From 8705e27b85f2e53e6903196d14869c633d98dda8 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Mon, 11 Mar 2024 09:22:02 -0400 Subject: [PATCH 01/23] WIP implement TOA5 upload http handler, need to hook up to router --- api/internal/handler/handler.go | 2 + api/internal/handler/measurement.go | 53 ++++++++ api/internal/model/datalogger_parser.go | 51 -------- api/internal/service/datalogger_telemetry.go | 129 +++++++++++++++++++ 4 files changed, 184 insertions(+), 51 deletions(-) diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index e7890f71..774fa2b9 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -21,6 +21,7 @@ type ApiHandler struct { AwareParameterService service.AwareParameterService CollectionGroupService service.CollectionGroupService DataloggerService service.DataloggerService + DataloggerTelemetryService service.DataloggerTelemetryService DistrictRollupService service.DistrictRollupService DomainService service.DomainService EquivalencyTableService service.EquivalencyTableService @@ -67,6 +68,7 @@ func NewApi(cfg *config.ApiConfig) *ApiHandler { AwareParameterService: service.NewAwareParameterService(db, q), CollectionGroupService: service.NewCollectionGroupService(db, q), DataloggerService: service.NewDataloggerService(db, q), + DataloggerTelemetryService: dataloggerTelemetryService, DistrictRollupService: service.NewDistrictRollupService(db, q), DomainService: service.NewDomainService(db, q), EquivalencyTableService: service.NewEquivalencyTableService(db, q), diff --git a/api/internal/handler/measurement.go b/api/internal/handler/measurement.go index ee9e5270..c952fff4 100644 --- a/api/internal/handler/measurement.go +++ b/api/internal/handler/measurement.go @@ -1,7 +1,9 @@ package handler import ( + "log" "net/http" + "strings" "time" "github.com/USACE/instrumentation-api/api/internal/model" @@ -141,3 +143,54 @@ func (h *ApiHandler) DeleteTimeserieMeasurements(c echo.Context) error { } return c.JSON(http.StatusOK, make(map[string]interface{})) } + +// CreateOrUpdateTimeseriesMeasurements godoc +// +// @Summary creates one or more timeseries measurements +// @Tags measurement +// @Accept json,mpfd +// @Produce json +// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection false "json array of timeseries measurement collections" +// @Param timeseries_measurement_collections formData file false "TOA5 file of timeseries measurement collections" +// @Success 200 {array} model.MeasurementCollection +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /timeseries_measurements [post] +// @Security Bearer +func (h *ApiHandler) _CreateOrUpdateTimeseriesMeasurements(c echo.Context) error { + contentType := "application/json" + contentTypeHeader, ok := c.Request().Header["Content-Type"] + if ok && len(contentTypeHeader) > 0 { + contentType = strings.ToLower(contentTypeHeader[0]) + } + + if strings.Contains(contentType, "multipart/form-data") { + return h.createOrUpdateTimeseriesMeasurementsMultipartFormData(c) + } + + return h.CreateOrUpdateTimeseriesMeasurements(c) +} + +func (h *ApiHandler) createOrUpdateTimeseriesMeasurementsMultipartFormData(c echo.Context) error { + file, err := c.FormFile("file") + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + src, err := file.Open() + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + defer func() { + if err := src.Close(); err != nil { + log.Printf("error closing file: %s", err.Error()) + } + }() + + if err := h.DataloggerTelemetryService.CreateOrUpdateTOA5MeasurementCollection(c.Request().Context(), src); err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusCreated, map[string]interface{}{}) +} diff --git a/api/internal/model/datalogger_parser.go b/api/internal/model/datalogger_parser.go index 32dfdc7e..e6ebb2c2 100644 --- a/api/internal/model/datalogger_parser.go +++ b/api/internal/model/datalogger_parser.go @@ -1,11 +1,8 @@ package model import ( - "encoding/csv" "encoding/json" - "log" "math" - "os" ) type DataloggerPayload struct { @@ -61,51 +58,3 @@ func (j *FloatNanInf) UnmarshalJSON(v []byte) error { } return nil } - -// ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. -// The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) -func ParseTOA5(filename string) ([][]string, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - - r := csv.NewReader(f) - - // read headers - // LINE 1: information about the data logger (e.g. serial number and program name) - header1, err := r.Read() - if err != nil { - return nil, err - } - // LINE 2: data header (names of the variables stored in the table) - header2, err := r.Read() - if err != nil { - return nil, err - } - // LINE 3: units for the variables if they have been defined in the data logger - header3, err := r.Read() - if err != nil { - return nil, err - } - // LINE 4: abbreviation for processing data logger performed - // (e.g. sample, average, standard deviation, maximum, minimum, etc.) - header4, err := r.Read() - if err != nil { - return nil, err - } - log.Printf("header1: %v", header1) - log.Printf("header2: %v", header2) - log.Printf("header3: %v", header3) - log.Printf("header4: %v", header4) - - // continue read until EOF - data, err := r.ReadAll() - if err != nil { - return nil, err - } - log.Printf("data: %v", data) - - return data, nil -} diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index aee3e441..962ad0a3 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -3,7 +3,13 @@ package service import ( "context" "database/sql" + "encoding/csv" "errors" + "fmt" + "io" + "math" + "strconv" + "time" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" @@ -15,6 +21,7 @@ type DataloggerTelemetryService interface { CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error + CreateOrUpdateTOA5MeasurementCollection(ctx context.Context, r io.Reader) error } type dataloggerTelemetryService struct { @@ -79,3 +86,125 @@ func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Conte return tx.Commit() } + +// ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. +// The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) +func (s dataloggerTelemetryService) CreateOrUpdateTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + unitsHeader, err := reader.Read() + if err != nil { + return err + } + processHeader, err := reader.Read() + if err != nil { + return err + } + + meta := model.Environment{ + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + if err != nil { + return err + } + + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + if err != nil { + return err + } + + em := make([]string, 0) + defer func() { + s.UpdateDataloggerTableError(ctx, dl.ID, &meta.TableName, &model.DataloggerError{Errors: em}) + }() + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]model.Field, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = model.Field{ + Name: fieldHeader[i], + Units: unitsHeader[i], + Process: processHeader[i], + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Rows { + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + + for idx, cell := range record[2:] { + fieldName := fields[idx].Name + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + // key error, field_name does not exist for equivalency table + // add error to Measurement payload to report back to user + em = append(em, fmt.Sprintf( + "key error: field_name %s does not exist for equivalency table %s", + fieldName, meta.TableName, + )) + continue + } + + v, err := strconv.ParseFloat(cell, 64) + if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { + // could not parse float + // add error to Measurement payload to report back to user + em = append(em, fmt.Sprintf( + "value error: field_name %s contains invalid value entry at %s", + fieldName, t, + )) + continue + } + + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + return err + } + } + } + + return tx.Commit() +} From 392e22e34373345d25fcefdf6fe5a43a166f23af Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Tue, 22 Oct 2024 08:57:10 -0400 Subject: [PATCH 02/23] wip; create uploader service --- api/internal/handler/measurement.go | 2 +- api/internal/service/datalogger_telemetry.go | 4 +- api/internal/service/uploader.go | 130 +++++++++++++++++++ 3 files changed, 133 insertions(+), 3 deletions(-) create mode 100644 api/internal/service/uploader.go diff --git a/api/internal/handler/measurement.go b/api/internal/handler/measurement.go index 2d17f907..74bf87da 100644 --- a/api/internal/handler/measurement.go +++ b/api/internal/handler/measurement.go @@ -186,7 +186,7 @@ func (h *ApiHandler) createOrUpdateTimeseriesMeasurementsMultipartFormData(c ech } }() - if err := h.DataloggerTelemetryService.CreateOrUpdateTOA5MeasurementCollection(c.Request().Context(), src); err != nil { + if err := h.DataloggerTelemetryService.CreateOrUpdateDataloggerTOA5MeasurementCollection(c.Request().Context(), src); err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index 962ad0a3..b55f2063 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -21,7 +21,7 @@ type DataloggerTelemetryService interface { CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error - CreateOrUpdateTOA5MeasurementCollection(ctx context.Context, r io.Reader) error + CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error } type dataloggerTelemetryService struct { @@ -89,7 +89,7 @@ func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Conte // ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. // The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) -func (s dataloggerTelemetryService) CreateOrUpdateTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { +func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { tx, err := s.db.BeginTxx(ctx, nil) if err != nil { return err diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go new file mode 100644 index 00000000..7067b794 --- /dev/null +++ b/api/internal/service/uploader.go @@ -0,0 +1,130 @@ +package service + +import ( + "context" + "encoding/csv" + "io" + "math" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type UploaderService interface { + CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error + CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error +} + +type uploaderService struct { + db *model.Database + *model.Queries +} + +func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { + return &uploaderService{db, q} +} + +// TODO: transition away from datalogger equivalency table to different parser that's uploader specific +func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + unitsHeader, err := reader.Read() + if err != nil { + return err + } + processHeader, err := reader.Read() + if err != nil { + return err + } + + meta := model.Environment{ + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + if err != nil { + return err + } + + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]model.Field, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = model.Field{ + Name: fieldHeader[i], + Units: unitsHeader[i], + Process: processHeader[i], + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Rows { + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + + for idx, cell := range record[2:] { + fieldName := fields[idx].Name + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + continue + } + + v, err := strconv.ParseFloat(cell, 64) + if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { + continue + } + + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + return err + } + } + } + return nil +} From 525636f011490bacecd138f465997686c32c7633 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 31 Oct 2024 13:11:19 -0400 Subject: [PATCH 03/23] initial sqlc query migration --- api/go.mod | 4 +- api/go.sum | 3 + api/internal/db/alert.sql_gen.go | 219 +++ api/internal/db/alert_check.sql_gen.go | 128 ++ api/internal/db/alert_config.sql_gen.go | 371 +++++ .../db/alert_measurement_check.sql_gen.go | 79 + api/internal/db/alert_subscription.sql_gen.go | 236 +++ api/internal/db/autocomplete.sql_gen.go | 57 + api/internal/db/aware.sql_gen.go | 95 ++ api/internal/db/batch.go | 62 + api/internal/db/collection_group.sql_gen.go | 245 +++ api/internal/db/datalogger.sql_gen.go | 340 +++++ .../db/datalogger_telemetry.sql_gen.go | 138 ++ api/internal/db/db.go | 33 + api/internal/db/district_rollup.sql_gen.go | 106 ++ api/internal/db/domains.sql_gen.go | 63 + api/internal/db/equivalency_table.sql_gen.go | 154 ++ api/internal/db/evaluation.sql_gen.go | 337 +++++ api/internal/db/heartbeat.sql_gen.go | 57 + api/internal/db/home.sql_gen.go | 40 + api/internal/db/instrument.sql_gen.go | 387 +++++ api/internal/db/instrument_assign.sql_gen.go | 203 +++ .../db/instrument_constant.sql_gen.go | 81 + api/internal/db/instrument_group.sql_gen.go | 243 +++ api/internal/db/instrument_ipi.sql_gen.go | 186 +++ api/internal/db/instrument_note.sql_gen.go | 159 ++ api/internal/db/instrument_saa.sql_gen.go | 194 +++ api/internal/db/instrument_status.sql_gen.go | 89 ++ api/internal/db/measurement.sql_gen.go | 255 ++++ api/internal/db/models.go | 1335 +++++++++++++++++ api/internal/db/plot_config.sql_gen.go | 202 +++ .../db/plot_config_bullseye.sql_gen.go | 108 ++ .../db/plot_config_contour.sql_gen.go | 188 +++ .../db/plot_config_profile.sql_gen.go | 41 + .../db/plot_config_scatter_line.sql_gen.go | 172 +++ api/internal/db/profile.sql_gen.go | 271 ++++ api/internal/db/project.sql_gen.go | 444 ++++++ api/internal/db/project_role.sql_gen.go | 171 +++ api/internal/db/querier.go | 270 ++++ api/internal/db/report_config.sql_gen.go | 339 +++++ api/internal/db/submittal.sql_gen.go | 251 ++++ api/internal/db/timeseries.sql_gen.go | 329 ++++ .../db/timeseries_calculated.sql_gen.go | 187 +++ api/internal/db/timeseries_cwms.sql_gen.go | 140 ++ api/internal/db/unit.sql_gen.go | 44 + api/internal/handler/handler.go | 346 ++--- api/internal/handler/handlerv2.go | 174 +++ api/internal/model/common.go | 12 + api/internal/model/instrument.go | 13 - api/internal/server/docs/openapi.json | 37 +- api/internal/server/docs/openapi.yaml | 27 +- api/internal/service/uploader.go | 51 +- api/internal/servicev2/alert.go | 96 ++ api/internal/servicev2/alert_check.go | 362 +++++ api/internal/servicev2/alert_config.go | 130 ++ api/internal/servicev2/alert_subscription.go | 231 +++ api/internal/servicev2/autocomplete.go | 20 + api/internal/servicev2/aware.go | 49 + api/internal/servicev2/collection_group.go | 56 + api/internal/servicev2/datalogger.go | 158 ++ .../servicev2/datalogger_telemetry.go | 210 +++ api/internal/servicev2/db.go | 27 + api/internal/servicev2/dcsloader.go | 125 ++ api/internal/servicev2/district_rollup.go | 22 + api/internal/servicev2/domain.go | 21 + api/internal/servicev2/equivalency_table.go | 90 ++ api/internal/servicev2/evaluation.go | 152 ++ api/internal/servicev2/heartbeat.go | 22 + api/internal/servicev2/home.go | 20 + api/internal/servicev2/instrument.go | 167 +++ api/internal/servicev2/instrument_assign.go | 183 +++ api/internal/servicev2/instrument_constant.go | 74 + api/internal/servicev2/instrument_group.go | 54 + api/internal/servicev2/instrument_ipi.go | 48 + api/internal/servicev2/instrument_note.go | 52 + api/internal/servicev2/instrument_opts.go | 130 ++ api/internal/servicev2/instrument_saa.go | 48 + api/internal/servicev2/instrument_status.go | 42 + api/internal/servicev2/measurement.go | 125 ++ .../servicev2/measurement_inclinometer.go | 120 ++ api/internal/servicev2/opendcs.go | 20 + api/internal/servicev2/plot_config.go | 27 + .../servicev2/plot_config_bullseye.go | 81 + api/internal/servicev2/plot_config_contour.go | 123 ++ api/internal/servicev2/plot_config_profile.go | 80 + .../servicev2/plot_config_scatter_line.go | 162 ++ api/internal/servicev2/profile.go | 141 ++ api/internal/servicev2/project.go | 132 ++ api/internal/servicev2/project_role.go | 53 + api/internal/servicev2/report_config.go | 143 ++ api/internal/servicev2/submittal.go | 27 + api/internal/servicev2/timeseries.go | 85 ++ .../servicev2/timeseries_calculated.go | 99 ++ api/internal/servicev2/timeseries_cwms.go | 72 + api/internal/servicev2/timeseries_process.go | 21 + api/internal/servicev2/unit.go | 20 + api/internal/servicev2/uploader.go | 143 ++ .../repeat/0030__views_projects.sql | 3 + .../repeat/0040__views_instruments.sql | 20 +- api/migrations/schema/V1.14.00__uploader.sql | 20 + api/queries/alert.sql | 48 + api/queries/alert_check.sql | 31 + api/queries/alert_config.sql | 94 ++ api/queries/alert_measurement_check.sql | 14 + api/queries/alert_subscription.sql | 62 + api/queries/autocomplete.sql | 5 + api/queries/aware.sql | 12 + api/queries/collection_group.sql | 45 + api/queries/datalogger.sql | 84 ++ api/queries/datalogger_telemetry.sql | 33 + api/queries/district_rollup.sql | 14 + api/queries/domains.sql | 6 + api/queries/equivalency_table.sql | 49 + api/queries/evaluation.sql | 93 ++ api/queries/heartbeat.sql | 10 + api/queries/home.sql | 7 + api/queries/instrument.sql | 110 ++ api/queries/instrument_assign.sql | 57 + api/queries/instrument_constant.sql | 11 + api/queries/instrument_group.sql | 53 + api/queries/instrument_ipi.sql | 46 + api/queries/instrument_note.sql | 35 + api/queries/instrument_saa.sql | 48 + api/queries/instrument_status.sql | 20 + api/queries/measurement.sql | 64 + api/queries/plot_config.sql | 43 + api/queries/plot_config_bullseye.sql | 32 + api/queries/plot_config_contour.sql | 52 + api/queries/plot_config_profile.sql | 6 + api/queries/plot_config_scatter_line.sql | 36 + api/queries/profile.sql | 52 + api/queries/project.sql | 65 + api/queries/project_role.sql | 42 + api/queries/report_config.sql | 55 + api/queries/submittal.sql | 57 + api/queries/timeseries.sql | 50 + api/queries/timeseries_calculated.sql | 56 + api/queries/timeseries_cwms.sql | 22 + api/queries/unit.sql | 4 + compose.sh | 66 +- go.work.sum | 1 + report/generated.d.ts | 21 +- sqlc.yml | 54 + 143 files changed, 15497 insertions(+), 290 deletions(-) create mode 100644 api/internal/db/alert.sql_gen.go create mode 100644 api/internal/db/alert_check.sql_gen.go create mode 100644 api/internal/db/alert_config.sql_gen.go create mode 100644 api/internal/db/alert_measurement_check.sql_gen.go create mode 100644 api/internal/db/alert_subscription.sql_gen.go create mode 100644 api/internal/db/autocomplete.sql_gen.go create mode 100644 api/internal/db/aware.sql_gen.go create mode 100644 api/internal/db/batch.go create mode 100644 api/internal/db/collection_group.sql_gen.go create mode 100644 api/internal/db/datalogger.sql_gen.go create mode 100644 api/internal/db/datalogger_telemetry.sql_gen.go create mode 100644 api/internal/db/db.go create mode 100644 api/internal/db/district_rollup.sql_gen.go create mode 100644 api/internal/db/domains.sql_gen.go create mode 100644 api/internal/db/equivalency_table.sql_gen.go create mode 100644 api/internal/db/evaluation.sql_gen.go create mode 100644 api/internal/db/heartbeat.sql_gen.go create mode 100644 api/internal/db/home.sql_gen.go create mode 100644 api/internal/db/instrument.sql_gen.go create mode 100644 api/internal/db/instrument_assign.sql_gen.go create mode 100644 api/internal/db/instrument_constant.sql_gen.go create mode 100644 api/internal/db/instrument_group.sql_gen.go create mode 100644 api/internal/db/instrument_ipi.sql_gen.go create mode 100644 api/internal/db/instrument_note.sql_gen.go create mode 100644 api/internal/db/instrument_saa.sql_gen.go create mode 100644 api/internal/db/instrument_status.sql_gen.go create mode 100644 api/internal/db/measurement.sql_gen.go create mode 100644 api/internal/db/models.go create mode 100644 api/internal/db/plot_config.sql_gen.go create mode 100644 api/internal/db/plot_config_bullseye.sql_gen.go create mode 100644 api/internal/db/plot_config_contour.sql_gen.go create mode 100644 api/internal/db/plot_config_profile.sql_gen.go create mode 100644 api/internal/db/plot_config_scatter_line.sql_gen.go create mode 100644 api/internal/db/profile.sql_gen.go create mode 100644 api/internal/db/project.sql_gen.go create mode 100644 api/internal/db/project_role.sql_gen.go create mode 100644 api/internal/db/querier.go create mode 100644 api/internal/db/report_config.sql_gen.go create mode 100644 api/internal/db/submittal.sql_gen.go create mode 100644 api/internal/db/timeseries.sql_gen.go create mode 100644 api/internal/db/timeseries_calculated.sql_gen.go create mode 100644 api/internal/db/timeseries_cwms.sql_gen.go create mode 100644 api/internal/db/unit.sql_gen.go create mode 100644 api/internal/handler/handlerv2.go create mode 100644 api/internal/servicev2/alert.go create mode 100644 api/internal/servicev2/alert_check.go create mode 100644 api/internal/servicev2/alert_config.go create mode 100644 api/internal/servicev2/alert_subscription.go create mode 100644 api/internal/servicev2/autocomplete.go create mode 100644 api/internal/servicev2/aware.go create mode 100644 api/internal/servicev2/collection_group.go create mode 100644 api/internal/servicev2/datalogger.go create mode 100644 api/internal/servicev2/datalogger_telemetry.go create mode 100644 api/internal/servicev2/db.go create mode 100644 api/internal/servicev2/dcsloader.go create mode 100644 api/internal/servicev2/district_rollup.go create mode 100644 api/internal/servicev2/domain.go create mode 100644 api/internal/servicev2/equivalency_table.go create mode 100644 api/internal/servicev2/evaluation.go create mode 100644 api/internal/servicev2/heartbeat.go create mode 100644 api/internal/servicev2/home.go create mode 100644 api/internal/servicev2/instrument.go create mode 100644 api/internal/servicev2/instrument_assign.go create mode 100644 api/internal/servicev2/instrument_constant.go create mode 100644 api/internal/servicev2/instrument_group.go create mode 100644 api/internal/servicev2/instrument_ipi.go create mode 100644 api/internal/servicev2/instrument_note.go create mode 100644 api/internal/servicev2/instrument_opts.go create mode 100644 api/internal/servicev2/instrument_saa.go create mode 100644 api/internal/servicev2/instrument_status.go create mode 100644 api/internal/servicev2/measurement.go create mode 100644 api/internal/servicev2/measurement_inclinometer.go create mode 100644 api/internal/servicev2/opendcs.go create mode 100644 api/internal/servicev2/plot_config.go create mode 100644 api/internal/servicev2/plot_config_bullseye.go create mode 100644 api/internal/servicev2/plot_config_contour.go create mode 100644 api/internal/servicev2/plot_config_profile.go create mode 100644 api/internal/servicev2/plot_config_scatter_line.go create mode 100644 api/internal/servicev2/profile.go create mode 100644 api/internal/servicev2/project.go create mode 100644 api/internal/servicev2/project_role.go create mode 100644 api/internal/servicev2/report_config.go create mode 100644 api/internal/servicev2/submittal.go create mode 100644 api/internal/servicev2/timeseries.go create mode 100644 api/internal/servicev2/timeseries_calculated.go create mode 100644 api/internal/servicev2/timeseries_cwms.go create mode 100644 api/internal/servicev2/timeseries_process.go create mode 100644 api/internal/servicev2/unit.go create mode 100644 api/internal/servicev2/uploader.go create mode 100644 api/migrations/schema/V1.14.00__uploader.sql create mode 100644 api/queries/alert.sql create mode 100644 api/queries/alert_check.sql create mode 100644 api/queries/alert_config.sql create mode 100644 api/queries/alert_measurement_check.sql create mode 100644 api/queries/alert_subscription.sql create mode 100644 api/queries/autocomplete.sql create mode 100644 api/queries/aware.sql create mode 100644 api/queries/collection_group.sql create mode 100644 api/queries/datalogger.sql create mode 100644 api/queries/datalogger_telemetry.sql create mode 100644 api/queries/district_rollup.sql create mode 100644 api/queries/domains.sql create mode 100644 api/queries/equivalency_table.sql create mode 100644 api/queries/evaluation.sql create mode 100644 api/queries/heartbeat.sql create mode 100644 api/queries/home.sql create mode 100644 api/queries/instrument.sql create mode 100644 api/queries/instrument_assign.sql create mode 100644 api/queries/instrument_constant.sql create mode 100644 api/queries/instrument_group.sql create mode 100644 api/queries/instrument_ipi.sql create mode 100644 api/queries/instrument_note.sql create mode 100644 api/queries/instrument_saa.sql create mode 100644 api/queries/instrument_status.sql create mode 100644 api/queries/measurement.sql create mode 100644 api/queries/plot_config.sql create mode 100644 api/queries/plot_config_bullseye.sql create mode 100644 api/queries/plot_config_contour.sql create mode 100644 api/queries/plot_config_profile.sql create mode 100644 api/queries/plot_config_scatter_line.sql create mode 100644 api/queries/profile.sql create mode 100644 api/queries/project.sql create mode 100644 api/queries/project_role.sql create mode 100644 api/queries/report_config.sql create mode 100644 api/queries/submittal.sql create mode 100644 api/queries/timeseries.sql create mode 100644 api/queries/timeseries_calculated.sql create mode 100644 api/queries/timeseries_cwms.sql create mode 100644 api/queries/unit.sql create mode 100644 sqlc.yml diff --git a/api/go.mod b/api/go.mod index 186ebf86..4c8113ab 100644 --- a/api/go.mod +++ b/api/go.mod @@ -20,7 +20,7 @@ require ( github.com/jackc/pgconn v1.14.3 github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 github.com/jackc/pgtype v1.14.3 - github.com/jackc/pgx/v5 v5.7.0 + github.com/jackc/pgx/v5 v5.7.1 github.com/jmoiron/sqlx v1.4.0 github.com/labstack/echo-jwt/v4 v4.2.0 github.com/labstack/echo/v4 v4.12.0 @@ -57,7 +57,7 @@ require ( github.com/jackc/pgproto3/v2 v2.3.3 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/pgx/v4 v4.18.3 // indirect - github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/labstack/gommon v0.4.2 // indirect github.com/mattn/go-colorable v0.1.13 // indirect diff --git a/api/go.sum b/api/go.sum index f7ebf596..1da0e199 100644 --- a/api/go.sum +++ b/api/go.sum @@ -144,12 +144,15 @@ github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= github.com/jackc/pgx/v5 v5.7.0 h1:FG6VLIdzvAPhnYqP14sQ2xhFLkiUQHCs6ySqO91kF4g= github.com/jackc/pgx/v5 v5.7.0/go.mod h1:awP1KNnjylvpxHuHP63gzjhnGkI1iw+PMoIwvoleN/8= +github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= +github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= diff --git a/api/internal/db/alert.sql_gen.go b/api/internal/db/alert.sql_gen.go new file mode 100644 index 00000000..72cdcfd9 --- /dev/null +++ b/api/internal/db/alert.sql_gen.go @@ -0,0 +1,219 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createAlert = `-- name: CreateAlert :exec +insert into alert (alert_config_id) values ($1) +` + +func (q *Queries) CreateAlert(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, createAlert, alertConfigID) + return err +} + +const createAlertRead = `-- name: CreateAlertRead :exec +insert into alert_read (profile_id, alert_id) values ($1, $2) +on conflict do nothing +` + +type CreateAlertReadParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) CreateAlertRead(ctx context.Context, arg CreateAlertReadParams) error { + _, err := q.db.Exec(ctx, createAlertRead, arg.ProfileID, arg.AlertID) + return err +} + +const deleteAlertRead = `-- name: DeleteAlertRead :exec +delete from alert_read where profile_id = $1 and alert_id = $2 +` + +type DeleteAlertReadParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) DeleteAlertRead(ctx context.Context, arg DeleteAlertReadParams) error { + _, err := q.db.Exec(ctx, deleteAlertRead, arg.ProfileID, arg.AlertID) + return err +} + +const getAlert = `-- name: GetAlert :one +select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +and a.id = $2 +` + +type GetAlertParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ID uuid.UUID `json:"id"` +} + +type GetAlertRow struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments interface{} `json:"instruments"` + Read bool `json:"read"` +} + +func (q *Queries) GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) { + row := q.db.QueryRow(ctx, getAlert, arg.ProfileID, arg.ID) + var i GetAlertRow + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreateDate, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + &i.Read, + ) + return i, err +} + +const listAlertsForInstrument = `-- name: ListAlertsForInstrument :many +select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert +where alert_config_id = any( + select id from alert_config_instrument + where instrument_id = $1 +) +` + +func (q *Queries) ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, listAlertsForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlert{} + for rows.Next() { + var i VAlert + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreateDate, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAlertsForProfile = `-- name: ListAlertsForProfile :many +select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +` + +type ListAlertsForProfileRow struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments interface{} `json:"instruments"` + Read bool `json:"read"` +} + +func (q *Queries) ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) { + rows, err := q.db.Query(ctx, listAlertsForProfile, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListAlertsForProfileRow{} + for rows.Next() { + var i ListAlertsForProfileRow + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreateDate, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + &i.Read, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAlertsForProject = `-- name: ListAlertsForProject :many +select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert where project_id = $1 +` + +func (q *Queries) ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, listAlertsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlert{} + for rows.Next() { + var i VAlert + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreateDate, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/alert_check.sql_gen.go b/api/internal/db/alert_check.sql_gen.go new file mode 100644 index 00000000..5bf521a8 --- /dev/null +++ b/api/internal/db/alert_check.sql_gen.go @@ -0,0 +1,128 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_check.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createNextSubmittalFromNewAlertConfigDate = `-- name: CreateNextSubmittalFromNewAlertConfigDate :exec +insert into submittal (alert_config_id, create_date, due_date) +select + ac.id, + $2::timestamptz, + $2::timestamptz + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +type CreateNextSubmittalFromNewAlertConfigDateParams struct { + ID uuid.UUID `json:"id"` + Column2 time.Time `json:"column_2"` +} + +func (q *Queries) CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error { + _, err := q.db.Exec(ctx, createNextSubmittalFromNewAlertConfigDate, arg.ID, arg.Column2) + return err +} + +const listAndCheckAlertConfigs = `-- name: ListAndCheckAlertConfigs :many +update alert_config ac1 +set last_checked = now() +from ( + select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions + from v_alert_config +) ac2 +where ac1.id = ac2.id +returning ac2.id, ac2.name, ac2.body, ac2.creator, ac2.creator_username, ac2.create_date, ac2.updater, ac2.updater_username, ac2.update_date, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.start_date, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked, ac2.last_reminded, ac2.instruments, ac2.alert_email_subscriptions +` + +func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, listAndCheckAlertConfigs) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartDate, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastChecked, + &i.LastReminded, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateAlertConfigLastReminded = `-- name: UpdateAlertConfigLastReminded :exec +update alert_config set last_reminded = $2 where id = $1 +` + +type UpdateAlertConfigLastRemindedParams struct { + ID uuid.UUID `json:"id"` + LastReminded pgtype.Timestamptz `json:"last_reminded"` +} + +func (q *Queries) UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error { + _, err := q.db.Exec(ctx, updateAlertConfigLastReminded, arg.ID, arg.LastReminded) + return err +} + +const updateSubmittalCompletionDateOrWarningSent = `-- name: UpdateSubmittalCompletionDateOrWarningSent :exec +update submittal set + submittal_status_id = $2, + completion_date = $3, + warning_sent = $4 +where id = $1 +` + +type UpdateSubmittalCompletionDateOrWarningSentParams struct { + ID uuid.UUID `json:"id"` + SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` + CompletionDate pgtype.Timestamptz `json:"completion_date"` + WarningSent bool `json:"warning_sent"` +} + +func (q *Queries) UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error { + _, err := q.db.Exec(ctx, updateSubmittalCompletionDateOrWarningSent, + arg.ID, + arg.SubmittalStatusID, + arg.CompletionDate, + arg.WarningSent, + ) + return err +} diff --git a/api/internal/db/alert_config.sql_gen.go b/api/internal/db/alert_config.sql_gen.go new file mode 100644 index 00000000..3ab4a27f --- /dev/null +++ b/api/internal/db/alert_config.sql_gen.go @@ -0,0 +1,371 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const assignInstrumentToAlertConfig = `-- name: AssignInstrumentToAlertConfig :exec +insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2) +` + +type AssignInstrumentToAlertConfigParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) AssignInstrumentToAlertConfig(ctx context.Context, arg AssignInstrumentToAlertConfigParams) error { + _, err := q.db.Exec(ctx, assignInstrumentToAlertConfig, arg.AlertConfigID, arg.InstrumentID) + return err +} + +const createAlertConfig = `-- name: CreateAlertConfig :one +insert into alert_config ( + project_id, + name, + body, + alert_type_id, + start_date, + schedule_interval, + mute_consecutive_alerts, + remind_interval, + warning_interval, + creator, + create_date +) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) +returning id +` + +type CreateAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartDate time.Time `json:"start_date"` + ScheduleInterval pgtype.Interval `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval pgtype.Interval `json:"remind_interval"` + WarningInterval pgtype.Interval `json:"warning_interval"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +func (q *Queries) CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createAlertConfig, + arg.ProjectID, + arg.Name, + arg.Body, + arg.AlertTypeID, + arg.StartDate, + arg.ScheduleInterval, + arg.MuteConsecutiveAlerts, + arg.RemindInterval, + arg.WarningInterval, + arg.Creator, + arg.CreateDate, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createNextSubmittalFromExistingAlertConfigDate = `-- name: CreateNextSubmittalFromExistingAlertConfigDate :exec +insert into submittal (alert_config_id, due_date) +select ac.id, ac.create_date + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +func (q *Queries) CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, createNextSubmittalFromExistingAlertConfigDate, id) + return err +} + +const deleteAlertConfig = `-- name: DeleteAlertConfig :exec +update alert_config set deleted=true where id = $1 +` + +func (q *Queries) DeleteAlertConfig(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteAlertConfig, id) + return err +} + +const getetAlertConfig = `-- name: GetetAlertConfig :one +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions from v_alert_config where id = $1 +` + +func (q *Queries) GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { + row := q.db.QueryRow(ctx, getetAlertConfig, id) + var i VAlertConfig + err := row.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartDate, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastChecked, + &i.LastReminded, + &i.Instruments, + &i.AlertEmailSubscriptions, + ) + return i, err +} + +const listAlertConfigsForInstrument = `-- name: ListAlertConfigsForInstrument :many +select t.id, t.name, t.body, t.creator, t.creator_username, t.create_date, t.updater, t.updater_username, t.update_date, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.start_date, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked, t.last_reminded, t.instruments, t.alert_email_subscriptions +from v_alert_config t +inner join alert_config_instrument aci on t.id = aci.alert_config_id +where aci.instrument_id = $1 +order by t.name +` + +func (q *Queries) ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, listAlertConfigsForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartDate, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastChecked, + &i.LastReminded, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAlertConfigsForProject = `-- name: ListAlertConfigsForProject :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions +from v_alert_config +where project_id = $1 +order by name +` + +func (q *Queries) ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, listAlertConfigsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartDate, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastChecked, + &i.LastReminded, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAlertConfigsForProjectAndAlertType = `-- name: ListAlertConfigsForProjectAndAlertType :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions +from v_alert_config +where project_id = $1 +and alert_type_id = $2 +order by name +` + +type ListAlertConfigsForProjectAndAlertTypeParams struct { + ProjectID uuid.UUID `json:"project_id"` + AlertTypeID uuid.UUID `json:"alert_type_id"` +} + +func (q *Queries) ListAlertConfigsForProjectAndAlertType(ctx context.Context, arg ListAlertConfigsForProjectAndAlertTypeParams) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, listAlertConfigsForProjectAndAlertType, arg.ProjectID, arg.AlertTypeID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartDate, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastChecked, + &i.LastReminded, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const unassignAllInstrumentsFromAlertConfig = `-- name: UnassignAllInstrumentsFromAlertConfig :exec +delete from alert_config_instrument where alert_config_id = $1 +` + +func (q *Queries) UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, unassignAllInstrumentsFromAlertConfig, alertConfigID) + return err +} + +const updateAlertConfig = `-- name: UpdateAlertConfig :exec +update alert_config set + name = $3, + body = $4, + start_date = $5, + schedule_interval = $6, + mute_consecutive_alerts = $7, + remind_interval = $8, + warning_interval = $9, + updater = $10, + update_date = $11 +where id = $1 and project_id = $2 +` + +type UpdateAlertConfigParams struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + ScheduleInterval pgtype.Interval `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval pgtype.Interval `json:"remind_interval"` + WarningInterval pgtype.Interval `json:"warning_interval"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error { + _, err := q.db.Exec(ctx, updateAlertConfig, + arg.ID, + arg.ProjectID, + arg.Name, + arg.Body, + arg.StartDate, + arg.ScheduleInterval, + arg.MuteConsecutiveAlerts, + arg.RemindInterval, + arg.WarningInterval, + arg.Updater, + arg.UpdateDate, + ) + return err +} + +const updateFutureSubmittalForAlertConfig = `-- name: UpdateFutureSubmittalForAlertConfig :one +update submittal +set due_date = sq.new_due_date +from ( + select + sub.id as submittal_id, + sub.create_date + ac.schedule_interval as new_due_date + from submittal sub + inner join alert_config ac on sub.alert_config_id = ac.id + where sub.alert_config_id = $1 + and sub.due_date > now() + and sub.completion_date is null + and not sub.marked_as_missing +) sq +where id = sq.submittal_id +and sq.new_due_date > now() +returning id +` + +func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID pgtype.UUID) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, updateFutureSubmittalForAlertConfig, alertConfigID) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/alert_measurement_check.sql_gen.go b/api/internal/db/alert_measurement_check.sql_gen.go new file mode 100644 index 00000000..00b86821 --- /dev/null +++ b/api/internal/db/alert_measurement_check.sql_gen.go @@ -0,0 +1,79 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_measurement_check.sql + +package db + +import ( + "context" +) + +const listIncompleteEvaluationSubmittals = `-- name: ListIncompleteEvaluationSubmittals :many +select alert_config_id, submittal_id, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal +where submittal_id = any( + select id from submittal + where completion_date is null and not marked_as_missing +) +` + +func (q *Queries) ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) { + rows, err := q.db.Query(ctx, listIncompleteEvaluationSubmittals) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertCheckEvaluationSubmittal{} + for rows.Next() { + var i VAlertCheckEvaluationSubmittal + if err := rows.Scan( + &i.AlertConfigID, + &i.SubmittalID, + &i.ShouldWarn, + &i.ShouldAlert, + &i.ShouldRemind, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listIncompleteMeasurementSubmittals = `-- name: ListIncompleteMeasurementSubmittals :many +select alert_config_id, submittal_id, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal +where submittal_id = any( + select id from submittal + where completion_date is null and not marked_as_missing +) +` + +func (q *Queries) ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) { + rows, err := q.db.Query(ctx, listIncompleteMeasurementSubmittals) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertCheckMeasurementSubmittal{} + for rows.Next() { + var i VAlertCheckMeasurementSubmittal + if err := rows.Scan( + &i.AlertConfigID, + &i.SubmittalID, + &i.ShouldWarn, + &i.ShouldAlert, + &i.ShouldRemind, + &i.AffectedTimeseries, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/alert_subscription.sql_gen.go b/api/internal/db/alert_subscription.sql_gen.go new file mode 100644 index 00000000..6578fa19 --- /dev/null +++ b/api/internal/db/alert_subscription.sql_gen.go @@ -0,0 +1,236 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_subscription.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createAlertEmailSubscription = `-- name: CreateAlertEmailSubscription :exec +insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) +on conflict on constraint email_unique_alert_config do nothing +` + +type CreateAlertEmailSubscriptionParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` +} + +func (q *Queries) CreateAlertEmailSubscription(ctx context.Context, arg CreateAlertEmailSubscriptionParams) error { + _, err := q.db.Exec(ctx, createAlertEmailSubscription, arg.AlertConfigID, arg.EmailID) + return err +} + +const createAlertProfileSubscription = `-- name: CreateAlertProfileSubscription :exec +insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) +on conflict on constraint profile_unique_alert_config do nothing +` + +type CreateAlertProfileSubscriptionParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) CreateAlertProfileSubscription(ctx context.Context, arg CreateAlertProfileSubscriptionParams) error { + _, err := q.db.Exec(ctx, createAlertProfileSubscription, arg.AlertConfigID, arg.ProfileID) + return err +} + +const createAlertProfileSubscriptionOnAnyConflictDoNothing = `-- name: CreateAlertProfileSubscriptionOnAnyConflictDoNothing :exec +insert into alert_profile_subscription (alert_config_id, profile_id) +values ($1, $2) +on conflict do nothing +` + +type CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx context.Context, arg CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams) error { + _, err := q.db.Exec(ctx, createAlertProfileSubscriptionOnAnyConflictDoNothing, arg.AlertConfigID, arg.ProfileID) + return err +} + +const deleteAlertEmailSubscription = `-- name: DeleteAlertEmailSubscription :exec +delete from alert_email_subscription where alert_config_id = $1 and email_id = $2 +` + +type DeleteAlertEmailSubscriptionParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` +} + +func (q *Queries) DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error { + _, err := q.db.Exec(ctx, deleteAlertEmailSubscription, arg.AlertConfigID, arg.EmailID) + return err +} + +const deleteAlertProfileSubscription = `-- name: DeleteAlertProfileSubscription :exec +delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 +` + +type DeleteAlertProfileSubscriptionParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error { + _, err := q.db.Exec(ctx, deleteAlertProfileSubscription, arg.AlertConfigID, arg.ProfileID) + return err +} + +const deleteAllAlertEmailSubscritpionsForAlertConfig = `-- name: DeleteAllAlertEmailSubscritpionsForAlertConfig :exec +delete from alert_email_subscription where alert_config_id = $1 +` + +func (q *Queries) DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteAllAlertEmailSubscritpionsForAlertConfig, alertConfigID) + return err +} + +const deleteAllAlertProfileSubscritpionsForAlertConfig = `-- name: DeleteAllAlertProfileSubscritpionsForAlertConfig :exec +delete from alert_profile_subscription where alert_config_id = $1 +` + +func (q *Queries) DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteAllAlertProfileSubscritpionsForAlertConfig, alertConfigID) + return err +} + +const getAlertSubscription = `-- name: GetAlertSubscription :many +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 +` + +type GetAlertSubscriptionParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) GetAlertSubscription(ctx context.Context, arg GetAlertSubscriptionParams) ([]AlertProfileSubscription, error) { + rows, err := q.db.Query(ctx, getAlertSubscription, arg.AlertConfigID, arg.ProfileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []AlertProfileSubscription{} + for rows.Next() { + var i AlertProfileSubscription + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAlertSubscriptionByID = `-- name: GetAlertSubscriptionByID :one +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where id = $1 +` + +func (q *Queries) GetAlertSubscriptionByID(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, getAlertSubscriptionByID, id) + var i AlertProfileSubscription + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ) + return i, err +} + +const listMyAlertSubscriptions = `-- name: ListMyAlertSubscriptions :many +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where profile_id = $1 +` + +func (q *Queries) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) { + rows, err := q.db.Query(ctx, listMyAlertSubscriptions, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []AlertProfileSubscription{} + for rows.Next() { + var i AlertProfileSubscription + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const registerEmail = `-- name: RegisterEmail :one +with e as ( + insert into email (email) values ($1) + on conflict on constraint unique_email do nothing + returning id +) +select id from e +union +select id from email where email = $1 +limit 1 +` + +func (q *Queries) RegisterEmail(ctx context.Context, email string) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, registerEmail, email) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const unregisterEmail = `-- name: UnregisterEmail :exec +delete from email where id = $1 +` + +func (q *Queries) UnregisterEmail(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, unregisterEmail, id) + return err +} + +const updateMyAlertSubscription = `-- name: UpdateMyAlertSubscription :exec +update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4 +` + +type UpdateMyAlertSubscriptionParams struct { + MuteUi bool `json:"mute_ui"` + MuteNotify bool `json:"mute_notify"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAlertSubscriptionParams) error { + _, err := q.db.Exec(ctx, updateMyAlertSubscription, + arg.MuteUi, + arg.MuteNotify, + arg.AlertConfigID, + arg.ProfileID, + ) + return err +} diff --git a/api/internal/db/autocomplete.sql_gen.go b/api/internal/db/autocomplete.sql_gen.go new file mode 100644 index 00000000..67529d54 --- /dev/null +++ b/api/internal/db/autocomplete.sql_gen.go @@ -0,0 +1,57 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: autocomplete.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const listEmailAutocomplete = `-- name: ListEmailAutocomplete :many +select id, user_type, username, email +from v_email_autocomplete +where username_email ilike '%'||$1||'%' +limit $2 +` + +type ListEmailAutocompleteParams struct { + Column1 *string `json:"column_1"` + Limit int32 `json:"limit"` +} + +type ListEmailAutocompleteRow struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username interface{} `json:"username"` + Email string `json:"email"` +} + +func (q *Queries) ListEmailAutocomplete(ctx context.Context, arg ListEmailAutocompleteParams) ([]ListEmailAutocompleteRow, error) { + rows, err := q.db.Query(ctx, listEmailAutocomplete, arg.Column1, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListEmailAutocompleteRow{} + for rows.Next() { + var i ListEmailAutocompleteRow + if err := rows.Scan( + &i.ID, + &i.UserType, + &i.Username, + &i.Email, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/aware.sql_gen.go b/api/internal/db/aware.sql_gen.go new file mode 100644 index 00000000..96e4b4a2 --- /dev/null +++ b/api/internal/db/aware.sql_gen.go @@ -0,0 +1,95 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: aware.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createAwarePlatform = `-- name: CreateAwarePlatform :exec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) +` + +type CreateAwarePlatformParams struct { + InstrumentID pgtype.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` +} + +func (q *Queries) CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error { + _, err := q.db.Exec(ctx, createAwarePlatform, arg.InstrumentID, arg.AwareID) + return err +} + +const listAwareParameters = `-- name: ListAwareParameters :many +select id, key, parameter_id, unit_id from aware_parameter +` + +type ListAwareParametersRow struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` +} + +func (q *Queries) ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) { + rows, err := q.db.Query(ctx, listAwareParameters) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListAwareParametersRow{} + for rows.Next() { + var i ListAwareParametersRow + if err := rows.Scan( + &i.ID, + &i.Key, + &i.ParameterID, + &i.UnitID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAwarePlatformParameterEnabled = `-- name: ListAwarePlatformParameterEnabled :many +select instrument_id, aware_id, aware_parameter_key, timeseries_id +from v_aware_platform_parameter_enabled +order by aware_id, aware_parameter_key +` + +func (q *Queries) ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) { + rows, err := q.db.Query(ctx, listAwarePlatformParameterEnabled) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAwarePlatformParameterEnabled{} + for rows.Next() { + var i VAwarePlatformParameterEnabled + if err := rows.Scan( + &i.InstrumentID, + &i.AwareID, + &i.AwareParameterKey, + &i.TimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go new file mode 100644 index 00000000..59f3ddeb --- /dev/null +++ b/api/internal/db/batch.go @@ -0,0 +1,62 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: batch.go + +package db + +import ( + "context" + "errors" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +var ( + ErrBatchAlreadyClosed = errors.New("batch already closed") +) + +const createAlerts = `-- name: CreateAlerts :batchexec +insert into alert (alert_config_id) values ($1) +` + +type CreateAlertsBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +func (q *Queries) CreateAlerts(ctx context.Context, alertConfigID []uuid.UUID) *CreateAlertsBatchResults { + batch := &pgx.Batch{} + for _, a := range alertConfigID { + vals := []interface{}{ + a, + } + batch.Queue(createAlerts, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateAlertsBatchResults{br, len(alertConfigID), false} +} + +func (b *CreateAlertsBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateAlertsBatchResults) Close() error { + b.closed = true + return b.br.Close() +} diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go new file mode 100644 index 00000000..7894456f --- /dev/null +++ b/api/internal/db/collection_group.sql_gen.go @@ -0,0 +1,245 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: collection_group.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const addTimeseriesToCollectionGroup = `-- name: AddTimeseriesToCollectionGroup :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id) values ($1, $2) +on conflict on constraint collection_group_unique_timeseries do nothing +` + +type AddTimeseriesToCollectionGroupParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, arg AddTimeseriesToCollectionGroupParams) error { + _, err := q.db.Exec(ctx, addTimeseriesToCollectionGroup, arg.CollectionGroupID, arg.TimeseriesID) + return err +} + +const createCollectionGroup = `-- name: CreateCollectionGroup :one +insert into collection_group (project_id, name, slug, creator, create_date, updater, update_date) +values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) +returning id, project_id, name, slug, creator, create_date, updater, update_date +` + +type CreateCollectionGroupParams struct { + ProjectID uuid.UUID `json:"project_id"` + Column2 string `json:"column_2"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, createCollectionGroup, + arg.ProjectID, + arg.Column2, + arg.Creator, + arg.CreateDate, + arg.Updater, + arg.UpdateDate, + ) + var i CollectionGroup + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + return i, err +} + +const deleteCollectionGroup = `-- name: DeleteCollectionGroup :exec +delete from collection_group where project_id=$1 and id=$2 +` + +type DeleteCollectionGroupParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error { + _, err := q.db.Exec(ctx, deleteCollectionGroup, arg.ProjectID, arg.ID) + return err +} + +const getCollectionGroupDetailsTimeseries = `-- name: GetCollectionGroupDetailsTimeseries :one +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit, tm.time as latest_time, tm.value as latest_value +from collection_group_timeseries cgt +inner join collection_group cg on cg.id = cgt.collection_group_id +inner join v_timeseries t on t.id = cgt.timeseries_id +left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( + select time from timeseries_measurement + where timeseries_id = t.id + order by time desc limit 1 +) +inner join project_instrument pi on t.instrument_id = pi.instrument_id +where pi.project_id = $1 +and cgt.collection_group_id = $2 +` + +type GetCollectionGroupDetailsTimeseriesParams struct { + ProjectID uuid.UUID `json:"project_id"` + CollectionGroupID uuid.UUID `json:"collection_group_id"` +} + +type GetCollectionGroupDetailsTimeseriesRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type NullTimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` + LatestTime pgtype.Timestamptz `json:"latest_time"` + LatestValue *float64 `json:"latest_value"` +} + +func (q *Queries) GetCollectionGroupDetailsTimeseries(ctx context.Context, arg GetCollectionGroupDetailsTimeseriesParams) (GetCollectionGroupDetailsTimeseriesRow, error) { + row := q.db.QueryRow(ctx, getCollectionGroupDetailsTimeseries, arg.ProjectID, arg.CollectionGroupID) + var i GetCollectionGroupDetailsTimeseriesRow + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + &i.LatestTime, + &i.LatestValue, + ) + return i, err +} + +const listCollectionGroups = `-- name: ListCollectionGroups :many +select id, project_id, slug, name, creator, create_date, updater, update_date +from collection_group +where project_id = $1 +and ($2 is null or $2 = id) +` + +type ListCollectionGroupsParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID interface{} `json:"id"` +} + +type ListCollectionGroupsRow struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) ListCollectionGroups(ctx context.Context, arg ListCollectionGroupsParams) ([]ListCollectionGroupsRow, error) { + rows, err := q.db.Query(ctx, listCollectionGroups, arg.ProjectID, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListCollectionGroupsRow{} + for rows.Next() { + var i ListCollectionGroupsRow + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const removeTimeseriesFromCollectionGroup = `-- name: RemoveTimeseriesFromCollectionGroup :exec +delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2 +` + +type RemoveTimeseriesFromCollectionGroupParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) RemoveTimeseriesFromCollectionGroup(ctx context.Context, arg RemoveTimeseriesFromCollectionGroupParams) error { + _, err := q.db.Exec(ctx, removeTimeseriesFromCollectionGroup, arg.CollectionGroupID, arg.TimeseriesID) + return err +} + +const updateCollectionGroup = `-- name: UpdateCollectionGroup :one +update collection_group set name=$3, updater=$4, update_date=$5 +where project_id=$1 and id=$2 +returning id, project_id, name, slug, creator, create_date, updater, update_date +` + +type UpdateCollectionGroupParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, updateCollectionGroup, + arg.ProjectID, + arg.ID, + arg.Name, + arg.Updater, + arg.UpdateDate, + ) + var i CollectionGroup + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + return i, err +} diff --git a/api/internal/db/datalogger.sql_gen.go b/api/internal/db/datalogger.sql_gen.go new file mode 100644 index 00000000..b73907c0 --- /dev/null +++ b/api/internal/db/datalogger.sql_gen.go @@ -0,0 +1,340 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: datalogger.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createDatalogger = `-- name: CreateDatalogger :one +insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) +values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) +returning id +` + +type CreateDataloggerParams struct { + Name string `json:"name"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + ModelID uuid.UUID `json:"model_id"` +} + +func (q *Queries) CreateDatalogger(ctx context.Context, arg CreateDataloggerParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createDatalogger, + arg.Name, + arg.Sn, + arg.ProjectID, + arg.Creator, + arg.ModelID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createDataloggerHash = `-- name: CreateDataloggerHash :exec +insert into datalogger_hash (datalogger_id, "hash") values ($1, $2) +` + +type CreateDataloggerHashParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +func (q *Queries) CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error { + _, err := q.db.Exec(ctx, createDataloggerHash, arg.DataloggerID, arg.Hash) + return err +} + +const deleteDatalogger = `-- name: DeleteDatalogger :exec +update datalogger set deleted = true, updater = $2, update_date = $3 where id = $1 +` + +type DeleteDataloggerParams struct { + ID uuid.UUID `json:"id"` + Updater uuid.UUID `json:"updater"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error { + _, err := q.db.Exec(ctx, deleteDatalogger, arg.ID, arg.Updater, arg.UpdateDate) + return err +} + +const deleteDataloggerTable = `-- name: DeleteDataloggerTable :exec +delete from datalogger_table where id = $1 +` + +func (q *Queries) DeleteDataloggerTable(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteDataloggerTable, id) + return err +} + +const getDatalogger = `-- name: GetDatalogger :one +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where id = $1 +` + +func (q *Queries) GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) { + row := q.db.QueryRow(ctx, getDatalogger, id) + var i VDatalogger + err := row.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ) + return i, err +} + +const getDataloggerIsActive = `-- name: GetDataloggerIsActive :one +select exists (select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where model = $1 and sn = $2)::int +` + +type GetDataloggerIsActiveParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) GetDataloggerIsActive(ctx context.Context, arg GetDataloggerIsActiveParams) (int32, error) { + row := q.db.QueryRow(ctx, getDataloggerIsActive, arg.Model, arg.Sn) + var column_1 int32 + err := row.Scan(&column_1) + return column_1, err +} + +const getDataloggerModelName = `-- name: GetDataloggerModelName :one +select model from datalogger_model where id = $1 +` + +func (q *Queries) GetDataloggerModelName(ctx context.Context, id uuid.UUID) (*string, error) { + row := q.db.QueryRow(ctx, getDataloggerModelName, id) + var model *string + err := row.Scan(&model) + return model, err +} + +const getDataloggerTablePreview = `-- name: GetDataloggerTablePreview :one +select datalogger_table_id, preview, update_date from v_datalogger_preview where datalogger_table_id = $1 limit 1 +` + +func (q *Queries) GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) { + row := q.db.QueryRow(ctx, getDataloggerTablePreview, dataloggerTableID) + var i VDataloggerPreview + err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdateDate) + return i, err +} + +const getOrCreateDataloggerTable = `-- name: GetOrCreateDataloggerTable :one +with new_datalogger_table as ( + insert into datalogger_table (datalogger_id, table_name) values ($1, $2) + on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing + returning id +) +select ndt.id from new_datalogger_table ndt +union +select sdt.id from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +` + +type GetOrCreateDataloggerTableParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) GetOrCreateDataloggerTable(ctx context.Context, arg GetOrCreateDataloggerTableParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, getOrCreateDataloggerTable, arg.DataloggerID, arg.TableName) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const listAllDataloggers = `-- name: ListAllDataloggers :many +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger +` + +func (q *Queries) ListAllDataloggers(ctx context.Context) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, listAllDataloggers) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDatalogger{} + for rows.Next() { + var i VDatalogger + if err := rows.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listDataloggersForProject = `-- name: ListDataloggersForProject :many +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where project_id = $1 +` + +func (q *Queries) ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, listDataloggersForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDatalogger{} + for rows.Next() { + var i VDatalogger + if err := rows.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const renameEmptyDataloggerTableName = `-- name: RenameEmptyDataloggerTableName :exec +update datalogger_table dt +set table_name = $2 +where dt.table_name = '' and dt.datalogger_id = $1 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +) +` + +type RenameEmptyDataloggerTableNameParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) RenameEmptyDataloggerTableName(ctx context.Context, arg RenameEmptyDataloggerTableNameParams) error { + _, err := q.db.Exec(ctx, renameEmptyDataloggerTableName, arg.DataloggerID, arg.TableName) + return err +} + +const resetDataloggerTableName = `-- name: ResetDataloggerTableName :exec +update datalogger_table set table_name = '' where id = $1 +` + +func (q *Queries) ResetDataloggerTableName(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, resetDataloggerTableName, id) + return err +} + +const updateDatalogger = `-- name: UpdateDatalogger :exec +update datalogger set + name = $2, + updater = $3, + update_date = $4 +where id = $1 +` + +type UpdateDataloggerParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater uuid.UUID `json:"updater"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) UpdateDatalogger(ctx context.Context, arg UpdateDataloggerParams) error { + _, err := q.db.Exec(ctx, updateDatalogger, + arg.ID, + arg.Name, + arg.Updater, + arg.UpdateDate, + ) + return err +} + +const updateDataloggerHash = `-- name: UpdateDataloggerHash :exec +update datalogger_hash set "hash" = $2 where datalogger_id = $1 +` + +type UpdateDataloggerHashParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +func (q *Queries) UpdateDataloggerHash(ctx context.Context, arg UpdateDataloggerHashParams) error { + _, err := q.db.Exec(ctx, updateDataloggerHash, arg.DataloggerID, arg.Hash) + return err +} + +const updateDataloggerUpdater = `-- name: UpdateDataloggerUpdater :exec +update datalogger set updater = $2, update_date = $3 where id = $1 +` + +type UpdateDataloggerUpdaterParams struct { + ID uuid.UUID `json:"id"` + Updater uuid.UUID `json:"updater"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) UpdateDataloggerUpdater(ctx context.Context, arg UpdateDataloggerUpdaterParams) error { + _, err := q.db.Exec(ctx, updateDataloggerUpdater, arg.ID, arg.Updater, arg.UpdateDate) + return err +} + +const verifyDataloggerExists = `-- name: VerifyDataloggerExists :one +select true from v_datalogger where id = $1 +` + +func (q *Queries) VerifyDataloggerExists(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, verifyDataloggerExists, id) + var column_1 bool + err := row.Scan(&column_1) + return column_1, err +} diff --git a/api/internal/db/datalogger_telemetry.sql_gen.go b/api/internal/db/datalogger_telemetry.sql_gen.go new file mode 100644 index 00000000..b1912cf4 --- /dev/null +++ b/api/internal/db/datalogger_telemetry.sql_gen.go @@ -0,0 +1,138 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: datalogger_telemetry.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createDataloggerError = `-- name: CreateDataloggerError :exec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +) +` + +type CreateDataloggerErrorParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + ErrorMessage *string `json:"error_message"` +} + +func (q *Queries) CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error { + _, err := q.db.Exec(ctx, createDataloggerError, arg.DataloggerID, arg.TableName, arg.ErrorMessage) + return err +} + +const createDataloggerTablePreview = `-- name: CreateDataloggerTablePreview :exec +insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3) +` + +type CreateDataloggerTablePreviewParams struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview []byte `json:"preview"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) CreateDataloggerTablePreview(ctx context.Context, arg CreateDataloggerTablePreviewParams) error { + _, err := q.db.Exec(ctx, createDataloggerTablePreview, arg.DataloggerTableID, arg.Preview, arg.UpdateDate) + return err +} + +const deleteDataloggerTableError = `-- name: DeleteDataloggerTableError :exec +delete from datalogger_error +where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +` + +type DeleteDataloggerTableErrorParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) DeleteDataloggerTableError(ctx context.Context, arg DeleteDataloggerTableErrorParams) error { + _, err := q.db.Exec(ctx, deleteDataloggerTableError, arg.DataloggerID, arg.TableName) + return err +} + +const getDataloggerByModelSN = `-- name: GetDataloggerByModelSN :one +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger +where model = $1 and sn = $2 +limit 1 +` + +type GetDataloggerByModelSNParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) { + row := q.db.QueryRow(ctx, getDataloggerByModelSN, arg.Model, arg.Sn) + var i VDatalogger + err := row.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ) + return i, err +} + +const getDataloggerHashByModelSN = `-- name: GetDataloggerHashByModelSN :one +select "hash" from v_datalogger_hash +where model = $1 and sn = $2 +limit 1 +` + +type GetDataloggerHashByModelSNParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) GetDataloggerHashByModelSN(ctx context.Context, arg GetDataloggerHashByModelSNParams) (string, error) { + row := q.db.QueryRow(ctx, getDataloggerHashByModelSN, arg.Model, arg.Sn) + var hash string + err := row.Scan(&hash) + return hash, err +} + +const updateDataloggerTablePreview = `-- name: UpdateDataloggerTablePreview :exec +update datalogger_preview set preview = $3, update_date = $4 +where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +` + +type UpdateDataloggerTablePreviewParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + Preview []byte `json:"preview"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) UpdateDataloggerTablePreview(ctx context.Context, arg UpdateDataloggerTablePreviewParams) error { + _, err := q.db.Exec(ctx, updateDataloggerTablePreview, + arg.DataloggerID, + arg.TableName, + arg.Preview, + arg.UpdateDate, + ) + return err +} diff --git a/api/internal/db/db.go b/api/internal/db/db.go new file mode 100644 index 00000000..8c84b4d7 --- /dev/null +++ b/api/internal/db/db.go @@ -0,0 +1,33 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row + SendBatch(context.Context, *pgx.Batch) pgx.BatchResults +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/api/internal/db/district_rollup.sql_gen.go b/api/internal/db/district_rollup.sql_gen.go new file mode 100644 index 00000000..3b8887e9 --- /dev/null +++ b/api/internal/db/district_rollup.sql_gen.go @@ -0,0 +1,106 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: district_rollup.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const listEvaluationDistrictRollupsForProjectAlertConfig = `-- name: ListEvaluationDistrictRollupsForProjectAlertConfig :many +select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +and project_id = $1 +and the_month >= date_trunc('month', $2::timestamptz) +and the_month <= date_trunc('month', $3::timestamptz) +` + +type ListEvaluationDistrictRollupsForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + Column2 time.Time `json:"column_2"` + Column3 time.Time `json:"column_3"` +} + +func (q *Queries) ListEvaluationDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListEvaluationDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, listEvaluationDistrictRollupsForProjectAlertConfig, arg.ProjectID, arg.Column2, arg.Column3) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrictRollup{} + for rows.Next() { + var i VDistrictRollup + if err := rows.Scan( + &i.AlertTypeID, + &i.OfficeID, + &i.DistrictInitials, + &i.ProjectName, + &i.ProjectID, + &i.TheMonth, + &i.ExpectedTotalSubmittals, + &i.ActualTotalSubmittals, + &i.RedSubmittals, + &i.YellowSubmittals, + &i.GreenSubmittals, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listMeasurementDistrictRollupsForProjectAlertConfig = `-- name: ListMeasurementDistrictRollupsForProjectAlertConfig :many +select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid +and project_id = $1 +and the_month >= date_trunc('month', $2::timestamptz) +and the_month <= date_trunc('month', $3::timestamptz) +` + +type ListMeasurementDistrictRollupsForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + Column2 time.Time `json:"column_2"` + Column3 time.Time `json:"column_3"` +} + +func (q *Queries) ListMeasurementDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListMeasurementDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, listMeasurementDistrictRollupsForProjectAlertConfig, arg.ProjectID, arg.Column2, arg.Column3) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrictRollup{} + for rows.Next() { + var i VDistrictRollup + if err := rows.Scan( + &i.AlertTypeID, + &i.OfficeID, + &i.DistrictInitials, + &i.ProjectName, + &i.ProjectID, + &i.TheMonth, + &i.ExpectedTotalSubmittals, + &i.ActualTotalSubmittals, + &i.RedSubmittals, + &i.YellowSubmittals, + &i.GreenSubmittals, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/domains.sql_gen.go b/api/internal/db/domains.sql_gen.go new file mode 100644 index 00000000..b84d2c99 --- /dev/null +++ b/api/internal/db/domains.sql_gen.go @@ -0,0 +1,63 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: domains.sql + +package db + +import ( + "context" +) + +const listDomainGroups = `-- name: ListDomainGroups :many +select "group", opts from v_domain_group +` + +func (q *Queries) ListDomainGroups(ctx context.Context) ([]VDomainGroup, error) { + rows, err := q.db.Query(ctx, listDomainGroups) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomainGroup{} + for rows.Next() { + var i VDomainGroup + if err := rows.Scan(&i.Group, &i.Opts); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listDomains = `-- name: ListDomains :many +select id, "group", value, description from v_domain +` + +func (q *Queries) ListDomains(ctx context.Context) ([]VDomain, error) { + rows, err := q.db.Query(ctx, listDomains) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomain{} + for rows.Next() { + var i VDomain + if err := rows.Scan( + &i.ID, + &i.Group, + &i.Value, + &i.Description, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/equivalency_table.sql_gen.go b/api/internal/db/equivalency_table.sql_gen.go new file mode 100644 index 00000000..190a8990 --- /dev/null +++ b/api/internal/db/equivalency_table.sql_gen.go @@ -0,0 +1,154 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: equivalency_table.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createOrUpdateEquivalencyTableRow = `-- name: CreateOrUpdateEquivalencyTableRow :exec +insert into datalogger_equivalency_table +(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) +values ($1, $2, $3, $4, $5, $6) +on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key +do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id +` + +type CreateOrUpdateEquivalencyTableRowParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error { + _, err := q.db.Exec(ctx, createOrUpdateEquivalencyTableRow, + arg.DataloggerID, + arg.DataloggerTableID, + arg.FieldName, + arg.DisplayName, + arg.InstrumentID, + arg.TimeseriesID, + ) + return err +} + +const deleteEquivalencyTable = `-- name: DeleteEquivalencyTable :exec +delete from datalogger_equivalency_table where datalogger_table_id = $1 +` + +func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteEquivalencyTable, dataloggerTableID) + return err +} + +const deleteEquivalencyTableRow = `-- name: DeleteEquivalencyTableRow :exec +delete from datalogger_equivalency_table where id = $1 +` + +func (q *Queries) DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteEquivalencyTableRow, id) + return err +} + +const getEquivalencyTable = `-- name: GetEquivalencyTable :many +select + datalogger_id, + datalogger_table_id, + datalogger_table_name, + fields +from v_datalogger_equivalency_table +where datalogger_table_id = $1 +` + +func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) ([]VDataloggerEquivalencyTable, error) { + rows, err := q.db.Query(ctx, getEquivalencyTable, dataloggerTableID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDataloggerEquivalencyTable{} + for rows.Next() { + var i VDataloggerEquivalencyTable + if err := rows.Scan( + &i.DataloggerID, + &i.DataloggerTableID, + &i.DataloggerTableName, + &i.Fields, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getIsValidDataloggerTable = `-- name: GetIsValidDataloggerTable :one +select not exists ( + select id, datalogger_id, table_name from datalogger_table where id = $1 and table_name = 'preparse' +) +` + +func (q *Queries) GetIsValidDataloggerTable(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, getIsValidDataloggerTable, id) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const getIsValidEquivalencyTableTimeseries = `-- name: GetIsValidEquivalencyTableTimeseries :one +select not exists ( + select id from v_timeseries_computed + where id = $1 + union all + select timeseries_id from instrument_constants + where timeseries_id = $1 +) +` + +func (q *Queries) GetIsValidEquivalencyTableTimeseries(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, getIsValidEquivalencyTableTimeseries, id) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const updateEquivalencyTableRow = `-- name: UpdateEquivalencyTableRow :exec +update datalogger_equivalency_table set + field_name = $2, + display_name = $3, + instrument_id = $4, + timeseries_id = $5 +where id = $1 +` + +type UpdateEquivalencyTableRowParams struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` +} + +func (q *Queries) UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error { + _, err := q.db.Exec(ctx, updateEquivalencyTableRow, + arg.ID, + arg.FieldName, + arg.DisplayName, + arg.InstrumentID, + arg.TimeseriesID, + ) + return err +} diff --git a/api/internal/db/evaluation.sql_gen.go b/api/internal/db/evaluation.sql_gen.go new file mode 100644 index 00000000..9d197daa --- /dev/null +++ b/api/internal/db/evaluation.sql_gen.go @@ -0,0 +1,337 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: evaluation.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const completeEvaluationSubmittal = `-- name: CompleteEvaluationSubmittal :exec +update submittal sub1 set + submittal_status_id = sq.submittal_status_id, + completion_date = now() +from ( + select + sub2.id as submittal_id, + case + -- if completed before due date, mark submittal as green id + when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + -- if completed after due date, mark as yellow + else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid + end as submittal_status_id + from submittal sub2 + inner join alert_config ac on sub2.alert_config_id = ac.id + where sub2.id = $1 + and sub2.completion_date is null + and not sub2.marked_as_missing + and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +) sq +where sub1.id = sq.submittal_id +returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completion_date, sub1.create_date, sub1.due_date, sub1.marked_as_missing, sub1.warning_sent +` + +func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, completeEvaluationSubmittal, id) + return err +} + +const createEvalationInstrument = `-- name: CreateEvalationInstrument :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type CreateEvalationInstrumentParams struct { + EvaluationID pgtype.UUID `json:"evaluation_id"` + InstrumentID pgtype.UUID `json:"instrument_id"` +} + +func (q *Queries) CreateEvalationInstrument(ctx context.Context, arg CreateEvalationInstrumentParams) error { + _, err := q.db.Exec(ctx, createEvalationInstrument, arg.EvaluationID, arg.InstrumentID) + return err +} + +const createEvaluation = `-- name: CreateEvaluation :one +insert into evaluation ( + project_id, + submittal_id, + name, + body, + start_date, + end_date, + creator, + create_date +) values ($1,$2,$3,$4,$5,$6,$7,$8) +returning id +` + +type CreateEvaluationParams struct { + ProjectID uuid.UUID `json:"project_id"` + SubmittalID pgtype.UUID `json:"submittal_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +func (q *Queries) CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createEvaluation, + arg.ProjectID, + arg.SubmittalID, + arg.Name, + arg.Body, + arg.StartDate, + arg.EndDate, + arg.Creator, + arg.CreateDate, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createNextEvaluationSubmittal = `-- name: CreateNextEvaluationSubmittal :exec +insert into submittal (alert_config_id, due_date) +select + ac.id, + now() + ac.schedule_interval +from alert_config ac +where ac.id in (select sub.alert_config_id from submittal sub where sub.id = $1) +` + +func (q *Queries) CreateNextEvaluationSubmittal(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, createNextEvaluationSubmittal, id) + return err +} + +const deleteEvaluation = `-- name: DeleteEvaluation :exec +delete from evaluation where id = $1 +` + +func (q *Queries) DeleteEvaluation(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteEvaluation, id) + return err +} + +const getEvaluation = `-- name: GetEvaluation :one +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation where id = $1 +` + +func (q *Queries) GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) { + row := q.db.QueryRow(ctx, getEvaluation, id) + var i VEvaluation + err := row.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartDate, + &i.EndDate, + &i.Instruments, + ) + return i, err +} + +const listInstrumentEvaluations = `-- name: ListInstrumentEvaluations :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation +where id = any( + select evaluation_id + from evaluation_instrument + where instrument_id = $1 +) +` + +func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID pgtype.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, listInstrumentEvaluations, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartDate, + &i.EndDate, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectEvaluations = `-- name: ListProjectEvaluations :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments +from v_evaluation +where project_id = $1 +` + +func (q *Queries) ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, listProjectEvaluations, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartDate, + &i.EndDate, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectEvaluationsByAlertConfig = `-- name: ListProjectEvaluationsByAlertConfig :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation +where project_id = $1 +and alert_config_id is not null +and alert_config_id = $2 +` + +type ListProjectEvaluationsByAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + AlertConfigID pgtype.UUID `json:"alert_config_id"` +} + +func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, listProjectEvaluationsByAlertConfig, arg.ProjectID, arg.AlertConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartDate, + &i.EndDate, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const unassignAllInstrumentsFromEvaluation = `-- name: UnassignAllInstrumentsFromEvaluation :exec +delete from evaluation_instrument where evaluation_id = $1 +` + +func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID pgtype.UUID) error { + _, err := q.db.Exec(ctx, unassignAllInstrumentsFromEvaluation, evaluationID) + return err +} + +const updateEvaluation = `-- name: UpdateEvaluation :exec +update evaluation set + name=$3, + body=$4, + start_date=$5, + end_date=$6, + updater=$7, + update_date=$8 +where id=$1 and project_id=$2 +` + +type UpdateEvaluationParams struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error { + _, err := q.db.Exec(ctx, updateEvaluation, + arg.ID, + arg.ProjectID, + arg.Name, + arg.Body, + arg.StartDate, + arg.EndDate, + arg.Updater, + arg.UpdateDate, + ) + return err +} diff --git a/api/internal/db/heartbeat.sql_gen.go b/api/internal/db/heartbeat.sql_gen.go new file mode 100644 index 00000000..882f56b1 --- /dev/null +++ b/api/internal/db/heartbeat.sql_gen.go @@ -0,0 +1,57 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: heartbeat.sql + +package db + +import ( + "context" + "time" +) + +const createHeartbeat = `-- name: CreateHeartbeat :one +insert into heartbeat (time) values ($1) returning time +` + +func (q *Queries) CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) { + row := q.db.QueryRow(ctx, createHeartbeat, argTime) + var time time.Time + err := row.Scan(&time) + return time, err +} + +const getLatestHeartbeat = `-- name: GetLatestHeartbeat :one +select max(time) as time from heartbeat +` + +func (q *Queries) GetLatestHeartbeat(ctx context.Context) (interface{}, error) { + row := q.db.QueryRow(ctx, getLatestHeartbeat) + var time interface{} + err := row.Scan(&time) + return time, err +} + +const listHeartbeats = `-- name: ListHeartbeats :many +select time from heartbeat +` + +func (q *Queries) ListHeartbeats(ctx context.Context) ([]time.Time, error) { + rows, err := q.db.Query(ctx, listHeartbeats) + if err != nil { + return nil, err + } + defer rows.Close() + items := []time.Time{} + for rows.Next() { + var time time.Time + if err := rows.Scan(&time); err != nil { + return nil, err + } + items = append(items, time) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/home.sql_gen.go b/api/internal/db/home.sql_gen.go new file mode 100644 index 00000000..628dabeb --- /dev/null +++ b/api/internal/db/home.sql_gen.go @@ -0,0 +1,40 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: home.sql + +package db + +import ( + "context" +) + +const getHome = `-- name: GetHome :one +select + (select count(*) from instrument where not deleted) as instrument_count, + (select count(*) from project where not deleted) as project_count, + (select count(*) from instrument_group) as instrument_group_count, + (select count(*) from instrument where not deleted and create_date > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h +` + +type GetHomeRow struct { + InstrumentCount int64 `json:"instrument_count"` + ProjectCount int64 `json:"project_count"` + InstrumentGroupCount int64 `json:"instrument_group_count"` + NewInstruments7d int64 `json:"new_instruments_7d"` + NewMeasurements2h int64 `json:"new_measurements_2h"` +} + +func (q *Queries) GetHome(ctx context.Context) (GetHomeRow, error) { + row := q.db.QueryRow(ctx, getHome) + var i GetHomeRow + err := row.Scan( + &i.InstrumentCount, + &i.ProjectCount, + &i.InstrumentGroupCount, + &i.NewInstruments7d, + &i.NewMeasurements2h, + ) + return i, err +} diff --git a/api/internal/db/instrument.sql_gen.go b/api/internal/db/instrument.sql_gen.go new file mode 100644 index 00000000..2030190b --- /dev/null +++ b/api/internal/db/instrument.sql_gen.go @@ -0,0 +1,387 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument.sql + +package db + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + model "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" + go_geom "github.com/twpayne/go-geom" +) + +const createInstrument = `-- name: CreateInstrument :one +insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) +values (slugify($1, 'instrument'), $1, $2, st_setsrid(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) +returning id, slug +` + +type CreateInstrumentParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + StGeomfromwkb interface{} `json:"st_geomfromwkb"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type CreateInstrumentRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) { + row := q.db.QueryRow(ctx, createInstrument, + arg.Name, + arg.TypeID, + arg.StGeomfromwkb, + arg.Station, + arg.StationOffset, + arg.Creator, + arg.CreateDate, + arg.NidID, + arg.UsgsID, + arg.ShowCwmsTab, + ) + var i CreateInstrumentRow + err := row.Scan(&i.ID, &i.Slug) + return i, err +} + +const deleteFlagInstrument = `-- name: DeleteFlagInstrument :exec +update instrument set deleted = true +where id = any( + select instrument_id + from project_instrument + where project_id = $1 +) +and id = $2 +` + +type DeleteFlagInstrumentParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstrumentParams) error { + _, err := q.db.Exec(ctx, deleteFlagInstrument, arg.ProjectID, arg.ID) + return err +} + +const getInstrumentCount = `-- name: GetInstrumentCount :one +select count(*) from instrument where not deleted +` + +func (q *Queries) GetInstrumentCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, getInstrumentCount) + var count int64 + err := row.Scan(&count) + return count, err +} + +const getProjectCountForInstrument = `-- name: GetProjectCountForInstrument :one +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in ($1::uuid[]) +group by pi.instrument_id, i.name +order by i.name +` + +type GetProjectCountForInstrumentRow struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int64 `json:"project_count"` +} + +func (q *Queries) GetProjectCountForInstrument(ctx context.Context, instrumentIds []uuid.UUID) (GetProjectCountForInstrumentRow, error) { + row := q.db.QueryRow(ctx, getProjectCountForInstrument, instrumentIds) + var i GetProjectCountForInstrumentRow + err := row.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount) + return i, err +} + +const listAdminProjects = `-- name: ListAdminProjects :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN' +` + +func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, listAdminProjects, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentIDNamesByIDs = `-- name: ListInstrumentIDNamesByIDs :many +select id, name +from instrument +where id in ($1::uuid[]) +and not deleted +` + +type ListInstrumentIDNamesByIDsRow struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +func (q *Queries) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]ListInstrumentIDNamesByIDsRow, error) { + rows, err := q.db.Query(ctx, listInstrumentIDNamesByIDs, instrumentIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListInstrumentIDNamesByIDsRow{} + for rows.Next() { + var i ListInstrumentIDNamesByIDsRow + if err := rows.Scan(&i.ID, &i.Name); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentProjects = `-- name: ListInstrumentProjects :many +select project_id from project_instrument where instrument_id = $1 +` + +func (q *Queries) ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, listInstrumentProjects, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstruments = `-- name: ListInstruments :many +select id, + status_id, + status, + status_time, + slug, + name, + type_id, + type, + icon, + geometry, + station, + station_offset, + creator, + create_date, + updater, + update_date, + projects, + constants, + groups, + alert_configs, + nid_id, + usgs_id, + has_cwms, + show_cwms_tab, + opts +from v_instrument +where not deleted +and ($1 is not null or $1 = id) +` + +type ListInstrumentsRow struct { + ID uuid.UUID `json:"id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Slug string `json:"slug"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry go_geom.T `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + Projects []model.IDSlugName `json:"projects"` + Constants []uuid.UUID `json:"constants"` + Groups []uuid.UUID `json:"groups"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + HasCwms bool `json:"has_cwms"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Opts model.Opts `json:"opts"` +} + +func (q *Queries) ListInstruments(ctx context.Context, id interface{}) ([]ListInstrumentsRow, error) { + rows, err := q.db.Query(ctx, listInstruments, id) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListInstrumentsRow{} + for rows.Next() { + var i ListInstrumentsRow + if err := rows.Scan( + &i.ID, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.StationOffset, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.NidID, + &i.UsgsID, + &i.HasCwms, + &i.ShowCwmsTab, + &i.Opts, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateInstrument = `-- name: UpdateInstrument :exec +update instrument set + name = $3, + type_id = $4, + geometry = ST_GeomFromWKB($5), + updater = $6, + update_date = $7, + station = $8, + station_offset = $9, + nid_id = $10, + usgs_id = $11, + show_cwms_tab = $12 +where id = $2 +and id in ( + select instrument_id + from project_instrument + where project_id = $1 +) +` + +type UpdateInstrumentParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + StGeomfromwkb interface{} `json:"st_geomfromwkb"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error { + _, err := q.db.Exec(ctx, updateInstrument, + arg.ProjectID, + arg.ID, + arg.Name, + arg.TypeID, + arg.StGeomfromwkb, + arg.Updater, + arg.UpdateDate, + arg.Station, + arg.StationOffset, + arg.NidID, + arg.UsgsID, + arg.ShowCwmsTab, + ) + return err +} + +const updateInstrumentGeometry = `-- name: UpdateInstrumentGeometry :one +update instrument set + geometry = st_geomfromwkb($3), + updater = $4, + update_date = now() +where id = $2 +and id in ( + select instrument_id + from project_instrument + where project_id = $1 +) +returning id +` + +type UpdateInstrumentGeometryParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + StGeomfromwkb interface{} `json:"st_geomfromwkb"` + Updater pgtype.UUID `json:"updater"` +} + +func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, updateInstrumentGeometry, + arg.ProjectID, + arg.ID, + arg.StGeomfromwkb, + arg.Updater, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/instrument_assign.sql_gen.go b/api/internal/db/instrument_assign.sql_gen.go new file mode 100644 index 00000000..7fb9ef3f --- /dev/null +++ b/api/internal/db/instrument_assign.sql_gen.go @@ -0,0 +1,203 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_assign.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const assignInstrumentToProject = `-- name: AssignInstrumentToProject :exec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +` + +type AssignInstrumentToProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) AssignInstrumentToProject(ctx context.Context, arg AssignInstrumentToProjectParams) error { + _, err := q.db.Exec(ctx, assignInstrumentToProject, arg.ProjectID, arg.InstrumentID) + return err +} + +const unassignInstrumentFromProject = `-- name: UnassignInstrumentFromProject :exec +delete from project_instrument where project_id = $1 and instrument_id = $2 +` + +type UnassignInstrumentFromProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) UnassignInstrumentFromProject(ctx context.Context, arg UnassignInstrumentFromProjectParams) error { + _, err := q.db.Exec(ctx, unassignInstrumentFromProject, arg.ProjectID, arg.InstrumentID) + return err +} + +const validateInstrumentNamesProjectUnique = `-- name: ValidateInstrumentNamesProjectUnique :many +select i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.project_id = $1 +and i.name in ($2::text[]) +and not i.deleted +` + +type ValidateInstrumentNamesProjectUniqueParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentName []string `json:"instrument_name"` +} + +func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) { + rows, err := q.db.Query(ctx, validateInstrumentNamesProjectUnique, arg.ProjectID, arg.InstrumentName) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var name string + if err := rows.Scan(&name); err != nil { + return nil, err + } + items = append(items, name) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const validateInstrumentsAssignerAuthorized = `-- name: ValidateInstrumentsAssignerAuthorized :many +select p.name as project_name, i.name as instrument_name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in ($1::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where ppr.profile_id = $2 + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +` + +type ValidateInstrumentsAssignerAuthorizedParams struct { + InstrumentIds []uuid.UUID `json:"instrument_ids"` + ProfileID uuid.UUID `json:"profile_id"` +} + +type ValidateInstrumentsAssignerAuthorizedRow struct { + ProjectName string `json:"project_name"` + InstrumentName string `json:"instrument_name"` +} + +func (q *Queries) ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) { + rows, err := q.db.Query(ctx, validateInstrumentsAssignerAuthorized, arg.InstrumentIds, arg.ProfileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ValidateInstrumentsAssignerAuthorizedRow{} + for rows.Next() { + var i ValidateInstrumentsAssignerAuthorizedRow + if err := rows.Scan(&i.ProjectName, &i.InstrumentName); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const validateProjectsAssignerAuthorized = `-- name: ValidateProjectsAssignerAuthorized :many +select p.name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = $1 +and pi.project_id in ($2::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where profile_id = $3 + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +order by p.name +` + +type ValidateProjectsAssignerAuthorizedParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ProjectIds []uuid.UUID `json:"project_ids"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) { + rows, err := q.db.Query(ctx, validateProjectsAssignerAuthorized, arg.InstrumentID, arg.ProjectIds, arg.ProfileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var name string + if err := rows.Scan(&name); err != nil { + return nil, err + } + items = append(items, name) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const validateProjectsInstrumentNameUnique = `-- name: ValidateProjectsInstrumentNameUnique :many +select p.name, i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +inner join project p on pi.project_id = p.id +where i.name = $1 +and pi.instrument_id in ($2::uuid[]) +and not i.deleted +order by pi.project_id +` + +type ValidateProjectsInstrumentNameUniqueParams struct { + Name string `json:"name"` + InstrumentID []uuid.UUID `json:"instrument_id"` +} + +type ValidateProjectsInstrumentNameUniqueRow struct { + Name string `json:"name"` + Name_2 string `json:"name_2"` +} + +func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]ValidateProjectsInstrumentNameUniqueRow, error) { + rows, err := q.db.Query(ctx, validateProjectsInstrumentNameUnique, arg.Name, arg.InstrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ValidateProjectsInstrumentNameUniqueRow{} + for rows.Next() { + var i ValidateProjectsInstrumentNameUniqueRow + if err := rows.Scan(&i.Name, &i.Name_2); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_constant.sql_gen.go b/api/internal/db/instrument_constant.sql_gen.go new file mode 100644 index 00000000..41412e6b --- /dev/null +++ b/api/internal/db/instrument_constant.sql_gen.go @@ -0,0 +1,81 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_constant.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createInstrumentConstant = `-- name: CreateInstrumentConstant :exec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) +` + +type CreateInstrumentConstantParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error { + _, err := q.db.Exec(ctx, createInstrumentConstant, arg.InstrumentID, arg.TimeseriesID) + return err +} + +const deleteInstrumentConstant = `-- name: DeleteInstrumentConstant :exec +delete from instrument_constants where instrument_id = $1 and timeseries_id = $2 +` + +type DeleteInstrumentConstantParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) DeleteInstrumentConstant(ctx context.Context, arg DeleteInstrumentConstantParams) error { + _, err := q.db.Exec(ctx, deleteInstrumentConstant, arg.InstrumentID, arg.TimeseriesID) + return err +} + +const listInstrumentConstants = `-- name: ListInstrumentConstants :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join instrument_constants ic on ic.timeseries_id = t.id +where ic.instrument_id = $1 +` + +func (q *Queries) ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listInstrumentConstants, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_group.sql_gen.go b/api/internal/db/instrument_group.sql_gen.go new file mode 100644 index 00000000..1145940c --- /dev/null +++ b/api/internal/db/instrument_group.sql_gen.go @@ -0,0 +1,243 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_group.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createInstrumentGroup = `-- name: CreateInstrumentGroup :one +insert into instrument_group (slug, name, description, creator, create_date, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, creator, create_date, updater, update_date, project_id +` + +type CreateInstrumentGroupParams struct { + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + ProjectID pgtype.UUID `json:"project_id"` +} + +type CreateInstrumentGroupRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID pgtype.UUID `json:"project_id"` +} + +func (q *Queries) CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) { + row := q.db.QueryRow(ctx, createInstrumentGroup, + arg.Name, + arg.Description, + arg.Creator, + arg.CreateDate, + arg.ProjectID, + ) + var i CreateInstrumentGroupRow + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + ) + return i, err +} + +const createInstrumentGroupInstruments = `-- name: CreateInstrumentGroupInstruments :exec +insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2) +` + +type CreateInstrumentGroupInstrumentsParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) CreateInstrumentGroupInstruments(ctx context.Context, arg CreateInstrumentGroupInstrumentsParams) error { + _, err := q.db.Exec(ctx, createInstrumentGroupInstruments, arg.InstrumentGroupID, arg.InstrumentID) + return err +} + +const deleteFlagInstrumentGroup = `-- name: DeleteFlagInstrumentGroup :exec +update instrument_group set deleted = true where id = $1 +` + +func (q *Queries) DeleteFlagInstrumentGroup(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteFlagInstrumentGroup, id) + return err +} + +const deleteInstrumentGroupInstruments = `-- name: DeleteInstrumentGroupInstruments :exec +delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2 +` + +type DeleteInstrumentGroupInstrumentsParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) DeleteInstrumentGroupInstruments(ctx context.Context, arg DeleteInstrumentGroupInstrumentsParams) error { + _, err := q.db.Exec(ctx, deleteInstrumentGroupInstruments, arg.InstrumentGroupID, arg.InstrumentID) + return err +} + +const listInstrumentGroupInstruments = `-- name: ListInstrumentGroupInstruments :many +select +from instrument_group_instruments igi +inner join v_instrument_group on igi.instrument_id = inst.id +where igi.instrument_group_id = $1 and inst.deleted = false +` + +type ListInstrumentGroupInstrumentsRow struct { +} + +func (q *Queries) ListInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID) ([]ListInstrumentGroupInstrumentsRow, error) { + rows, err := q.db.Query(ctx, listInstrumentGroupInstruments, instrumentGroupID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListInstrumentGroupInstrumentsRow{} + for rows.Next() { + var i ListInstrumentGroupInstrumentsRow + if err := rows.Scan(); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentGroups = `-- name: ListInstrumentGroups :many +select + id, + slug, + name, + description, + creator, + create_date, + updater, + update_date, + project_id, + instrument_count, + timeseries_count +from v_instrument_group +where not deleted +and ($1 is not null or $1 = id) +` + +type ListInstrumentGroupsRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID pgtype.UUID `json:"project_id"` + InstrumentCount int64 `json:"instrument_count"` + TimeseriesCount interface{} `json:"timeseries_count"` +} + +func (q *Queries) ListInstrumentGroups(ctx context.Context, id interface{}) ([]ListInstrumentGroupsRow, error) { + rows, err := q.db.Query(ctx, listInstrumentGroups, id) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListInstrumentGroupsRow{} + for rows.Next() { + var i ListInstrumentGroupsRow + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateInstrumentGroup = `-- name: UpdateInstrumentGroup :one +update instrument_group set + name = $2, + deleted = $3, + description = $4, + updater = $5, + update_date = $6, + project_id = $7 + where id = $1 + returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id +` + +type UpdateInstrumentGroupParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Deleted bool `json:"deleted"` + Description *string `json:"description"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID pgtype.UUID `json:"project_id"` +} + +func (q *Queries) UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) { + row := q.db.QueryRow(ctx, updateInstrumentGroup, + arg.ID, + arg.Name, + arg.Deleted, + arg.Description, + arg.Updater, + arg.UpdateDate, + arg.ProjectID, + ) + var i InstrumentGroup + err := row.Scan( + &i.ID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + ) + return i, err +} diff --git a/api/internal/db/instrument_ipi.sql_gen.go b/api/internal/db/instrument_ipi.sql_gen.go new file mode 100644 index 00000000..7d8e11e6 --- /dev/null +++ b/api/internal/db/instrument_ipi.sql_gen.go @@ -0,0 +1,186 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_ipi.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createIpiOpts = `-- name: CreateIpiOpts :exec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateIpiOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +func (q *Queries) CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error { + _, err := q.db.Exec(ctx, createIpiOpts, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const createIpiSegment = `-- name: CreateIpiSegment :exec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6) +` + +type CreateIpiSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error { + _, err := q.db.Exec(ctx, createIpiSegment, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.TiltTimeseriesID, + arg.IncDevTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} + +const getAllIpiSegmentsForInstrument = `-- name: GetAllIpiSegmentsForInstrument :many +select id, instrument_id, length_timeseries_id, length, tilt_timeseries_id, inc_dev_timeseries_id from v_ipi_segment where instrument_id = $1 +` + +func (q *Queries) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) { + rows, err := q.db.Query(ctx, getAllIpiSegmentsForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VIpiSegment{} + for rows.Next() { + var i VIpiSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.LengthTimeseriesID, + &i.Length, + &i.TiltTimeseriesID, + &i.IncDevTimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getIpiMeasurementsForInstrument = `-- name: GetIpiMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_ipi_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_ipi_measurement m2 +where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type GetIpiMeasurementsForInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) GetIpiMeasurementsForInstrument(ctx context.Context, arg GetIpiMeasurementsForInstrumentParams) ([]VIpiMeasurement, error) { + rows, err := q.db.Query(ctx, getIpiMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VIpiMeasurement{} + for rows.Next() { + var i VIpiMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateIpiOpts = `-- name: UpdateIpiOpts :exec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateIpiOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +func (q *Queries) UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error { + _, err := q.db.Exec(ctx, updateIpiOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const updateIpiSegment = `-- name: UpdateIpiSegment :exec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2 +` + +type UpdateIpiSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error { + _, err := q.db.Exec(ctx, updateIpiSegment, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.TiltTimeseriesID, + arg.IncDevTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} diff --git a/api/internal/db/instrument_note.sql_gen.go b/api/internal/db/instrument_note.sql_gen.go new file mode 100644 index 00000000..a98d6fa6 --- /dev/null +++ b/api/internal/db/instrument_note.sql_gen.go @@ -0,0 +1,159 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_note.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createInstrumentNote = `-- name: CreateInstrumentNote :one +insert into instrument_note (instrument_id, title, body, time, creator, create_date) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +` + +type CreateInstrumentNoteParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +func (q *Queries) CreateInstrumentNote(ctx context.Context, arg CreateInstrumentNoteParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, createInstrumentNote, + arg.InstrumentID, + arg.Title, + arg.Body, + arg.Time, + arg.Creator, + arg.CreateDate, + ) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + return i, err +} + +const deleteInstrumentNote = `-- name: DeleteInstrumentNote :exec +delete from instrument_note where id = $1 +` + +func (q *Queries) DeleteInstrumentNote(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteInstrumentNote, id) + return err +} + +const listInstrumentNotes = `-- name: ListInstrumentNotes :many +select + id, + instrument_id, + title, + body, + time, + creator, + create_date, + updater, + update_date +from instrument_note +where ($2 is null or $2 = $1) +and ($3 is null or $3 = $1) +` + +type ListInstrumentNotesParams struct { + Column1 interface{} `json:"column_1"` + InstrumentID interface{} `json:"instrument_id"` + ID interface{} `json:"id"` +} + +func (q *Queries) ListInstrumentNotes(ctx context.Context, arg ListInstrumentNotesParams) ([]InstrumentNote, error) { + rows, err := q.db.Query(ctx, listInstrumentNotes, arg.Column1, arg.InstrumentID, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []InstrumentNote{} + for rows.Next() { + var i InstrumentNote + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateInstrumentNote = `-- name: UpdateInstrumentNote :one +update instrument_note set + title = $2, + body = $3, + time = $4, + updater = $5, + update_date = $6 +where id = $1 +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +` + +type UpdateInstrumentNoteParams struct { + ID uuid.UUID `json:"id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, updateInstrumentNote, + arg.ID, + arg.Title, + arg.Body, + arg.Time, + arg.Updater, + arg.UpdateDate, + ) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + return i, err +} diff --git a/api/internal/db/instrument_saa.sql_gen.go b/api/internal/db/instrument_saa.sql_gen.go new file mode 100644 index 00000000..25a04b08 --- /dev/null +++ b/api/internal/db/instrument_saa.sql_gen.go @@ -0,0 +1,194 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_saa.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createSaaOpts = `-- name: CreateSaaOpts :exec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateSaaOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +func (q *Queries) CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error { + _, err := q.db.Exec(ctx, createSaaOpts, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const createSaaSegment = `-- name: CreateSaaSegment :exec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type CreateSaaSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` + YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` + ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error { + _, err := q.db.Exec(ctx, createSaaSegment, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.XTimeseriesID, + arg.YTimeseriesID, + arg.ZTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} + +const getAllSaaSegmentsForInstrument = `-- name: GetAllSaaSegmentsForInstrument :many +select id, instrument_id, length_timeseries_id, length, x_timeseries_id, y_timeseries_id, z_timeseries_id, temp_timeseries_id from v_saa_segment where instrument_id = $1 +` + +func (q *Queries) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) { + rows, err := q.db.Query(ctx, getAllSaaSegmentsForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSaaSegment{} + for rows.Next() { + var i VSaaSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.LengthTimeseriesID, + &i.Length, + &i.XTimeseriesID, + &i.YTimeseriesID, + &i.ZTimeseriesID, + &i.TempTimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getSaaMeasurementsForInstrument = `-- name: GetSaaMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_saa_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_saa_measurement m2 +where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type GetSaaMeasurementsForInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) { + rows, err := q.db.Query(ctx, getSaaMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSaaMeasurement{} + for rows.Next() { + var i VSaaMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateSaaOpts = `-- name: UpdateSaaOpts :exec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateSaaOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +func (q *Queries) UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error { + _, err := q.db.Exec(ctx, updateSaaOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const updateSaaSegment = `-- name: UpdateSaaSegment :exec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2 +` + +type UpdateSaaSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` + YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` + ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error { + _, err := q.db.Exec(ctx, updateSaaSegment, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.XTimeseriesID, + arg.YTimeseriesID, + arg.ZTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} diff --git a/api/internal/db/instrument_status.sql_gen.go b/api/internal/db/instrument_status.sql_gen.go new file mode 100644 index 00000000..a12dbd09 --- /dev/null +++ b/api/internal/db/instrument_status.sql_gen.go @@ -0,0 +1,89 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_status.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createOrUpdateInstrumentStatus = `-- name: CreateOrUpdateInstrumentStatus :exec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id +` + +type CreateOrUpdateInstrumentStatusParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) CreateOrUpdateInstrumentStatus(ctx context.Context, arg CreateOrUpdateInstrumentStatusParams) error { + _, err := q.db.Exec(ctx, createOrUpdateInstrumentStatus, arg.InstrumentID, arg.StatusID, arg.Time) + return err +} + +const deleteInstrumentStatus = `-- name: DeleteInstrumentStatus :exec +delete from instrument_status where id = $1 +` + +func (q *Queries) DeleteInstrumentStatus(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteInstrumentStatus, id) + return err +} + +const listInstrumentStatuses = `-- name: ListInstrumentStatuses :many +select + s.id, + s.status_id, + d.name status, + s.time +from instrument_status s +inner join status d on d.id = s.status_id +where ($1 is null or $1 = s.instrument_id) +and ($2 is null or $2 = s.id) +order by time desc +` + +type ListInstrumentStatusesParams struct { + InstrumentID interface{} `json:"instrument_id"` + ID interface{} `json:"id"` +} + +type ListInstrumentStatusesRow struct { + ID uuid.UUID `json:"id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + Time time.Time `json:"time"` +} + +func (q *Queries) ListInstrumentStatuses(ctx context.Context, arg ListInstrumentStatusesParams) ([]ListInstrumentStatusesRow, error) { + rows, err := q.db.Query(ctx, listInstrumentStatuses, arg.InstrumentID, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListInstrumentStatusesRow{} + for rows.Next() { + var i ListInstrumentStatusesRow + if err := rows.Scan( + &i.ID, + &i.StatusID, + &i.Status, + &i.Time, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/measurement.sql_gen.go b/api/internal/db/measurement.sql_gen.go new file mode 100644 index 00000000..5690fb3e --- /dev/null +++ b/api/internal/db/measurement.sql_gen.go @@ -0,0 +1,255 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: measurement.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createOrUpdateTimeseriesMeasurement = `-- name: CreateOrUpdateTimeseriesMeasurement :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type CreateOrUpdateTimeseriesMeasurementParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) CreateOrUpdateTimeseriesMeasurement(ctx context.Context, arg CreateOrUpdateTimeseriesMeasurementParams) error { + _, err := q.db.Exec(ctx, createOrUpdateTimeseriesMeasurement, arg.TimeseriesID, arg.Time, arg.Value) + return err +} + +const createOrUpdateTimeseriesNote = `-- name: CreateOrUpdateTimeseriesNote :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +` + +type CreateOrUpdateTimeseriesNoteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOrUpdateTimeseriesNoteParams) error { + _, err := q.db.Exec(ctx, createOrUpdateTimeseriesNote, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err +} + +const createTimeseriesMeasruement = `-- name: CreateTimeseriesMeasruement :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing +` + +type CreateTimeseriesMeasruementParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) CreateTimeseriesMeasruement(ctx context.Context, arg CreateTimeseriesMeasruementParams) error { + _, err := q.db.Exec(ctx, createTimeseriesMeasruement, arg.TimeseriesID, arg.Time, arg.Value) + return err +} + +const createTimeseriesNote = `-- name: CreateTimeseriesNote :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing +` + +type CreateTimeseriesNoteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error { + _, err := q.db.Exec(ctx, createTimeseriesNote, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err +} + +const deleteTimeseriesMeasurement = `-- name: DeleteTimeseriesMeasurement :exec +delete from timeseries_measurement where timeseries_id = $1 and time = $2 +` + +type DeleteTimeseriesMeasurementParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) DeleteTimeseriesMeasurement(ctx context.Context, arg DeleteTimeseriesMeasurementParams) error { + _, err := q.db.Exec(ctx, deleteTimeseriesMeasurement, arg.TimeseriesID, arg.Time) + return err +} + +const deleteTimeseriesMeasurements = `-- name: DeleteTimeseriesMeasurements :exec +delete from timeseries_measurement where timeseries_id = $1 and time = $2 +` + +type DeleteTimeseriesMeasurementsParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) DeleteTimeseriesMeasurements(ctx context.Context, arg DeleteTimeseriesMeasurementsParams) error { + _, err := q.db.Exec(ctx, deleteTimeseriesMeasurements, arg.TimeseriesID, arg.Time) + return err +} + +const deleteTimeseriesMeasurementsRange = `-- name: DeleteTimeseriesMeasurementsRange :exec +delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3 +` + +type DeleteTimeseriesMeasurementsRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error { + _, err := q.db.Exec(ctx, deleteTimeseriesMeasurementsRange, arg.TimeseriesID, arg.Time, arg.Time_2) + return err +} + +const deleteTimeseriesNote = `-- name: DeleteTimeseriesNote :exec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 +` + +type DeleteTimeseriesNoteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) DeleteTimeseriesNote(ctx context.Context, arg DeleteTimeseriesNoteParams) error { + _, err := q.db.Exec(ctx, deleteTimeseriesNote, arg.TimeseriesID, arg.Time, arg.Time_2) + return err +} + +const getTimeseriesConstantMeasurement = `-- name: GetTimeseriesConstantMeasurement :many +select + m.timeseries_id, + m.time, + m.value +from timeseries_measurement m +inner join v_timeseries_stored t on t.id = m.timeseries_id +inner join parameter p on p.id = t.parameter_id +where t.instrument_id in ( + select instrument_id + from v_timeseries_stored t + where t.id= $1 +) +and p.name = $2 +` + +type GetTimeseriesConstantMeasurementParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type GetTimeseriesConstantMeasurementRow struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) GetTimeseriesConstantMeasurement(ctx context.Context, arg GetTimeseriesConstantMeasurementParams) ([]GetTimeseriesConstantMeasurementRow, error) { + rows, err := q.db.Query(ctx, getTimeseriesConstantMeasurement, arg.ID, arg.Name) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetTimeseriesConstantMeasurementRow{} + for rows.Next() { + var i GetTimeseriesConstantMeasurementRow + if err := rows.Scan(&i.TimeseriesID, &i.Time, &i.Value); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listTimeseriesMeasurements = `-- name: ListTimeseriesMeasurements :many +select + m.timeseries_id, + m.time, + m.value, + n.masked, + n.validated, + n.annotation +from timeseries_measurement m +left join timeseries_notes n on m.timeseries_id = n.timeseries_id and m.time = n.time +inner join timeseries t on t.id = m.timeseries_id +where t.id = $1 and m.time > $2 and m.time < $3 order by m.time asc +` + +type ListTimeseriesMeasurementsParams struct { + ID uuid.UUID `json:"id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +type ListTimeseriesMeasurementsRow struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) ListTimeseriesMeasurements(ctx context.Context, arg ListTimeseriesMeasurementsParams) ([]ListTimeseriesMeasurementsRow, error) { + rows, err := q.db.Query(ctx, listTimeseriesMeasurements, arg.ID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListTimeseriesMeasurementsRow{} + for rows.Next() { + var i ListTimeseriesMeasurementsRow + if err := rows.Scan( + &i.TimeseriesID, + &i.Time, + &i.Value, + &i.Masked, + &i.Validated, + &i.Annotation, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/models.go b/api/internal/db/models.go new file mode 100644 index 00000000..87aae2b8 --- /dev/null +++ b/api/internal/db/models.go @@ -0,0 +1,1335 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "database/sql/driver" + "fmt" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + model "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" + go_geom "github.com/twpayne/go-geom" +) + +type JobStatus string + +const ( + JobStatusSUCCESS JobStatus = "SUCCESS" + JobStatusFAIL JobStatus = "FAIL" + JobStatusINIT JobStatus = "INIT" +) + +func (e *JobStatus) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = JobStatus(s) + case string: + *e = JobStatus(s) + default: + return fmt.Errorf("unsupported scan type for JobStatus: %T", src) + } + return nil +} + +type NullJobStatus struct { + JobStatus JobStatus `json:"job_status"` + Valid bool `json:"valid"` // Valid is true if JobStatus is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullJobStatus) Scan(value interface{}) error { + if value == nil { + ns.JobStatus, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.JobStatus.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullJobStatus) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.JobStatus), nil +} + +type LineStyle string + +const ( + LineStyleSolid LineStyle = "solid" + LineStyleDot LineStyle = "dot" + LineStyleDash LineStyle = "dash" + LineStyleLongdash LineStyle = "longdash" + LineStyleDashdot LineStyle = "dashdot" + LineStyleLongdashdot LineStyle = "longdashdot" +) + +func (e *LineStyle) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = LineStyle(s) + case string: + *e = LineStyle(s) + default: + return fmt.Errorf("unsupported scan type for LineStyle: %T", src) + } + return nil +} + +type NullLineStyle struct { + LineStyle LineStyle `json:"line_style"` + Valid bool `json:"valid"` // Valid is true if LineStyle is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullLineStyle) Scan(value interface{}) error { + if value == nil { + ns.LineStyle, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.LineStyle.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullLineStyle) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.LineStyle), nil +} + +type PlotType string + +const ( + PlotTypeScatterLine PlotType = "scatter-line" + PlotTypeProfile PlotType = "profile" + PlotTypeContour PlotType = "contour" + PlotTypeBullseye PlotType = "bullseye" +) + +func (e *PlotType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = PlotType(s) + case string: + *e = PlotType(s) + default: + return fmt.Errorf("unsupported scan type for PlotType: %T", src) + } + return nil +} + +type NullPlotType struct { + PlotType PlotType `json:"plot_type"` + Valid bool `json:"valid"` // Valid is true if PlotType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullPlotType) Scan(value interface{}) error { + if value == nil { + ns.PlotType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.PlotType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullPlotType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.PlotType), nil +} + +type TimeseriesType string + +const ( + TimeseriesTypeStandard TimeseriesType = "standard" + TimeseriesTypeConstant TimeseriesType = "constant" + TimeseriesTypeComputed TimeseriesType = "computed" + TimeseriesTypeCwms TimeseriesType = "cwms" +) + +func (e *TimeseriesType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = TimeseriesType(s) + case string: + *e = TimeseriesType(s) + default: + return fmt.Errorf("unsupported scan type for TimeseriesType: %T", src) + } + return nil +} + +type NullTimeseriesType struct { + TimeseriesType TimeseriesType `json:"timeseries_type"` + Valid bool `json:"valid"` // Valid is true if TimeseriesType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullTimeseriesType) Scan(value interface{}) error { + if value == nil { + ns.TimeseriesType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.TimeseriesType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullTimeseriesType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.TimeseriesType), nil +} + +type TraceType string + +const ( + TraceTypeBar TraceType = "bar" + TraceTypeScattergl TraceType = "scattergl" +) + +func (e *TraceType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = TraceType(s) + case string: + *e = TraceType(s) + default: + return fmt.Errorf("unsupported scan type for TraceType: %T", src) + } + return nil +} + +type NullTraceType struct { + TraceType TraceType `json:"trace_type"` + Valid bool `json:"valid"` // Valid is true if TraceType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullTraceType) Scan(value interface{}) error { + if value == nil { + ns.TraceType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.TraceType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullTraceType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.TraceType), nil +} + +type UploaderConfigType string + +const ( + UploaderConfigTypeCsv UploaderConfigType = "csv" + UploaderConfigTypeDux UploaderConfigType = "dux" + UploaderConfigTypeToa5 UploaderConfigType = "toa5" +) + +func (e *UploaderConfigType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = UploaderConfigType(s) + case string: + *e = UploaderConfigType(s) + default: + return fmt.Errorf("unsupported scan type for UploaderConfigType: %T", src) + } + return nil +} + +type NullUploaderConfigType struct { + UploaderConfigType UploaderConfigType `json:"uploader_config_type"` + Valid bool `json:"valid"` // Valid is true if UploaderConfigType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullUploaderConfigType) Scan(value interface{}) error { + if value == nil { + ns.UploaderConfigType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.UploaderConfigType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullUploaderConfigType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.UploaderConfigType), nil +} + +type YAxis string + +const ( + YAxisY1 YAxis = "y1" + YAxisY2 YAxis = "y2" +) + +func (e *YAxis) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = YAxis(s) + case string: + *e = YAxis(s) + default: + return fmt.Errorf("unsupported scan type for YAxis: %T", src) + } + return nil +} + +type NullYAxis struct { + YAxis YAxis `json:"y_axis"` + Valid bool `json:"valid"` // Valid is true if YAxis is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullYAxis) Scan(value interface{}) error { + if value == nil { + ns.YAxis, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.YAxis.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullYAxis) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.YAxis), nil +} + +type Agency struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type Alert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` +} + +type AlertConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartDate time.Time `json:"start_date"` + ScheduleInterval pgtype.Interval `json:"schedule_interval"` + NMissedBeforeAlert int32 `json:"n_missed_before_alert"` + WarningInterval pgtype.Interval `json:"warning_interval"` + RemindInterval pgtype.Interval `json:"remind_interval"` + LastChecked pgtype.Timestamptz `json:"last_checked"` + LastReminded pgtype.Timestamptz `json:"last_reminded"` + Deleted bool `json:"deleted"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` +} + +type AlertConfigInstrument struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type AlertEmailSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertProfileSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` + MuteUi bool `json:"mute_ui"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertRead struct { + AlertID uuid.UUID `json:"alert_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +type AlertType struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type AwareParameter struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + TimeseriesSlug string `json:"timeseries_slug"` + TimeseriesName string `json:"timeseries_name"` +} + +type AwarePlatform struct { + ID uuid.UUID `json:"id"` + AwareID uuid.UUID `json:"aware_id"` + InstrumentID pgtype.UUID `json:"instrument_id"` +} + +type AwarePlatformParameterEnabled struct { + AwarePlatformID uuid.UUID `json:"aware_platform_id"` + AwareParameterID uuid.UUID `json:"aware_parameter_id"` +} + +type Calculation struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +type CollectionGroup struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +type CollectionGroupTimeseries struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type Config struct { + StaticHost string `json:"static_host"` + StaticPrefix string `json:"static_prefix"` +} + +type Datalogger struct { + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater uuid.UUID `json:"updater"` + UpdateDate time.Time `json:"update_date"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Deleted bool `json:"deleted"` +} + +type DataloggerEquivalencyTable struct { + ID uuid.UUID `json:"id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerDeleted bool `json:"datalogger_deleted"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` + DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` +} + +type DataloggerError struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + ErrorMessage *string `json:"error_message"` + DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` +} + +type DataloggerHash struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +type DataloggerModel struct { + ID uuid.UUID `json:"id"` + Model *string `json:"model"` +} + +type DataloggerPreview struct { + Preview []byte `json:"preview"` + UpdateDate time.Time `json:"update_date"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` +} + +type DataloggerTable struct { + ID uuid.UUID `json:"id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +type District struct { + ID uuid.UUID `json:"id"` + DivisionID uuid.UUID `json:"division_id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + OfficeID pgtype.UUID `json:"office_id"` +} + +type Division struct { + ID uuid.UUID `json:"id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + AgencyID uuid.UUID `json:"agency_id"` +} + +type Email struct { + ID uuid.UUID `json:"id"` + Email string `json:"email"` +} + +type Evaluation struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + SubmittalID pgtype.UUID `json:"submittal_id"` +} + +type EvaluationInstrument struct { + EvaluationID pgtype.UUID `json:"evaluation_id"` + InstrumentID pgtype.UUID `json:"instrument_id"` +} + +type Heartbeat struct { + Time time.Time `json:"time"` +} + +type InclinometerMeasurement struct { + Time time.Time `json:"time"` + Values []byte `json:"values"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type Instrument struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Geometry interface{} `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + TypeID uuid.UUID `json:"type_id"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentConstant struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type InstrumentGroup struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID pgtype.UUID `json:"project_id"` +} + +type InstrumentGroupInstrument struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` +} + +type InstrumentNote struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +type InstrumentStatus struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +type InstrumentTelemetry struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + TelemetryTypeID uuid.UUID `json:"telemetry_type_id"` + TelemetryID uuid.UUID `json:"telemetry_id"` +} + +type InstrumentType struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Icon *string `json:"icon"` +} + +type IpiOpt struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +type IpiSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +type Measure struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type Office struct { + ID uuid.UUID `json:"id"` +} + +type Parameter struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type PlotBullseyeConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` +} + +type PlotConfiguration struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + PlotType PlotType `json:"plot_type"` +} + +type PlotConfigurationCustomShape struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +type PlotConfigurationSetting struct { + ID uuid.UUID `json:"id"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` +} + +type PlotConfigurationTimeseriesTrace struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + TraceType TraceType `json:"trace_type"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +type PlotContourConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time pgtype.Timestamptz `json:"time"` + LocfBackfill pgtype.Interval `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +type PlotContourConfigTimeseries struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type PlotProfileConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type PlotScatterLineConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +type Profile struct { + ID uuid.UUID `json:"id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + DisplayName string `json:"display_name"` +} + +type ProfileProjectRole struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + ProjectID uuid.UUID `json:"project_id"` + GrantedBy pgtype.UUID `json:"granted_by"` + GrantedDate time.Time `json:"granted_date"` +} + +type ProfileToken struct { + ID uuid.UUID `json:"id"` + TokenID string `json:"token_id"` + ProfileID uuid.UUID `json:"profile_id"` + Issued time.Time `json:"issued"` + Hash string `json:"hash"` +} + +type Project struct { + ID uuid.UUID `json:"id"` + Image *string `json:"image"` + FederalID *string `json:"federal_id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + DistrictID pgtype.UUID `json:"district_id"` +} + +type ProjectInstrument struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type ReportConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +type ReportConfigPlotConfig struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +type ReportDownloadJob struct { + ID uuid.UUID `json:"id"` + ReportConfigID pgtype.UUID `json:"report_config_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Status JobStatus `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry pgtype.Timestamptz `json:"file_expiry"` + Progress int32 `json:"progress"` + ProgressUpdateDate time.Time `json:"progress_update_date"` +} + +type Role struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Deleted bool `json:"deleted"` +} + +type SaaOpt struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime pgtype.Timestamptz `json:"initial_time"` +} + +type SaaSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` + YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` + ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +type Status struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description *string `json:"description"` +} + +type Submittal struct { + ID uuid.UUID `json:"id"` + AlertConfigID pgtype.UUID `json:"alert_config_id"` + SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` + CompletionDate pgtype.Timestamptz `json:"completion_date"` + CreateDate time.Time `json:"create_date"` + DueDate time.Time `json:"due_date"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` +} + +type SubmittalStatus struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type TelemetryGo struct { + ID uuid.UUID `json:"id"` + NesdisID string `json:"nesdis_id"` +} + +type TelemetryIridium struct { + ID uuid.UUID `json:"id"` + Imei string `json:"imei"` +} + +type TelemetryType struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +type Timeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +type TimeseriesCwm struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` +} + +type TimeseriesMeasurement struct { + Time time.Time `json:"time"` + Value float64 `json:"value"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type TimeseriesNote struct { + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +type Unit struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID pgtype.UUID `json:"unit_family_id"` + MeasureID pgtype.UUID `json:"measure_id"` +} + +type UnitFamily struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type UploaderConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Description string `json:"description"` + CreateDate time.Time `json:"create_date"` + Creator uuid.UUID `json:"creator"` + Type UploaderConfigType `json:"type"` +} + +type UploaderConfigMapping struct { + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type VAlert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments interface{} `json:"instruments"` +} + +type VAlertCheckEvaluationSubmittal struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + SubmittalID uuid.UUID `json:"submittal_id"` + ShouldWarn interface{} `json:"should_warn"` + ShouldAlert interface{} `json:"should_alert"` + ShouldRemind interface{} `json:"should_remind"` +} + +type VAlertCheckMeasurementSubmittal struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + SubmittalID uuid.UUID `json:"submittal_id"` + ShouldWarn interface{} `json:"should_warn"` + ShouldAlert interface{} `json:"should_alert"` + ShouldRemind interface{} `json:"should_remind"` + AffectedTimeseries string `json:"affected_timeseries"` +} + +type VAlertConfig struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + Creator pgtype.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertType string `json:"alert_type"` + StartDate time.Time `json:"start_date"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + LastChecked pgtype.Timestamptz `json:"last_checked"` + LastReminded pgtype.Timestamptz `json:"last_reminded"` + Instruments interface{} `json:"instruments"` + AlertEmailSubscriptions interface{} `json:"alert_email_subscriptions"` +} + +type VAwarePlatformParameterEnabled struct { + InstrumentID uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` + AwareParameterKey string `json:"aware_parameter_key"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` +} + +type VDatalogger struct { + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater uuid.UUID `json:"updater"` + UpdaterUsername string `json:"updater_username"` + UpdateDate time.Time `json:"update_date"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Model *string `json:"model"` + Errors interface{} `json:"errors"` + Tables string `json:"tables"` +} + +type VDataloggerEquivalencyTable struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + DataloggerTableName string `json:"datalogger_table_name"` + Fields string `json:"fields"` +} + +type VDataloggerHash struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` + Model *string `json:"model"` + Sn string `json:"sn"` +} + +type VDataloggerPreview struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview []byte `json:"preview"` + UpdateDate time.Time `json:"update_date"` +} + +type VDistrict struct { + Agency string `json:"agency"` + ID uuid.UUID `json:"id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + DivisionName *string `json:"division_name"` + DivisionInitials *string `json:"division_initials"` + OfficeID pgtype.UUID `json:"office_id"` +} + +type VDistrictRollup struct { + AlertTypeID uuid.UUID `json:"alert_type_id"` + OfficeID pgtype.UUID `json:"office_id"` + DistrictInitials *string `json:"district_initials"` + ProjectName string `json:"project_name"` + ProjectID uuid.UUID `json:"project_id"` + TheMonth pgtype.Interval `json:"the_month"` + ExpectedTotalSubmittals int64 `json:"expected_total_submittals"` + ActualTotalSubmittals int64 `json:"actual_total_submittals"` + RedSubmittals int64 `json:"red_submittals"` + YellowSubmittals int64 `json:"yellow_submittals"` + GreenSubmittals int64 `json:"green_submittals"` +} + +type VDomain struct { + ID uuid.UUID `json:"id"` + Group string `json:"group"` + Value string `json:"value"` + Description *string `json:"description"` +} + +type VDomainGroup struct { + Group string `json:"group"` + Opts string `json:"opts"` +} + +type VEmailAutocomplete struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username interface{} `json:"username"` + Email string `json:"email"` + UsernameEmail string `json:"username_email"` +} + +type VEvaluation struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + Creator pgtype.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertConfigID pgtype.UUID `json:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name"` + SubmittalID pgtype.UUID `json:"submittal_id"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Instruments interface{} `json:"instruments"` +} + +type VInstrument struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Slug string `json:"slug"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry go_geom.T `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + Telemetry model.Opts `json:"telemetry"` + HasCwms bool `json:"has_cwms"` + Projects []model.IDSlugName `json:"projects"` + Constants []uuid.UUID `json:"constants"` + Groups []uuid.UUID `json:"groups"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + Opts model.Opts `json:"opts"` +} + +type VInstrumentGroup struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID pgtype.UUID `json:"project_id"` + Deleted bool `json:"deleted"` + InstrumentCount int64 `json:"instrument_count"` + TimeseriesCount interface{} `json:"timeseries_count"` +} + +type VInstrumentTelemetry struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + TelemetryTypeID uuid.UUID `json:"telemetry_type_id"` + TelemetryTypeSlug string `json:"telemetry_type_slug"` + TelemetryTypeName string `json:"telemetry_type_name"` +} + +type VIpiMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements string `json:"measurements"` +} + +type VIpiSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` +} + +type VPlotConfiguration struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` + ReportConfigs string `json:"report_configs"` + PlotType PlotType `json:"plot_type"` + Display interface{} `json:"display"` +} + +type VProfile struct { + ID uuid.UUID `json:"id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + Roles interface{} `json:"roles"` +} + +type VProfileProjectRole struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + ProjectID uuid.UUID `json:"project_id"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` + Rolename interface{} `json:"rolename"` +} + +type VProject struct { + ID uuid.UUID `json:"id"` + FederalID *string `json:"federal_id"` + Image interface{} `json:"image"` + DistrictID pgtype.UUID `json:"district_id"` + OfficeID pgtype.UUID `json:"office_id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreatorUsername *string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + InstrumentCount int64 `json:"instrument_count"` + InstrumentGroupCount int64 `json:"instrument_group_count"` +} + +type VReportConfig struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + Creator uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + PlotConfigs string `json:"plot_configs"` + GlobalOverrides string `json:"global_overrides"` +} + +type VSaaMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements string `json:"measurements"` +} + +type VSaaSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` + YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` + ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` + TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` +} + +type VSubmittal struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + AlertConfigName string `json:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertTypeName string `json:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name"` + CompletionDate pgtype.Timestamptz `json:"completion_date"` + CreateDate time.Time `json:"create_date"` + DueDate time.Time `json:"due_date"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` +} + +type VTimeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type NullTimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` +} + +type VTimeseriesComputed struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` + Contents *string `json:"contents"` +} + +type VTimeseriesCwm struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type NullTimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` +} + +type VTimeseriesDependency struct { + ID uuid.UUID `json:"id"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParsedVariable interface{} `json:"parsed_variable"` + DependencyTimeseriesID pgtype.UUID `json:"dependency_timeseries_id"` +} + +type VTimeseriesProjectMap struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + ProjectID pgtype.UUID `json:"project_id"` +} + +type VTimeseriesStored struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +type VUnit struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID pgtype.UUID `json:"unit_family_id"` + UnitFamily string `json:"unit_family"` + MeasureID pgtype.UUID `json:"measure_id"` + Measure string `json:"measure"` +} diff --git a/api/internal/db/plot_config.sql_gen.go b/api/internal/db/plot_config.sql_gen.go new file mode 100644 index 00000000..f963f736 --- /dev/null +++ b/api/internal/db/plot_config.sql_gen.go @@ -0,0 +1,202 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createPlotConfig = `-- name: CreatePlotConfig :one +insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +returning id +` + +type CreatePlotConfigParams struct { + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + PlotType PlotType `json:"plot_type"` +} + +func (q *Queries) CreatePlotConfig(ctx context.Context, arg CreatePlotConfigParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createPlotConfig, + arg.Name, + arg.ProjectID, + arg.Creator, + arg.CreateDate, + arg.PlotType, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createPlotConfigSettings = `-- name: CreatePlotConfigSettings :exec +insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) +values ($1, $2, $3, $4, $5, $6, $7) +` + +type CreatePlotConfigSettingsParams struct { + ID uuid.UUID `json:"id"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` +} + +func (q *Queries) CreatePlotConfigSettings(ctx context.Context, arg CreatePlotConfigSettingsParams) error { + _, err := q.db.Exec(ctx, createPlotConfigSettings, + arg.ID, + arg.ShowMasked, + arg.ShowNonvalidated, + arg.ShowComments, + arg.AutoRange, + arg.DateRange, + arg.Threshold, + ) + return err +} + +const deletePlotConfig = `-- name: DeletePlotConfig :exec +delete from plot_configuration where project_id = $1 and id = $2 +` + +type DeletePlotConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) DeletePlotConfig(ctx context.Context, arg DeletePlotConfigParams) error { + _, err := q.db.Exec(ctx, deletePlotConfig, arg.ProjectID, arg.ID) + return err +} + +const deletePlotConfigSettings = `-- name: DeletePlotConfigSettings :exec +delete from plot_configuration_settings where id = $1 +` + +func (q *Queries) DeletePlotConfigSettings(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deletePlotConfigSettings, id) + return err +} + +const listPlotConfigs = `-- name: ListPlotConfigs :many +select + id, + slug, + name, + project_id, + report_configs, + creator, + create_date, + updater, + update_date, + show_masked, + show_nonvalidated, + show_comments, + auto_range, + date_range, + threshold, + plot_type, + display +from v_plot_configuration +where ($1 is null or $1 = project_id) +and ($2 is null or $2 = id) +` + +type ListPlotConfigsParams struct { + ProjectID interface{} `json:"project_id"` + ID interface{} `json:"id"` +} + +type ListPlotConfigsRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + ReportConfigs string `json:"report_configs"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` + PlotType PlotType `json:"plot_type"` + Display interface{} `json:"display"` +} + +func (q *Queries) ListPlotConfigs(ctx context.Context, arg ListPlotConfigsParams) ([]ListPlotConfigsRow, error) { + rows, err := q.db.Query(ctx, listPlotConfigs, arg.ProjectID, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListPlotConfigsRow{} + for rows.Next() { + var i ListPlotConfigsRow + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.ReportConfigs, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.PlotType, + &i.Display, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updatePlotConfig = `-- name: UpdatePlotConfig :exec +update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2 +` + +type UpdatePlotConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` +} + +func (q *Queries) UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error { + _, err := q.db.Exec(ctx, updatePlotConfig, + arg.ProjectID, + arg.ID, + arg.Name, + arg.Updater, + arg.UpdateDate, + ) + return err +} diff --git a/api/internal/db/plot_config_bullseye.sql_gen.go b/api/internal/db/plot_config_bullseye.sql_gen.go new file mode 100644 index 00000000..d1485949 --- /dev/null +++ b/api/internal/db/plot_config_bullseye.sql_gen.go @@ -0,0 +1,108 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_bullseye.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createPlotBullseyeConfig = `-- name: CreatePlotBullseyeConfig :exec +insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3) +` + +type CreatePlotBullseyeConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` +} + +func (q *Queries) CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error { + _, err := q.db.Exec(ctx, createPlotBullseyeConfig, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) + return err +} + +const deletePlotBullseyeConfig = `-- name: DeletePlotBullseyeConfig :exec +delete from plot_bullseye_config where plot_config_id = $1 +` + +func (q *Queries) DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deletePlotBullseyeConfig, plotConfigID) + return err +} + +const listPlotConfigMeasurementsBullseyePlot = `-- name: ListPlotConfigMeasurementsBullseyePlot :many +select + t.time, + locf(xm.value) as x, + locf(ym.value) as y +from plot_bullseye_config pc +inner join timeseries_measurement t +on t.timeseries_id = pc.x_axis_timeseries_id +or t.timeseries_id = pc.y_axis_timeseries_id +left join timeseries_measurement xm +on xm.timeseries_id = pc.x_axis_timeseries_id +and xm.time = t.time +left join timeseries_measurement ym +on ym.timeseries_id = pc.y_axis_timeseries_id +and ym.time = t.time +where pc.plot_config_id = $1 +and t.time > $2 +and t.time < $3 +group by t.time +order by t.time asc +` + +type ListPlotConfigMeasurementsBullseyePlotParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +type ListPlotConfigMeasurementsBullseyePlotRow struct { + Time time.Time `json:"time"` + X interface{} `json:"x"` + Y interface{} `json:"y"` +} + +func (q *Queries) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, arg ListPlotConfigMeasurementsBullseyePlotParams) ([]ListPlotConfigMeasurementsBullseyePlotRow, error) { + rows, err := q.db.Query(ctx, listPlotConfigMeasurementsBullseyePlot, arg.PlotConfigID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListPlotConfigMeasurementsBullseyePlotRow{} + for rows.Next() { + var i ListPlotConfigMeasurementsBullseyePlotRow + if err := rows.Scan(&i.Time, &i.X, &i.Y); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updatePlotBullseyeConfig = `-- name: UpdatePlotBullseyeConfig :exec +UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 +` + +type UpdatePlotBullseyeConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` +} + +func (q *Queries) UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error { + _, err := q.db.Exec(ctx, updatePlotBullseyeConfig, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) + return err +} diff --git a/api/internal/db/plot_config_contour.sql_gen.go b/api/internal/db/plot_config_contour.sql_gen.go new file mode 100644 index 00000000..105539c0 --- /dev/null +++ b/api/internal/db/plot_config_contour.sql_gen.go @@ -0,0 +1,188 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_contour.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createPlotContourConfig = `-- name: CreatePlotContourConfig :exec +insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) +values ($1, $2, $3, $4, $5, $6) +` + +type CreatePlotContourConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time pgtype.Timestamptz `json:"time"` + LocfBackfill pgtype.Interval `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +func (q *Queries) CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error { + _, err := q.db.Exec(ctx, createPlotContourConfig, + arg.PlotConfigID, + arg.Time, + arg.LocfBackfill, + arg.GradientSmoothing, + arg.ContourSmoothing, + arg.ShowLabels, + ) + return err +} + +const createPlotContourConfigTimeseries = `-- name: CreatePlotContourConfigTimeseries :exec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing +` + +type CreatePlotContourConfigTimeseriesParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreatePlotContourConfigTimeseries(ctx context.Context, arg CreatePlotContourConfigTimeseriesParams) error { + _, err := q.db.Exec(ctx, createPlotContourConfigTimeseries, arg.PlotContourConfigID, arg.TimeseriesID) + return err +} + +const deleteAllPlotContourConfigTimeseries = `-- name: DeleteAllPlotContourConfigTimeseries :exec +delete from plot_contour_config_timeseries where plot_contour_config_id = $1 +` + +func (q *Queries) DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteAllPlotContourConfigTimeseries, plotContourConfigID) + return err +} + +const deletePlotContourConfig = `-- name: DeletePlotContourConfig :exec +delete from plot_contour_config where plot_config_id = $1 +` + +func (q *Queries) DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deletePlotContourConfig, plotConfigID) + return err +} + +const listPlotConfigMeasurementsContourPlot = `-- name: ListPlotConfigMeasurementsContourPlot :many +select + oi.x, + oi.y, + locf(mm.value) as z +from plot_contour_config pc +left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id +left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +inner join timeseries ts on ts.id = pcts.timeseries_id +inner join ( + select + ii.id, + st_x(st_centroid(ii.geometry)) as x, + st_y(st_centroid(ii.geometry)) as y + from instrument ii +) oi on oi.id = ts.instrument_id +where plot_config_id = $1 +and mm.time = $2 +group by pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y +` + +type ListPlotConfigMeasurementsContourPlotParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time time.Time `json:"time"` +} + +type ListPlotConfigMeasurementsContourPlotRow struct { + X interface{} `json:"x"` + Y interface{} `json:"y"` + Z interface{} `json:"z"` +} + +func (q *Queries) ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg ListPlotConfigMeasurementsContourPlotParams) ([]ListPlotConfigMeasurementsContourPlotRow, error) { + rows, err := q.db.Query(ctx, listPlotConfigMeasurementsContourPlot, arg.PlotConfigID, arg.Time) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListPlotConfigMeasurementsContourPlotRow{} + for rows.Next() { + var i ListPlotConfigMeasurementsContourPlotRow + if err := rows.Scan(&i.X, &i.Y, &i.Z); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listPlotContourConfigTimes = `-- name: ListPlotContourConfigTimes :many +select distinct mm.time +from plot_contour_config_timeseries pcts +inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +where pcts.plot_contour_config_id = $1 +and mm.time > $2 +and mm.time < $3 +order by time asc +` + +type ListPlotContourConfigTimesParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) ListPlotContourConfigTimes(ctx context.Context, arg ListPlotContourConfigTimesParams) ([]time.Time, error) { + rows, err := q.db.Query(ctx, listPlotContourConfigTimes, arg.PlotContourConfigID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []time.Time{} + for rows.Next() { + var time time.Time + if err := rows.Scan(&time); err != nil { + return nil, err + } + items = append(items, time) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updatePlotContourConfig = `-- name: UpdatePlotContourConfig :exec +update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 +where plot_config_id=$1 +` + +type UpdatePlotContourConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time pgtype.Timestamptz `json:"time"` + LocfBackfill pgtype.Interval `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +func (q *Queries) UpdatePlotContourConfig(ctx context.Context, arg UpdatePlotContourConfigParams) error { + _, err := q.db.Exec(ctx, updatePlotContourConfig, + arg.PlotConfigID, + arg.Time, + arg.LocfBackfill, + arg.GradientSmoothing, + arg.ContourSmoothing, + arg.ShowLabels, + ) + return err +} diff --git a/api/internal/db/plot_config_profile.sql_gen.go b/api/internal/db/plot_config_profile.sql_gen.go new file mode 100644 index 00000000..8da61f70 --- /dev/null +++ b/api/internal/db/plot_config_profile.sql_gen.go @@ -0,0 +1,41 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_profile.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" +) + +const createPlotProfileConfig = `-- name: CreatePlotProfileConfig :exec +insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2) +` + +type CreatePlotProfileConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) CreatePlotProfileConfig(ctx context.Context, arg CreatePlotProfileConfigParams) error { + _, err := q.db.Exec(ctx, createPlotProfileConfig, arg.PlotConfigID, arg.InstrumentID) + return err +} + +const updatePlotProfileConfig = `-- name: UpdatePlotProfileConfig :exec +update plot_profile_config set instrument_id=$2 where plot_config_id=$1 +` + +type UpdatePlotProfileConfigParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) UpdatePlotProfileConfig(ctx context.Context, arg UpdatePlotProfileConfigParams) error { + _, err := q.db.Exec(ctx, updatePlotProfileConfig, arg.PlotConfigID, arg.InstrumentID) + return err +} diff --git a/api/internal/db/plot_config_scatter_line.sql_gen.go b/api/internal/db/plot_config_scatter_line.sql_gen.go new file mode 100644 index 00000000..4ca69794 --- /dev/null +++ b/api/internal/db/plot_config_scatter_line.sql_gen.go @@ -0,0 +1,172 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_scatter_line.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createPlotConfigCustomShape = `-- name: CreatePlotConfigCustomShape :exec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) +` + +type CreatePlotConfigCustomShapeParams struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error { + _, err := q.db.Exec(ctx, createPlotConfigCustomShape, + arg.PlotConfigurationID, + arg.Enabled, + arg.Name, + arg.DataPoint, + arg.Color, + ) + return err +} + +const createPlotConfigScatterLineLayout = `-- name: CreatePlotConfigScatterLineLayout :exec +insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3) +` + +type CreatePlotConfigScatterLineLayoutParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) CreatePlotConfigScatterLineLayout(ctx context.Context, arg CreatePlotConfigScatterLineLayoutParams) error { + _, err := q.db.Exec(ctx, createPlotConfigScatterLineLayout, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) + return err +} + +const createPlotConfigTimeseriesTrace = `-- name: CreatePlotConfigTimeseriesTrace :exec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8) +` + +type CreatePlotConfigTimeseriesTraceParams struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error { + _, err := q.db.Exec(ctx, createPlotConfigTimeseriesTrace, + arg.PlotConfigurationID, + arg.TimeseriesID, + arg.TraceOrder, + arg.Color, + arg.LineStyle, + arg.Width, + arg.ShowMarkers, + arg.YAxis, + ) + return err +} + +const deleteAllPlotConfigCustomShapes = `-- name: DeleteAllPlotConfigCustomShapes :exec +delete from plot_configuration_custom_shape where plot_configuration_id=$1 +` + +func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteAllPlotConfigCustomShapes, plotConfigurationID) + return err +} + +const deleteAllPlotConfigTimeseriesTraces = `-- name: DeleteAllPlotConfigTimeseriesTraces :exec +delete from plot_configuration_timeseries_trace where plot_configuration_id=$1 +` + +func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteAllPlotConfigTimeseriesTraces, plotConfigurationID) + return err +} + +const updatePlotConfigCustomShape = `-- name: UpdatePlotConfigCustomShape :exec +update plot_configuration_custom_shape +set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1 +` + +type UpdatePlotConfigCustomShapeParams struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) UpdatePlotConfigCustomShape(ctx context.Context, arg UpdatePlotConfigCustomShapeParams) error { + _, err := q.db.Exec(ctx, updatePlotConfigCustomShape, + arg.PlotConfigurationID, + arg.Enabled, + arg.Name, + arg.DataPoint, + arg.Color, + ) + return err +} + +const updatePlotConfigScatterLineLayout = `-- name: UpdatePlotConfigScatterLineLayout :exec +update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1 +` + +type UpdatePlotConfigScatterLineLayoutParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) UpdatePlotConfigScatterLineLayout(ctx context.Context, arg UpdatePlotConfigScatterLineLayoutParams) error { + _, err := q.db.Exec(ctx, updatePlotConfigScatterLineLayout, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) + return err +} + +const updatePlotConfigTimeseriesTrace = `-- name: UpdatePlotConfigTimeseriesTrace :exec +update plot_configuration_timeseries_trace +set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 +where plot_configuration_id=$1 and timeseries_id=$2 +` + +type UpdatePlotConfigTimeseriesTraceParams struct { + PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` + TimeseriesID pgtype.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) UpdatePlotConfigTimeseriesTrace(ctx context.Context, arg UpdatePlotConfigTimeseriesTraceParams) error { + _, err := q.db.Exec(ctx, updatePlotConfigTimeseriesTrace, + arg.PlotConfigurationID, + arg.TimeseriesID, + arg.TraceOrder, + arg.Color, + arg.LineStyle, + arg.Width, + arg.ShowMarkers, + arg.YAxis, + ) + return err +} diff --git a/api/internal/db/profile.sql_gen.go b/api/internal/db/profile.sql_gen.go new file mode 100644 index 00000000..18ba8640 --- /dev/null +++ b/api/internal/db/profile.sql_gen.go @@ -0,0 +1,271 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: profile.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createProfile = `-- name: CreateProfile :exec +insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name +` + +type CreateProfileParams struct { + Edipi int64 `json:"edipi"` + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` +} + +func (q *Queries) CreateProfile(ctx context.Context, arg CreateProfileParams) error { + _, err := q.db.Exec(ctx, createProfile, + arg.Edipi, + arg.Username, + arg.Email, + arg.DisplayName, + ) + return err +} + +const createProfileToken = `-- name: CreateProfileToken :one +insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning id, token_id, profile_id, issued, hash +` + +type CreateProfileTokenParams struct { + TokenID string `json:"token_id"` + ProfileID uuid.UUID `json:"profile_id"` + Hash string `json:"hash"` +} + +func (q *Queries) CreateProfileToken(ctx context.Context, arg CreateProfileTokenParams) (ProfileToken, error) { + row := q.db.QueryRow(ctx, createProfileToken, arg.TokenID, arg.ProfileID, arg.Hash) + var i ProfileToken + err := row.Scan( + &i.ID, + &i.TokenID, + &i.ProfileID, + &i.Issued, + &i.Hash, + ) + return i, err +} + +const deleteToken = `-- name: DeleteToken :exec +delete from profile_token where profile_id=$1 and token_id=$2 +` + +type DeleteTokenParams struct { + ProfileID uuid.UUID `json:"profile_id"` + TokenID string `json:"token_id"` +} + +func (q *Queries) DeleteToken(ctx context.Context, arg DeleteTokenParams) error { + _, err := q.db.Exec(ctx, deleteToken, arg.ProfileID, arg.TokenID) + return err +} + +const getIssuedTokens = `-- name: GetIssuedTokens :many +select token_id, issued from profile_token where profile_id = $1 +` + +type GetIssuedTokensRow struct { + TokenID string `json:"token_id"` + Issued time.Time `json:"issued"` +} + +func (q *Queries) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]GetIssuedTokensRow, error) { + rows, err := q.db.Query(ctx, getIssuedTokens, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetIssuedTokensRow{} + for rows.Next() { + var i GetIssuedTokensRow + if err := rows.Scan(&i.TokenID, &i.Issued); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getProfileForEDIPI = `-- name: GetProfileForEDIPI :many +select id, edipi, username, display_name, email, is_admin, roles from v_profile where edipi = $1 +` + +func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int64) ([]VProfile, error) { + rows, err := q.db.Query(ctx, getProfileForEDIPI, edipi) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProfile{} + for rows.Next() { + var i VProfile + if err := rows.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getProfileForEmail = `-- name: GetProfileForEmail :one +select id, edipi, username, display_name, email, is_admin, roles from v_profile where email ilike $1 +limit 1 +` + +func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (VProfile, error) { + row := q.db.QueryRow(ctx, getProfileForEmail, email) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + ) + return i, err +} + +const getProfileForTokenID = `-- name: GetProfileForTokenID :one +select p.id, p.edipi, p.username, p.email, p.is_admin +from profile_token t +left join v_profile p on p.id = t.profile_id +where t.token_id = $1 +limit 1 +` + +type GetProfileForTokenIDRow struct { + ID pgtype.UUID `json:"id"` + Edipi *int64 `json:"edipi"` + Username *string `json:"username"` + Email *string `json:"email"` + IsAdmin *bool `json:"is_admin"` +} + +func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) { + row := q.db.QueryRow(ctx, getProfileForTokenID, tokenID) + var i GetProfileForTokenIDRow + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.Email, + &i.IsAdmin, + ) + return i, err +} + +const getProfileForUsername = `-- name: GetProfileForUsername :one +select id, edipi, username, display_name, email, is_admin, roles from v_profile where username = $1 +limit 1 +` + +func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (VProfile, error) { + row := q.db.QueryRow(ctx, getProfileForUsername, username) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + ) + return i, err +} + +const getTokenInfo = `-- name: GetTokenInfo :one +select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1 +` + +func (q *Queries) GetTokenInfo(ctx context.Context, tokenID string) (ProfileToken, error) { + row := q.db.QueryRow(ctx, getTokenInfo, tokenID) + var i ProfileToken + err := row.Scan( + &i.ID, + &i.TokenID, + &i.ProfileID, + &i.Issued, + &i.Hash, + ) + return i, err +} + +const updateProfileForEDIPI = `-- name: UpdateProfileForEDIPI :exec +UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4 +` + +type UpdateProfileForEDIPIParams struct { + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` + Edipi int64 `json:"edipi"` +} + +func (q *Queries) UpdateProfileForEDIPI(ctx context.Context, arg UpdateProfileForEDIPIParams) error { + _, err := q.db.Exec(ctx, updateProfileForEDIPI, + arg.Username, + arg.Email, + arg.DisplayName, + arg.Edipi, + ) + return err +} + +const updateProfileForEmail = `-- name: UpdateProfileForEmail :exec +update profile set username=$1, display_name=$2 where email ilike $3 +` + +type UpdateProfileForEmailParams struct { + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` +} + +func (q *Queries) UpdateProfileForEmail(ctx context.Context, arg UpdateProfileForEmailParams) error { + _, err := q.db.Exec(ctx, updateProfileForEmail, arg.Username, arg.DisplayName, arg.Email) + return err +} + +const updateProfileForUsername = `-- name: UpdateProfileForUsername :exec +update profile set email=$1, display_name=$2 where username=$3 +` + +type UpdateProfileForUsernameParams struct { + Email string `json:"email"` + DisplayName string `json:"display_name"` + Username string `json:"username"` +} + +func (q *Queries) UpdateProfileForUsername(ctx context.Context, arg UpdateProfileForUsernameParams) error { + _, err := q.db.Exec(ctx, updateProfileForUsername, arg.Email, arg.DisplayName, arg.Username) + return err +} diff --git a/api/internal/db/project.sql_gen.go b/api/internal/db/project.sql_gen.go new file mode 100644 index 00000000..3ac2b996 --- /dev/null +++ b/api/internal/db/project.sql_gen.go @@ -0,0 +1,444 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: project.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createProject = `-- name: CreateProject :one +insert into project (federal_id, slug, name, district_id, creator, create_date) +values ($1, slugify($2, 'project'), $2, $3, $4, $5) +returning id, slug +` + +type CreateProjectParams struct { + FederalID *string `json:"federal_id"` + Name string `json:"name"` + DistrictID pgtype.UUID `json:"district_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +type CreateProjectRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) CreateProject(ctx context.Context, arg CreateProjectParams) (CreateProjectRow, error) { + row := q.db.QueryRow(ctx, createProject, + arg.FederalID, + arg.Name, + arg.DistrictID, + arg.Creator, + arg.CreateDate, + ) + var i CreateProjectRow + err := row.Scan(&i.ID, &i.Slug) + return i, err +} + +const deleteFlagProject = `-- name: DeleteFlagProject :exec +update project set deleted=true where id = $1 +` + +func (q *Queries) DeleteFlagProject(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteFlagProject, id) + return err +} + +const getProject = `-- name: GetProject :one +select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project where id = $1 +` + +func (q *Queries) GetProject(ctx context.Context, id uuid.UUID) (VProject, error) { + row := q.db.QueryRow(ctx, getProject, id) + var i VProject + err := row.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ) + return i, err +} + +const getProjectCount = `-- name: GetProjectCount :one +select count(*) from project where not deleted +` + +func (q *Queries) GetProjectCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, getProjectCount) + var count int64 + err := row.Scan(&count) + return count, err +} + +const listDistricts = `-- name: ListDistricts :many +select agency, id, name, initials, division_name, division_initials, office_id from v_district +` + +func (q *Queries) ListDistricts(ctx context.Context) ([]VDistrict, error) { + rows, err := q.db.Query(ctx, listDistricts) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrict{} + for rows.Next() { + var i VDistrict + if err := rows.Scan( + &i.Agency, + &i.ID, + &i.Name, + &i.Initials, + &i.DivisionName, + &i.DivisionInitials, + &i.OfficeID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentGroupsForProject = `-- name: ListInstrumentGroupsForProject :many +select ig.id, ig.slug, ig.name, ig.description, ig.creator, ig.create_date, ig.updater, ig.update_date, ig.project_id, ig.deleted, ig.instrument_count, ig.timeseries_count +from v_instrument_group ig +where ig.project_id = $1 +` + +func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID pgtype.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, listInstrumentGroupsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentGroup{} + for rows.Next() { + var i VInstrumentGroup + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + &i.Deleted, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentsForProject = `-- name: ListInstrumentsForProject :many +select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1 +` + +func (q *Queries) ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, listInstrumentsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrument{} + for rows.Next() { + var i VInstrument + if err := rows.Scan( + &i.ID, + &i.Deleted, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.StationOffset, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjects = `-- name: ListProjects :many +select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +` + +func (q *Queries) ListProjects(ctx context.Context) ([]VProject, error) { + rows, err := q.db.Query(ctx, listProjects) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectsForFederalID = `-- name: ListProjectsForFederalID :many +select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +where federal_id = sqc.arg(federal_id) +` + +func (q *Queries) ListProjectsForFederalID(ctx context.Context) ([]VProject, error) { + rows, err := q.db.Query(ctx, listProjectsForFederalID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectsForProfileRole = `-- name: ListProjectsForProfileRole :many +select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.deleted, p.slug, p.name, p.creator, p.creator_username, p.create_date, p.updater, p.updater_username, p.update_date, p.instrument_count, p.instrument_group_count +from v_project p +inner join profile_project_roles pr on pr.project_id = p.id +inner join role r on r.id = pr.role_id +where pr.profile_id = $1 +and r.name = $2 +` + +type ListProjectsForProfileRoleParams struct { + ProfileID uuid.UUID `json:"profile_id"` + Name string `json:"name"` +} + +func (q *Queries) ListProjectsForProfileRole(ctx context.Context, arg ListProjectsForProfileRoleParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, listProjectsForProfileRole, arg.ProfileID, arg.Name) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchProjects = `-- name: SearchProjects :many +select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +where name ilike '%'||$1||'%' +limit $2 +` + +type SearchProjectsParams struct { + Name *string `json:"name"` + ResultLimit int32 `json:"result_limit"` +} + +func (q *Queries) SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, searchProjects, arg.Name, arg.ResultLimit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Deleted, + &i.Slug, + &i.Name, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateProject = `-- name: UpdateProject :one +update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id +` + +type UpdateProjectParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + DistrictID pgtype.UUID `json:"district_id"` + FederalID *string `json:"federal_id"` +} + +func (q *Queries) UpdateProject(ctx context.Context, arg UpdateProjectParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, updateProject, + arg.ID, + arg.Name, + arg.Updater, + arg.UpdateDate, + arg.DistrictID, + arg.FederalID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const updateProjectImage = `-- name: UpdateProjectImage :exec +update project set image = $1 where id = $2 +` + +type UpdateProjectImageParams struct { + Image *string `json:"image"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) UpdateProjectImage(ctx context.Context, arg UpdateProjectImageParams) error { + _, err := q.db.Exec(ctx, updateProjectImage, arg.Image, arg.ID) + return err +} diff --git a/api/internal/db/project_role.sql_gen.go b/api/internal/db/project_role.sql_gen.go new file mode 100644 index 00000000..3ed77cfb --- /dev/null +++ b/api/internal/db/project_role.sql_gen.go @@ -0,0 +1,171 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: project_role.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createProfileProjectRole = `-- name: CreateProfileProjectRole :one +insert into profile_project_roles (project_id, profile_id, role_id, granted_by) +values ($1, $2, $3, $4) +on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id +returning id +` + +type CreateProfileProjectRoleParams struct { + ProjectID uuid.UUID `json:"project_id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + GrantedBy pgtype.UUID `json:"granted_by"` +} + +func (q *Queries) CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createProfileProjectRole, + arg.ProjectID, + arg.ProfileID, + arg.RoleID, + arg.GrantedBy, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const deleteProfileProjectRole = `-- name: DeleteProfileProjectRole :exec +delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3 +` + +type DeleteProfileProjectRoleParams struct { + ProjectID uuid.UUID `json:"project_id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` +} + +func (q *Queries) DeleteProfileProjectRole(ctx context.Context, arg DeleteProfileProjectRoleParams) error { + _, err := q.db.Exec(ctx, deleteProfileProjectRole, arg.ProjectID, arg.ProfileID, arg.RoleID) + return err +} + +const getProjectMembership = `-- name: GetProjectMembership :one +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where id = $1 +` + +type GetProjectMembershipRow struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Username string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` +} + +func (q *Queries) GetProjectMembership(ctx context.Context, id uuid.UUID) (GetProjectMembershipRow, error) { + row := q.db.QueryRow(ctx, getProjectMembership, id) + var i GetProjectMembershipRow + err := row.Scan( + &i.ID, + &i.ProfileID, + &i.Username, + &i.Email, + &i.RoleID, + &i.Role, + ) + return i, err +} + +const isProjectAdmin = `-- name: IsProjectAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and r.name = 'ADMIN' +) +` + +type IsProjectAdminParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) IsProjectAdmin(ctx context.Context, arg IsProjectAdminParams) (bool, error) { + row := q.db.QueryRow(ctx, isProjectAdmin, arg.ProfileID, arg.ProjectID) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const isProjectMember = `-- name: IsProjectMember :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and (r.name = 'MEMBER' or r.name = 'ADMIN') +) +` + +type IsProjectMemberParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) IsProjectMember(ctx context.Context, arg IsProjectMemberParams) (bool, error) { + row := q.db.QueryRow(ctx, isProjectMember, arg.ProfileID, arg.ProjectID) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const listProjectMembers = `-- name: ListProjectMembers :many +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where project_id = $1 +order by email +` + +type ListProjectMembersRow struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Username string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` +} + +func (q *Queries) ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) { + rows, err := q.db.Query(ctx, listProjectMembers, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListProjectMembersRow{} + for rows.Next() { + var i ListProjectMembersRow + if err := rows.Scan( + &i.ID, + &i.ProfileID, + &i.Username, + &i.Email, + &i.RoleID, + &i.Role, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go new file mode 100644 index 00000000..fd72735d --- /dev/null +++ b/api/internal/db/querier.go @@ -0,0 +1,270 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +type Querier interface { + AddTimeseriesToCollectionGroup(ctx context.Context, arg AddTimeseriesToCollectionGroupParams) error + AssignInstrumentToAlertConfig(ctx context.Context, arg AssignInstrumentToAlertConfigParams) error + AssignInstrumentToProject(ctx context.Context, arg AssignInstrumentToProjectParams) error + AssignReportConfigPlotConfig(ctx context.Context, arg AssignReportConfigPlotConfigParams) error + CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) error + CreateAlert(ctx context.Context, alertConfigID uuid.UUID) error + CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) + CreateAlertEmailSubscription(ctx context.Context, arg CreateAlertEmailSubscriptionParams) error + CreateAlertProfileSubscription(ctx context.Context, arg CreateAlertProfileSubscriptionParams) error + CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx context.Context, arg CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams) error + CreateAlertRead(ctx context.Context, arg CreateAlertReadParams) error + CreateAlerts(ctx context.Context, alertConfigID []uuid.UUID) *CreateAlertsBatchResults + CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error + CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) + CreateCalculation(ctx context.Context, arg CreateCalculationParams) error + CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) + CreateDatalogger(ctx context.Context, arg CreateDataloggerParams) (uuid.UUID, error) + CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error + CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error + CreateDataloggerTablePreview(ctx context.Context, arg CreateDataloggerTablePreviewParams) error + CreateEvalationInstrument(ctx context.Context, arg CreateEvalationInstrumentParams) error + CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) + CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) + CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) + CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error + CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) + CreateInstrumentGroupInstruments(ctx context.Context, arg CreateInstrumentGroupInstrumentsParams) error + CreateInstrumentNote(ctx context.Context, arg CreateInstrumentNoteParams) (InstrumentNote, error) + CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error + CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error + CreateNextEvaluationSubmittal(ctx context.Context, id uuid.UUID) error + CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error + CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error + CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error + CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error + CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error + CreateOrUpdateInstrumentStatus(ctx context.Context, arg CreateOrUpdateInstrumentStatusParams) error + CreateOrUpdateTimeseriesMeasurement(ctx context.Context, arg CreateOrUpdateTimeseriesMeasurementParams) error + CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOrUpdateTimeseriesNoteParams) error + CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error + CreatePlotConfig(ctx context.Context, arg CreatePlotConfigParams) (uuid.UUID, error) + CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error + CreatePlotConfigScatterLineLayout(ctx context.Context, arg CreatePlotConfigScatterLineLayoutParams) error + CreatePlotConfigSettings(ctx context.Context, arg CreatePlotConfigSettingsParams) error + CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error + CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error + CreatePlotContourConfigTimeseries(ctx context.Context, arg CreatePlotContourConfigTimeseriesParams) error + CreatePlotProfileConfig(ctx context.Context, arg CreatePlotProfileConfigParams) error + CreateProfile(ctx context.Context, arg CreateProfileParams) error + CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) + CreateProfileToken(ctx context.Context, arg CreateProfileTokenParams) (ProfileToken, error) + CreateProject(ctx context.Context, arg CreateProjectParams) (CreateProjectRow, error) + CreateReportConfig(ctx context.Context, arg CreateReportConfigParams) (uuid.UUID, error) + CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) + CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error + CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error + CreateTimeseries(ctx context.Context, arg CreateTimeseriesParams) (CreateTimeseriesRow, error) + CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error + CreateTimeseriesMeasruement(ctx context.Context, arg CreateTimeseriesMeasruementParams) error + CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error + DeleteAlertConfig(ctx context.Context, id uuid.UUID) error + DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error + DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error + DeleteAlertRead(ctx context.Context, arg DeleteAlertReadParams) error + DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID pgtype.UUID) error + DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID pgtype.UUID) error + DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error + DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error + DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error + DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error + DeleteDataloggerTable(ctx context.Context, id uuid.UUID) error + DeleteDataloggerTableError(ctx context.Context, arg DeleteDataloggerTableErrorParams) error + DeleteEquivalencyTable(ctx context.Context, dataloggerTableID pgtype.UUID) error + DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) error + DeleteEvaluation(ctx context.Context, id uuid.UUID) error + DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstrumentParams) error + DeleteFlagInstrumentGroup(ctx context.Context, id uuid.UUID) error + DeleteFlagProject(ctx context.Context, id uuid.UUID) error + DeleteInstrumentConstant(ctx context.Context, arg DeleteInstrumentConstantParams) error + DeleteInstrumentGroupInstruments(ctx context.Context, arg DeleteInstrumentGroupInstrumentsParams) error + DeleteInstrumentNote(ctx context.Context, id uuid.UUID) error + DeleteInstrumentStatus(ctx context.Context, id uuid.UUID) error + DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error + DeletePlotConfig(ctx context.Context, arg DeletePlotConfigParams) error + DeletePlotConfigSettings(ctx context.Context, id uuid.UUID) error + DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error + DeleteProfileProjectRole(ctx context.Context, arg DeleteProfileProjectRoleParams) error + DeleteReportConfig(ctx context.Context, id uuid.UUID) error + DeleteTimeseries(ctx context.Context, id uuid.UUID) error + DeleteTimeseriesMeasurement(ctx context.Context, arg DeleteTimeseriesMeasurementParams) error + DeleteTimeseriesMeasurements(ctx context.Context, arg DeleteTimeseriesMeasurementsParams) error + DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error + DeleteTimeseriesNote(ctx context.Context, arg DeleteTimeseriesNoteParams) error + DeleteToken(ctx context.Context, arg DeleteTokenParams) error + GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) + GetAlertSubscription(ctx context.Context, arg GetAlertSubscriptionParams) ([]AlertProfileSubscription, error) + GetAlertSubscriptionByID(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) + GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) + GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) + GetCollectionGroupDetailsTimeseries(ctx context.Context, arg GetCollectionGroupDetailsTimeseriesParams) (GetCollectionGroupDetailsTimeseriesRow, error) + GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) + GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) + GetDataloggerHashByModelSN(ctx context.Context, arg GetDataloggerHashByModelSNParams) (string, error) + GetDataloggerIsActive(ctx context.Context, arg GetDataloggerIsActiveParams) (int32, error) + GetDataloggerModelName(ctx context.Context, id uuid.UUID) (*string, error) + GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) + GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) ([]VDataloggerEquivalencyTable, error) + GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) + GetHome(ctx context.Context) (GetHomeRow, error) + GetInstrumentCount(ctx context.Context) (int64, error) + GetIpiMeasurementsForInstrument(ctx context.Context, arg GetIpiMeasurementsForInstrumentParams) ([]VIpiMeasurement, error) + GetIsValidDataloggerTable(ctx context.Context, id uuid.UUID) (bool, error) + GetIsValidEquivalencyTableTimeseries(ctx context.Context, id uuid.UUID) (bool, error) + GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]GetIssuedTokensRow, error) + GetLatestHeartbeat(ctx context.Context) (interface{}, error) + GetOrCreateDataloggerTable(ctx context.Context, arg GetOrCreateDataloggerTableParams) (uuid.UUID, error) + GetProfileForEDIPI(ctx context.Context, edipi int64) ([]VProfile, error) + GetProfileForEmail(ctx context.Context, email string) (VProfile, error) + GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) + GetProfileForUsername(ctx context.Context, username string) (VProfile, error) + GetProject(ctx context.Context, id uuid.UUID) (VProject, error) + GetProjectCount(ctx context.Context) (int64, error) + GetProjectCountForInstrument(ctx context.Context, instrumentIds []uuid.UUID) (GetProjectCountForInstrumentRow, error) + GetProjectMembership(ctx context.Context, id uuid.UUID) (GetProjectMembershipRow, error) + GetReportConfigByID(ctx context.Context, id uuid.UUID) ([]VReportConfig, error) + GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) + GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) + GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) + GetTimeseriesConstantMeasurement(ctx context.Context, arg GetTimeseriesConstantMeasurementParams) ([]GetTimeseriesConstantMeasurementRow, error) + GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) + GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) + GetTokenInfo(ctx context.Context, tokenID string) (ProfileToken, error) + GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) + IsProjectAdmin(ctx context.Context, arg IsProjectAdminParams) (bool, error) + IsProjectMember(ctx context.Context, arg IsProjectMemberParams) (bool, error) + ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) + ListAlertConfigSubmittals(ctx context.Context, arg ListAlertConfigSubmittalsParams) ([]VSubmittal, error) + ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) + ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) + ListAlertConfigsForProjectAndAlertType(ctx context.Context, arg ListAlertConfigsForProjectAndAlertTypeParams) ([]VAlertConfig, error) + ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) + ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) + ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) + ListAllDataloggers(ctx context.Context) ([]VDatalogger, error) + ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) + ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) + ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) + ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) + ListCollectionGroups(ctx context.Context, arg ListCollectionGroupsParams) ([]ListCollectionGroupsRow, error) + ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) + ListDistricts(ctx context.Context) ([]VDistrict, error) + ListDomainGroups(ctx context.Context) ([]VDomainGroup, error) + ListDomains(ctx context.Context) ([]VDomain, error) + ListEmailAutocomplete(ctx context.Context, arg ListEmailAutocompleteParams) ([]ListEmailAutocompleteRow, error) + ListEvaluationDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListEvaluationDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) + ListHeartbeats(ctx context.Context) ([]time.Time, error) + ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) + ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) + ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + ListInstrumentEvaluations(ctx context.Context, instrumentID pgtype.UUID) ([]VEvaluation, error) + ListInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID) ([]ListInstrumentGroupInstrumentsRow, error) + ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) + ListInstrumentGroups(ctx context.Context, id interface{}) ([]ListInstrumentGroupsRow, error) + ListInstrumentGroupsForProject(ctx context.Context, projectID pgtype.UUID) ([]VInstrumentGroup, error) + ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]ListInstrumentIDNamesByIDsRow, error) + ListInstrumentNotes(ctx context.Context, arg ListInstrumentNotesParams) ([]InstrumentNote, error) + ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) + ListInstrumentStatuses(ctx context.Context, arg ListInstrumentStatusesParams) ([]ListInstrumentStatusesRow, error) + ListInstrumentSubmittals(ctx context.Context, arg ListInstrumentSubmittalsParams) ([]VSubmittal, error) + ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + ListInstruments(ctx context.Context, id interface{}) ([]ListInstrumentsRow, error) + ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) + ListMeasurementDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListMeasurementDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) + ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) + ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, arg ListPlotConfigMeasurementsBullseyePlotParams) ([]ListPlotConfigMeasurementsBullseyePlotRow, error) + ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg ListPlotConfigMeasurementsContourPlotParams) ([]ListPlotConfigMeasurementsContourPlotRow, error) + ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID pgtype.UUID) ([]VTimeseries, error) + ListPlotConfigs(ctx context.Context, arg ListPlotConfigsParams) ([]ListPlotConfigsRow, error) + ListPlotContourConfigTimes(ctx context.Context, arg ListPlotContourConfigTimesParams) ([]time.Time, error) + ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) + ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) + ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) + ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) + ListProjectSubmittals(ctx context.Context, arg ListProjectSubmittalsParams) ([]VSubmittal, error) + ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) + ListProjects(ctx context.Context) ([]VProject, error) + ListProjectsForFederalID(ctx context.Context) ([]VProject, error) + ListProjectsForProfileRole(ctx context.Context, arg ListProjectsForProfileRoleParams) ([]VProject, error) + ListReportConfigPlotConfigs(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) + ListTimeseries(ctx context.Context, id uuid.UUID) ([]VTimeseries, error) + ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) + ListTimeseriesMeasurements(ctx context.Context, arg ListTimeseriesMeasurementsParams) ([]ListTimeseriesMeasurementsRow, error) + ListUnits(ctx context.Context) ([]VUnit, error) + ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmittal, error) + RegisterEmail(ctx context.Context, email string) (uuid.UUID, error) + RemoveTimeseriesFromCollectionGroup(ctx context.Context, arg RemoveTimeseriesFromCollectionGroupParams) error + RenameEmptyDataloggerTableName(ctx context.Context, arg RenameEmptyDataloggerTableNameParams) error + ResetDataloggerTableName(ctx context.Context, id uuid.UUID) error + SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]VProject, error) + UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID pgtype.UUID) error + UnassignAllReportConfigPlotConfig(ctx context.Context, reportConfigID uuid.UUID) error + UnassignInstrumentFromProject(ctx context.Context, arg UnassignInstrumentFromProjectParams) error + UnassignReportConfigPlotConfig(ctx context.Context, arg UnassignReportConfigPlotConfigParams) error + UnregisterEmail(ctx context.Context, id uuid.UUID) error + UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error + UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error + UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (CollectionGroup, error) + UpdateDatalogger(ctx context.Context, arg UpdateDataloggerParams) error + UpdateDataloggerHash(ctx context.Context, arg UpdateDataloggerHashParams) error + UpdateDataloggerTablePreview(ctx context.Context, arg UpdateDataloggerTablePreviewParams) error + UpdateDataloggerUpdater(ctx context.Context, arg UpdateDataloggerUpdaterParams) error + UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error + UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error + UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID pgtype.UUID) (uuid.UUID, error) + UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error + UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) + UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) + UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) + UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error + UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error + UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAlertSubscriptionParams) error + UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error + UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error + UpdatePlotConfigCustomShape(ctx context.Context, arg UpdatePlotConfigCustomShapeParams) error + UpdatePlotConfigScatterLineLayout(ctx context.Context, arg UpdatePlotConfigScatterLineLayoutParams) error + UpdatePlotConfigTimeseriesTrace(ctx context.Context, arg UpdatePlotConfigTimeseriesTraceParams) error + UpdatePlotContourConfig(ctx context.Context, arg UpdatePlotContourConfigParams) error + UpdatePlotProfileConfig(ctx context.Context, arg UpdatePlotProfileConfigParams) error + UpdateProfileForEDIPI(ctx context.Context, arg UpdateProfileForEDIPIParams) error + UpdateProfileForEmail(ctx context.Context, arg UpdateProfileForEmailParams) error + UpdateProfileForUsername(ctx context.Context, arg UpdateProfileForUsernameParams) error + UpdateProject(ctx context.Context, arg UpdateProjectParams) (uuid.UUID, error) + UpdateProjectImage(ctx context.Context, arg UpdateProjectImageParams) error + UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error + UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error + UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error + UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error + UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error + UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error + UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) + UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error + ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) + ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) + ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) + ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]ValidateProjectsInstrumentNameUniqueRow, error) + VerifyDataloggerExists(ctx context.Context, id uuid.UUID) (bool, error) + VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID pgtype.UUID) error + VerifyMissingSubmittal(ctx context.Context, id uuid.UUID) error +} + +var _ Querier = (*Queries)(nil) diff --git a/api/internal/db/report_config.sql_gen.go b/api/internal/db/report_config.sql_gen.go new file mode 100644 index 00000000..1a491523 --- /dev/null +++ b/api/internal/db/report_config.sql_gen.go @@ -0,0 +1,339 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: report_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const assignReportConfigPlotConfig = `-- name: AssignReportConfigPlotConfig :exec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +` + +type AssignReportConfigPlotConfigParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) AssignReportConfigPlotConfig(ctx context.Context, arg AssignReportConfigPlotConfigParams) error { + _, err := q.db.Exec(ctx, assignReportConfigPlotConfig, arg.ReportConfigID, arg.PlotConfigID) + return err +} + +const createReportConfig = `-- name: CreateReportConfig :one +insert into report_config ( + name, slug, project_id, creator, description, date_range, date_range_enabled, + show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled +) +values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id +` + +type CreateReportConfigParams struct { + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + Description string `json:"description"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +func (q *Queries) CreateReportConfig(ctx context.Context, arg CreateReportConfigParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createReportConfig, + arg.Name, + arg.ProjectID, + arg.Creator, + arg.Description, + arg.DateRange, + arg.DateRangeEnabled, + arg.ShowMasked, + arg.ShowMaskedEnabled, + arg.ShowNonvalidated, + arg.ShowNonvalidatedEnabled, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createReportDownloadJob = `-- name: CreateReportDownloadJob :one +insert into report_download_job (report_config_id, creator) values ($1, $2) returning id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date +` + +type CreateReportDownloadJobParams struct { + ReportConfigID pgtype.UUID `json:"report_config_id"` + Creator uuid.UUID `json:"creator"` +} + +func (q *Queries) CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, createReportDownloadJob, arg.ReportConfigID, arg.Creator) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.Creator, + &i.CreateDate, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdateDate, + ) + return i, err +} + +const deleteReportConfig = `-- name: DeleteReportConfig :exec +delete from report_config where id=$1 +` + +func (q *Queries) DeleteReportConfig(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteReportConfig, id) + return err +} + +const getReportConfigByID = `-- name: GetReportConfigByID :many +select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where id = $1 +` + +func (q *Queries) GetReportConfigByID(ctx context.Context, id uuid.UUID) ([]VReportConfig, error) { + rows, err := q.db.Query(ctx, getReportConfigByID, id) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VReportConfig{} + for rows.Next() { + var i VReportConfig + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.ProjectID, + &i.ProjectName, + &i.DistrictName, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.PlotConfigs, + &i.GlobalOverrides, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getReportDownloadJob = `-- name: GetReportDownloadJob :one +select id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date from report_download_job where id=$1 and creator=$2 +` + +type GetReportDownloadJobParams struct { + ID uuid.UUID `json:"id"` + Creator uuid.UUID `json:"creator"` +} + +func (q *Queries) GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, getReportDownloadJob, arg.ID, arg.Creator) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.Creator, + &i.CreateDate, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdateDate, + ) + return i, err +} + +const listProjectReportConfigs = `-- name: ListProjectReportConfigs :many +select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where project_id = $1 +` + +func (q *Queries) ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) { + rows, err := q.db.Query(ctx, listProjectReportConfigs, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VReportConfig{} + for rows.Next() { + var i VReportConfig + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.ProjectID, + &i.ProjectName, + &i.DistrictName, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.PlotConfigs, + &i.GlobalOverrides, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listReportConfigPlotConfigs = `-- name: ListReportConfigPlotConfigs :many +select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = any( + select plot_config_id from report_config_plot_config where report_config_id = $1 +) +` + +func (q *Queries) ListReportConfigPlotConfigs(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, listReportConfigPlotConfigs, reportConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VPlotConfiguration{} + for rows.Next() { + var i VPlotConfiguration + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.ReportConfigs, + &i.PlotType, + &i.Display, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const unassignAllReportConfigPlotConfig = `-- name: UnassignAllReportConfigPlotConfig :exec +delete from report_config_plot_config where report_config_id=$1 +` + +func (q *Queries) UnassignAllReportConfigPlotConfig(ctx context.Context, reportConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, unassignAllReportConfigPlotConfig, reportConfigID) + return err +} + +const unassignReportConfigPlotConfig = `-- name: UnassignReportConfigPlotConfig :exec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 +` + +type UnassignReportConfigPlotConfigParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) UnassignReportConfigPlotConfig(ctx context.Context, arg UnassignReportConfigPlotConfigParams) error { + _, err := q.db.Exec(ctx, unassignReportConfigPlotConfig, arg.ReportConfigID, arg.PlotConfigID) + return err +} + +const updateReportConfig = `-- name: UpdateReportConfig :exec +update report_config set name=$2, +updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1 +` + +type UpdateReportConfigParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater pgtype.UUID `json:"updater"` + UpdateDate pgtype.Timestamptz `json:"update_date"` + Description string `json:"description"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +func (q *Queries) UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error { + _, err := q.db.Exec(ctx, updateReportConfig, + arg.ID, + arg.Name, + arg.Updater, + arg.UpdateDate, + arg.Description, + arg.DateRange, + arg.DateRangeEnabled, + arg.ShowMasked, + arg.ShowMaskedEnabled, + arg.ShowNonvalidated, + arg.ShowNonvalidatedEnabled, + ) + return err +} + +const updateReportDownloadJob = `-- name: UpdateReportDownloadJob :exec +update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1 +` + +type UpdateReportDownloadJobParams struct { + ID uuid.UUID `json:"id"` + Status JobStatus `json:"status"` + Progress int32 `json:"progress"` + ProgressUpdateDate time.Time `json:"progress_update_date"` + FileKey *string `json:"file_key"` + FileExpiry pgtype.Timestamptz `json:"file_expiry"` +} + +func (q *Queries) UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error { + _, err := q.db.Exec(ctx, updateReportDownloadJob, + arg.ID, + arg.Status, + arg.Progress, + arg.ProgressUpdateDate, + arg.FileKey, + arg.FileExpiry, + ) + return err +} diff --git a/api/internal/db/submittal.sql_gen.go b/api/internal/db/submittal.sql_gen.go new file mode 100644 index 00000000..e2a7f51f --- /dev/null +++ b/api/internal/db/submittal.sql_gen.go @@ -0,0 +1,251 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: submittal.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const listAlertConfigSubmittals = `-- name: ListAlertConfigSubmittals :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +from v_submittal +where alert_config_id = $1 +and ($2 = false or (completion_date is null and not marked_as_missing)) +order by due_date desc +` + +type ListAlertConfigSubmittalsParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +} + +func (q *Queries) ListAlertConfigSubmittals(ctx context.Context, arg ListAlertConfigSubmittalsParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, listAlertConfigSubmittals, arg.AlertConfigID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentSubmittals = `-- name: ListInstrumentSubmittals :many +select sub.id, sub.alert_config_id, sub.alert_config_name, sub.alert_type_id, sub.alert_type_name, sub.project_id, sub.submittal_status_id, sub.submittal_status_name, sub.completion_date, sub.create_date, sub.due_date, sub.marked_as_missing, sub.warning_sent +from v_submittal sub +inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id +where aci.instrument_id = $1 +and ($2 = false or (completion_date is null and not marked_as_missing)) +order by sub.due_date desc +` + +type ListInstrumentSubmittalsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +} + +func (q *Queries) ListInstrumentSubmittals(ctx context.Context, arg ListInstrumentSubmittalsParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, listInstrumentSubmittals, arg.InstrumentID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectSubmittals = `-- name: ListProjectSubmittals :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +from v_submittal +where project_id = $1 +and ($2 = false or (completion_date is null and not marked_as_missing)) +order by due_date desc, alert_type_name asc +` + +type ListProjectSubmittalsParams struct { + ProjectID uuid.UUID `json:"project_id"` + ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +} + +func (q *Queries) ListProjectSubmittals(ctx context.Context, arg ListProjectSubmittalsParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, listProjectSubmittals, arg.ProjectID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listUnverifiedMissingSubmittals = `-- name: ListUnverifiedMissingSubmittals :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +from v_submittal +where completion_date is null +and not marked_as_missing +order by due_date desc +` + +func (q *Queries) ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, listUnverifiedMissingSubmittals) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateSubmittal = `-- name: UpdateSubmittal :exec +update submittal set + submittal_status_id = $2, + completion_date = $3, + warning_sent = $4 +where id = $1 +` + +type UpdateSubmittalParams struct { + ID uuid.UUID `json:"id"` + SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` + CompletionDate pgtype.Timestamptz `json:"completion_date"` + WarningSent bool `json:"warning_sent"` +} + +func (q *Queries) UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error { + _, err := q.db.Exec(ctx, updateSubmittal, + arg.ID, + arg.SubmittalStatusID, + arg.CompletionDate, + arg.WarningSent, + ) + return err +} + +const verifyMissingAlertConfigSubmittals = `-- name: VerifyMissingAlertConfigSubmittals :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where alert_config_id = $1 +and completion_date is null +and now() > due_date +` + +func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID pgtype.UUID) error { + _, err := q.db.Exec(ctx, verifyMissingAlertConfigSubmittals, alertConfigID) + return err +} + +const verifyMissingSubmittal = `-- name: VerifyMissingSubmittal :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where id = $1 +and completion_date is null +and now() > due_date +` + +func (q *Queries) VerifyMissingSubmittal(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, verifyMissingSubmittal, id) + return err +} diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go new file mode 100644 index 00000000..49f001a2 --- /dev/null +++ b/api/internal/db/timeseries.sql_gen.go @@ -0,0 +1,329 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createTimeseries = `-- name: CreateTimeseries :one +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type +` + +type CreateTimeseriesParams struct { + InstrumentID pgtype.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +type CreateTimeseriesRow struct { + ID uuid.UUID `json:"id"` + InstrumentID pgtype.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +func (q *Queries) CreateTimeseries(ctx context.Context, arg CreateTimeseriesParams) (CreateTimeseriesRow, error) { + row := q.db.QueryRow(ctx, createTimeseries, + arg.InstrumentID, + arg.Name, + arg.ParameterID, + arg.UnitID, + arg.Type, + ) + var i CreateTimeseriesRow + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Slug, + &i.Name, + &i.ParameterID, + &i.UnitID, + &i.Type, + ) + return i, err +} + +const deleteTimeseries = `-- name: DeleteTimeseries :exec +delete from timeseries where id = $1 +` + +func (q *Queries) DeleteTimeseries(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteTimeseries, id) + return err +} + +const getStoredTimeseriesExists = `-- name: GetStoredTimeseriesExists :one +select exists (select id from v_timeseries_stored where id = $1) +` + +func (q *Queries) GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, getStoredTimeseriesExists, id) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const getTimeseriesProjectMap = `-- name: GetTimeseriesProjectMap :many +select timeseries_id, project_id +from v_timeseries_project_map +where timeseries_id in ($1::uuid[]) +` + +func (q *Queries) GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) { + rows, err := q.db.Query(ctx, getTimeseriesProjectMap, timeseriesIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseriesProjectMap{} + for rows.Next() { + var i VTimeseriesProjectMap + if err := rows.Scan(&i.TimeseriesID, &i.ProjectID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentGroupTimeseries = `-- name: ListInstrumentGroupTimeseries :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id +where gi.instrument_group_id = $1 +` + +func (q *Queries) ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listInstrumentGroupTimeseries, instrumentGroupID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentTimeseries = `-- name: ListInstrumentTimeseries :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries +where instrument_id = $1 +` + +func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listInstrumentTimeseries, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listPlotConfigTimeseries = `-- name: ListPlotConfigTimeseries :many +SELECT t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit FROM v_timeseries t +INNER JOIN plot_configuration_timeseries_trace pct ON pct.timeseries_id = t.id +WHERE pct.plot_configuration_id = $1 +` + +func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID pgtype.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listPlotConfigTimeseries, plotConfigurationID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProjectTimeseries = `-- name: ListProjectTimeseries :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join project_instrument p on p.instrument_id = t.instrument_id +where p.project_id = $1 +` + +func (q *Queries) ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listProjectTimeseries, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listTimeseries = `-- name: ListTimeseries :many +SELECT id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit FROM v_timeseries WHERE id = $1 +` + +func (q *Queries) ListTimeseries(ctx context.Context, id uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listTimeseries, id) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateTimeseries = `-- name: UpdateTimeseries :one +update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 +where id = $1 +returning id +` + +type UpdateTimeseriesParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` +} + +func (q *Queries) UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, updateTimeseries, + arg.ID, + arg.Name, + arg.InstrumentID, + arg.ParameterID, + arg.UnitID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go new file mode 100644 index 00000000..3c944d74 --- /dev/null +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -0,0 +1,187 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries_calculated.sql + +package db + +import ( + "context" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createCalculatedTimeseries = `-- name: CreateCalculatedTimeseries :one +INSERT INTO timeseries ( + instrument_id, + parameter_id, + unit_id, + slug, + name, + type +) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +RETURNING id +` + +type CreateCalculatedTimeseriesParams struct { + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` +} + +func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createCalculatedTimeseries, + arg.InstrumentID, + arg.ParameterID, + arg.UnitID, + arg.Name, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const createCalculation = `-- name: CreateCalculation :exec +insert into calculation (timeseries_id, contents) values ($1,$2) +` + +type CreateCalculationParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +func (q *Queries) CreateCalculation(ctx context.Context, arg CreateCalculationParams) error { + _, err := q.db.Exec(ctx, createCalculation, arg.TimeseriesID, arg.Contents) + return err +} + +const createOrUpdateCalculatedTimeseries = `-- name: CreateOrUpdateCalculatedTimeseries :exec +insert into timeseries ( + id, + instrument_id, + parameter_id, + unit_id, + slug, + name, + type +) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') +on conflict (id) do update set + instrument_id = coalesce(excluded.instrument_id, $6), + parameter_id = coalesce(excluded.parameter_id, $7), + unit_id = coalesce(excluded.unit_id, $8), + slug = coalesce(excluded.slug, slugify($9, 'timeseries')), + name = coalesce(excluded.name, $9), + type = 'computed' +` + +type CreateOrUpdateCalculatedTimeseriesParams struct { + ID uuid.UUID `json:"id"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` + InstrumentID_2 pgtype.UUID `json:"instrument_id_2"` + ParameterID_2 uuid.UUID `json:"parameter_id_2"` + UnitID_2 uuid.UUID `json:"unit_id_2"` + Rawname string `json:"rawname"` +} + +func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error { + _, err := q.db.Exec(ctx, createOrUpdateCalculatedTimeseries, + arg.ID, + arg.InstrumentID, + arg.ParameterID, + arg.UnitID, + arg.Name, + arg.InstrumentID_2, + arg.ParameterID_2, + arg.UnitID_2, + arg.Rawname, + ) + return err +} + +const createOrUpdateCalculation = `-- name: CreateOrUpdateCalculation :exec +insert into calculation (timeseries_id, contents) values ($1, $2) +on conflict (timeseries_id) do update set contents = coalesce(excluded.contents, $3) +` + +type CreateOrUpdateCalculationParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` + Contents_2 *string `json:"contents_2"` +} + +func (q *Queries) CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error { + _, err := q.db.Exec(ctx, createOrUpdateCalculation, arg.TimeseriesID, arg.Contents, arg.Contents_2) + return err +} + +const deleteCalculatedTimeseries = `-- name: DeleteCalculatedTimeseries :exec +delete from timeseries where id = $1 and id in (select timeseries_id from calculation) +` + +func (q *Queries) DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteCalculatedTimeseries, id) + return err +} + +const listCalculatedTimeseries = `-- name: ListCalculatedTimeseries :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where ($1 is null or instrument_id = $1) +and ($2 is null or id = $2) +` + +type ListCalculatedTimeseriesParams struct { + InstrumentID interface{} `json:"instrument_id"` + ID interface{} `json:"id"` +} + +type ListCalculatedTimeseriesRow struct { + ID uuid.UUID `json:"id"` + InstrumentID pgtype.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + FormulaName string `json:"formula_name"` + Formula string `json:"formula"` +} + +func (q *Queries) ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) { + rows, err := q.db.Query(ctx, listCalculatedTimeseries, arg.InstrumentID, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListCalculatedTimeseriesRow{} + for rows.Next() { + var i ListCalculatedTimeseriesRow + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.ParameterID, + &i.UnitID, + &i.Slug, + &i.FormulaName, + &i.Formula, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/timeseries_cwms.sql_gen.go b/api/internal/db/timeseries_cwms.sql_gen.go new file mode 100644 index 00000000..399075c2 --- /dev/null +++ b/api/internal/db/timeseries_cwms.sql_gen.go @@ -0,0 +1,140 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries_cwms.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" + uuid "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const createTimeseriesCwms = `-- name: CreateTimeseriesCwms :exec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5) +` + +type CreateTimeseriesCwmsParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` +} + +func (q *Queries) CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error { + _, err := q.db.Exec(ctx, createTimeseriesCwms, + arg.TimeseriesID, + arg.CwmsTimeseriesID, + arg.CwmsOfficeID, + arg.CwmsExtentEarliestTime, + arg.CwmsExtentLatestTime, + ) + return err +} + +const getTimeseriesCwms = `-- name: GetTimeseriesCwms :one +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms +where id = $1 +` + +func (q *Queries) GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) { + row := q.db.QueryRow(ctx, getTimeseriesCwms, id) + var i VTimeseriesCwm + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + &i.CwmsTimeseriesID, + &i.CwmsOfficeID, + &i.CwmsExtentEarliestTime, + &i.CwmsExtentLatestTime, + ) + return i, err +} + +const listTimeseriesCwms = `-- name: ListTimeseriesCwms :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms +where instrument_id = $1 +` + +func (q *Queries) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) { + rows, err := q.db.Query(ctx, listTimeseriesCwms, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseriesCwm{} + for rows.Next() { + var i VTimeseriesCwm + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + &i.CwmsTimeseriesID, + &i.CwmsOfficeID, + &i.CwmsExtentEarliestTime, + &i.CwmsExtentLatestTime, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateTimeseriesCwms = `-- name: UpdateTimeseriesCwms :exec +update timeseries_cwms set + cwms_timeseries_id=$2, + cwms_office_id=$3, + cwms_extent_earliest_time=$4, + cwms_extent_latest_time=$5 +where timeseries_id=$1 +` + +type UpdateTimeseriesCwmsParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` +} + +func (q *Queries) UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error { + _, err := q.db.Exec(ctx, updateTimeseriesCwms, + arg.TimeseriesID, + arg.CwmsTimeseriesID, + arg.CwmsOfficeID, + arg.CwmsExtentEarliestTime, + arg.CwmsExtentLatestTime, + ) + return err +} diff --git a/api/internal/db/unit.sql_gen.go b/api/internal/db/unit.sql_gen.go new file mode 100644 index 00000000..9a1010c9 --- /dev/null +++ b/api/internal/db/unit.sql_gen.go @@ -0,0 +1,44 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: unit.sql + +package db + +import ( + "context" +) + +const listUnits = `-- name: ListUnits :many +select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure +from v_unit +order by name +` + +func (q *Queries) ListUnits(ctx context.Context) ([]VUnit, error) { + rows, err := q.db.Query(ctx, listUnits) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VUnit{} + for rows.Next() { + var i VUnit + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Abbreviation, + &i.UnitFamilyID, + &i.UnitFamily, + &i.MeasureID, + &i.Measure, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index 6eae2c25..e0e96f7b 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -1,173 +1,173 @@ -package handler - -import ( - "net/http" - "strings" - "time" - - "github.com/USACE/instrumentation-api/api/internal/cloud" - "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/middleware" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/USACE/instrumentation-api/api/internal/service" -) - -func newHttpClient() *http.Client { - return &http.Client{ - Timeout: time.Second * 60, - CheckRedirect: func(req *http.Request, via []*http.Request) error { - return nil - }, - } -} - -type ApiHandler struct { - Middleware middleware.Middleware - BlobService cloud.Blob - AlertService service.AlertService - AlertConfigService service.AlertConfigService - AlertSubscriptionService service.AlertSubscriptionService - EmailAutocompleteService service.EmailAutocompleteService - AwareParameterService service.AwareParameterService - CollectionGroupService service.CollectionGroupService - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - DistrictRollupService service.DistrictRollupService - DomainService service.DomainService - EquivalencyTableService service.EquivalencyTableService - EvaluationService service.EvaluationService - HeartbeatService service.HeartbeatService - HomeService service.HomeService - InstrumentService service.InstrumentService - InstrumentAssignService service.InstrumentAssignService - InstrumentConstantService service.InstrumentConstantService - InstrumentGroupService service.InstrumentGroupService - InstrumentNoteService service.InstrumentNoteService - InstrumentStatusService service.InstrumentStatusService - IpiInstrumentService service.IpiInstrumentService - MeasurementService service.MeasurementService - InclinometerMeasurementService service.InclinometerMeasurementService - OpendcsService service.OpendcsService - PlotConfigService service.PlotConfigService - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - ProjectService service.ProjectService - ReportConfigService service.ReportConfigService - SaaInstrumentService service.SaaInstrumentService - SubmittalService service.SubmittalService - TimeseriesService service.TimeseriesService - TimeseriesCwmsService service.TimeseriesCwmsService - CalculatedTimeseriesService service.CalculatedTimeseriesService - ProcessTimeseriesService service.ProcessTimeseriesService - UnitService service.UnitService -} - -func NewApi(cfg *config.ApiConfig) *ApiHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) - - return &ApiHandler{ - Middleware: mw, - BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), - AlertService: service.NewAlertService(db, q), - AlertConfigService: service.NewAlertConfigService(db, q), - AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), - EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), - AwareParameterService: service.NewAwareParameterService(db, q), - CollectionGroupService: service.NewCollectionGroupService(db, q), - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - DistrictRollupService: service.NewDistrictRollupService(db, q), - DomainService: service.NewDomainService(db, q), - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - EvaluationService: service.NewEvaluationService(db, q), - HeartbeatService: service.NewHeartbeatService(db, q), - HomeService: service.NewHomeService(db, q), - InstrumentService: service.NewInstrumentService(db, q), - InstrumentAssignService: service.NewInstrumentAssignService(db, q), - InstrumentConstantService: service.NewInstrumentConstantService(db, q), - InstrumentGroupService: service.NewInstrumentGroupService(db, q), - InstrumentNoteService: service.NewInstrumentNoteService(db, q), - InstrumentStatusService: service.NewInstrumentStatusService(db, q), - IpiInstrumentService: service.NewIpiInstrumentService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), - OpendcsService: service.NewOpendcsService(db, q), - PlotConfigService: service.NewPlotConfigService(db, q), - ProfileService: profileService, - ProjectRoleService: service.NewProjectRoleService(db, q), - ProjectService: service.NewProjectService(db, q), - ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), - SaaInstrumentService: service.NewSaaInstrumentService(db, q), - SubmittalService: service.NewSubmittalService(db, q), - TimeseriesService: service.NewTimeseriesService(db, q), - TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), - CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), - ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), - UnitService: service.NewUnitService(db, q), - } -} - -type TelemetryHandler struct { - Middleware middleware.Middleware - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - EquivalencyTableService service.EquivalencyTableService - MeasurementService service.MeasurementService -} - -func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) - - return &TelemetryHandler{ - Middleware: mw, - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - } -} - -type AlertCheckHandler struct { - AlertCheckService service.AlertCheckService -} - -func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - return &AlertCheckHandler{ - AlertCheckService: service.NewAlertCheckService(db, q, cfg), - } -} - -type DcsLoaderHandler struct { - PubsubService cloud.Pubsub - DcsLoaderService service.DcsLoaderService -} - -func NewDcsLoader(cfg *config.DcsLoaderConfig) *DcsLoaderHandler { - if !strings.HasPrefix(cfg.AWSSQSEndpoint, "https://") || !strings.HasPrefix(cfg.AWSSQSEndpoint, "http://") { - cfg.AWSSQSEndpoint = "https://" + cfg.AWSSQSEndpoint - } - s3Blob := cloud.NewS3Blob(&cfg.AWSS3Config, "", "") - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig).WithBlob(s3Blob) - apiClient := newHttpClient() - - return &DcsLoaderHandler{ - PubsubService: ps, - DcsLoaderService: service.NewDcsLoaderService(apiClient, cfg), - } -} +// package handler +// +// import ( +// "net/http" +// "strings" +// "time" +// +// "github.com/USACE/instrumentation-api/api/internal/cloud" +// "github.com/USACE/instrumentation-api/api/internal/config" +// "github.com/USACE/instrumentation-api/api/internal/middleware" +// "github.com/USACE/instrumentation-api/api/internal/model" +// "github.com/USACE/instrumentation-api/api/internal/service" +// ) +// +// func newHttpClient() *http.Client { +// return &http.Client{ +// Timeout: time.Second * 60, +// CheckRedirect: func(req *http.Request, via []*http.Request) error { +// return nil +// }, +// } +// } +// +// type ApiHandler struct { +// Middleware middleware.Middleware +// BlobService cloud.Blob +// AlertService service.AlertService +// AlertConfigService service.AlertConfigService +// AlertSubscriptionService service.AlertSubscriptionService +// EmailAutocompleteService service.EmailAutocompleteService +// AwareParameterService service.AwareParameterService +// CollectionGroupService service.CollectionGroupService +// DataloggerService service.DataloggerService +// DataloggerTelemetryService service.DataloggerTelemetryService +// DistrictRollupService service.DistrictRollupService +// DomainService service.DomainService +// EquivalencyTableService service.EquivalencyTableService +// EvaluationService service.EvaluationService +// HeartbeatService service.HeartbeatService +// HomeService service.HomeService +// InstrumentService service.InstrumentService +// InstrumentAssignService service.InstrumentAssignService +// InstrumentConstantService service.InstrumentConstantService +// InstrumentGroupService service.InstrumentGroupService +// InstrumentNoteService service.InstrumentNoteService +// InstrumentStatusService service.InstrumentStatusService +// IpiInstrumentService service.IpiInstrumentService +// MeasurementService service.MeasurementService +// InclinometerMeasurementService service.InclinometerMeasurementService +// OpendcsService service.OpendcsService +// PlotConfigService service.PlotConfigService +// ProfileService service.ProfileService +// ProjectRoleService service.ProjectRoleService +// ProjectService service.ProjectService +// ReportConfigService service.ReportConfigService +// SaaInstrumentService service.SaaInstrumentService +// SubmittalService service.SubmittalService +// TimeseriesService service.TimeseriesService +// TimeseriesCwmsService service.TimeseriesCwmsService +// CalculatedTimeseriesService service.CalculatedTimeseriesService +// ProcessTimeseriesService service.ProcessTimeseriesService +// UnitService service.UnitService +// } +// +// func NewApi(cfg *config.ApiConfig) *ApiHandler { +// db := model.NewDatabase(&cfg.DBConfig) +// q := db.Queries() +// ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) +// +// profileService := service.NewProfileService(db, q) +// projectRoleService := service.NewProjectRoleService(db, q) +// dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) +// mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) +// +// return &ApiHandler{ +// Middleware: mw, +// BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), +// AlertService: service.NewAlertService(db, q), +// AlertConfigService: service.NewAlertConfigService(db, q), +// AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), +// EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), +// AwareParameterService: service.NewAwareParameterService(db, q), +// CollectionGroupService: service.NewCollectionGroupService(db, q), +// DataloggerService: service.NewDataloggerService(db, q), +// DataloggerTelemetryService: dataloggerTelemetryService, +// DistrictRollupService: service.NewDistrictRollupService(db, q), +// DomainService: service.NewDomainService(db, q), +// EquivalencyTableService: service.NewEquivalencyTableService(db, q), +// EvaluationService: service.NewEvaluationService(db, q), +// HeartbeatService: service.NewHeartbeatService(db, q), +// HomeService: service.NewHomeService(db, q), +// InstrumentService: service.NewInstrumentService(db, q), +// InstrumentAssignService: service.NewInstrumentAssignService(db, q), +// InstrumentConstantService: service.NewInstrumentConstantService(db, q), +// InstrumentGroupService: service.NewInstrumentGroupService(db, q), +// InstrumentNoteService: service.NewInstrumentNoteService(db, q), +// InstrumentStatusService: service.NewInstrumentStatusService(db, q), +// IpiInstrumentService: service.NewIpiInstrumentService(db, q), +// MeasurementService: service.NewMeasurementService(db, q), +// InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), +// OpendcsService: service.NewOpendcsService(db, q), +// PlotConfigService: service.NewPlotConfigService(db, q), +// ProfileService: profileService, +// ProjectRoleService: service.NewProjectRoleService(db, q), +// ProjectService: service.NewProjectService(db, q), +// ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), +// SaaInstrumentService: service.NewSaaInstrumentService(db, q), +// SubmittalService: service.NewSubmittalService(db, q), +// TimeseriesService: service.NewTimeseriesService(db, q), +// TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), +// CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), +// ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), +// UnitService: service.NewUnitService(db, q), +// } +// } +// +// type TelemetryHandler struct { +// Middleware middleware.Middleware +// DataloggerService service.DataloggerService +// DataloggerTelemetryService service.DataloggerTelemetryService +// EquivalencyTableService service.EquivalencyTableService +// MeasurementService service.MeasurementService +// } +// +// func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { +// db := model.NewDatabase(&cfg.DBConfig) +// q := db.Queries() +// +// profileService := service.NewProfileService(db, q) +// projectRoleService := service.NewProjectRoleService(db, q) +// dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) +// mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) +// +// return &TelemetryHandler{ +// Middleware: mw, +// DataloggerService: service.NewDataloggerService(db, q), +// DataloggerTelemetryService: dataloggerTelemetryService, +// EquivalencyTableService: service.NewEquivalencyTableService(db, q), +// MeasurementService: service.NewMeasurementService(db, q), +// } +// } +// +// type AlertCheckHandler struct { +// AlertCheckService service.AlertCheckService +// } +// +// func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { +// db := model.NewDatabase(&cfg.DBConfig) +// q := db.Queries() +// +// return &AlertCheckHandler{ +// AlertCheckService: service.NewAlertCheckService(db, q, cfg), +// } +// } +// +// type DcsLoaderHandler struct { +// PubsubService cloud.Pubsub +// DcsLoaderService service.DcsLoaderService +// } +// +// func NewDcsLoader(cfg *config.DcsLoaderConfig) *DcsLoaderHandler { +// if !strings.HasPrefix(cfg.AWSSQSEndpoint, "https://") || !strings.HasPrefix(cfg.AWSSQSEndpoint, "http://") { +// cfg.AWSSQSEndpoint = "https://" + cfg.AWSSQSEndpoint +// } +// s3Blob := cloud.NewS3Blob(&cfg.AWSS3Config, "", "") +// ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig).WithBlob(s3Blob) +// apiClient := newHttpClient() +// +// return &DcsLoaderHandler{ +// PubsubService: ps, +// DcsLoaderService: service.NewDcsLoaderService(apiClient, cfg), +// } +// } diff --git a/api/internal/handler/handlerv2.go b/api/internal/handler/handlerv2.go new file mode 100644 index 00000000..e7982b1f --- /dev/null +++ b/api/internal/handler/handlerv2.go @@ -0,0 +1,174 @@ +package handler + +import ( + "net/http" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/cloud" + "github.com/USACE/instrumentation-api/api/internal/config" + "github.com/USACE/instrumentation-api/api/internal/middleware" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/service" +) + +func newHttpClient() *http.Client { + return &http.Client{ + Timeout: time.Second * 60, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return nil + }, + } +} + +type ApiHandler struct { + Middleware middleware.Middleware + BlobService cloud.Blob + AlertService service.AlertService + AlertConfigService service.AlertConfigService + AlertSubscriptionService service.AlertSubscriptionService + EmailAutocompleteService service.EmailAutocompleteService + AwareParameterService service.AwareParameterService + CollectionGroupService service.CollectionGroupService + DataloggerService service.DataloggerService + DataloggerTelemetryService service.DataloggerTelemetryService + DistrictRollupService service.DistrictRollupService + DomainService service.DomainService + EquivalencyTableService service.EquivalencyTableService + EvaluationService service.EvaluationService + HeartbeatService service.HeartbeatService + HomeService service.HomeService + InstrumentService service.InstrumentService + InstrumentAssignService service.InstrumentAssignService + InstrumentConstantService service.InstrumentConstantService + InstrumentGroupService service.InstrumentGroupService + InstrumentNoteService service.InstrumentNoteService + InstrumentStatusService service.InstrumentStatusService + IpiInstrumentService service.IpiInstrumentService + MeasurementService service.MeasurementService + InclinometerMeasurementService service.InclinometerMeasurementService + OpendcsService service.OpendcsService + PlotConfigService service.PlotConfigService + ProfileService service.ProfileService + ProjectRoleService service.ProjectRoleService + ProjectService service.ProjectService + ReportConfigService service.ReportConfigService + SaaInstrumentService service.SaaInstrumentService + SubmittalService service.SubmittalService + TimeseriesService service.TimeseriesService + TimeseriesCwmsService service.TimeseriesCwmsService + CalculatedTimeseriesService service.CalculatedTimeseriesService + ProcessTimeseriesService service.ProcessTimeseriesService + UnitService service.UnitService +} + +func NewApi(cfg *config.ApiConfig) *ApiHandler { + db := model.NewDatabase(&cfg.DBConfig) + q := db.Queries() + ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) + + dbStore := service.NewDatabaseStore( + profileService := service.NewProfileService(db, q) + projectRoleService := service.NewProjectRoleService(db, q) + dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) + mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + + return &ApiHandler{ + Middleware: mw, + BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), + AlertService: service.NewAlertService(db, q), + AlertConfigService: service.NewAlertConfigService(db, q), + AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), + EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), + AwareParameterService: service.NewAwareParameterService(db, q), + CollectionGroupService: service.NewCollectionGroupService(db, q), + DataloggerService: service.NewDataloggerService(db, q), + DataloggerTelemetryService: dataloggerTelemetryService, + DistrictRollupService: service.NewDistrictRollupService(db, q), + DomainService: service.NewDomainService(db, q), + EquivalencyTableService: service.NewEquivalencyTableService(db, q), + EvaluationService: service.NewEvaluationService(db, q), + HeartbeatService: service.NewHeartbeatService(db, q), + HomeService: service.NewHomeService(db, q), + InstrumentService: service.NewInstrumentService(db, q), + InstrumentAssignService: service.NewInstrumentAssignService(db, q), + InstrumentConstantService: service.NewInstrumentConstantService(db, q), + InstrumentGroupService: service.NewInstrumentGroupService(db, q), + InstrumentNoteService: service.NewInstrumentNoteService(db, q), + InstrumentStatusService: service.NewInstrumentStatusService(db, q), + IpiInstrumentService: service.NewIpiInstrumentService(db, q), + MeasurementService: service.NewMeasurementService(db, q), + InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), + OpendcsService: service.NewOpendcsService(db, q), + PlotConfigService: service.NewPlotConfigService(db, q), + ProfileService: profileService, + ProjectRoleService: service.NewProjectRoleService(db, q), + ProjectService: service.NewProjectService(db, q), + ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), + SaaInstrumentService: service.NewSaaInstrumentService(db, q), + SubmittalService: service.NewSubmittalService(db, q), + TimeseriesService: service.NewTimeseriesService(db, q), + TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), + CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), + ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), + UnitService: service.NewUnitService(db, q), + } +} + +type TelemetryHandler struct { + Middleware middleware.Middleware + DataloggerService service.DataloggerService + DataloggerTelemetryService service.DataloggerTelemetryService + EquivalencyTableService service.EquivalencyTableService + MeasurementService service.MeasurementService +} + +func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { + db := model.NewDatabase(&cfg.DBConfig) + q := db.Queries() + + profileService := service.NewProfileService(db, q) + projectRoleService := service.NewProjectRoleService(db, q) + dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) + mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + + return &TelemetryHandler{ + Middleware: mw, + DataloggerService: service.NewDataloggerService(db, q), + DataloggerTelemetryService: dataloggerTelemetryService, + EquivalencyTableService: service.NewEquivalencyTableService(db, q), + MeasurementService: service.NewMeasurementService(db, q), + } +} + +type AlertCheckHandler struct { + AlertCheckService service.AlertCheckService +} + +func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { + db := model.NewDatabase(&cfg.DBConfig) + q := db.Queries() + + return &AlertCheckHandler{ + AlertCheckService: service.NewAlertCheckService(db, q, cfg), + } +} + +type DcsLoaderHandler struct { + PubsubService cloud.Pubsub + DcsLoaderService service.DcsLoaderService +} + +func NewDcsLoader(cfg *config.DcsLoaderConfig) *DcsLoaderHandler { + if !strings.HasPrefix(cfg.AWSSQSEndpoint, "https://") || !strings.HasPrefix(cfg.AWSSQSEndpoint, "http://") { + cfg.AWSSQSEndpoint = "https://" + cfg.AWSSQSEndpoint + } + s3Blob := cloud.NewS3Blob(&cfg.AWSS3Config, "", "") + ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig).WithBlob(s3Blob) + apiClient := newHttpClient() + + return &DcsLoaderHandler{ + PubsubService: ps, + DcsLoaderService: service.NewDcsLoaderService(apiClient, cfg), + } +} diff --git a/api/internal/model/common.go b/api/internal/model/common.go index 66432b87..d18a2410 100644 --- a/api/internal/model/common.go +++ b/api/internal/model/common.go @@ -1,6 +1,8 @@ package model import ( + "encoding/json" + "fmt" "time" "github.com/google/uuid" @@ -16,6 +18,16 @@ type AuditInfo struct { UpdateDate *time.Time `json:"update_date" db:"update_date"` } +type Opts map[string]interface{} + +func (o *Opts) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + type IDSlug struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` diff --git a/api/internal/model/instrument.go b/api/internal/model/instrument.go index dfc9e75f..66e00afc 100644 --- a/api/internal/model/instrument.go +++ b/api/internal/model/instrument.go @@ -3,7 +3,6 @@ package model import ( "context" "database/sql/driver" - "encoding/json" "fmt" "time" @@ -42,18 +41,6 @@ type Instrument struct { AuditInfo } -// Optional instrument metadata based on type -// If there are no options defined for the instrument type, the object will be empty -type Opts map[string]interface{} - -func (o *Opts) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), o) -} - // InstrumentCollection is a collection of Instrument items type InstrumentCollection []Instrument diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index e62c791f..886ec8db 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -12200,25 +12200,19 @@ }, "/timeseries_measurements" : { "post" : { - "parameters" : [ { - "description" : "api key", - "in" : "query", - "name" : "key", - "required" : true, - "schema" : { - "type" : "string" - } - } ], "requestBody" : { "content" : { - "*/*" : { + "application/json" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" + } + }, + "multipart/form-data" : { + "schema" : { + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" } } - }, - "description" : "array of timeseries measurement collections", - "required" : true + } }, "responses" : { "200" : { @@ -12265,7 +12259,10 @@ "description" : "Internal Server Error" } }, - "summary" : "creates or updates one or more timeseries measurements", + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates one or more timeseries measurements", "tags" : [ "measurement" ], "x-codegen-request-body-name" : "timeseries_measurement_collections" } @@ -14723,6 +14720,16 @@ "enum" : [ 0, 1, 2 ], "type" : "integer", "x-enum-varnames" : [ "Undefined", "Null", "Present" ] + }, + "_timeseries_measurements_post_request" : { + "properties" : { + "timeseries_measurement_collections" : { + "description" : "TOA5 file of timeseries measurement collections", + "format" : "binary", + "type" : "string" + } + }, + "type" : "object" } }, "securitySchemes" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index e6bde2c5..bf12a73a 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -8157,20 +8157,14 @@ paths: - timeseries /timeseries_measurements: post: - parameters: - - description: api key - in: query - name: key - required: true - schema: - type: string requestBody: content: - '*/*': + application/json: schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' - description: array of timeseries measurement collections - required: true + $ref: '#/components/schemas/_timeseries_measurements_post_request' + multipart/form-data: + schema: + $ref: '#/components/schemas/_timeseries_measurements_post_request' responses: "200": content: @@ -8198,7 +8192,9 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: creates or updates one or more timeseries measurements + security: + - Bearer: [] + summary: creates one or more timeseries measurements tags: - measurement x-codegen-request-body-name: timeseries_measurement_collections @@ -11177,6 +11173,13 @@ components: - Undefined - "Null" - Present + _timeseries_measurements_post_request: + properties: + timeseries_measurement_collections: + description: TOA5 file of timeseries measurement collections + format: binary + type: string + type: object securitySchemes: Bearer: description: Type "Bearer" followed by a space and access token. diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index 7067b794..c28a2800 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -13,8 +13,9 @@ import ( ) type UploaderService interface { - CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error - CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error + CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error + CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error + CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error } type uploaderService struct { @@ -26,8 +27,20 @@ func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { return &uploaderService{db, q} } -// TODO: transition away from datalogger equivalency table to different parser that's uploader specific -func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { +func CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO + return nil +} + +func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO + return nil +} + +// TODO transition away from datalogger equivalency table to different parser that's uploader specific +func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO Get mapper by id + tx, err := s.db.BeginTxx(ctx, nil) if err != nil { return err @@ -56,23 +69,23 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } meta := model.Environment{ - StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - OSVersion: envHeader[4], - ProgName: envHeader[5], - TableName: envHeader[6], + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], } - dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) - if err != nil { - return err - } - - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) - if err != nil { - return err - } + // dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + // if err != nil { + // return err + // } + // + // tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + // if err != nil { + // return err + // } // first two columns are timestamp and record number // we only want to collect the measurement fields here diff --git a/api/internal/servicev2/alert.go b/api/internal/servicev2/alert.go new file mode 100644 index 00000000..6c318ceb --- /dev/null +++ b/api/internal/servicev2/alert.go @@ -0,0 +1,96 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type AlertService interface { + CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error + GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]model.Alert, error) + GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.Alert, error) + GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Alert, error) + GetOneAlertForProfile(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) + DoAlertRead(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) + DoAlertUnread(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) +} + +// Create creates one or more new alerts +func (s dbStore) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { + var err error + s.Queries.CreateAlerts(ctx, alertConfigIDs).Exec(func(_ int, e error) { + err = e + }) + return err +} + +// DoAlertRead marks an alert as read for a profile +func (s dbStore) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { + tx, err := s.db.Begin(ctx) + if err != nil { + return model.Alert{}, err + } + defer txDo(ctx, tx.Rollback) + + qtx := s.WithTx(tx) + if err := qtx.CreateAlertRead(ctx, db.CreateAlertReadParams{ + ProfileID: profileID, + AlertID: alertID, + }); err != nil { + return model.Alert{}, err + } + b, err := qtx.GetAlert(ctx, db.GetAlertParams{ + ProfileID: profileID, + ID: alertID, + }) + if err != nil { + return model.Alert{}, err + } + if err := tx.Commit(ctx); err != nil { + return model.Alert{}, err + } + + return model.Alert{ + Read: &b.Read, + ID: b.ID, + AlertConfigID: b.AlertConfigID, + ProjectID: b.ProjectID, + ProjectName: b.ProjectName, + Name: b.Name, + Body: b.Body, + CreateDate: b.CreateDate, + Instruments: b.Instruments, + }, nil +} + +// DoAlertUnread marks an alert as unread for a profile +func (s dbStore) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { + tx, err := s.db.Begin(ctx) + if err != nil { + return model.Alert{}, err + } + defer txDo(ctx, tx.Rollback) + + qtx := s.WithTx(tx) + if err := qtx.DeleteAlertRead(ctx, db.DeleteAlertReadParams{ + ProfileID: profileID, + AlertID: alertID, + }); err != nil { + return model.Alert{}, err + } + a, err := qtx.GetAlert(ctx, db.GetAlertParams{ + ProfileID: profileID, + ID: alertID, + }) + if err != nil { + return model.Alert{}, err + } + if err := tx.Commit(ctx); err != nil { + return model.Alert{}, err + } + + return a, nil +} diff --git a/api/internal/servicev2/alert_check.go b/api/internal/servicev2/alert_check.go new file mode 100644 index 00000000..0f618a85 --- /dev/null +++ b/api/internal/servicev2/alert_check.go @@ -0,0 +1,362 @@ +package servicev2 + +import ( + "context" + "errors" + "fmt" + "log" + "sync" + "time" + + "github.com/USACE/instrumentation-api/api/internal/config" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +var ( + GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") + YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") + RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") + + MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") + EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") +) + +const ( + warning = "Warning" + alert = "Alert" + reminder = "Reminder" +) + +type AlertCheckService interface { + DoAlertChecks(ctx context.Context) error +} + +type alertConfigChecker[T alertChecker] interface { + GetAlertConfig() model.AlertConfig + SetAlertConfig(model.AlertConfig) + GetChecks() []T + SetChecks([]T) + DoEmail(string, config.AlertCheckConfig) error +} + +type alertChecker interface { + GetShouldWarn() bool + GetShouldAlert() bool + GetShouldRemind() bool + GetSubmittal() model.Submittal + SetSubmittal(model.Submittal) +} + +type alertCheckService struct { + db *model.Database + *model.Queries + cfg *config.AlertCheckConfig +} + +func NewAlertCheckService(db *model.Database, q *model.Queries, cfg *config.AlertCheckConfig) *alertCheckService { + return &alertCheckService{db, q, cfg} +} + +func (s alertCheckService) DoAlertChecks(ctx context.Context) error { + if s.cfg == nil { + return fmt.Errorf("missing config") + } + + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + subs, err := qtx.ListUnverifiedMissingSubmittals(ctx) + if err != nil { + return err + } + acs, err := qtx.ListAndCheckAlertConfigs(ctx) + if err != nil { + return err + } + if len(acs) == 0 { + log.Println("no alert configs to check") + return nil + } + + subMap := make(map[uuid.UUID]model.Submittal) + for _, s := range subs { + subMap[s.ID] = s + } + acMap := make(map[uuid.UUID]model.AlertConfig) + for _, a := range acs { + acMap[a.ID] = a + } + + errs := make([]error, 0) + + if err := checkMeasurements(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + errs = append(errs, err) + } + if err := checkEvaluations(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + errs = append(errs, err) + } + + if err := tx.Commit(); err != nil { + errs = append(errs, err) + } + + if len(errs) > 0 { + return errors.Join(errs...) + } + + return nil +} + +func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { + accs := make([]*model.AlertConfigEvaluationCheck, 0) + ecs, err := q.GetAllIncompleteEvaluationSubmittals(ctx) + if err != nil { + return err + } + + ecMap := make(map[uuid.UUID][]*model.EvaluationCheck) + for k := range acMap { + ecMap[k] = make([]*model.EvaluationCheck, 0) + } + for idx := range ecs { + if sub, ok := subMap[ecs[idx].SubmittalID]; ok { + ecs[idx].Submittal = sub + ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], ecs[idx]) + } + } + for k, v := range acMap { + if v.AlertTypeID != EvaluationSubmittalAlertTypeID { + continue + } + acc := model.AlertConfigEvaluationCheck{ + AlertConfig: v, + AlertChecks: ecMap[k], + } + accs = append(accs, &acc) + } + + // handleChecks should not rollback txn but should bubble up errors after txn committed + alertCheckErr := handleChecks(ctx, q, accs, cfg) + if alertCheckErr != nil { + return alertCheckErr + } + + return nil +} + +func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { + accs := make([]*model.AlertConfigMeasurementCheck, 0) + mcs, err := q.GetAllIncompleteMeasurementSubmittals(ctx) + if err != nil { + return err + } + + mcMap := make(map[uuid.UUID][]*model.MeasurementCheck) + for k := range acMap { + mcMap[k] = make([]*model.MeasurementCheck, 0) + } + + for idx := range mcs { + if sub, ok := subMap[mcs[idx].SubmittalID]; ok { + mcs[idx].Submittal = sub + mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], mcs[idx]) + } + } + + for k, v := range acMap { + if v.AlertTypeID != MeasurementSubmittalAlertTypeID { + continue + } + acc := model.AlertConfigMeasurementCheck{ + AlertConfig: v, + AlertChecks: mcMap[k], + } + accs = append(accs, &acc) + } + + alertCheckErr := handleChecks(ctx, q, accs, cfg) + if alertCheckErr != nil { + return alertCheckErr + } + + return nil +} + +func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT) error { + for _, acc := range accs { + ac := acc.GetAlertConfig() + if err := q.UpdateAlertConfigLastReminded(ctx, ac); err != nil { + return err + } + checks := acc.GetChecks() + for _, c := range checks { + sub := c.GetSubmittal() + if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, sub); err != nil { + return err + } + } + if ac.CreateNextSubmittalFrom != nil { + if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, ac); err != nil { + return err + } + } + } + return nil +} + +// there should always be at least one "missing" submittal within an alert config. Submittals are created: +// 1. when an alert config is created (first submittal) +// 2. when a submittal is completed (next submittal created) +// 3. when a submittals due date has passed if it is not completed +// +// for evaluations, the next is submittal created manually when the evaluation is made +// for measurements, the next submittal is created the first time this function runs after the due date +// +// No "Yellow" Status Submittals should be passed to this function as it implies the submittal has been completed +// +// TODO: smtp.SendMail esablishes a new connection for each batch of emails sent. I would be better to aggregate +// the contents of each email, then create a connection pool to reuse and send all emails at once, with any errors wrapped and returned +// p.s. Dear future me/someone else: I'm sorry +func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT, cfg config.AlertCheckConfig) error { + defer util.Timer()() + + mu := &sync.Mutex{} + aaccs := make([]PT, len(accs)) + errs := make([]error, 0) + t := time.Now() + + wg := sync.WaitGroup{} + for i, p := range accs { + wg.Add(1) + go func(idx int, acc PT) { + defer wg.Done() + + ac := acc.GetAlertConfig() + checks := acc.GetChecks() + + // If ANY "missing" submittals are within an alert config, aggregate missing submittals and send an alert + acAlert := false + sendAlertEmail := false + // If ANY missing submittals previously existed within an alert config, send them in a "reminder" instead of an alert + acReminder := false + sendReminderEmail := false + // If a reminder exists when at least one submittal "shouldAlert", the alert should be aggregated into the next reminder + // instead of sending a new reminder email. If NO alerts exist for an alert config, the reminder can be reset to NULL. + // Reminders should be set when the first alert for an alert config is triggered, or at each reminder interval + resetReminders := len(checks) != 0 + + for j, c := range checks { + shouldWarn := c.GetShouldWarn() + shouldAlert := c.GetShouldAlert() + shouldRemind := c.GetShouldRemind() + sub := c.GetSubmittal() + + // if no submittal alerts or warnings are found, no emails should be sent + if !shouldAlert && !shouldWarn { + // if submittal status was previously red, update status to yellow and + // completion_date to current timestamp + if sub.SubmittalStatusID == RedSubmittalStatusID { + sub.SubmittalStatusID = YellowSubmittalStatusID + sub.CompletionDate = &t + ac.CreateNextSubmittalFrom = &t + } else + + // if submittal status is green and the current time is not before the submittal due date, + // complete the submittal at that due date and prepare the next submittal interval + if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueDate) { + sub.CompletionDate = &sub.DueDate + ac.CreateNextSubmittalFrom = &sub.DueDate + } + } else + + // if any submittal warning is triggered, immediately send a + // warning email, since submittal due dates are unique within alert configs + if shouldWarn && !sub.WarningSent { + if !ac.MuteConsecutiveAlerts || ac.LastReminded == nil { + mu.Lock() + if err := acc.DoEmail(warning, cfg); err != nil { + errs = append(errs, err) + } + mu.Unlock() + } + sub.SubmittalStatusID = GreenSubmittalStatusID + sub.WarningSent = true + } else + + // if any submittal alert is triggered after a warning has been sent within an + // alert config, aggregate missing submittals and send their contents in an alert email + if shouldAlert { + if sub.SubmittalStatusID != RedSubmittalStatusID { + sub.SubmittalStatusID = RedSubmittalStatusID + acAlert = true + ac.CreateNextSubmittalFrom = &sub.DueDate + } + resetReminders = false + } + + // if any reminder is triggered, aggregate missing + // submittals and send their contents in an email + if shouldRemind { + acReminder = true + } + + c.SetSubmittal(sub) + checks[j] = c + } + + // if there are no alerts, there should also be no reminders sent. "last_reminded" is used to determine + // if an alert has already been sent for an alert config, and send a reminder if so + if resetReminders { + ac.LastReminded = nil + } + + // if there are any reminders within an alert config, they will override the alerts if MuteConsecutiveAlerts is true + if acAlert && ((!acReminder && ac.LastReminded == nil) || !ac.MuteConsecutiveAlerts) { + ac.LastReminded = &t + sendAlertEmail = true + } + if acReminder && ac.LastReminded != nil { + ac.LastReminded = &t + sendReminderEmail = true + } + + acc.SetAlertConfig(ac) + acc.SetChecks(checks) + + if sendAlertEmail { + mu.Lock() + if err := acc.DoEmail(alert, cfg); err != nil { + errs = append(errs, err) + } + mu.Unlock() + } + if sendReminderEmail { + mu.Lock() + if err := acc.DoEmail(reminder, cfg); err != nil { + errs = append(errs, err) + } + mu.Unlock() + } + + aaccs[idx] = acc + }(i, p) + } + wg.Wait() + + if err := updateAlertConfigChecks(ctx, q, aaccs); err != nil { + errs = append(errs, err) + return errors.Join(errs...) + } + if len(errs) > 0 { + return errors.Join(errs...) + } + + return nil +} diff --git a/api/internal/servicev2/alert_config.go b/api/internal/servicev2/alert_config.go new file mode 100644 index 00000000..ae984b7b --- /dev/null +++ b/api/internal/servicev2/alert_config.go @@ -0,0 +1,130 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type AlertConfigService interface { + GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.AlertConfig, error) + GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]model.AlertConfig, error) + GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.AlertConfig, error) + GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (model.AlertConfig, error) + CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) + UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) + DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error +} + +type alertConfigService struct { + db *model.Database + *model.Queries +} + +func NewAlertConfigService(db *model.Database, q *model.Queries) *alertConfigService { + return &alertConfigService{db, q} +} + +// CreateAlertConfig creates one new alert configuration +func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) { + var a model.AlertConfig + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + if ac.RemindInterval == "" { + ac.RemindInterval = "PT0" + } + if ac.WarningInterval == "" { + ac.WarningInterval = "PT0" + } + + qtx := s.WithTx(tx) + + acID, err := qtx.CreateAlertConfig(ctx, ac) + if err != nil { + return a, err + } + + for _, aci := range ac.Instruments { + if err := qtx.AssignInstrumentToAlertConfig(ctx, acID, aci.InstrumentID); err != nil { + return a, err + } + } + + if err := registerAndSubscribe(ctx, qtx, acID, ac.AlertEmailSubscriptions); err != nil { + return a, err + } + + if err := qtx.CreateNextSubmittalFromExistingAlertConfigDate(ctx, acID); err != nil { + return a, err + } + + acNew, err := qtx.GetOneAlertConfig(ctx, acID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return acNew, nil +} + +// UpdateAlertConfig updates an alert config +func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.AlertConfig{}, err + } + defer model.TxDo(tx.Rollback) + + if ac.RemindInterval == "" { + ac.RemindInterval = "PT0" + } + if ac.WarningInterval == "" { + ac.WarningInterval = "PT0" + } + + qtx := s.WithTx(tx) + + if err := qtx.UpdateAlertConfig(ctx, ac); err != nil { + return model.AlertConfig{}, err + } + + if err := qtx.UnassignAllInstrumentsFromAlertConfig(ctx, alertConfigID); err != nil { + return model.AlertConfig{}, err + } + + for _, aci := range ac.Instruments { + if err := qtx.AssignInstrumentToAlertConfig(ctx, alertConfigID, aci.InstrumentID); err != nil { + return model.AlertConfig{}, err + } + } + + if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { + return model.AlertConfig{}, err + } + if err := registerAndSubscribe(ctx, qtx, alertConfigID, ac.AlertEmailSubscriptions); err != nil { + return model.AlertConfig{}, err + } + + if err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, alertConfigID); err != nil { + return model.AlertConfig{}, err + } + + acNew, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + if err != nil { + return model.AlertConfig{}, err + } + + if err := tx.Commit(); err != nil { + return model.AlertConfig{}, err + } + + return acNew, nil +} diff --git a/api/internal/servicev2/alert_subscription.go b/api/internal/servicev2/alert_subscription.go new file mode 100644 index 00000000..ffe73dec --- /dev/null +++ b/api/internal/servicev2/alert_subscription.go @@ -0,0 +1,231 @@ +package servicev2 + +import ( + "context" + "fmt" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +const ( + unknown = "" + email = "email" + profile = "profile" +) + +type AlertSubscriptionService interface { + SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) + UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error + GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) + GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (model.AlertSubscription, error) + ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]model.AlertSubscription, error) + UpdateMyAlertSubscription(ctx context.Context, s model.AlertSubscription) (model.AlertSubscription, error) + SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) + UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) + UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + UnregisterEmail(ctx context.Context, emailID uuid.UUID) error +} + +type alertSubscriptionService struct { + db *model.Database + *model.Queries +} + +func NewAlertSubscriptionService(db *model.Database, q *model.Queries) *alertSubscriptionService { + return &alertSubscriptionService{db, q} +} + +// SubscribeProfileToAlerts subscribes a profile to an instrument alert +func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (model.AlertSubscription, error) { + var a model.AlertSubscription + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.SubscribeProfileToAlerts(ctx, alertConfigID, profileID); err != nil { + return a, err + } + + updated, err := qtx.GetAlertSubscription(ctx, alertConfigID, profileID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return updated, nil +} + +// UpdateMyAlertSubscription updates properties on a AlertSubscription +func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (model.AlertSubscription, error) { + var a model.AlertSubscription + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateMyAlertSubscription(ctx, sub); err != nil { + return a, err + } + + updated, err := qtx.GetAlertSubscription(ctx, sub.AlertConfigID, sub.ProfileID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return updated, nil +} + +func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { + var a model.AlertConfig + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := registerAndSubscribe(ctx, qtx, alertConfigID, emails); err != nil { + return a, err + } + + // Register any emails that are not yet in system + for idx, em := range emails { + if em.UserType == unknown || em.UserType == email { + newID, err := qtx.RegisterEmail(ctx, em.Email) + if err != nil { + return a, err + } + emails[idx].ID = newID + emails[idx].UserType = email + } + } + // Subscribe emails + for _, em := range emails { + if em.UserType == email { + if err := qtx.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return a, err + } + } else if em.UserType == profile { + if err := qtx.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return a, err + } + } else { + return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) + } + } + + acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return acUpdated, nil +} + +func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { + var a model.AlertConfig + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, em := range emails { + if em.UserType == unknown { + return a, fmt.Errorf("required field user_type is null, aborting transaction") + } else if em.UserType == email { + if err := qtx.UnsubscribeEmailFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return a, err + } + } else if em.UserType == profile { + if err := qtx.UnsubscribeProfileFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return a, err + } + } else { + return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) + } + } + + acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return acUpdated, nil +} + +func (s alertSubscriptionService) UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { + return err + } + + if err := qtx.UnsubscribeAllProfilesFromAlertConfig(ctx, alertConfigID); err != nil { + return err + } + + if err := tx.Commit(); err != nil { + return err + } + return nil +} + +func registerAndSubscribe(ctx context.Context, q *model.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { + for idx, em := range emails { + if em.UserType == unknown || em.UserType == email { + newID, err := q.RegisterEmail(ctx, em.Email) + if err != nil { + return err + } + emails[idx].ID = newID + emails[idx].UserType = email + } + } + for _, em := range emails { + if em.UserType == email { + if err := q.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return err + } + } else if em.UserType == profile { + if err := q.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + return err + } + } else { + return fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) + } + } + return nil +} diff --git a/api/internal/servicev2/autocomplete.go b/api/internal/servicev2/autocomplete.go new file mode 100644 index 00000000..c821e69c --- /dev/null +++ b/api/internal/servicev2/autocomplete.go @@ -0,0 +1,20 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type EmailAutocompleteService interface { + ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]model.EmailAutocompleteResult, error) +} + +type emailAutocompleteService struct { + db *model.Database + *model.Queries +} + +func NewEmailAutocompleteService(db *model.Database, q *model.Queries) *emailAutocompleteService { + return &emailAutocompleteService{db, q} +} diff --git a/api/internal/servicev2/aware.go b/api/internal/servicev2/aware.go new file mode 100644 index 00000000..4e9fbd40 --- /dev/null +++ b/api/internal/servicev2/aware.go @@ -0,0 +1,49 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type AwareParameterService interface { + ListAwareParameters(ctx context.Context) ([]model.AwareParameter, error) + ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) +} + +type awareParameterService struct { + db *model.Database + *model.Queries +} + +func NewAwareParameterService(db *model.Database, q *model.Queries) *awareParameterService { + return &awareParameterService{db, q} +} + +// ListAwarePlatformParameterConfig returns aware platform parameter configs +func (s awareParameterService) ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) { + aa := make([]model.AwarePlatformParameterConfig, 0) + ee, err := s.ListAwarePlatformParameterEnabled(ctx) + if err != nil { + return aa, err + } + // reorganize aware_parameter_key, timeseries_id into map for each instrument + // Map of aware parameters to timeseries + m1 := make(map[uuid.UUID]model.AwarePlatformParameterConfig) + for _, e := range ee { + if _, ok := m1[e.InstrumentID]; !ok { + m1[e.InstrumentID] = model.AwarePlatformParameterConfig{ + InstrumentID: e.InstrumentID, + AwareID: e.AwareID, + AwareParameters: make(map[string]*uuid.UUID), + } + } + m1[e.InstrumentID].AwareParameters[e.AwareParameterKey] = e.TimeseriesID + } + + for k := range m1 { + aa = append(aa, m1[k]) + } + return aa, nil +} diff --git a/api/internal/servicev2/collection_group.go b/api/internal/servicev2/collection_group.go new file mode 100644 index 00000000..8023900d --- /dev/null +++ b/api/internal/servicev2/collection_group.go @@ -0,0 +1,56 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type CollectionGroupService interface { + ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]model.CollectionGroup, error) + GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) + CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) + UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) + DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error + AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error + RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error +} + +type collectionGroupService struct { + db *model.Database + *model.Queries +} + +func NewCollectionGroupService(db *model.Database, q *model.Queries) *collectionGroupService { + return &collectionGroupService{db, q} +} + +// GetCollectionGroupDetails returns details for a single CollectionGroup +func (s collectionGroupService) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) { + var a model.CollectionGroupDetails + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + cg, err := qtx.GetCollectionGroupDetails(ctx, projectID, collectionGroupID) + if err != nil { + return a, err + } + ts, err := qtx.GetCollectionGroupDetailsTimeseries(ctx, projectID, collectionGroupID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + cg.Timeseries = ts + + return cg, nil +} diff --git a/api/internal/servicev2/datalogger.go b/api/internal/servicev2/datalogger.go new file mode 100644 index 00000000..4f59ff2d --- /dev/null +++ b/api/internal/servicev2/datalogger.go @@ -0,0 +1,158 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type DataloggerService interface { + GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) + ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]model.Datalogger, error) + ListAllDataloggers(ctx context.Context) ([]model.Datalogger, error) + GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) + VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error + CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) + CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) + GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (model.Datalogger, error) + UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) + DeleteDatalogger(ctx context.Context, d model.Datalogger) error + GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) + ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error + GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) + DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error +} + +type dataloggerService struct { + db *model.Database + *model.Queries +} + +func NewDataloggerService(db *model.Database, q *model.Queries) *dataloggerService { + return &dataloggerService{db, q} +} + +func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) { + var a model.DataloggerWithKey + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + dataloggerID, err := qtx.CreateDatalogger(ctx, n) + if err != nil { + return a, err + } + + key, err := qtx.CreateDataloggerHash(ctx, dataloggerID) + if err != nil { + return a, err + } + + dl, err := qtx.GetOneDatalogger(ctx, dataloggerID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + dk := model.DataloggerWithKey{ + Datalogger: dl, + Key: key, + } + + return dk, nil +} + +func (s dataloggerService) CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) { + var a model.DataloggerWithKey + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + key, err := qtx.UpdateDataloggerHash(ctx, u.ID) + if err != nil { + return a, err + } + + if err := qtx.UpdateDataloggerUpdater(ctx, u); err != nil { + return a, err + } + + dl, err := qtx.GetOneDatalogger(ctx, u.ID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + dk := model.DataloggerWithKey{ + Datalogger: dl, + Key: key, + } + + return dk, nil +} + +func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) { + var a model.Datalogger + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateDatalogger(ctx, u); err != nil { + return a, err + } + + dlUpdated, err := qtx.GetOneDatalogger(ctx, u.ID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return dlUpdated, nil +} + +func (s dataloggerTelemetryService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return uuid.Nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + return uuid.Nil, err + } + + dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + if err != nil { + return uuid.Nil, err + } + + if err := tx.Commit(); err != nil { + return uuid.Nil, err + } + + return dataloggerTableID, nil +} diff --git a/api/internal/servicev2/datalogger_telemetry.go b/api/internal/servicev2/datalogger_telemetry.go new file mode 100644 index 00000000..28278e69 --- /dev/null +++ b/api/internal/servicev2/datalogger_telemetry.go @@ -0,0 +1,210 @@ +package servicev2 + +import ( + "context" + "database/sql" + "encoding/csv" + "errors" + "fmt" + "io" + "math" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type DataloggerTelemetryService interface { + GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (model.Datalogger, error) + GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) + CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error + UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) + UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error + CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error +} + +type dataloggerTelemetryService struct { + db *model.Database + *model.Queries +} + +func NewDataloggerTelemetryService(db *model.Database, q *model.Queries) *dataloggerTelemetryService { + return &dataloggerTelemetryService{db, q} +} + +// UpdateDataloggerTablePreview attempts to update a table preview by datalogger_id and table_name, creates the +// datalogger table and corresponding preview if it doesn't exist +func (s dataloggerTelemetryService) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return uuid.Nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + // replace empty datalogger table name with most recent payload + if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + return uuid.Nil, err + } + + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + if err != nil { + return uuid.Nil, err + } + if err := qtx.UpdateDataloggerTablePreview(ctx, dataloggerID, tableName, prv); err != nil { + if !errors.Is(err, sql.ErrNoRows) { + return uuid.Nil, err + } + prv.DataloggerTableID = tableID + if err := qtx.CreateDataloggerTablePreview(ctx, prv); err != nil { + } + } + + return tableID, tx.Commit() +} + +func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.DeleteDataloggerTableError(ctx, dataloggerID, tableName); err != nil { + return err + } + + for _, m := range e.Errors { + if err := qtx.CreateDataloggerTableError(ctx, dataloggerID, tableName, m); err != nil { + return err + } + } + + return tx.Commit() +} + +// ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. +// The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) +func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + unitsHeader, err := reader.Read() + if err != nil { + return err + } + processHeader, err := reader.Read() + if err != nil { + return err + } + + meta := model.Environment{ + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + if err != nil { + return err + } + + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + if err != nil { + return err + } + + em := make([]string, 0) + defer func() { + s.UpdateDataloggerTableError(ctx, dl.ID, &meta.TableName, &model.DataloggerError{Errors: em}) + }() + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]model.Field, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = model.Field{ + Name: fieldHeader[i], + Units: unitsHeader[i], + Process: processHeader[i], + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Rows { + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + + for idx, cell := range record[2:] { + fieldName := fields[idx].Name + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + // key error, field_name does not exist for equivalency table + // add error to Measurement payload to report back to user + em = append(em, fmt.Sprintf( + "key error: field_name %s does not exist for equivalency table %s", + fieldName, meta.TableName, + )) + continue + } + + v, err := strconv.ParseFloat(cell, 64) + if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { + // could not parse float + // add error to Measurement payload to report back to user + em = append(em, fmt.Sprintf( + "value error: field_name %s contains invalid value entry at %s", + fieldName, t, + )) + continue + } + + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + return err + } + } + } + + return tx.Commit() +} diff --git a/api/internal/servicev2/db.go b/api/internal/servicev2/db.go new file mode 100644 index 00000000..d486e0c9 --- /dev/null +++ b/api/internal/servicev2/db.go @@ -0,0 +1,27 @@ +package servicev2 + +import ( + "context" + "database/sql" + "errors" + "log" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/jackc/pgx/v5/pgxpool" +) + +type dbStore struct { + db *pgxpool.Pool + *db.Queries +} + +func NewDbStore(db *pgxpool.Pool, q *db.Queries) *dbStore { + return &dbStore{db, q} +} + +func txDo(ctx context.Context, rollback func(ctx context.Context) error) { + err := rollback(ctx) + if err != nil && !errors.Is(err, sql.ErrTxDone) { + log.Print(err.Error()) + } +} diff --git a/api/internal/servicev2/dcsloader.go b/api/internal/servicev2/dcsloader.go new file mode 100644 index 00000000..bcf13b9c --- /dev/null +++ b/api/internal/servicev2/dcsloader.go @@ -0,0 +1,125 @@ +package servicev2 + +import ( + "bytes" + "encoding/csv" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/config" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type DcsLoaderService interface { + ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) + PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error +} + +type dcsLoaderService struct { + apiClient *http.Client + cfg *config.DcsLoaderConfig +} + +func NewDcsLoaderService(apiClient *http.Client, cfg *config.DcsLoaderConfig) *dcsLoaderService { + return &dcsLoaderService{apiClient, cfg} +} + +func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) { + mcs := make([]model.MeasurementCollection, 0) + mCount := 0 + reader := csv.NewReader(r) + + rows := make([][]string, 0) + for { + row, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return mcs, mCount, err + } + rows = append(rows, row) + } + + mcMap := make(map[uuid.UUID]*model.MeasurementCollection) + for _, row := range rows { + // 0=timeseries_id, 1=time, 2=value + tsid, err := uuid.Parse(row[0]) + if err != nil { + return mcs, mCount, err + } + t, err := time.Parse(time.RFC3339, row[1]) + if err != nil { + return mcs, mCount, err + } + v, err := strconv.ParseFloat(row[2], 32) + if err != nil { + return mcs, mCount, err + } + + if _, ok := mcMap[tsid]; !ok { + mcMap[tsid] = &model.MeasurementCollection{ + TimeseriesID: tsid, + Items: make([]model.Measurement, 0), + } + } + mcMap[tsid].Items = append(mcMap[tsid].Items, model.Measurement{TimeseriesID: tsid, Time: t, Value: model.FloatNanInf(v)}) + mCount++ + } + + mcs = make([]model.MeasurementCollection, len(mcMap)) + idx := 0 + for _, v := range mcMap { + mcs[idx] = *v + idx++ + } + + return mcs, mCount, nil +} + +func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error { + requestBodyBytes, err := json.Marshal(mcs) + if err != nil { + return err + } + + req, err := http.NewRequest("POST", fmt.Sprintf("%s?key=%s", s.cfg.PostURL, s.cfg.APIKey), bytes.NewReader(requestBodyBytes)) + if err != nil { + return err + } + defer req.Body.Close() + + req.Header.Add("Content-Type", "application/json") + + resp, err := s.apiClient.Do(req) + if err != nil { + urlErr := err.(*url.Error) + urlRedact := util.RedactRequest{URL: urlErr.URL} + if err := urlRedact.RedactQueryParam("key"); err != nil { + return err + } + urlErr.URL = urlRedact.URL + log.Printf("\n\t*** Error; unable to make request; %s", urlErr.Error()) + return urlErr + } + defer resp.Body.Close() + + if resp.StatusCode != 201 { + log.Printf("\n\t*** Error; Status Code: %d ***\n", resp.StatusCode) + body, err := io.ReadAll(resp.Body) + if err != nil { + log.Println("Error reading response body") + return err + } + log.Printf("%s\n", body) + } + return nil +} diff --git a/api/internal/servicev2/district_rollup.go b/api/internal/servicev2/district_rollup.go new file mode 100644 index 00000000..e78ef5e8 --- /dev/null +++ b/api/internal/servicev2/district_rollup.go @@ -0,0 +1,22 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type DistrictRollupService interface { + ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) + ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) +} + +type districtRollupService struct { + db *model.Database + *model.Queries +} + +func NewDistrictRollupService(db *model.Database, q *model.Queries) *districtRollupService { + return &districtRollupService{db, q} +} diff --git a/api/internal/servicev2/domain.go b/api/internal/servicev2/domain.go new file mode 100644 index 00000000..91de8966 --- /dev/null +++ b/api/internal/servicev2/domain.go @@ -0,0 +1,21 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type DomainService interface { + GetDomains(ctx context.Context) ([]model.Domain, error) + GetDomainMap(ctx context.Context) (model.DomainMap, error) +} + +type domainService struct { + db *model.Database + *model.Queries +} + +func NewDomainService(db *model.Database, q *model.Queries) *domainService { + return &domainService{db, q} +} diff --git a/api/internal/servicev2/equivalency_table.go b/api/internal/servicev2/equivalency_table.go new file mode 100644 index 00000000..2ac6f34b --- /dev/null +++ b/api/internal/servicev2/equivalency_table.go @@ -0,0 +1,90 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type EquivalencyTableService interface { + GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (model.EquivalencyTable, error) + CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) + UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) + DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error + DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error + GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error +} + +type equivalencyTableService struct { + db *model.Database + *model.Queries +} + +func NewEquivalencyTableService(db *model.Database, q *model.Queries) *equivalencyTableService { + return &equivalencyTableService{db, q} +} + +// CreateEquivalencyTable creates EquivalencyTable rows +// If a row with the given datalogger id or field name already exists the row will be ignored +func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.EquivalencyTable{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, r := range t.Rows { + if r.TimeseriesID != nil { + if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { + return model.EquivalencyTable{}, err + } + } + if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, t.DataloggerID, t.DataloggerTableID, r); err != nil { + return model.EquivalencyTable{}, err + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + if err != nil { + return model.EquivalencyTable{}, err + } + + if err := tx.Commit(); err != nil { + return model.EquivalencyTable{}, err + } + + return eqt, nil +} + +// UpdateEquivalencyTable updates rows of an EquivalencyTable +func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.EquivalencyTable{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, r := range t.Rows { + if r.TimeseriesID != nil { + if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { + return model.EquivalencyTable{}, err + } + } + if err := qtx.UpdateEquivalencyTableRow(ctx, r); err != nil { + return model.EquivalencyTable{}, err + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + + if err := tx.Commit(); err != nil { + return model.EquivalencyTable{}, err + } + + return eqt, nil +} diff --git a/api/internal/servicev2/evaluation.go b/api/internal/servicev2/evaluation.go new file mode 100644 index 00000000..8b0b8473 --- /dev/null +++ b/api/internal/servicev2/evaluation.go @@ -0,0 +1,152 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type EvaluationService interface { + ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]model.Evaluation, error) + ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]model.Evaluation, error) + ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]model.Evaluation, error) + GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (model.Evaluation, error) + RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error + CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) + UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) + DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error +} + +type evaluationService struct { + db *model.Database + *model.Queries +} + +func NewEvaluationService(db *model.Database, q *model.Queries) *evaluationService { + return &evaluationService{db, q} +} + +func (s evaluationService) RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + sub, err := qtx.CompleteEvaluationSubmittal(ctx, subID) + if err != nil { + return err + } + + // Create next submittal if submitted on-time + // late submittals will have already generated next submittal + if sub.SubmittalStatusID == GreenSubmittalStatusID { + if err := qtx.CreateNextEvaluationSubmittal(ctx, subID); err != nil { + return err + } + } + return tx.Commit() +} + +func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) { + var a model.Evaluation + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if ev.SubmittalID != nil { + sub, err := qtx.CompleteEvaluationSubmittal(ctx, *ev.SubmittalID) + if err != nil { + return a, err + } + // Create next submittal if submitted on-time + // late submittals will have already generated next submittal + if sub.SubmittalStatusID == GreenSubmittalStatusID { + qtx.CreateNextEvaluationSubmittal(ctx, *ev.SubmittalID) + } + } + + evID, err := qtx.CreateEvaluation(ctx, ev) + if err != nil { + return a, err + } + + for _, aci := range ev.Instruments { + if err := qtx.CreateEvaluationInstrument(ctx, evID, aci.InstrumentID); err != nil { + return a, err + } + } + + evNew, err := qtx.GetEvaluation(ctx, evID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + + return evNew, nil +} + +func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) { + var a model.Evaluation + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return a, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateEvaluation(ctx, ev); err != nil { + return a, err + } + + if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, ev.ID); err != nil { + return a, err + } + + for _, aci := range ev.Instruments { + if err := qtx.CreateEvaluationInstrument(ctx, ev.ID, aci.InstrumentID); err != nil { + return a, err + } + } + + evUpdated, err := qtx.GetEvaluation(ctx, ev.ID) + if err != nil { + return a, err + } + + if err := tx.Commit(); err != nil { + return a, err + } + return evUpdated, nil +} + +func (s evaluationService) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, evaluationID); err != nil { + return err + } + + if err := qtx.DeleteEvaluation(ctx, evaluationID); err != nil { + return err + } + + return nil +} diff --git a/api/internal/servicev2/heartbeat.go b/api/internal/servicev2/heartbeat.go new file mode 100644 index 00000000..9df692b7 --- /dev/null +++ b/api/internal/servicev2/heartbeat.go @@ -0,0 +1,22 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type HeartbeatService interface { + DoHeartbeat(ctx context.Context) (model.Heartbeat, error) + GetLatestHeartbeat(ctx context.Context) (model.Heartbeat, error) + ListHeartbeats(ctx context.Context) ([]model.Heartbeat, error) +} + +type heartbeatService struct { + db *model.Database + *model.Queries +} + +func NewHeartbeatService(db *model.Database, q *model.Queries) *heartbeatService { + return &heartbeatService{db, q} +} diff --git a/api/internal/servicev2/home.go b/api/internal/servicev2/home.go new file mode 100644 index 00000000..0427135a --- /dev/null +++ b/api/internal/servicev2/home.go @@ -0,0 +1,20 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type HomeService interface { + GetHome(ctx context.Context) (model.Home, error) +} + +type homeService struct { + db *model.Database + *model.Queries +} + +func NewHomeService(db *model.Database, q *model.Queries) *homeService { + return &homeService{db, q} +} diff --git a/api/internal/servicev2/instrument.go b/api/internal/servicev2/instrument.go new file mode 100644 index 00000000..4724ef18 --- /dev/null +++ b/api/internal/servicev2/instrument.go @@ -0,0 +1,167 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" + "github.com/paulmach/orb/geojson" +) + +type InstrumentService interface { + ListInstruments(ctx context.Context) ([]model.Instrument, error) + GetInstrument(ctx context.Context, instrumentID uuid.UUID) (model.Instrument, error) + GetInstrumentCount(ctx context.Context) (model.InstrumentCount, error) + CreateInstrument(ctx context.Context, i model.Instrument) (model.IDSlugName, error) + CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) + UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) + UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) + DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error +} + +type instrumentService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentService(db *model.Database, q *model.Queries) *instrumentService { + return &instrumentService{db, q} +} + +var ( + saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") + ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") +) + +type requestType int + +const ( + create requestType = iota + update +) + +func createInstrument(ctx context.Context, q *model.Queries, instrument model.Instrument) (model.IDSlugName, error) { + newInstrument, err := q.CreateInstrument(ctx, instrument) + if err != nil { + return model.IDSlugName{}, err + } + for _, prj := range instrument.Projects { + if err := q.AssignInstrumentToProject(ctx, prj.ID, newInstrument.ID); err != nil { + return model.IDSlugName{}, err + } + } + if err := q.CreateOrUpdateInstrumentStatus(ctx, newInstrument.ID, instrument.StatusID, instrument.StatusTime); err != nil { + return model.IDSlugName{}, err + } + if instrument.AwareID != nil { + if err := q.CreateAwarePlatform(ctx, newInstrument.ID, *instrument.AwareID); err != nil { + return model.IDSlugName{}, err + } + } + instrument.ID = newInstrument.ID + if err := handleOpts(ctx, q, instrument, create); err != nil { + return model.IDSlugName{}, err + } + return newInstrument, nil +} + +func (s instrumentService) CreateInstrument(ctx context.Context, instrument model.Instrument) (model.IDSlugName, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.IDSlugName{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + newInstrument, err := createInstrument(ctx, qtx, instrument) + if err != nil { + return model.IDSlugName{}, err + } + + if err := tx.Commit(); err != nil { + return model.IDSlugName{}, err + } + return newInstrument, nil +} + +func (s instrumentService) CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + ii := make([]model.IDSlugName, len(instruments)) + for idx, i := range instruments { + newInstrument, err := createInstrument(ctx, qtx, i) + if err != nil { + return nil, err + } + ii[idx] = newInstrument + } + if err := tx.Commit(); err != nil { + return nil, err + } + return ii, nil +} + +// UpdateInstrument updates a single instrument +func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.Instrument{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateInstrument(ctx, projectID, i); err != nil { + return model.Instrument{}, err + } + if err := qtx.CreateOrUpdateInstrumentStatus(ctx, i.ID, i.StatusID, i.StatusTime); err != nil { + return model.Instrument{}, err + } + + if err := handleOpts(ctx, qtx, i, update); err != nil { + return model.Instrument{}, err + } + + aa, err := qtx.GetInstrument(ctx, i.ID) + if err != nil { + return model.Instrument{}, err + } + + if err := tx.Commit(); err != nil { + return model.Instrument{}, err + } + + return aa, nil +} + +func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.Instrument{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateInstrumentGeometry(ctx, projectID, instrumentID, geom, p); err != nil { + return model.Instrument{}, err + } + + aa, err := qtx.GetInstrument(ctx, instrumentID) + if err != nil { + return model.Instrument{}, err + } + + if err := tx.Commit(); err != nil { + return model.Instrument{}, err + } + + return aa, nil +} diff --git a/api/internal/servicev2/instrument_assign.go b/api/internal/servicev2/instrument_assign.go new file mode 100644 index 00000000..d169b02e --- /dev/null +++ b/api/internal/servicev2/instrument_assign.go @@ -0,0 +1,183 @@ +package servicev2 + +import ( + "context" + "fmt" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InstrumentAssignService interface { + AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) + UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) + AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) + UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) + ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (model.InstrumentsValidation, error) + ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) +} + +type instrumentAssignService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentAssignService(db *model.Database, q *model.Queries) *instrumentAssignService { + return &instrumentAssignService{db, q} +} + +func validateAssignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID uuid.UUID, instrument model.Instrument, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { + v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrument.ID, projectIDs) + if err != nil || !v.IsValid { + return v, err + } + return q.ValidateProjectsInstrumentNameUnique(ctx, instrument.Name, projectIDs) +} + +func validateAssignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { + iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) + if err != nil { + return model.InstrumentsValidation{}, err + } + iIDs := make([]uuid.UUID, len(iIDNames)) + iNames := make([]string, len(iIDNames)) + for idx := range iIDNames { + iIDs[idx] = iIDNames[idx].ID + iNames[idx] = iIDNames[idx].Name + } + v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, iIDs) + if err != nil || !v.IsValid { + return v, err + } + return q.ValidateInstrumentNamesProjectUnique(ctx, projectID, iNames) +} + +func assignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { + instrument, err := q.GetInstrument(ctx, instrumentID) + if err != nil { + return model.InstrumentsValidation{}, err + } + v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) + if err != nil || !v.IsValid { + return v, err + } + for _, pID := range projectIDs { + if err := q.AssignInstrumentToProject(ctx, pID, instrumentID); err != nil { + return model.InstrumentsValidation{}, err + } + } + return v, nil +} + +func unassignProjectsFromInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { + v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid { + return v, err + } + for _, pID := range projectIDs { + if err := q.UnassignInstrumentFromProject(ctx, pID, instrumentID); err != nil { + return v, err + } + } + return v, nil +} + +func assignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { + v, err := validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid { + return v, err + } + for _, iID := range instrumentIDs { + if err := q.AssignInstrumentToProject(ctx, projectID, iID); err != nil { + return v, err + } + } + return v, nil +} + +func unassignInstrumentsFromProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { + v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, instrumentIDs) + if err != nil || !v.IsValid { + return v, err + } + cc, err := q.GetProjectCountForInstruments(ctx, instrumentIDs) + if err != nil { + return model.InstrumentsValidation{}, err + } + + for _, count := range cc { + if count.ProjectCount < 1 { + // invalid instrument, skipping + continue + } + if count.ProjectCount == 1 { + v.IsValid = false + v.ReasonCode = model.InvalidUnassign + v.Errors = append(v.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + } + if err := q.UnassignInstrumentFromProject(ctx, projectID, count.InstrumentID); err != nil { + return v, err + } + } + return v, nil +} + +func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.InstrumentsValidation{}, err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { + return v, err + } + return v, tx.Commit() +} + +func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.InstrumentsValidation{}, err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { + return v, err + } + return v, tx.Commit() +} + +func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.InstrumentsValidation{}, err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { + return v, err + } + return v, tx.Commit() +} + +func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.InstrumentsValidation{}, err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { + return v, err + } + return v, tx.Commit() +} diff --git a/api/internal/servicev2/instrument_constant.go b/api/internal/servicev2/instrument_constant.go new file mode 100644 index 00000000..ddf8180e --- /dev/null +++ b/api/internal/servicev2/instrument_constant.go @@ -0,0 +1,74 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InstrumentConstantService interface { + ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) + CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error + CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) + DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error +} + +type instrumentConstantService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentConstantService(db *model.Database, q *model.Queries) *instrumentConstantService { + return &instrumentConstantService{db, q} +} + +// CreateInstrumentConstants creates many instrument constants from an array of instrument constants +// An InstrumentConstant is structurally the same as a timeseries and saved in the same tables +func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + uu := make([]model.Timeseries, len(tt)) + for idx, t := range tt { + t.Type = model.ConstantTimeseriesType + tsNew, err := qtx.CreateTimeseries(ctx, t) + if err != nil { + return nil, err + } + if err := qtx.CreateInstrumentConstant(ctx, tsNew.InstrumentID, tsNew.ID); err != nil { + return nil, err + } + uu[idx] = tsNew + } + if err := tx.Commit(); err != nil { + return nil, err + } + return uu, nil +} + +// DeleteInstrumentConstant removes a timeseries as an Instrument Constant; Does not delete underlying timeseries +func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.DeleteInstrumentConstant(ctx, instrumentID, timeseriesID); err != nil { + return err + } + + if err := qtx.DeleteTimeseries(ctx, timeseriesID); err != nil { + return err + } + + return tx.Commit() +} diff --git a/api/internal/servicev2/instrument_group.go b/api/internal/servicev2/instrument_group.go new file mode 100644 index 00000000..ea4ee34e --- /dev/null +++ b/api/internal/servicev2/instrument_group.go @@ -0,0 +1,54 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InstrumentGroupService interface { + ListInstrumentGroups(ctx context.Context) ([]model.InstrumentGroup, error) + GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (model.InstrumentGroup, error) + CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) + UpdateInstrumentGroup(ctx context.Context, group model.InstrumentGroup) (model.InstrumentGroup, error) + DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error + ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]model.Instrument, error) + CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error + DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error +} + +type instrumentGroupService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentGroupService(db *model.Database, q *model.Queries) *instrumentGroupService { + return &instrumentGroupService{db, q} +} + +// CreateInstrumentGroup creates many instruments from an array of instruments +func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + q := s.WithTx(tx) + + gg := make([]model.InstrumentGroup, len(groups)) + for idx, g := range groups { + gNew, err := q.CreateInstrumentGroup(ctx, g) + if err != nil { + return nil, err + } + gg[idx] = gNew + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return gg, nil +} diff --git a/api/internal/servicev2/instrument_ipi.go b/api/internal/servicev2/instrument_ipi.go new file mode 100644 index 00000000..7d2169ab --- /dev/null +++ b/api/internal/servicev2/instrument_ipi.go @@ -0,0 +1,48 @@ +package servicev2 + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type IpiInstrumentService interface { + GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.IpiSegment, error) + UpdateIpiSegment(ctx context.Context, seg model.IpiSegment) error + UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error + GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.IpiMeasurements, error) +} + +type ipiInstrumentService struct { + db *model.Database + *model.Queries +} + +func NewIpiInstrumentService(db *model.Database, q *model.Queries) *ipiInstrumentService { + return &ipiInstrumentService{db, q} +} + +func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, seg := range segs { + if err := qtx.UpdateIpiSegment(ctx, seg); err != nil { + return err + } + if seg.Length == nil { + continue + } + if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { + return err + } + } + return tx.Commit() +} diff --git a/api/internal/servicev2/instrument_note.go b/api/internal/servicev2/instrument_note.go new file mode 100644 index 00000000..5d607fda --- /dev/null +++ b/api/internal/servicev2/instrument_note.go @@ -0,0 +1,52 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InstrumentNoteService interface { + ListInstrumentNotes(ctx context.Context) ([]model.InstrumentNote, error) + ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentNote, error) + GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (model.InstrumentNote, error) + CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) + UpdateInstrumentNote(ctx context.Context, n model.InstrumentNote) (model.InstrumentNote, error) + DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error +} + +type instrumentNoteService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentNoteService(db *model.Database, q *model.Queries) *instrumentNoteService { + return &instrumentNoteService{db, q} +} + +// CreateInstrumentNote creates many instrument notes from an array of instrument notes +func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + nn := make([]model.InstrumentNote, len(notes)) + for idx, n := range notes { + noteNew, err := qtx.CreateInstrumentNote(ctx, n) + if err != nil { + return nil, err + } + nn[idx] = noteNew + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return nn, nil +} diff --git a/api/internal/servicev2/instrument_opts.go b/api/internal/servicev2/instrument_opts.go new file mode 100644 index 00000000..adca2fdf --- /dev/null +++ b/api/internal/servicev2/instrument_opts.go @@ -0,0 +1,130 @@ +package servicev2 + +import ( + "context" + "fmt" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +func handleOpts(ctx context.Context, q *model.Queries, inst model.Instrument, rt requestType) error { + switch inst.TypeID { + case saaTypeID: + opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) + if err != nil { + return err + } + if rt == create { + for i := 1; i <= opts.NumSegments; i++ { + tsConstant := model.Timeseries{ + InstrumentID: inst.ID, + ParameterID: model.SaaParameterID, + UnitID: model.FeetUnitID, + } + tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) + tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) + + tsConstant.Type = model.ConstantTimeseriesType + tsNew, err := q.CreateTimeseries(ctx, tsConstant) + if err != nil { + return err + } + if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { + return err + } + if err := q.CreateSaaSegment(ctx, model.SaaSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { + return err + } + } + + tsConstant := model.Timeseries{ + InstrumentID: inst.ID, + ParameterID: model.SaaParameterID, + UnitID: model.FeetUnitID, + } + tsConstant.Slug = inst.Slug + "-bottom-elevation" + tsConstant.Name = inst.Slug + "-bottom-elevation" + + tsConstant.Type = model.ConstantTimeseriesType + tsNew, err := q.CreateTimeseries(ctx, tsConstant) + if err != nil { + return err + } + if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { + return err + } + opts.BottomElevationTimeseriesID = tsNew.ID + if err := q.CreateSaaOpts(ctx, inst.ID, opts); err != nil { + return err + } + } + if rt == update { + if err := q.UpdateSaaOpts(ctx, inst.ID, opts); err != nil { + return err + } + } + if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { + return err + } + case ipiTypeID: + opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) + if err != nil { + return err + } + if rt == create { + for i := 1; i <= opts.NumSegments; i++ { + tsConstant := model.Timeseries{ + InstrumentID: inst.ID, + ParameterID: model.IpiParameterID, + UnitID: model.FeetUnitID, + } + tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) + tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) + + tsConstant.Type = model.ConstantTimeseriesType + tsNew, err := q.CreateTimeseries(ctx, tsConstant) + if err != nil { + return err + } + if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { + return err + } + if err := q.CreateIpiSegment(ctx, model.IpiSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { + return err + } + } + + tsConstant := model.Timeseries{ + InstrumentID: inst.ID, + ParameterID: model.IpiParameterID, + UnitID: model.FeetUnitID, + } + tsConstant.Slug = inst.Slug + "-bottom-elevation" + tsConstant.Name = inst.Slug + "-bottom-elevation" + + tsConstant.Type = model.ConstantTimeseriesType + tsNew, err := q.CreateTimeseries(ctx, tsConstant) + if err != nil { + return err + } + if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { + return err + } + opts.BottomElevationTimeseriesID = tsNew.ID + if err := q.CreateIpiOpts(ctx, inst.ID, opts); err != nil { + return err + } + } + if rt == update { + if err := q.UpdateIpiOpts(ctx, inst.ID, opts); err != nil { + return err + } + } + if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { + return err + } + default: + } + return nil +} diff --git a/api/internal/servicev2/instrument_saa.go b/api/internal/servicev2/instrument_saa.go new file mode 100644 index 00000000..9983bf56 --- /dev/null +++ b/api/internal/servicev2/instrument_saa.go @@ -0,0 +1,48 @@ +package servicev2 + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type SaaInstrumentService interface { + GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.SaaSegment, error) + UpdateSaaSegment(ctx context.Context, seg model.SaaSegment) error + UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error + GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.SaaMeasurements, error) +} + +type saaInstrumentService struct { + db *model.Database + *model.Queries +} + +func NewSaaInstrumentService(db *model.Database, q *model.Queries) *saaInstrumentService { + return &saaInstrumentService{db, q} +} + +func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, seg := range segs { + if err := qtx.UpdateSaaSegment(ctx, seg); err != nil { + return err + } + if seg.Length == nil { + continue + } + if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { + return err + } + } + return tx.Commit() +} diff --git a/api/internal/servicev2/instrument_status.go b/api/internal/servicev2/instrument_status.go new file mode 100644 index 00000000..d3396146 --- /dev/null +++ b/api/internal/servicev2/instrument_status.go @@ -0,0 +1,42 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InstrumentStatusService interface { + ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentStatus, error) + GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (model.InstrumentStatus, error) + CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error + DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error +} + +type instrumentStatusService struct { + db *model.Database + *model.Queries +} + +func NewInstrumentStatusService(db *model.Database, q *model.Queries) *instrumentStatusService { + return &instrumentStatusService{db, q} +} + +func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, updateStatus := range ss { + if err := qtx.CreateOrUpdateInstrumentStatus(ctx, instrumentID, updateStatus.StatusID, updateStatus.Time); err != nil { + return err + } + } + + return tx.Commit() +} diff --git a/api/internal/servicev2/measurement.go b/api/internal/servicev2/measurement.go new file mode 100644 index 00000000..05124fdc --- /dev/null +++ b/api/internal/servicev2/measurement.go @@ -0,0 +1,125 @@ +package servicev2 + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type MeasurementService interface { + ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow, threshold int) (*model.MeasurementCollection, error) + DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error + GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (model.Measurement, error) + CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error + CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error + CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error + CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error + CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) + CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) + UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) + DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error + DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error +} + +type measurementService struct { + db *model.Database + *model.Queries +} + +func NewMeasurementService(db *model.Database, q *model.Queries) *measurementService { + return &measurementService{db, q} +} + +type mmtCbk func(context.Context, uuid.UUID, time.Time, float64) error +type noteCbk func(context.Context, uuid.UUID, time.Time, model.TimeseriesNote) error + +func createMeasurements(ctx context.Context, mc []model.MeasurementCollection, mmtFn mmtCbk, noteFn noteCbk) error { + for _, c := range mc { + for _, m := range c.Items { + if err := mmtFn(ctx, c.TimeseriesID, m.Time, float64(m.Value)); err != nil { + return err + } + if m.Masked != nil || m.Validated != nil || m.Annotation != nil { + if err := noteFn(ctx, c.TimeseriesID, m.Time, m.TimeseriesNote); err != nil { + return err + } + } + } + } + return nil +} + +// CreateTimeseriesMeasurements creates many timeseries from an array of timeseries +func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { + return nil, err + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return mc, nil +} + +// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries +// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated +func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := createMeasurements(ctx, mc, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { + return nil, err + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return mc, nil +} + +// UpdateTimeseriesMeasurements updates many timeseries measurements, "overwriting" time and values to match paylaod +func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for _, c := range mc { + if err := qtx.DeleteTimeseriesMeasurementsByRange(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { + return nil, err + } + if err := qtx.DeleteTimeseriesNote(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { + return nil, err + } + } + + if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { + return nil, err + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return mc, nil +} diff --git a/api/internal/servicev2/measurement_inclinometer.go b/api/internal/servicev2/measurement_inclinometer.go new file mode 100644 index 00000000..ed3e139c --- /dev/null +++ b/api/internal/servicev2/measurement_inclinometer.go @@ -0,0 +1,120 @@ +package servicev2 + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InclinometerMeasurementService interface { + ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow) (*model.InclinometerMeasurementCollection, error) + ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*model.InclinometerMeasurementValues, error) + DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error + CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) + ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) + CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error +} + +type inclinometerMeasurementService struct { + db *model.Database + *model.Queries +} + +func NewInclinometerMeasurementService(db *model.Database, q *model.Queries) *inclinometerMeasurementService { + return &inclinometerMeasurementService{db, q} +} + +// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer +// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated +func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + // Iterate All inclinometer Measurements + for idx := range im { + for i := range im[idx].Inclinometers { + im[idx].Inclinometers[i].Creator = p.ID + im[idx].Inclinometers[i].CreateDate = createDate + if err := qtx.CreateOrUpdateInclinometerMeasurement(ctx, im[idx].TimeseriesID, im[idx].Inclinometers[i].Time, im[idx].Inclinometers[i].Values, p.ID, createDate); err != nil { + return nil, err + } + } + } + if err := tx.Commit(); err != nil { + return nil, err + } + + return im, nil +} + +// CreateTimeseriesConstant creates timeseries constant +func (s inclinometerMeasurementService) CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + instrumentIDs, err := qtx.ListInstrumentIDsFromTimeseriesID(ctx, timeseriesID) + if err != nil { + return err + } + + parameterIDs, err := qtx.ListParameterIDsFromParameterName(ctx, parameterName) + if err != nil { + return err + } + + unitIDs, err := qtx.ListUnitIDsFromUnitName(ctx, unitName) + if err != nil { + return err + } + + if len(instrumentIDs) > 0 && len(parameterIDs) > 0 && len(unitIDs) > 0 { + t := model.Timeseries{} + measurement := model.Measurement{} + measurements := []model.Measurement{} + mc := model.MeasurementCollection{} + mcs := []model.MeasurementCollection{} + ts := []model.Timeseries{} + + t.InstrumentID = instrumentIDs[0] + t.Slug = parameterName + t.Name = parameterName + t.ParameterID = parameterIDs[0] + t.UnitID = unitIDs[0] + ts = append(ts, t) + + t.Type = model.ConstantTimeseriesType + tsNew, err := qtx.CreateTimeseries(ctx, t) + if err != nil { + return err + } + // Assign timeseries + if err := qtx.CreateInstrumentConstant(ctx, t.InstrumentID, t.ID); err != nil { + return err + } + + measurement.Time = time.Now() + measurement.Value = model.FloatNanInf(value) + measurements = append(measurements, measurement) + mc.TimeseriesID = tsNew.ID + mc.Items = measurements + mcs = append(mcs, mc) + + if err = createMeasurements(ctx, mcs, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { + return err + } + } + + return nil +} diff --git a/api/internal/servicev2/opendcs.go b/api/internal/servicev2/opendcs.go new file mode 100644 index 00000000..8361a2dc --- /dev/null +++ b/api/internal/servicev2/opendcs.go @@ -0,0 +1,20 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type OpendcsService interface { + ListOpendcsSites(ctx context.Context) ([]model.Site, error) +} + +type opendcsService struct { + db *model.Database + *model.Queries +} + +func NewOpendcsService(db *model.Database, q *model.Queries) *opendcsService { + return &opendcsService{db, q} +} diff --git a/api/internal/servicev2/plot_config.go b/api/internal/servicev2/plot_config.go new file mode 100644 index 00000000..a244b539 --- /dev/null +++ b/api/internal/servicev2/plot_config.go @@ -0,0 +1,27 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type PlotConfigService interface { + ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]model.PlotConfig, error) + GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (model.PlotConfig, error) + DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error + plotConfigBullseyePlotService + plotConfigContourPlotService + plotConfigProfilePlotService + plotConfigScatterLinePlotService +} + +type plotConfigService struct { + db *model.Database + *model.Queries +} + +func NewPlotConfigService(db *model.Database, q *model.Queries) *plotConfigService { + return &plotConfigService{db, q} +} diff --git a/api/internal/servicev2/plot_config_bullseye.go b/api/internal/servicev2/plot_config_bullseye.go new file mode 100644 index 00000000..50fff7ce --- /dev/null +++ b/api/internal/servicev2/plot_config_bullseye.go @@ -0,0 +1,81 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type plotConfigBullseyePlotService interface { + CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) + UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) + ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]model.PlotConfigMeasurementBullseyePlot, error) +} + +func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pc.PlotType = model.BullseyePlotType + pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + if err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotBullseyeConfig(ctx, pcID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + pcNew, err := qtx.GetPlotConfig(ctx, pcID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.UpdatePlotBullseyeConfig(ctx, pc.ID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + if err != nil { + return model.PlotConfig{}, err + } + err = tx.Commit() + + return pcNew, err +} diff --git a/api/internal/servicev2/plot_config_contour.go b/api/internal/servicev2/plot_config_contour.go new file mode 100644 index 00000000..30779136 --- /dev/null +++ b/api/internal/servicev2/plot_config_contour.go @@ -0,0 +1,123 @@ +package servicev2 + +import ( + "context" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type plotConfigContourPlotService interface { + CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) + UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) + ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]time.Time, error) + GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) +} + +func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pc.PlotType = model.ContourPlotType + pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + if err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotContourConfig(ctx, pcID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + for _, tsID := range pc.Display.TimeseriesIDs { + if err := qtx.CreatePlotContourConfigTimeseries(ctx, pcID, tsID); err != nil { + return model.PlotConfig{}, err + } + } + + pcNew, err := qtx.GetPlotConfig(ctx, pcID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.UpdatePlotContourConfig(ctx, pc.ID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeleteAllPlotContourConfigTimeseries(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + for _, tsID := range pc.Display.TimeseriesIDs { + if err := qtx.CreatePlotContourConfigTimeseries(ctx, pc.ID, tsID); err != nil { + return model.PlotConfig{}, err + } + } + + pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { + q := s.db.Queries() + + mm, err := q.ListPlotConfigMeasurementsContourPlot(ctx, plotConfigID, t) + if err != nil { + return model.AggregatePlotConfigMeasurementsContourPlot{}, err + } + + am := model.AggregatePlotConfigMeasurementsContourPlot{ + X: make([]float64, len(mm)), + Y: make([]float64, len(mm)), + Z: make([]*float64, len(mm)), + } + + for idx := range mm { + am.X[idx] = mm[idx].X + am.Y[idx] = mm[idx].Y + am.Z[idx] = mm[idx].Z + } + + return am, nil +} diff --git a/api/internal/servicev2/plot_config_profile.go b/api/internal/servicev2/plot_config_profile.go new file mode 100644 index 00000000..cd6adf44 --- /dev/null +++ b/api/internal/servicev2/plot_config_profile.go @@ -0,0 +1,80 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type plotConfigProfilePlotService interface { + CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) + UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) +} + +func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pc.PlotType = model.ProfilePlotType + pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + if err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotProfileConfig(ctx, pcID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + pcNew, err := qtx.GetPlotConfig(ctx, pcID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.UpdatePlotProfileConfig(ctx, pc.ID, pc.Display); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} diff --git a/api/internal/servicev2/plot_config_scatter_line.go b/api/internal/servicev2/plot_config_scatter_line.go new file mode 100644 index 00000000..ac983a72 --- /dev/null +++ b/api/internal/servicev2/plot_config_scatter_line.go @@ -0,0 +1,162 @@ +package servicev2 + +import ( + "context" + "fmt" + "log" + "strings" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type plotConfigScatterLinePlotService interface { + CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) + UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) +} + +func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pc.PlotType = model.ScatterLinePlotType + pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + if err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { + return model.PlotConfig{}, err + } + + if err := validateCreateTraces(ctx, qtx, pcID, pc.Display.Traces); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigScatterLineLayout(ctx, pcID, pc.Display.Layout); err != nil { + return model.PlotConfig{}, err + } + + if err := validateCreateCustomShapes(ctx, qtx, pcID, pc.Display.Layout.CustomShapes); err != nil { + return model.PlotConfig{}, err + } + pcNew, err := qtx.GetPlotConfig(ctx, pcID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.PlotConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { + log.Printf("fails on delete %s", pc.ID) + return model.PlotConfig{}, err + } + + if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, pc.ID); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { + log.Printf("fails on create %s, %+v", pc.ID, pc.PlotConfigSettings) + return model.PlotConfig{}, err + } + + if err := validateCreateTraces(ctx, qtx, pc.ID, pc.Display.Traces); err != nil { + return model.PlotConfig{}, err + } + + if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, pc.ID, pc.Display.Layout); err != nil { + return model.PlotConfig{}, err + } + + if err := validateCreateCustomShapes(ctx, qtx, pc.ID, pc.Display.Layout.CustomShapes); err != nil { + return model.PlotConfig{}, err + } + + pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + if err != nil { + return model.PlotConfig{}, err + } + + err = tx.Commit() + + return pcNew, err +} + +func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { + for _, tr := range trs { + tr.PlotConfigurationID = pcID + + if err := validateColor(tr.Color); err != nil { + return err + } + if tr.LineStyle == "" { + tr.LineStyle = "solid" + } + if tr.YAxis == "" { + tr.YAxis = "y1" + } + + if err := q.CreatePlotConfigTimeseriesTrace(ctx, tr); err != nil { + return err + } + } + return nil +} + +func validateCreateCustomShapes(ctx context.Context, q *model.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { + for _, cs := range css { + cs.PlotConfigurationID = pcID + + if err := validateColor(cs.Color); err != nil { + return err + } + + if err := q.CreatePlotConfigCustomShape(ctx, cs); err != nil { + return err + } + } + return nil +} + +func validateColor(colorHex string) error { + parts := strings.SplitAfter(colorHex, "#") + invalidHexErr := fmt.Errorf("invalid hex code format: %s; format must be '#000000'", colorHex) + if len(parts) != 2 { + return invalidHexErr + } + if len(parts[0]) != 1 && len(parts[1]) != 6 { + return invalidHexErr + } + for _, r := range parts[1] { + if !(r >= '0' && r <= '9' || r >= 'a' && r <= 'f' || r >= 'A' && r <= 'F') { + return invalidHexErr + } + } + return nil +} diff --git a/api/internal/servicev2/profile.go b/api/internal/servicev2/profile.go new file mode 100644 index 00000000..109a9b4b --- /dev/null +++ b/api/internal/servicev2/profile.go @@ -0,0 +1,141 @@ +package servicev2 + +import ( + "context" + "errors" + "strings" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type ProfileService interface { + GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) + GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) + GetProfileForEmail(ctx context.Context, email string) (model.Profile, error) + GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) + GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) + CreateProfile(ctx context.Context, n model.ProfileInfo) (model.Profile, error) + CreateProfileToken(ctx context.Context, profileID uuid.UUID) (model.Token, error) + GetTokenInfoByTokenID(ctx context.Context, tokenID string) (model.TokenInfo, error) + UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) + DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error +} + +type profileService struct { + db *model.Database + *model.Queries +} + +func NewProfileService(db *model.Database, q *model.Queries) *profileService { + return &profileService{db, q} +} + +func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) { + var p model.Profile + var err error + if claims.CacUID != nil { + p, err = s.GetProfileWithTokensForEDIPI(ctx, *claims.CacUID) + } else { + p, err = s.GetProfileWithTokensForEmail(ctx, claims.Email) + } + if err != nil { + return model.Profile{}, err + } + return p, nil +} + +func (s profileService) GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) { + p, err := s.GetProfileForEDIPI(ctx, edipi) + if err != nil { + return model.Profile{}, err + } + tokens, err := s.GetIssuedTokens(ctx, p.ID) + if err != nil { + return model.Profile{}, err + } + p.Tokens = tokens + return p, nil +} + +func (s profileService) GetProfileWithTokensForEmail(ctx context.Context, email string) (model.Profile, error) { + p, err := s.GetProfileForEmail(ctx, email) + if err != nil { + return model.Profile{}, err + } + tokens, err := s.GetIssuedTokens(ctx, p.ID) + if err != nil { + return model.Profile{}, err + } + p.Tokens = tokens + return p, nil +} + +func (s profileService) GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) { + p, err := s.GetProfileForUsername(ctx, username) + if err != nil { + return model.Profile{}, err + } + tokens, err := s.GetIssuedTokens(ctx, p.ID) + if err != nil { + return model.Profile{}, err + } + p.Tokens = tokens + return p, nil +} + +// GetProfileForTokenID returns a profile given a token ID +func (s profileService) GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) { + p, err := s.GetProfileForTokenID(ctx, tokenID) + if err != nil { + return model.Profile{}, err + } + tokens, err := s.GetIssuedTokens(ctx, p.ID) + if err != nil { + return model.Profile{}, err + } + p.Tokens = tokens + return p, nil +} + +// UpdateProfileForClaims syncs a database profile to the provided token claims +// THe order of precence in which the function will attepmt to update profiles is edipi, email, username +func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) { + var claimsMatchProfile bool = p.Username == claims.PreferredUsername && + strings.ToLower(p.Email) == strings.ToLower(claims.Email) && + p.DisplayName == claims.Name + + if claimsMatchProfile { + return p, nil + } + + if claims.CacUID != nil && !claimsMatchProfile { + if err := s.UpdateProfileForEDIPI(ctx, *claims.CacUID, model.ProfileInfo{ + Username: claims.PreferredUsername, + DisplayName: claims.Name, + Email: claims.Email, + }); err != nil { + return p, err + } + p.Username = claims.PreferredUsername + p.DisplayName = claims.Name + p.Email = claims.Email + + return p, nil + } + + if strings.ToLower(p.Email) == strings.ToLower(claims.Email) && !claimsMatchProfile { + if err := s.UpdateProfileForEmail(ctx, claims.Email, model.ProfileInfo{ + Username: claims.PreferredUsername, + DisplayName: claims.Name, + }); err != nil { + return p, err + } + p.Username = claims.PreferredUsername + p.DisplayName = claims.Name + + return p, nil + } + + return p, errors.New("claims did not match profile and could not be updated") +} diff --git a/api/internal/servicev2/project.go b/api/internal/servicev2/project.go new file mode 100644 index 00000000..41e29e6e --- /dev/null +++ b/api/internal/servicev2/project.go @@ -0,0 +1,132 @@ +package servicev2 + +import ( + "context" + "image" + "io" + "mime/multipart" + "os" + + "github.com/USACE/instrumentation-api/api/internal/img" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type ProjectService interface { + SearchProjects(ctx context.Context, searchInput string, limit int) ([]model.SearchResult, error) + ListDistricts(ctx context.Context) ([]model.District, error) + ListProjects(ctx context.Context) ([]model.Project, error) + ListProjectsByFederalID(ctx context.Context, federalID string) ([]model.Project, error) + ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Project, error) + ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]model.Project, error) + ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]model.Instrument, error) + ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]model.InstrumentGroup, error) + GetProjectCount(ctx context.Context) (model.ProjectCount, error) + GetProject(ctx context.Context, projectID uuid.UUID) (model.Project, error) + CreateProject(ctx context.Context, p model.Project) (model.IDSlugName, error) + CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) + UpdateProject(ctx context.Context, p model.Project) (model.Project, error) + UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error + DeleteFlagProject(ctx context.Context, projectID uuid.UUID) error +} + +type projectService struct { + db *model.Database + *model.Queries +} + +func NewProjectService(db *model.Database, q *model.Queries) *projectService { + return &projectService{db, q} +} + +type uploader func(ctx context.Context, r io.Reader, rawPath, bucketName string) error + +// CreateProjectBulk creates one or more projects from an array of projects +func (s projectService) CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pp := make([]model.IDSlugName, len(projects)) + for idx, p := range projects { + aa, err := qtx.CreateProject(ctx, p) + if err != nil { + return nil, err + } + pp[idx] = aa + } + if err := tx.Commit(); err != nil { + return nil, err + } + return pp, nil +} + +// UpdateProject updates a project +func (s projectService) UpdateProject(ctx context.Context, p model.Project) (model.Project, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.Project{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateProject(ctx, p); err != nil { + return model.Project{}, err + } + + updated, err := qtx.GetProject(ctx, p.ID) + if err != nil { + return model.Project{}, err + } + + if err := tx.Commit(); err != nil { + return model.Project{}, err + } + + return updated, nil +} + +func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + p, err := qtx.GetProject(ctx, projectID) + if err != nil { + return err + } + + src, err := file.Open() + if err != nil { + return err + } + defer src.Close() + dst, err := os.Create(file.Filename) + if err != nil { + return err + } + defer dst.Close() + + if err := img.Resize(src, dst, image.Rect(0, 0, 480, 480)); err != nil { + return err + } + + if err := qtx.UpdateProjectImage(ctx, file.Filename, projectID); err != nil { + return err + } + + if err := u(ctx, src, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { + return err + } + + return tx.Commit() +} diff --git a/api/internal/servicev2/project_role.go b/api/internal/servicev2/project_role.go new file mode 100644 index 00000000..b29dba51 --- /dev/null +++ b/api/internal/servicev2/project_role.go @@ -0,0 +1,53 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type ProjectRoleService interface { + ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]model.ProjectMembership, error) + GetProjectMembership(ctx context.Context, roleID uuid.UUID) (model.ProjectMembership, error) + AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) + RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error + IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) + IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) +} + +type projectRoleService struct { + db *model.Database + *model.Queries +} + +func NewProjectRoleService(db *model.Database, q *model.Queries) *projectRoleService { + return &projectRoleService{db, q} +} + +// AddProjectMemberRole adds a role to a user for a specific project +func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.ProjectMembership{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + pprID, err := qtx.AddProjectMemberRole(ctx, projectID, profileID, roleID, grantedBy) + if err != nil { + return model.ProjectMembership{}, err + } + + pm, err := qtx.GetProjectMembership(ctx, pprID) + if err != nil { + return model.ProjectMembership{}, err + } + + if err := tx.Commit(); err != nil { + return model.ProjectMembership{}, err + } + + return pm, nil +} diff --git a/api/internal/servicev2/report_config.go b/api/internal/servicev2/report_config.go new file mode 100644 index 00000000..86f42e4c --- /dev/null +++ b/api/internal/servicev2/report_config.go @@ -0,0 +1,143 @@ +package servicev2 + +import ( + "context" + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/cloud" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type ReportConfigService interface { + ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]model.ReportConfig, error) + CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) + UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error + DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error + GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) + CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) + GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (model.ReportDownloadJob, error) + UpdateReportDownloadJob(ctx context.Context, j model.ReportDownloadJob) error +} + +type reportConfigService struct { + db *model.Database + *model.Queries + pubsub cloud.Pubsub + mockQueue bool +} + +func NewReportConfigService(db *model.Database, q *model.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { + return &reportConfigService{db, q, ps, mockQueue} +} + +func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.ReportConfig{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + rcID, err := qtx.CreateReportConfig(ctx, rc) + if err != nil { + return model.ReportConfig{}, err + } + + for _, pc := range rc.PlotConfigs { + if err := qtx.AssignReportConfigPlotConfig(ctx, rcID, pc.ID); err != nil { + return model.ReportConfig{}, err + } + } + + rcNew, err := qtx.GetReportConfigByID(ctx, rcID) + if err != nil { + return model.ReportConfig{}, err + } + + if err := tx.Commit(); err != nil { + return model.ReportConfig{}, err + } + return rcNew, nil +} + +func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if err := qtx.UpdateReportConfig(ctx, rc); err != nil { + return err + } + + if err := qtx.UnassignAllReportConfigPlotConfig(ctx, rc.ID); err != nil { + return err + } + + for _, pc := range rc.PlotConfigs { + if err := qtx.AssignReportConfigPlotConfig(ctx, rc.ID, pc.ID); err != nil { + return err + } + } + + return tx.Commit() +} + +func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) { + q := s.db.Queries() + + rc, err := q.GetReportConfigByID(ctx, rcID) + if err != nil { + return model.ReportConfigWithPlotConfigs{}, err + } + pcs, err := q.ListReportConfigPlotConfigs(ctx, rcID) + if err != nil { + return model.ReportConfigWithPlotConfigs{}, err + } + return model.ReportConfigWithPlotConfigs{ + ReportConfig: rc, + PlotConfigs: pcs, + }, nil +} + +func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return model.ReportDownloadJob{}, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + j, err := qtx.CreateReportDownloadJob(ctx, rcID, profileID) + if err != nil { + return model.ReportDownloadJob{}, err + } + + msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: j.ID, IsLandscape: isLandscape} + b, err := json.Marshal(msg) + if err != nil { + return model.ReportDownloadJob{}, err + } + + // NOTE: Depending on how long this takes, possibly invoke the lambdas directly + if _, err := s.pubsub.PublishMessage(ctx, b); err != nil { + return model.ReportDownloadJob{}, err + } + + if err := tx.Commit(); err != nil { + return model.ReportDownloadJob{}, err + } + + if s.mockQueue { + if _, err := s.pubsub.MockPublishMessage(ctx, b); err != nil { + return model.ReportDownloadJob{}, err + } + } + + return j, nil +} diff --git a/api/internal/servicev2/submittal.go b/api/internal/servicev2/submittal.go new file mode 100644 index 00000000..3b6b3c6a --- /dev/null +++ b/api/internal/servicev2/submittal.go @@ -0,0 +1,27 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type SubmittalService interface { + ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]model.Submittal, error) + ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]model.Submittal, error) + ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]model.Submittal, error) + ListUnverifiedMissingSubmittals(ctx context.Context) ([]model.Submittal, error) + UpdateSubmittal(ctx context.Context, sub model.Submittal) error + VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error + VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error +} + +type submittalService struct { + db *model.Database + *model.Queries +} + +func NewSubmittalService(db *model.Database, q *model.Queries) *submittalService { + return &submittalService{db, q} +} diff --git a/api/internal/servicev2/timeseries.go b/api/internal/servicev2/timeseries.go new file mode 100644 index 00000000..92f30bdd --- /dev/null +++ b/api/internal/servicev2/timeseries.go @@ -0,0 +1,85 @@ +package servicev2 + +import ( + "context" + "errors" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type TimeseriesService interface { + GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) + AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error + ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]model.Timeseries, error) + ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) + ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]model.Timeseries, error) + GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (model.Timeseries, error) + CreateTimeseries(ctx context.Context, ts model.Timeseries) (model.Timeseries, error) + CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) + UpdateTimeseries(ctx context.Context, ts model.Timeseries) (uuid.UUID, error) + DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error +} + +type timeseriesService struct { + db *model.Database + *model.Queries +} + +func NewTimeseriesService(db *model.Database, q *model.Queries) *timeseriesService { + return ×eriesService{db, q} +} + +func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return nil, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + uu := make([]model.Timeseries, len(tt)) + for idx, ts := range tt { + ts.Type = model.StandardTimeseriesType + tsNew, err := qtx.CreateTimeseries(ctx, ts) + if err != nil { + return nil, err + } + uu[idx] = tsNew + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + return uu, nil +} + +func (s timeseriesService) AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error { + ddc := make(map[uuid.UUID]struct{}, len(dd)) + dds := make([]uuid.UUID, len(dd)) + idx := 0 + for k := range ddc { + ddc[k] = struct{}{} + dds[idx] = k + idx++ + } + + q := s.db.Queries() + + m, err := q.GetTimeseriesProjectMap(ctx, dds) + if err != nil { + return err + } + for tID := range ddc { + ppID, ok := m[tID] + if ok && ppID == projectID { + delete(ddc, tID) + } + } + if len(ddc) != 0 { + return errors.New("instruments for all timeseries must be linked to project") + } + return nil +} diff --git a/api/internal/servicev2/timeseries_calculated.go b/api/internal/servicev2/timeseries_calculated.go new file mode 100644 index 00000000..e623e377 --- /dev/null +++ b/api/internal/servicev2/timeseries_calculated.go @@ -0,0 +1,99 @@ +package servicev2 + +import ( + "context" + "database/sql" + "errors" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type CalculatedTimeseriesService interface { + GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.CalculatedTimeseries, error) + CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error + UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error + DeleteCalculatedTimeseries(ctx context.Context, ctsID uuid.UUID) error +} + +type calculatedTimeseriesService struct { + db *model.Database + *model.Queries +} + +func NewCalculatedTimeseriesService(db *model.Database, q *model.Queries) *calculatedTimeseriesService { + return &calculatedTimeseriesService{db, q} +} + +func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + tsID, err := qtx.CreateCalculatedTimeseries(ctx, cc) + if err != nil { + return err + } + + if err := qtx.CreateCalculation(ctx, tsID, cc.Formula); err != nil { + return err + } + + if err := tx.Commit(); err != nil { + return err + } + + return nil +} + +func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + defaultCts, err := qtx.GetOneCalculation(ctx, &cts.ID) + if err != nil { + return err + } + + if cts.InstrumentID == uuid.Nil { + cts.InstrumentID = defaultCts.InstrumentID + } + if cts.ParameterID == uuid.Nil { + cts.ParameterID = defaultCts.ParameterID + } + if cts.UnitID == uuid.Nil { + cts.UnitID = defaultCts.UnitID + } + if cts.Slug == "" { + cts.Slug = defaultCts.Slug + } + if cts.FormulaName == "" { + cts.FormulaName = defaultCts.FormulaName + } + if cts.Formula == "" { + cts.Formula = defaultCts.Formula + } + + if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, cts, defaultCts); err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + + if err := qtx.CreateOrUpdateCalculation(ctx, cts.ID, cts.Formula, defaultCts.Formula); err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + + if err := tx.Commit(); err != nil { + return err + } + + return nil +} diff --git a/api/internal/servicev2/timeseries_cwms.go b/api/internal/servicev2/timeseries_cwms.go new file mode 100644 index 00000000..f11185ac --- /dev/null +++ b/api/internal/servicev2/timeseries_cwms.go @@ -0,0 +1,72 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type TimeseriesCwmsService interface { + ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]model.TimeseriesCwms, error) + CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) + UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error +} + +type timeseriesCwmsService struct { + db *model.Database + *model.Queries +} + +func NewTimeseriesCwmsService(db *model.Database, q *model.Queries) *timeseriesCwmsService { + return ×eriesCwmsService{db, q} +} + +func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return tcc, err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + for idx := range tcc { + tcc[idx].Type = model.CwmsTimeseriesType + tcc[idx].InstrumentID = instrumentID + tsNew, err := qtx.CreateTimeseries(ctx, tcc[idx].Timeseries) + if err != nil { + return tcc, err + } + tcc[idx].Timeseries = tsNew + if err := qtx.CreateTimeseriesCwms(ctx, tcc[idx]); err != nil { + return tcc, err + } + } + + if err := tx.Commit(); err != nil { + return tcc, err + } + + return tcc, nil +} + +func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + if _, err := qtx.UpdateTimeseries(ctx, tsCwms.Timeseries); err != nil { + return err + } + + if err := qtx.UpdateTimeseriesCwms(ctx, tsCwms); err != nil { + return err + } + + return tx.Commit() +} diff --git a/api/internal/servicev2/timeseries_process.go b/api/internal/servicev2/timeseries_process.go new file mode 100644 index 00000000..12a4e916 --- /dev/null +++ b/api/internal/servicev2/timeseries_process.go @@ -0,0 +1,21 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type ProcessTimeseriesService interface { + SelectMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessTimeseriesResponseCollection, error) + SelectInclinometerMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessInclinometerTimeseriesResponseCollection, error) +} + +type processTimeseriesService struct { + db *model.Database + *model.Queries +} + +func NewProcessTimeseriesService(db *model.Database, q *model.Queries) *processTimeseriesService { + return &processTimeseriesService{db, q} +} diff --git a/api/internal/servicev2/unit.go b/api/internal/servicev2/unit.go new file mode 100644 index 00000000..0a9def77 --- /dev/null +++ b/api/internal/servicev2/unit.go @@ -0,0 +1,20 @@ +package servicev2 + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/model" +) + +type UnitService interface { + ListUnits(ctx context.Context) ([]model.Unit, error) +} + +type unitService struct { + db *model.Database + *model.Queries +} + +func NewUnitService(db *model.Database, q *model.Queries) *unitService { + return &unitService{db, q} +} diff --git a/api/internal/servicev2/uploader.go b/api/internal/servicev2/uploader.go new file mode 100644 index 00000000..18d8432d --- /dev/null +++ b/api/internal/servicev2/uploader.go @@ -0,0 +1,143 @@ +package servicev2 + +import ( + "context" + "encoding/csv" + "io" + "math" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type UploaderService interface { + CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error + CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error + CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error +} + +type uploaderService struct { + db *model.Database + *model.Queries +} + +func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { + return &uploaderService{db, q} +} + +func CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO + return nil +} + +func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO + return nil +} + +// TODO transition away from datalogger equivalency table to different parser that's uploader specific +func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO Get mapper by id + + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + unitsHeader, err := reader.Read() + if err != nil { + return err + } + processHeader, err := reader.Read() + if err != nil { + return err + } + + meta := model.Environment{ + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], + } + + // dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + // if err != nil { + // return err + // } + // + // tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + // if err != nil { + // return err + // } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]model.Field, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = model.Field{ + Name: fieldHeader[i], + Units: unitsHeader[i], + Process: processHeader[i], + } + } + + eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Rows { + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + + for idx, cell := range record[2:] { + fieldName := fields[idx].Name + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + continue + } + + v, err := strconv.ParseFloat(cell, 64) + if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { + continue + } + + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + return err + } + } + } + return nil +} diff --git a/api/migrations/repeat/0030__views_projects.sql b/api/migrations/repeat/0030__views_projects.sql index 199c6af6..66a865ea 100644 --- a/api/migrations/repeat/0030__views_projects.sql +++ b/api/migrations/repeat/0030__views_projects.sql @@ -42,6 +42,8 @@ CREATE OR REPLACE VIEW v_project AS ( SELECT static_host FROM config LIMIT 1 ) cfg ON true + WHERE NOT deleted + ORDER BY name ); CREATE OR REPLACE VIEW v_district AS ( @@ -56,4 +58,5 @@ CREATE OR REPLACE VIEW v_district AS ( FROM district dis INNER JOIN division div ON dis.division_id = div.id INNER JOIN agency ag ON ag.id = div.agency_id + ORDER BY ag.name, div.name, dis.name ); diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index 3d9fdedd..8e4734c2 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -21,9 +21,9 @@ CREATE OR REPLACE VIEW v_instrument AS ( i.name, i.type_id, i.show_cwms_tab, - t.name AS type, - t.icon AS icon, - ST_AsBinary(i.geometry) AS geometry, + t.name type, + t.icon, + i.geometry, i.station, i.station_offset, i.creator, @@ -32,18 +32,18 @@ CREATE OR REPLACE VIEW v_instrument AS ( i.update_date, i.nid_id, i.usgs_id, - tel.telemetry AS telemetry, + tel.telemetry, cwms.has_cwms, - COALESCE(op.parr::TEXT, '[]'::TEXT) AS projects, - COALESCE(c.constants, '{}') AS constants, - COALESCE(g.groups, '{}') AS groups, - COALESCE(a.alert_configs, '{}') AS alert_configs, - COALESCE(o.opts, '{}'::JSON)::TEXT AS opts + COALESCE(op.parr, '[]'::json) projects, + COALESCE(c.constants, '{}') constants, + COALESCE(g.groups, '{}') groups, + COALESCE(a.alert_configs, '{}') alert_configs, + COALESCE(o.opts, '{}'::json) opts FROM instrument i INNER JOIN instrument_type t ON t.id = i.type_id LEFT JOIN LATERAL ( SELECT - JSON_AGG(JSON_BUILD_OBJECT( + json_agg(json_build_object( 'id', p.id, 'name', p.name, 'slug', p.slug diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql new file mode 100644 index 00000000..f4c01901 --- /dev/null +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -0,0 +1,20 @@ +CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); + + +CREATE TABLE uploader_config ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + project_id uuid NOT NULL REFERENCES project(id), + name text NOT NULL, + description text NOT NULL, + create_date timestamptz NOT NULL DEFAULT now(), + creator uuid NOT NULL REFERENCES profile(id), + type uploader_config_type NOT NULL +); + + +CREATE TABLE uploader_config_mapping ( + uploader_config_id uuid NOT NULL REFERENCES uploader_config(id), + field_name text NOT NULL, + timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), + CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) +); diff --git a/api/queries/alert.sql b/api/queries/alert.sql new file mode 100644 index 00000000..752d1ba9 --- /dev/null +++ b/api/queries/alert.sql @@ -0,0 +1,48 @@ +-- name: CreateAlert :exec +insert into alert (alert_config_id) values ($1); + + +-- name: CreateAlerts :batchexec +insert into alert (alert_config_id) values ($1); + + +-- name: ListAlertsForProject :many +select * from v_alert where project_id = $1; + + +-- name: ListAlertsForInstrument :many +select * from v_alert +where alert_config_id = any( + select id from alert_config_instrument + where instrument_id = $1 +); + + +-- name: ListAlertsForProfile :many +select a.*, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1; + + +-- name: GetAlert :one +select a.*, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +and a.id = $2; + + +-- name: CreateAlertRead :exec +insert into alert_read (profile_id, alert_id) values ($1, $2) +on conflict do nothing; + + +-- name: DeleteAlertRead :exec +delete from alert_read where profile_id = $1 and alert_id = $2; diff --git a/api/queries/alert_check.sql b/api/queries/alert_check.sql new file mode 100644 index 00000000..14400e44 --- /dev/null +++ b/api/queries/alert_check.sql @@ -0,0 +1,31 @@ +-- name: ListAndCheckAlertConfigs :many +update alert_config ac1 +set last_checked = now() +from ( + select * + from v_alert_config +) ac2 +where ac1.id = ac2.id +returning ac2.*; + + +-- name: UpdateAlertConfigLastReminded :exec +update alert_config set last_reminded = $2 where id = $1; + + +-- name: UpdateSubmittalCompletionDateOrWarningSent :exec +update submittal set + submittal_status_id = $2, + completion_date = $3, + warning_sent = $4 +where id = $1; + + +-- name: CreateNextSubmittalFromNewAlertConfigDate :exec +insert into submittal (alert_config_id, create_date, due_date) +select + ac.id, + $2::timestamptz, + $2::timestamptz + ac.schedule_interval +from alert_config ac +where ac.id = $1; diff --git a/api/queries/alert_config.sql b/api/queries/alert_config.sql new file mode 100644 index 00000000..71d5c837 --- /dev/null +++ b/api/queries/alert_config.sql @@ -0,0 +1,94 @@ +-- name: ListAlertConfigsForProject :many +select * +from v_alert_config +where project_id = $1 +order by name; + + +-- name: ListAlertConfigsForProjectAndAlertType :many +select * +from v_alert_config +where project_id = $1 +and alert_type_id = $2 +order by name; + + +-- name: ListAlertConfigsForInstrument :many +select t.* +from v_alert_config t +inner join alert_config_instrument aci on t.id = aci.alert_config_id +where aci.instrument_id = $1 +order by t.name; + + +-- name: GetetAlertConfig :one +select * from v_alert_config where id = $1; + + +-- name: CreateAlertConfig :one +insert into alert_config ( + project_id, + name, + body, + alert_type_id, + start_date, + schedule_interval, + mute_consecutive_alerts, + remind_interval, + warning_interval, + creator, + create_date +) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) +returning id; + + +-- name: AssignInstrumentToAlertConfig :exec +insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2); + + +-- name: UnassignAllInstrumentsFromAlertConfig :exec +delete from alert_config_instrument where alert_config_id = $1; + + +-- name: CreateNextSubmittalFromExistingAlertConfigDate :exec +insert into submittal (alert_config_id, due_date) +select ac.id, ac.create_date + ac.schedule_interval +from alert_config ac +where ac.id = $1; + + +-- name: UpdateAlertConfig :exec +update alert_config set + name = $3, + body = $4, + start_date = $5, + schedule_interval = $6, + mute_consecutive_alerts = $7, + remind_interval = $8, + warning_interval = $9, + updater = $10, + update_date = $11 +where id = $1 and project_id = $2; + + +-- name: UpdateFutureSubmittalForAlertConfig :one +update submittal +set due_date = sq.new_due_date +from ( + select + sub.id as submittal_id, + sub.create_date + ac.schedule_interval as new_due_date + from submittal sub + inner join alert_config ac on sub.alert_config_id = ac.id + where sub.alert_config_id = $1 + and sub.due_date > now() + and sub.completion_date is null + and not sub.marked_as_missing +) sq +where id = sq.submittal_id +and sq.new_due_date > now() +returning id; + + +-- name: DeleteAlertConfig :exec +update alert_config set deleted=true where id = $1; diff --git a/api/queries/alert_measurement_check.sql b/api/queries/alert_measurement_check.sql new file mode 100644 index 00000000..87c8b244 --- /dev/null +++ b/api/queries/alert_measurement_check.sql @@ -0,0 +1,14 @@ +-- name: ListIncompleteEvaluationSubmittals :many +select * from v_alert_check_evaluation_submittal +where submittal_id = any( + select id from submittal + where completion_date is null and not marked_as_missing +); + + +-- name: ListIncompleteMeasurementSubmittals :many +select * from v_alert_check_measurement_submittal +where submittal_id = any( + select id from submittal + where completion_date is null and not marked_as_missing +); diff --git a/api/queries/alert_subscription.sql b/api/queries/alert_subscription.sql new file mode 100644 index 00000000..054e0f4a --- /dev/null +++ b/api/queries/alert_subscription.sql @@ -0,0 +1,62 @@ +-- name: CreateAlertProfileSubscriptionOnAnyConflictDoNothing :exec +insert into alert_profile_subscription (alert_config_id, profile_id) +values ($1, $2) +on conflict do nothing; + + +-- name: DeleteAlertProfileSubscription :exec +delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; + + +-- name: GetAlertSubscription :many +select * from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; + + +-- name: GetAlertSubscriptionByID :one +select * from alert_profile_subscription where id = $1; + + +-- name: ListMyAlertSubscriptions :many +select * from alert_profile_subscription where profile_id = $1; + + +-- name: UpdateMyAlertSubscription :exec +update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4; + + +-- name: RegisterEmail :one +with e as ( + insert into email (email) values ($1) + on conflict on constraint unique_email do nothing + returning id +) +select id from e +union +select id from email where email = $1 +limit 1; + + +-- name: UnregisterEmail :exec +delete from email where id = $1; + + +-- name: CreateAlertEmailSubscription :exec +insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) +on conflict on constraint email_unique_alert_config do nothing; + + +-- name: CreateAlertProfileSubscription :exec +insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) +on conflict on constraint profile_unique_alert_config do nothing; + + +-- name: DeleteAlertEmailSubscription :exec +delete from alert_email_subscription where alert_config_id = $1 and email_id = $2; + + +-- name: DeleteAllAlertEmailSubscritpionsForAlertConfig :exec +delete from alert_email_subscription where alert_config_id = $1; + + +-- name: DeleteAllAlertProfileSubscritpionsForAlertConfig :exec +delete from alert_profile_subscription where alert_config_id = $1; diff --git a/api/queries/autocomplete.sql b/api/queries/autocomplete.sql new file mode 100644 index 00000000..12bbdfe6 --- /dev/null +++ b/api/queries/autocomplete.sql @@ -0,0 +1,5 @@ +-- name: ListEmailAutocomplete :many +select id, user_type, username, email +from v_email_autocomplete +where username_email ilike '%'||$1||'%' +limit $2; diff --git a/api/queries/aware.sql b/api/queries/aware.sql new file mode 100644 index 00000000..0881e406 --- /dev/null +++ b/api/queries/aware.sql @@ -0,0 +1,12 @@ +-- name: ListAwareParameters :many +select id, key, parameter_id, unit_id from aware_parameter; + + +-- name: ListAwarePlatformParameterEnabled :many +select instrument_id, aware_id, aware_parameter_key, timeseries_id +from v_aware_platform_parameter_enabled +order by aware_id, aware_parameter_key; + + +-- name: CreateAwarePlatform :exec +insert into aware_platform (instrument_id, aware_id) values ($1, $2); diff --git a/api/queries/collection_group.sql b/api/queries/collection_group.sql new file mode 100644 index 00000000..91bca958 --- /dev/null +++ b/api/queries/collection_group.sql @@ -0,0 +1,45 @@ +-- name: ListCollectionGroups :many +select id, project_id, slug, name, creator, create_date, updater, update_date +from collection_group +where project_id = sqlc.arg(project_id) +and (sqlc.narg(id) is null or sqlc.narg(id) = id); + + +-- name: GetCollectionGroupDetailsTimeseries :one +select t.*, tm.time as latest_time, tm.value as latest_value +from collection_group_timeseries cgt +inner join collection_group cg on cg.id = cgt.collection_group_id +inner join v_timeseries t on t.id = cgt.timeseries_id +left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( + select time from timeseries_measurement + where timeseries_id = t.id + order by time desc limit 1 +) +inner join project_instrument pi on t.instrument_id = pi.instrument_id +where pi.project_id = $1 +and cgt.collection_group_id = $2; + + +-- name: CreateCollectionGroup :one +insert into collection_group (project_id, name, slug, creator, create_date, updater, update_date) +values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) +returning id, project_id, name, slug, creator, create_date, updater, update_date; + + +-- name: UpdateCollectionGroup :one +update collection_group set name=$3, updater=$4, update_date=$5 +where project_id=$1 and id=$2 +returning id, project_id, name, slug, creator, create_date, updater, update_date; + + +-- name: DeleteCollectionGroup :exec +delete from collection_group where project_id=$1 and id=$2; + + +-- name: AddTimeseriesToCollectionGroup :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id) values ($1, $2) +on conflict on constraint collection_group_unique_timeseries do nothing; + + +-- name: RemoveTimeseriesFromCollectionGroup :exec +delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2; diff --git a/api/queries/datalogger.sql b/api/queries/datalogger.sql new file mode 100644 index 00000000..02189a69 --- /dev/null +++ b/api/queries/datalogger.sql @@ -0,0 +1,84 @@ +-- name: GetDataloggerModelName :one +select model from datalogger_model where id = $1; + + +-- name: ListDataloggersForProject :many +select * from v_datalogger where project_id = $1; + + +-- name: ListAllDataloggers :many +select * from v_datalogger; + + +-- name: GetDataloggerIsActive :one +select exists (select * from v_datalogger where model = $1 and sn = $2)::int; + + +-- name: VerifyDataloggerExists :one +select true from v_datalogger where id = $1; + + +-- name: CreateDataloggerHash :exec +insert into datalogger_hash (datalogger_id, "hash") values ($1, $2); + + +-- name: GetDatalogger :one +select * from v_datalogger where id = $1; + + +-- name: CreateDatalogger :one +insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) +values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) +returning id; + + +-- name: UpdateDatalogger :exec +update datalogger set + name = $2, + updater = $3, + update_date = $4 +where id = $1; + + +-- name: UpdateDataloggerHash :exec +update datalogger_hash set "hash" = $2 where datalogger_id = $1; + + +-- name: UpdateDataloggerUpdater :exec +update datalogger set updater = $2, update_date = $3 where id = $1; + + +-- name: DeleteDatalogger :exec +update datalogger set deleted = true, updater = $2, update_date = $3 where id = $1; + + +-- name: GetDataloggerTablePreview :one +select * from v_datalogger_preview where datalogger_table_id = $1 limit 1; + + +-- name: ResetDataloggerTableName :exec +update datalogger_table set table_name = '' where id = $1; + + +-- name: RenameEmptyDataloggerTableName :exec +update datalogger_table dt +set table_name = $2 +where dt.table_name = '' and dt.datalogger_id = $1 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +); + + +-- name: GetOrCreateDataloggerTable :one +with new_datalogger_table as ( + insert into datalogger_table (datalogger_id, table_name) values ($1, $2) + on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing + returning id +) +select ndt.id from new_datalogger_table ndt +union +select sdt.id from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2; + + +-- name: DeleteDataloggerTable :exec +delete from datalogger_table where id = $1; diff --git a/api/queries/datalogger_telemetry.sql b/api/queries/datalogger_telemetry.sql new file mode 100644 index 00000000..4c2ec97b --- /dev/null +++ b/api/queries/datalogger_telemetry.sql @@ -0,0 +1,33 @@ +-- name: GetDataloggerByModelSN :one +select * from v_datalogger +where model = $1 and sn = $2 +limit 1; + + +-- name: GetDataloggerHashByModelSN :one +select "hash" from v_datalogger_hash +where model = $1 and sn = $2 +limit 1; + + +-- name: CreateDataloggerTablePreview :exec +insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3); + + +-- name: UpdateDataloggerTablePreview :exec +update datalogger_preview set preview = $3, update_date = $4 +where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); + + +-- name: DeleteDataloggerTableError :exec +delete from datalogger_error +where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); + + +-- name: CreateDataloggerError :exec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +); diff --git a/api/queries/district_rollup.sql b/api/queries/district_rollup.sql new file mode 100644 index 00000000..c9f7e951 --- /dev/null +++ b/api/queries/district_rollup.sql @@ -0,0 +1,14 @@ +-- name: ListEvaluationDistrictRollupsForProjectAlertConfig :many +select * from v_district_rollup +where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +and project_id = $1 +and the_month >= date_trunc('month', $2::timestamptz) +and the_month <= date_trunc('month', $3::timestamptz); + + +-- name: ListMeasurementDistrictRollupsForProjectAlertConfig :many +select * from v_district_rollup +where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid +and project_id = $1 +and the_month >= date_trunc('month', $2::timestamptz) +and the_month <= date_trunc('month', $3::timestamptz); diff --git a/api/queries/domains.sql b/api/queries/domains.sql new file mode 100644 index 00000000..5514bcf5 --- /dev/null +++ b/api/queries/domains.sql @@ -0,0 +1,6 @@ +-- name: ListDomains :many +select * from v_domain; + + +-- name: ListDomainGroups :many +select * from v_domain_group; diff --git a/api/queries/equivalency_table.sql b/api/queries/equivalency_table.sql new file mode 100644 index 00000000..0f9c6a2e --- /dev/null +++ b/api/queries/equivalency_table.sql @@ -0,0 +1,49 @@ +-- name: GetIsValidDataloggerTable :one +select not exists ( + select * from datalogger_table where id = $1 and table_name = 'preparse' +); + + +-- name: GetIsValidEquivalencyTableTimeseries :one +select not exists ( + select id from v_timeseries_computed + where id = $1 + union all + select timeseries_id from instrument_constants + where timeseries_id = $1 +); + + +-- name: GetEquivalencyTable :many +select + datalogger_id, + datalogger_table_id, + datalogger_table_name, + fields +from v_datalogger_equivalency_table +where datalogger_table_id = $1; + + +-- name: CreateOrUpdateEquivalencyTableRow :exec +insert into datalogger_equivalency_table +(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) +values ($1, $2, $3, $4, $5, $6) +on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key +do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id; + + +-- name: UpdateEquivalencyTableRow :exec +update datalogger_equivalency_table set + field_name = $2, + display_name = $3, + instrument_id = $4, + timeseries_id = $5 +where id = $1; + + +-- name: DeleteEquivalencyTable :exec +delete from datalogger_equivalency_table where datalogger_table_id = $1; + + +-- name: DeleteEquivalencyTableRow :exec +delete from datalogger_equivalency_table where id = $1; diff --git a/api/queries/evaluation.sql b/api/queries/evaluation.sql new file mode 100644 index 00000000..00c841a8 --- /dev/null +++ b/api/queries/evaluation.sql @@ -0,0 +1,93 @@ +-- name: ListProjectEvaluations :many +select * +from v_evaluation +where project_id = $1; + + +-- name: ListProjectEvaluationsByAlertConfig :many +select * from v_evaluation +where project_id = $1 +and alert_config_id is not null +and alert_config_id = $2; + + +-- name: ListInstrumentEvaluations :many +select * from v_evaluation +where id = any( + select evaluation_id + from evaluation_instrument + where instrument_id = $1 +); + + +-- name: GetEvaluation :one +select * from v_evaluation where id = $1; + + +-- name: CompleteEvaluationSubmittal :exec +update submittal sub1 set + submittal_status_id = sq.submittal_status_id, + completion_date = now() +from ( + select + sub2.id as submittal_id, + case + -- if completed before due date, mark submittal as green id + when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + -- if completed after due date, mark as yellow + else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid + end as submittal_status_id + from submittal sub2 + inner join alert_config ac on sub2.alert_config_id = ac.id + where sub2.id = $1 + and sub2.completion_date is null + and not sub2.marked_as_missing + and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +) sq +where sub1.id = sq.submittal_id +returning sub1.*; + + +-- name: CreateNextEvaluationSubmittal :exec +insert into submittal (alert_config_id, due_date) +select + ac.id, + now() + ac.schedule_interval +from alert_config ac +where ac.id in (select sub.alert_config_id from submittal sub where sub.id = $1); + + +-- name: CreateEvaluation :one +insert into evaluation ( + project_id, + submittal_id, + name, + body, + start_date, + end_date, + creator, + create_date +) values ($1,$2,$3,$4,$5,$6,$7,$8) +returning id; + + +-- name: CreateEvalationInstrument :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); + + +-- name: UpdateEvaluation :exec +update evaluation set + name=$3, + body=$4, + start_date=$5, + end_date=$6, + updater=$7, + update_date=$8 +where id=$1 and project_id=$2; + + +-- name: UnassignAllInstrumentsFromEvaluation :exec +delete from evaluation_instrument where evaluation_id = $1; + +-- name: DeleteEvaluation :exec +delete from evaluation where id = $1; diff --git a/api/queries/heartbeat.sql b/api/queries/heartbeat.sql new file mode 100644 index 00000000..b93a4293 --- /dev/null +++ b/api/queries/heartbeat.sql @@ -0,0 +1,10 @@ +-- name: CreateHeartbeat :one +insert into heartbeat (time) values ($1) returning *; + + +-- name: GetLatestHeartbeat :one +select max(time) as time from heartbeat; + + +-- name: ListHeartbeats :many +select * from heartbeat; diff --git a/api/queries/home.sql b/api/queries/home.sql new file mode 100644 index 00000000..ec8ef97f --- /dev/null +++ b/api/queries/home.sql @@ -0,0 +1,7 @@ +-- name: GetHome :one +select + (select count(*) from instrument where not deleted) as instrument_count, + (select count(*) from project where not deleted) as project_count, + (select count(*) from instrument_group) as instrument_group_count, + (select count(*) from instrument where not deleted and create_date > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h; diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql new file mode 100644 index 00000000..2370b4a2 --- /dev/null +++ b/api/queries/instrument.sql @@ -0,0 +1,110 @@ +-- name: ListInstruments :many +select id, + status_id, + status, + status_time, + slug, + name, + type_id, + type, + icon, + geometry, + station, + station_offset, + creator, + create_date, + updater, + update_date, + projects, + constants, + groups, + alert_configs, + nid_id, + usgs_id, + has_cwms, + show_cwms_tab, + opts +from v_instrument +where not deleted +and (sqlc.narg(id) is not null or sqlc.narg(id) = id); + + +-- name: GetInstrumentCount :one +select count(*) from instrument where not deleted; + + +-- name: CreateInstrument :one +insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) +values (slugify($1, 'instrument'), $1, $2, st_setsrid(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) +returning id, slug; + + +-- name: ListAdminProjects :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN'; + + +-- name: ListInstrumentProjects :many +select project_id from project_instrument where instrument_id = $1; + + +-- name: GetProjectCountForInstrument :one +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in (sqlc.arg(instrument_ids)::uuid[]) +group by pi.instrument_id, i.name +order by i.name; + + +-- name: UpdateInstrument :exec +update instrument set + name = $3, + type_id = $4, + geometry = ST_GeomFromWKB($5), + updater = $6, + update_date = $7, + station = $8, + station_offset = $9, + nid_id = $10, + usgs_id = $11, + show_cwms_tab = $12 +where id = $2 +and id in ( + select instrument_id + from project_instrument + where project_id = $1 +); + + +-- name: UpdateInstrumentGeometry :one +update instrument set + geometry = st_geomfromwkb($3), + updater = $4, + update_date = now() +where id = $2 +and id in ( + select instrument_id + from project_instrument + where project_id = $1 +) +returning id; + + +-- name: DeleteFlagInstrument :exec +update instrument set deleted = true +where id = any( + select instrument_id + from project_instrument + where project_id = $1 +) +and id = $2; + + +-- name: ListInstrumentIDNamesByIDs :many +select id, name +from instrument +where id in (sqlc.arg(instrument_ids)::uuid[]) +and not deleted; diff --git a/api/queries/instrument_assign.sql b/api/queries/instrument_assign.sql new file mode 100644 index 00000000..8d320f8d --- /dev/null +++ b/api/queries/instrument_assign.sql @@ -0,0 +1,57 @@ +-- name: AssignInstrumentToProject :exec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; + + +-- name: UnassignInstrumentFromProject :exec +delete from project_instrument where project_id = $1 and instrument_id = $2; + + +-- name: ValidateInstrumentNamesProjectUnique :many +select i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.project_id = sqlc.arg(project_id) +and i.name in (sqlc.arg(instrument_name)::text[]) +and not i.deleted; + + +-- name: ValidateProjectsInstrumentNameUnique :many +select p.name, i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +inner join project p on pi.project_id = p.id +where i.name = sqlc.arg(name) +and pi.instrument_id in (sqlc.arg(instrument_id)::uuid[]) +and not i.deleted +order by pi.project_id; + + +-- name: ValidateInstrumentsAssignerAuthorized :many +select p.name as project_name, i.name as instrument_name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in (sqlc.arg(instrument_ids)::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where ppr.profile_id = sqlc.arg(profile_id) + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted; + + +-- name: ValidateProjectsAssignerAuthorized :many +select p.name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = sqlc.arg(instrument_id) +and pi.project_id in (sqlc.arg(project_ids)::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where profile_id = sqlc.arg(profile_id) + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +order by p.name; diff --git a/api/queries/instrument_constant.sql b/api/queries/instrument_constant.sql new file mode 100644 index 00000000..85b313eb --- /dev/null +++ b/api/queries/instrument_constant.sql @@ -0,0 +1,11 @@ +-- name: ListInstrumentConstants :many +select t.* from v_timeseries t +inner join instrument_constants ic on ic.timeseries_id = t.id +where ic.instrument_id = $1; + +-- name: CreateInstrumentConstant :exec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); + + +-- name: DeleteInstrumentConstant :exec +delete from instrument_constants where instrument_id = $1 and timeseries_id = $2; diff --git a/api/queries/instrument_group.sql b/api/queries/instrument_group.sql new file mode 100644 index 00000000..fac9b5f6 --- /dev/null +++ b/api/queries/instrument_group.sql @@ -0,0 +1,53 @@ +-- name: ListInstrumentGroups :many +select + id, + slug, + name, + description, + creator, + create_date, + updater, + update_date, + project_id, + instrument_count, + timeseries_count +from v_instrument_group +where not deleted +and (sqlc.narg(id) is not null or sqlc.narg(id) = id); + + +-- name: CreateInstrumentGroup :one +insert into instrument_group (slug, name, description, creator, create_date, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, creator, create_date, updater, update_date, project_id; + + +-- name: UpdateInstrumentGroup :one +update instrument_group set + name = $2, + deleted = $3, + description = $4, + updater = $5, + update_date = $6, + project_id = $7 + where id = $1 + returning *; + + +-- name: DeleteFlagInstrumentGroup :exec +update instrument_group set deleted = true where id = $1; + + +-- name: ListInstrumentGroupInstruments :many +select inst.* +from instrument_group_instruments igi +inner join v_instrument_group on igi.instrument_id = inst.id +where igi.instrument_group_id = $1 and inst.deleted = false; + + +-- name: CreateInstrumentGroupInstruments :exec +insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2); + + +-- name: DeleteInstrumentGroupInstruments :exec +delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2; diff --git a/api/queries/instrument_ipi.sql b/api/queries/instrument_ipi.sql new file mode 100644 index 00000000..33ef2dc6 --- /dev/null +++ b/api/queries/instrument_ipi.sql @@ -0,0 +1,46 @@ +-- name: CreateIpiOpts :exec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: UpdateIpiOpts :exec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: GetAllIpiSegmentsForInstrument :many +select * from v_ipi_segment where instrument_id = $1; + + +-- name: CreateIpiSegment :exec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6); + + +-- name: UpdateIpiSegment :exec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2; + + +-- name: GetIpiMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_ipi_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_ipi_measurement m2 +where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc; diff --git a/api/queries/instrument_note.sql b/api/queries/instrument_note.sql new file mode 100644 index 00000000..4dca597e --- /dev/null +++ b/api/queries/instrument_note.sql @@ -0,0 +1,35 @@ +-- name: ListInstrumentNotes :many +select + id, + instrument_id, + title, + body, + time, + creator, + create_date, + updater, + update_date +from instrument_note +where (sqlc.narg(instrument_id) is null or sqlc.narg(instrument_id) = $1) +and (sqlc.narg(id) is null or sqlc.narg(id) = $1); + + +-- name: CreateInstrumentNote :one +insert into instrument_note (instrument_id, title, body, time, creator, create_date) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; + + +-- name: UpdateInstrumentNote :one +update instrument_note set + title = $2, + body = $3, + time = $4, + updater = $5, + update_date = $6 +where id = $1 +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; + + +-- name: DeleteInstrumentNote :exec +delete from instrument_note where id = $1; diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql new file mode 100644 index 00000000..cedac8bd --- /dev/null +++ b/api/queries/instrument_saa.sql @@ -0,0 +1,48 @@ +-- name: CreateSaaOpts :exec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: UpdateSaaOpts :exec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: GetAllSaaSegmentsForInstrument :many +select * from v_saa_segment where instrument_id = $1; + + +-- name: CreateSaaSegment :exec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: UpdateSaaSegment :exec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2; + + +-- name: GetSaaMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_saa_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_saa_measurement m2 +where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc; diff --git a/api/queries/instrument_status.sql b/api/queries/instrument_status.sql new file mode 100644 index 00000000..86f14fb6 --- /dev/null +++ b/api/queries/instrument_status.sql @@ -0,0 +1,20 @@ +-- name: ListInstrumentStatuses :many +select + s.id, + s.status_id, + d.name status, + s.time +from instrument_status s +inner join status d on d.id = s.status_id +where (sqlc.narg(instrument_id) is null or sqlc.narg(instrument_id) = s.instrument_id) +and (sqlc.narg(id) is null or sqlc.narg(id) = s.id) +order by time desc; + + +-- name: CreateOrUpdateInstrumentStatus :exec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; + + +-- name: DeleteInstrumentStatus :exec +delete from instrument_status where id = $1; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql new file mode 100644 index 00000000..5a153454 --- /dev/null +++ b/api/queries/measurement.sql @@ -0,0 +1,64 @@ +-- name: ListTimeseriesMeasurements :many +select + m.timeseries_id, + m.time, + m.value, + n.masked, + n.validated, + n.annotation +from timeseries_measurement m +left join timeseries_notes n on m.timeseries_id = n.timeseries_id and m.time = n.time +inner join timeseries t on t.id = m.timeseries_id +where t.id = $1 and m.time > $2 and m.time < $3 order by m.time asc; + + +-- name: DeleteTimeseriesMeasurements :exec +delete from timeseries_measurement where timeseries_id = $1 and time = $2; + + +-- name: GetTimeseriesConstantMeasurement :many +select + m.timeseries_id, + m.time, + m.value +from timeseries_measurement m +inner join v_timeseries_stored t on t.id = m.timeseries_id +inner join parameter p on p.id = t.parameter_id +where t.instrument_id in ( + select instrument_id + from v_timeseries_stored t + where t.id= $1 +) +and p.name = $2; + + +-- name: CreateTimeseriesMeasruement :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing; + + +-- name: CreateOrUpdateTimeseriesMeasurement :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + +-- name: CreateTimeseriesNote :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing; + + +-- name: CreateOrUpdateTimeseriesNote :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; + + +-- name: DeleteTimeseriesMeasurement :exec +delete from timeseries_measurement where timeseries_id = $1 and time = $2; + + +-- name: DeleteTimeseriesMeasurementsRange :exec +delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3; + + +-- name: DeleteTimeseriesNote :exec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3; diff --git a/api/queries/plot_config.sql b/api/queries/plot_config.sql new file mode 100644 index 00000000..414d6fb4 --- /dev/null +++ b/api/queries/plot_config.sql @@ -0,0 +1,43 @@ +-- name: ListPlotConfigs :many +select + id, + slug, + name, + project_id, + report_configs, + creator, + create_date, + updater, + update_date, + show_masked, + show_nonvalidated, + show_comments, + auto_range, + date_range, + threshold, + plot_type, + display +from v_plot_configuration +where (sqlc.narg(project_id) is null or sqlc.narg(project_id) = project_id) +and (sqlc.narg(id) is null or sqlc.narg(id) = id); + + +-- name: CreatePlotConfig :one +insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +returning id; + +-- name: CreatePlotConfigSettings :exec +insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) +values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: UpdatePlotConfig :exec +update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2; + + +-- name: DeletePlotConfig :exec +delete from plot_configuration where project_id = $1 and id = $2; + + +-- name: DeletePlotConfigSettings :exec +delete from plot_configuration_settings where id = $1; diff --git a/api/queries/plot_config_bullseye.sql b/api/queries/plot_config_bullseye.sql new file mode 100644 index 00000000..7324a23a --- /dev/null +++ b/api/queries/plot_config_bullseye.sql @@ -0,0 +1,32 @@ +-- name: CreatePlotBullseyeConfig :exec +insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3); + + +-- name: UpdatePlotBullseyeConfig :exec +UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1; + + +-- name: DeletePlotBullseyeConfig :exec +delete from plot_bullseye_config where plot_config_id = $1; + + +-- name: ListPlotConfigMeasurementsBullseyePlot :many +select + t.time, + locf(xm.value) as x, + locf(ym.value) as y +from plot_bullseye_config pc +inner join timeseries_measurement t +on t.timeseries_id = pc.x_axis_timeseries_id +or t.timeseries_id = pc.y_axis_timeseries_id +left join timeseries_measurement xm +on xm.timeseries_id = pc.x_axis_timeseries_id +and xm.time = t.time +left join timeseries_measurement ym +on ym.timeseries_id = pc.y_axis_timeseries_id +and ym.time = t.time +where pc.plot_config_id = $1 +and t.time > $2 +and t.time < $3 +group by t.time +order by t.time asc; diff --git a/api/queries/plot_config_contour.sql b/api/queries/plot_config_contour.sql new file mode 100644 index 00000000..a04aa733 --- /dev/null +++ b/api/queries/plot_config_contour.sql @@ -0,0 +1,52 @@ +-- name: CreatePlotContourConfig :exec +insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) +values ($1, $2, $3, $4, $5, $6); + + +-- name: UpdatePlotContourConfig :exec +update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 +where plot_config_id=$1; + + +-- name: DeletePlotContourConfig :exec +delete from plot_contour_config where plot_config_id = $1; + + +-- name: CreatePlotContourConfigTimeseries :exec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing; + + +-- name: DeleteAllPlotContourConfigTimeseries :exec +delete from plot_contour_config_timeseries where plot_contour_config_id = $1; + + +-- name: ListPlotContourConfigTimes :many +select distinct mm.time +from plot_contour_config_timeseries pcts +inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +where pcts.plot_contour_config_id = $1 +and mm.time > $2 +and mm.time < $3 +order by time asc; + + +-- name: ListPlotConfigMeasurementsContourPlot :many +select + oi.x, + oi.y, + locf(mm.value) as z +from plot_contour_config pc +left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id +left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +inner join timeseries ts on ts.id = pcts.timeseries_id +inner join ( + select + ii.id, + st_x(st_centroid(ii.geometry)) as x, + st_y(st_centroid(ii.geometry)) as y + from instrument ii +) oi on oi.id = ts.instrument_id +where plot_config_id = $1 +and mm.time = $2 +group by pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y; diff --git a/api/queries/plot_config_profile.sql b/api/queries/plot_config_profile.sql new file mode 100644 index 00000000..73ead8f8 --- /dev/null +++ b/api/queries/plot_config_profile.sql @@ -0,0 +1,6 @@ +-- name: CreatePlotProfileConfig :exec +insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2); + + +-- name: UpdatePlotProfileConfig :exec +update plot_profile_config set instrument_id=$2 where plot_config_id=$1; diff --git a/api/queries/plot_config_scatter_line.sql b/api/queries/plot_config_scatter_line.sql new file mode 100644 index 00000000..2b41b4bc --- /dev/null +++ b/api/queries/plot_config_scatter_line.sql @@ -0,0 +1,36 @@ +-- name: CreatePlotConfigScatterLineLayout :exec +insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3); + + +-- name: UpdatePlotConfigScatterLineLayout :exec +update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1; + + +-- name: CreatePlotConfigTimeseriesTrace :exec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8); + + +-- name: UpdatePlotConfigTimeseriesTrace :exec +update plot_configuration_timeseries_trace +set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 +where plot_configuration_id=$1 and timeseries_id=$2; + + +-- name: DeleteAllPlotConfigTimeseriesTraces :exec +delete from plot_configuration_timeseries_trace where plot_configuration_id=$1; + + +-- name: CreatePlotConfigCustomShape :exec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); + + +-- name: UpdatePlotConfigCustomShape :exec +update plot_configuration_custom_shape +set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1; + + +-- name: DeleteAllPlotConfigCustomShapes :exec +delete from plot_configuration_custom_shape where plot_configuration_id=$1; diff --git a/api/queries/profile.sql b/api/queries/profile.sql new file mode 100644 index 00000000..489daecc --- /dev/null +++ b/api/queries/profile.sql @@ -0,0 +1,52 @@ +-- name: GetProfileForEDIPI :many +select * from v_profile where edipi = $1; + + +-- name: GetProfileForEmail :one +select * from v_profile where email ilike $1 +limit 1; + + +-- name: GetProfileForUsername :one +select * from v_profile where username = $1 +limit 1; + + +-- name: GetIssuedTokens :many +select token_id, issued from profile_token where profile_id = $1; + + +-- name: GetProfileForTokenID :one +select p.id, p.edipi, p.username, p.email, p.is_admin +from profile_token t +left join v_profile p on p.id = t.profile_id +where t.token_id = $1 +limit 1; + + +-- name: CreateProfile :exec +insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name; + + +-- name: CreateProfileToken :one +insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning *; + + +-- name: GetTokenInfo :one +select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1; + + +-- name: UpdateProfileForEDIPI :exec +UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4; + + +-- name: UpdateProfileForEmail :exec +update profile set username=$1, display_name=$2 where email ilike $3; + + +-- name: UpdateProfileForUsername :exec +update profile set email=$1, display_name=$2 where username=$3; + + +-- name: DeleteToken :exec +delete from profile_token where profile_id=$1 and token_id=$2; diff --git a/api/queries/project.sql b/api/queries/project.sql new file mode 100644 index 00000000..69fb81db --- /dev/null +++ b/api/queries/project.sql @@ -0,0 +1,65 @@ +-- name: ListProjects :many +select * from v_project; + + +-- name: SearchProjects :many +select * from v_project +where name ilike '%'||sqlc.arg(name)||'%' +limit sqlc.arg(result_limit); + + +-- name: ListProjectsForFederalID :many +select * from v_project +where federal_id = sqc.arg(federal_id); + + +-- name: ListDistricts :many +select * from v_district; + + +-- name: ListProjectsForProfileRole :many +select p.* +from v_project p +inner join profile_project_roles pr on pr.project_id = p.id +inner join role r on r.id = pr.role_id +where pr.profile_id = $1 +and r.name = $2; + + +-- name: ListInstrumentsForProject :many +select i.* +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1; + + +-- name: ListInstrumentGroupsForProject :many +select ig.* +from v_instrument_group ig +where ig.project_id = $1; + + +-- name: GetProjectCount :one +select count(*) from project where not deleted; + + +-- name: GetProject :one +select * from v_project where id = $1; + + +-- name: CreateProject :one +insert into project (federal_id, slug, name, district_id, creator, create_date) +values ($1, slugify($2, 'project'), $2, $3, $4, $5) +returning id, slug; + + +-- name: UpdateProject :one +update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id; + + +-- name: UpdateProjectImage :exec +update project set image = $1 where id = $2; + + +-- name: DeleteFlagProject :exec +update project set deleted=true where id = $1; diff --git a/api/queries/project_role.sql b/api/queries/project_role.sql new file mode 100644 index 00000000..85db6d7e --- /dev/null +++ b/api/queries/project_role.sql @@ -0,0 +1,42 @@ +-- name: ListProjectMembers :many +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where project_id = $1 +order by email; + + +-- name: GetProjectMembership :one +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where id = $1; + + +-- name: CreateProfileProjectRole :one +insert into profile_project_roles (project_id, profile_id, role_id, granted_by) +values ($1, $2, $3, $4) +on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id +returning id; + + +-- name: DeleteProfileProjectRole :exec +delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3; + + +-- name: IsProjectAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and r.name = 'ADMIN' +); + + +-- name: IsProjectMember :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and (r.name = 'MEMBER' or r.name = 'ADMIN') +); diff --git a/api/queries/report_config.sql b/api/queries/report_config.sql new file mode 100644 index 00000000..34511968 --- /dev/null +++ b/api/queries/report_config.sql @@ -0,0 +1,55 @@ +-- name: CreateReportConfig :one +insert into report_config ( + name, slug, project_id, creator, description, date_range, date_range_enabled, + show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled +) +values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id; + + +-- name: ListProjectReportConfigs :many +select * from v_report_config where project_id = $1; + + +-- name: ListReportConfigPlotConfigs :many +select * from v_plot_configuration where id = any( + select plot_config_id from report_config_plot_config where report_config_id = $1 +); + + +-- name: GetReportConfigByID :many +select * from v_report_config where id = $1; + + +-- name: UpdateReportConfig :exec +update report_config set name=$2, +updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1; + + +-- name: DeleteReportConfig :exec +delete from report_config where id=$1; + + +-- name: AssignReportConfigPlotConfig :exec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); + + +-- name: UnassignReportConfigPlotConfig :exec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; + + +-- name: UnassignAllReportConfigPlotConfig :exec +delete from report_config_plot_config where report_config_id=$1; + + +-- name: GetReportDownloadJob :one +select * from report_download_job where id=$1 and creator=$2; + + +-- name: CreateReportDownloadJob :one +insert into report_download_job (report_config_id, creator) values ($1, $2) returning *; + + +-- name: UpdateReportDownloadJob :exec +update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1; diff --git a/api/queries/submittal.sql b/api/queries/submittal.sql new file mode 100644 index 00000000..786b9cd3 --- /dev/null +++ b/api/queries/submittal.sql @@ -0,0 +1,57 @@ +-- name: ListProjectSubmittals :many +select * +from v_submittal +where project_id = sqlc.arg(project_id) +and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +order by due_date desc, alert_type_name asc; + + +-- name: ListInstrumentSubmittals :many +select sub.* +from v_submittal sub +inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id +where aci.instrument_id = sqlc.arg(instrument_id) +and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +order by sub.due_date desc; + + +-- name: ListAlertConfigSubmittals :many +select * +from v_submittal +where alert_config_id = $1 +and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +order by due_date desc; + + +-- name: ListUnverifiedMissingSubmittals :many +select * +from v_submittal +where completion_date is null +and not marked_as_missing +order by due_date desc; + + +-- name: UpdateSubmittal :exec +update submittal set + submittal_status_id = $2, + completion_date = $3, + warning_sent = $4 +where id = $1; + + +-- name: VerifyMissingSubmittal :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where id = $1 +and completion_date is null +and now() > due_date; + + +-- name: VerifyMissingAlertConfigSubmittals :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where alert_config_id = $1 +and completion_date is null +and now() > due_date; diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql new file mode 100644 index 00000000..74955da4 --- /dev/null +++ b/api/queries/timeseries.sql @@ -0,0 +1,50 @@ +-- name: GetStoredTimeseriesExists :one +select exists (select id from v_timeseries_stored where id = $1); + + +-- name: GetTimeseriesProjectMap :many +select timeseries_id, project_id +from v_timeseries_project_map +where timeseries_id in (sqlc.arg(timeseries_ids)::uuid[]); + +-- name: ListProjectTimeseries :many +select t.* from v_timeseries t +inner join project_instrument p on p.instrument_id = t.instrument_id +where p.project_id = $1; + + +-- name: ListInstrumentTimeseries :many +select * from v_timeseries +where instrument_id = $1; + + +-- name: ListPlotConfigTimeseries :many +SELECT t.* FROM v_timeseries t +INNER JOIN plot_configuration_timeseries_trace pct ON pct.timeseries_id = t.id +WHERE pct.plot_configuration_id = $1; + + +-- name: ListInstrumentGroupTimeseries :many +select t.* from v_timeseries t +inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id +where gi.instrument_group_id = $1; + + +-- name: ListTimeseries :many +SELECT * FROM v_timeseries WHERE id = $1; + + +-- name: CreateTimeseries :one +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type; + + +-- name: UpdateTimeseries :one +update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 +where id = $1 +returning id; + + +-- name: DeleteTimeseries :exec +delete from timeseries where id = $1; diff --git a/api/queries/timeseries_calculated.sql b/api/queries/timeseries_calculated.sql new file mode 100644 index 00000000..509c7ecd --- /dev/null +++ b/api/queries/timeseries_calculated.sql @@ -0,0 +1,56 @@ +-- name: ListCalculatedTimeseries :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where (sqlc.narg(instrument_id) is null or instrument_id = sqlc.narg(instrument_id)) +and (sqlc.narg(id) is null or id = sqlc.narg(id)); + + +-- name: CreateCalculatedTimeseries :one +INSERT INTO timeseries ( + instrument_id, + parameter_id, + unit_id, + slug, + name, + type +) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +RETURNING id; + + +-- name: CreateCalculation :exec +insert into calculation (timeseries_id, contents) values ($1,$2); + + +-- name: CreateOrUpdateCalculation :exec +insert into calculation (timeseries_id, contents) values ($1, $2) +on conflict (timeseries_id) do update set contents = coalesce(excluded.contents, $3); + + +-- name: DeleteCalculatedTimeseries :exec +delete from timeseries where id = $1 and id in (select timeseries_id from calculation); + + +-- name: CreateOrUpdateCalculatedTimeseries :exec +insert into timeseries ( + id, + instrument_id, + parameter_id, + unit_id, + slug, + name, + type +) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') +on conflict (id) do update set + instrument_id = coalesce(excluded.instrument_id, $6), + parameter_id = coalesce(excluded.parameter_id, $7), + unit_id = coalesce(excluded.unit_id, $8), + slug = coalesce(excluded.slug, slugify($9, 'timeseries')), + name = coalesce(excluded.name, $9), + type = 'computed'; diff --git a/api/queries/timeseries_cwms.sql b/api/queries/timeseries_cwms.sql new file mode 100644 index 00000000..019c2bfb --- /dev/null +++ b/api/queries/timeseries_cwms.sql @@ -0,0 +1,22 @@ +-- name: ListTimeseriesCwms :many +select * from v_timeseries_cwms +where instrument_id = $1; + + +-- name: GetTimeseriesCwms :one +select * from v_timeseries_cwms +where id = $1; + + +-- name: CreateTimeseriesCwms :exec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5); + + +-- name: UpdateTimeseriesCwms :exec +update timeseries_cwms set + cwms_timeseries_id=$2, + cwms_office_id=$3, + cwms_extent_earliest_time=$4, + cwms_extent_latest_time=$5 +where timeseries_id=$1; diff --git a/api/queries/unit.sql b/api/queries/unit.sql new file mode 100644 index 00000000..aa69dc56 --- /dev/null +++ b/api/queries/unit.sql @@ -0,0 +1,4 @@ +-- name: ListUnits :many +select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure +from v_unit +order by name; diff --git a/compose.sh b/compose.sh index 4ce826ef..7fcd26d6 100755 --- a/compose.sh +++ b/compose.sh @@ -2,7 +2,10 @@ set -Eeo pipefail -parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) +parent_path=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + pwd -P +) cd "$parent_path" COMPOSECMD="docker compose -f docker-compose.yml" @@ -10,12 +13,14 @@ COMPOSECMD="docker compose -f docker-compose.yml" mkdocs() { ( DOCKER_BUILDKIT=1 docker build --file api/Dockerfile.openapi --output api/internal/server/docs api - cd report && npm run generate >/dev/null; + cd report && npm run generate >/dev/null ) } +if [ "$1" = "gen" ]; then + docker run --rm -v $(pwd):/src -w /src sqlc/sqlc generate -if [ "$1" = "watch" ]; then +elif [ "$1" = "watch" ]; then mkdocs -q if [ "$2" = "mock" ]; then DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml --profile=mock watch @@ -23,7 +28,6 @@ if [ "$1" = "watch" ]; then DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml watch fi - elif [ "$1" = "up" ]; then mkdocs -q if [ "$2" = "mock" ]; then @@ -31,7 +35,6 @@ elif [ "$1" = "up" ]; then else DOCKER_BUILDKIT=1 $COMPOSECMD up -d --build fi - elif [ "$1" = "build" ]; then if [ "$2" = "local" ] || [ "$2" = "develop" ] || [ "$2" = "test" ] || [ "$2" = "prod" ]; then @@ -46,20 +49,19 @@ elif [ "$1" = "build" ]; then AMD64_TARGET_PLATFORM=true fi - for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader - do - docker build \ - ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ - --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ - --build-arg="GO_VERSION=1.23" \ - --build-arg="BUILD_TAG=$2" \ - --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ - -t $BUILD_TARGET:"$2" api + for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader; do + docker build \ + ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ + --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ + --build-arg="GO_VERSION=1.23" \ + --build-arg="BUILD_TAG=$2" \ + --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ + -t $BUILD_TARGET:"$2" api done docker build \ - --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ - -t midas-report:$2 report + --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ + -t midas-report:$2 report else echo -e "usage:\n\t./compose.sh build [local,develop,test,prod]" exit 1 @@ -74,8 +76,7 @@ elif [ "$1" = "build" ]; then declare -a REGISTRIES=("midas-api" "midas-telemetry" "midas-alert" "midas-dcs-loader" "midas-sql") # tag - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker tag $IMAGE:"$2" $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -83,8 +84,7 @@ elif [ "$1" = "build" ]; then fi # push - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker push $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -92,20 +92,16 @@ elif [ "$1" = "build" ]; then fi fi - elif [ "$1" = "authdbdump" ]; then - $COMPOSECMD exec authdb pg_dump postgres > auth/initdb/init2.sql - + $COMPOSECMD exec authdb pg_dump postgres >auth/initdb/init2.sql elif [ "$1" = "down" ]; then mkdocs -q $COMPOSECMD -f docker-compose.dev.yml --profile=mock down - elif [ "$1" = "clean" ]; then $COMPOSECMD -f docker-compose.dev.yml --profile=mock down -v - elif [ "$1" = "test" ]; then docker compose build shift @@ -115,21 +111,21 @@ elif [ "$1" = "test" ]; then while [[ $# -gt 0 ]]; do case $1 in - -rm) - TEARDOWN=true - shift - ;; - *) - REST_ARGS+=("$1") - shift - ;; + -rm) + TEARDOWN=true + shift + ;; + *) + REST_ARGS+=("$1") + shift + ;; esac done GOCMD="go test ${REST_ARGS[@]} github.com/USACE/instrumentation-api/api/internal/handler" if [ "$REPORT" = true ]; then - docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api > $(pwd)/test.log + docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api >$(pwd)/test.log else docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api fi @@ -138,11 +134,9 @@ elif [ "$1" = "test" ]; then docker compose --profile=mock down -v fi - elif [ "$1" = "mkdocs" ]; then mkdocs - else echo -e "usage:\n\t./compose.sh watch\n\t./compose.sh up\n\t./compose.sh down\n\t./compose.sh clean\n\t./compose.sh test\n\t./compose.sh mkdocs" fi diff --git a/go.work.sum b/go.work.sum index 4ab4a8b4..7afc1998 100644 --- a/go.work.sum +++ b/go.work.sum @@ -18,6 +18,7 @@ github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= diff --git a/report/generated.d.ts b/report/generated.d.ts index 0f48113c..aea27d27 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -7030,18 +7030,12 @@ export interface paths { }; }; "/timeseries_measurements": { - /** creates or updates one or more timeseries measurements */ + /** creates one or more timeseries measurements */ post: { - parameters: { - query: { - /** @description api key */ - key: string; - }; - }; - /** @description array of timeseries measurement collections */ - requestBody: { + requestBody?: { content: { - "*/*": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "application/json": components["schemas"]["_timeseries_measurements_post_request"]; + "multipart/form-data": components["schemas"]["_timeseries_measurements_post_request"]; }; }; responses: { @@ -9813,6 +9807,13 @@ export interface components { }; /** @enum {integer} */ "pgtype.Status": 0 | 1 | 2; + _timeseries_measurements_post_request: { + /** + * Format: binary + * @description TOA5 file of timeseries measurement collections + */ + timeseries_measurement_collections?: string; + }; }; responses: never; parameters: never; diff --git a/sqlc.yml b/sqlc.yml new file mode 100644 index 00000000..b2d428e9 --- /dev/null +++ b/sqlc.yml @@ -0,0 +1,54 @@ +version: "2" +sql: + - engine: "postgresql" + queries: "api/queries/*.sql" + schema: + - "api/migrations/schema/*.sql" + - "api/migrations/repeat/*.sql" + gen: + go: + package: "db" + out: "api/internal/db" + sql_package: "pgx/v5" + output_files_suffix: "_gen" + emit_json_tags: true + emit_interface: true + emit_empty_slices: true + emit_pointers_for_null_types: true + overrides: + - db_type: "uuid" + go_type: "github.com/google/uuid.UUID" + - db_type: "geometry" + go_type: "github.com/twpayne/go-geom.T" + - db_type: "timestamptz" + go_type: "time.Time" + - db_type: "json" + go_type: "github.com/USACE/instrumentation-api/api/internal/model.Opts" + - column: "v_instrument.projects" + go_type: + import: "github.com/USACE/instrumentation-api/api/internal/model" + package: "model" + type: "IDSlugName" + slice: true + - column: "v_instrument.constants" + go_type: + import: "github.com/google/uuid" + package: "uuid" + type: "UUID" + slice: true + - column: "v_instrument.groups" + go_type: + import: "github.com/google/uuid" + package: "uuid" + type: "UUID" + slice: true + - column: "v_instrument.alert_configs" + go_type: + import: "github.com/google/uuid" + package: "uuid" + type: "UUID" + slice: true + - column: "v_instrument.geometry" + go_type: + import: "github.com/twpayne/go-geom" + type: "T" From 13b806c755242a969f4b57d1da1f6e0724472a73 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 31 Oct 2024 14:21:17 -0400 Subject: [PATCH 04/23] wip --- api/internal/model/uploader.go | 56 ++++++++++++++++++++ api/migrations/schema/V1.14.00__uploader.sql | 20 +++++++ 2 files changed, 76 insertions(+) create mode 100644 api/internal/model/uploader.go create mode 100644 api/migrations/schema/V1.14.00__uploader.sql diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go new file mode 100644 index 00000000..953f7cf7 --- /dev/null +++ b/api/internal/model/uploader.go @@ -0,0 +1,56 @@ +package model + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +type UploaderConfigType string + +const ( + CSV, DUX, TOA5 UploaderConfigType = "csv", "dux", "toa5" +) + +type UploaderConfig struct { + ID uuid.UUID + ProjectID uuid.UUID + Name string + Discription string + CreateDate time.Time + Creator uuid.UUID + Type UploaderConfigType +} + +type UploaderConfigMapping struct { + UploaderConfigID uuid.UUID + FieldName string + TimeseriesID uuid.UUID +} + +const createUploaderConfig = ` + INSERT INTO uploader_config VALUES ($1, $2, $3, $4, $5, $6) +` + +func (q Queries) CreateUploaderConfig(ctx context.Context, uc UploaderConfig) error { + +} + +// CREATE TABLE uploader_config ( +// id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), +// project_id uuid NOT NULL REFERENCES project(id), +// name text NOT NULL, +// description text NOT NULL, +// create_date timestamptz NOT NULL DEFAULT now(), +// creator uuid NOT NULL REFERENCES profile(id), +// type uploader_config_type NOT NULL +// ); +// +// +// CREATE TABLE uploader_config_mapping ( +// uploader_config_id uuid NOT NULL REFERENCES uploader_config(id), +// field_name text NOT NULL, +// timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), +// CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) +// ); diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql new file mode 100644 index 00000000..f4c01901 --- /dev/null +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -0,0 +1,20 @@ +CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); + + +CREATE TABLE uploader_config ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + project_id uuid NOT NULL REFERENCES project(id), + name text NOT NULL, + description text NOT NULL, + create_date timestamptz NOT NULL DEFAULT now(), + creator uuid NOT NULL REFERENCES profile(id), + type uploader_config_type NOT NULL +); + + +CREATE TABLE uploader_config_mapping ( + uploader_config_id uuid NOT NULL REFERENCES uploader_config(id), + field_name text NOT NULL, + timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), + CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) +); From 2253490595cd6a7d213c5aeee1b51fe4e8b52268 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 14:41:55 -0500 Subject: [PATCH 05/23] wip; sqlc queries --- api/go.mod | 1 + api/go.sum | 2 + api/internal/db/alert.sql_gen.go | 37 +- api/internal/db/alert_check.sql_gen.go | 25 +- api/internal/db/alert_config.sql_gen.go | 74 +- .../db/alert_measurement_check.sql_gen.go | 6 +- api/internal/db/alert_subscription.sql_gen.go | 56 +- api/internal/db/autocomplete.sql_gen.go | 1 - api/internal/db/aware.sql_gen.go | 6 +- api/internal/db/batch.go | 1615 ++++++++++++++++- api/internal/db/collection_group.sql_gen.go | 122 +- api/internal/db/datalogger.sql_gen.go | 1 - .../db/datalogger_telemetry.sql_gen.go | 1 - api/internal/db/district_rollup.sql_gen.go | 1 - api/internal/db/equivalency_table.sql_gen.go | 61 +- api/internal/db/evaluation.sql_gen.go | 89 +- api/internal/db/instrument.sql_gen.go | 317 ++-- api/internal/db/instrument_assign.sql_gen.go | 32 +- .../db/instrument_constant.sql_gen.go | 1 - api/internal/db/instrument_group.sql_gen.go | 156 +- api/internal/db/instrument_ipi.sql_gen.go | 40 +- api/internal/db/instrument_note.sql_gen.go | 14 +- api/internal/db/instrument_saa.sql_gen.go | 44 +- api/internal/db/instrument_status.sql_gen.go | 1 - api/internal/db/measurement.sql_gen.go | 27 +- api/internal/db/models.go | 868 ++++----- api/internal/db/overrides.go | 132 ++ api/internal/db/plot_config.sql_gen.go | 96 +- .../db/plot_config_bullseye.sql_gen.go | 14 +- .../db/plot_config_contour.sql_gen.go | 36 +- .../db/plot_config_profile.sql_gen.go | 1 - .../db/plot_config_scatter_line.sql_gen.go | 58 +- api/internal/db/profile.sql_gen.go | 62 +- api/internal/db/project.sql_gen.go | 157 +- api/internal/db/project_role.sql_gen.go | 10 +- api/internal/db/querier.go | 88 +- api/internal/db/report_config.sql_gen.go | 97 +- api/internal/db/submittal.sql_gen.go | 13 +- api/internal/db/timeseries.sql_gen.go | 18 +- .../db/timeseries_calculated.sql_gen.go | 42 +- api/internal/db/timeseries_cwms.sql_gen.go | 22 +- api/internal/handler/handlerv2.go | 1 - api/internal/model/common.go | 79 + api/internal/model/datalogger.go | 8 +- api/internal/model/instrument.go | 55 +- api/internal/model/report_config.go | 16 - api/internal/model/timeseries.go | 12 +- api/internal/model/timeseries_calculated.go | 4 +- api/internal/servicev2/alert.go | 71 +- api/internal/servicev2/alert_check.go | 305 +++- api/internal/servicev2/alert_config.go | 104 +- api/internal/servicev2/alert_subscription.go | 160 +- api/internal/servicev2/autocomplete.go | 9 - api/internal/servicev2/collection_group.go | 51 +- api/internal/servicev2/datalogger.go | 148 +- .../servicev2/datalogger_telemetry.go | 85 +- api/internal/servicev2/db.go | 10 +- api/internal/servicev2/district_rollup.go | 7 +- api/internal/servicev2/domain.go | 7 +- api/internal/servicev2/equivalency_table.go | 73 +- api/internal/servicev2/evaluation.go | 123 +- api/internal/servicev2/heartbeat.go | 7 +- api/internal/servicev2/home.go | 7 +- api/internal/servicev2/instrument.go | 204 ++- api/internal/servicev2/instrument_assign.go | 365 ++-- api/internal/servicev2/instrument_constant.go | 19 +- api/internal/servicev2/instrument_group.go | 42 +- api/internal/servicev2/instrument_ipi.go | 52 +- api/internal/servicev2/instrument_note.go | 41 +- api/internal/servicev2/instrument_opts.go | 427 ++++- api/internal/servicev2/instrument_saa.go | 13 +- api/internal/servicev2/instrument_status.go | 13 +- api/internal/servicev2/measurement.go | 147 +- .../servicev2/measurement_inclinometer.go | 17 +- api/internal/servicev2/opendcs.go | 7 +- api/internal/servicev2/plot_config.go | 58 +- .../servicev2/plot_config_bullseye.go | 79 +- api/internal/servicev2/plot_config_contour.go | 140 +- api/internal/servicev2/plot_config_profile.go | 77 +- .../servicev2/plot_config_scatter_line.go | 136 +- api/internal/servicev2/profile.go | 82 +- api/internal/servicev2/project.go | 88 +- api/internal/servicev2/project_role.go | 46 +- api/internal/servicev2/report_config.go | 167 +- api/internal/servicev2/submittal.go | 7 +- api/internal/servicev2/timeseries.go | 79 +- .../servicev2/timeseries_calculated.go | 19 +- api/internal/servicev2/timeseries_cwms.go | 84 +- api/internal/servicev2/timeseries_process.go | 7 +- api/internal/servicev2/unit.go | 10 +- api/internal/servicev2/uploader.go | 13 +- .../repeat/0020__views_profiles.sql | 7 +- .../repeat/0040__views_instruments.sql | 22 +- .../repeat/0050__views_timeseries.sql | 43 + api/migrations/repeat/0060__views_alerts.sql | 55 +- api/migrations/repeat/0090__views_plots.sql | 22 +- .../repeat/0100__views_datalogger.sql | 18 +- .../repeat/0110__views_evaluations.sql | 48 +- .../repeat/0120__views_alert_check.sql | 20 +- .../0140__views_depth_based_instruments.sql | 62 +- api/migrations/repeat/0150__views_domain.sql | 4 +- .../repeat/0160__views_report_config.sql | 14 +- api/migrations/schema/V1.14.00__uploader.sql | 5 + api/queries/alert_check.sql | 4 +- api/queries/alert_config.sql | 4 +- api/queries/alert_subscription.sql | 8 +- api/queries/aware.sql | 3 + api/queries/collection_group.sql | 24 +- api/queries/equivalency_table.sql | 2 +- api/queries/evaluation.sql | 8 +- api/queries/instrument.sql | 58 +- api/queries/instrument_assign.sql | 17 +- api/queries/instrument_constant.sql | 5 + api/queries/instrument_group.sql | 33 +- api/queries/instrument_ipi.sql | 32 + api/queries/instrument_note.sql | 6 + api/queries/instrument_saa.sql | 24 + api/queries/instrument_status.sql | 5 + api/queries/measurement.sql | 28 +- api/queries/plot_config.sql | 31 +- api/queries/plot_config_contour.sql | 11 +- api/queries/plot_config_scatter_line.sql | 11 + api/queries/profile.sql | 2 +- api/queries/project.sql | 18 +- api/queries/report_config.sql | 10 +- api/queries/timeseries.sql | 6 + api/queries/timeseries_cwms.sql | 5 + go.work.sum | 47 + sqlc.yml | 152 +- 129 files changed, 6006 insertions(+), 3093 deletions(-) create mode 100644 api/internal/db/overrides.go diff --git a/api/go.mod b/api/go.mod index 4c8113ab..6fec5963 100644 --- a/api/go.mod +++ b/api/go.mod @@ -64,6 +64,7 @@ require ( github.com/mattn/go-isatty v0.0.20 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.11.0 // indirect + github.com/twpayne/go-geom v1.5.7 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect diff --git a/api/go.sum b/api/go.sum index 1da0e199..c46a7ff8 100644 --- a/api/go.sum +++ b/api/go.sum @@ -238,6 +238,8 @@ github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8 github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/twpayne/go-geom v1.5.7 h1:7fdceDUr03/MP7rAKOaTV6x9njMiQdxB/D0PDzMTCDc= +github.com/twpayne/go-geom v1.5.7/go.mod h1:y4fTAQtLedXW8eG2Yo4tYrIGN1yIwwKkmA+K3iSHKBA= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= diff --git a/api/internal/db/alert.sql_gen.go b/api/internal/db/alert.sql_gen.go index 72cdcfd9..ff95ac9c 100644 --- a/api/internal/db/alert.sql_gen.go +++ b/api/internal/db/alert.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createAlert = `-- name: CreateAlert :exec @@ -68,15 +67,15 @@ type GetAlertParams struct { } type GetAlertRow struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - Instruments interface{} `json:"instruments"` - Read bool `json:"read"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` + Read bool `json:"read"` } func (q *Queries) GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) { @@ -144,15 +143,15 @@ where aps.profile_id = $1 ` type ListAlertsForProfileRow struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - Instruments interface{} `json:"instruments"` - Read bool `json:"read"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` + Read bool `json:"read"` } func (q *Queries) ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) { diff --git a/api/internal/db/alert_check.sql_gen.go b/api/internal/db/alert_check.sql_gen.go index 5bf521a8..8ec1c556 100644 --- a/api/internal/db/alert_check.sql_gen.go +++ b/api/internal/db/alert_check.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createNextSubmittalFromNewAlertConfigDate = `-- name: CreateNextSubmittalFromNewAlertConfigDate :exec @@ -25,12 +23,12 @@ where ac.id = $1 ` type CreateNextSubmittalFromNewAlertConfigDateParams struct { - ID uuid.UUID `json:"id"` - Column2 time.Time `json:"column_2"` + ID uuid.UUID `json:"id"` + Date time.Time `json:"date"` } func (q *Queries) CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error { - _, err := q.db.Exec(ctx, createNextSubmittalFromNewAlertConfigDate, arg.ID, arg.Column2) + _, err := q.db.Exec(ctx, createNextSubmittalFromNewAlertConfigDate, arg.ID, arg.Date) return err } @@ -38,11 +36,11 @@ const listAndCheckAlertConfigs = `-- name: ListAndCheckAlertConfigs :many update alert_config ac1 set last_checked = now() from ( - select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions + select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config ) ac2 where ac1.id = ac2.id -returning ac2.id, ac2.name, ac2.body, ac2.creator, ac2.creator_username, ac2.create_date, ac2.updater, ac2.updater_username, ac2.update_date, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.start_date, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked, ac2.last_reminded, ac2.instruments, ac2.alert_email_subscriptions +returning ac2.id, ac2.name, ac2.body, ac2.creator, ac2.creator_username, ac2.create_date, ac2.updater, ac2.updater_username, ac2.update_date, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.start_date, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked, ac2.last_reminded, ac2.create_next_submittal_from, ac2.instruments, ac2.alert_email_subscriptions ` func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) { @@ -75,6 +73,7 @@ func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, &i.WarningInterval, &i.LastChecked, &i.LastReminded, + &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, ); err != nil { @@ -93,8 +92,8 @@ update alert_config set last_reminded = $2 where id = $1 ` type UpdateAlertConfigLastRemindedParams struct { - ID uuid.UUID `json:"id"` - LastReminded pgtype.Timestamptz `json:"last_reminded"` + ID uuid.UUID `json:"id"` + LastReminded *time.Time `json:"last_reminded"` } func (q *Queries) UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error { @@ -111,10 +110,10 @@ where id = $1 ` type UpdateSubmittalCompletionDateOrWarningSentParams struct { - ID uuid.UUID `json:"id"` - SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` - CompletionDate pgtype.Timestamptz `json:"completion_date"` - WarningSent bool `json:"warning_sent"` + ID uuid.UUID `json:"id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletionDate *time.Time `json:"completion_date"` + WarningSent bool `json:"warning_sent"` } func (q *Queries) UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error { diff --git a/api/internal/db/alert_config.sql_gen.go b/api/internal/db/alert_config.sql_gen.go index 3ab4a27f..1dabfc9a 100644 --- a/api/internal/db/alert_config.sql_gen.go +++ b/api/internal/db/alert_config.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const assignInstrumentToAlertConfig = `-- name: AssignInstrumentToAlertConfig :exec @@ -46,17 +44,17 @@ returning id ` type CreateAlertConfigParams struct { - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - AlertTypeID uuid.UUID `json:"alert_type_id"` - StartDate time.Time `json:"start_date"` - ScheduleInterval pgtype.Interval `json:"schedule_interval"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` - RemindInterval pgtype.Interval `json:"remind_interval"` - WarningInterval pgtype.Interval `json:"warning_interval"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartDate time.Time `json:"start_date"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` } func (q *Queries) CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) { @@ -99,12 +97,12 @@ func (q *Queries) DeleteAlertConfig(ctx context.Context, id uuid.UUID) error { return err } -const getetAlertConfig = `-- name: GetetAlertConfig :one -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions from v_alert_config where id = $1 +const getAlertConfig = `-- name: GetAlertConfig :one +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where id = $1 ` -func (q *Queries) GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { - row := q.db.QueryRow(ctx, getetAlertConfig, id) +func (q *Queries) GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { + row := q.db.QueryRow(ctx, getAlertConfig, id) var i VAlertConfig err := row.Scan( &i.ID, @@ -127,6 +125,7 @@ func (q *Queries) GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertCon &i.WarningInterval, &i.LastChecked, &i.LastReminded, + &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, ) @@ -134,7 +133,7 @@ func (q *Queries) GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertCon } const listAlertConfigsForInstrument = `-- name: ListAlertConfigsForInstrument :many -select t.id, t.name, t.body, t.creator, t.creator_username, t.create_date, t.updater, t.updater_username, t.update_date, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.start_date, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked, t.last_reminded, t.instruments, t.alert_email_subscriptions +select t.id, t.name, t.body, t.creator, t.creator_username, t.create_date, t.updater, t.updater_username, t.update_date, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.start_date, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked, t.last_reminded, t.create_next_submittal_from, t.instruments, t.alert_email_subscriptions from v_alert_config t inner join alert_config_instrument aci on t.id = aci.alert_config_id where aci.instrument_id = $1 @@ -171,6 +170,7 @@ func (q *Queries) ListAlertConfigsForInstrument(ctx context.Context, instrumentI &i.WarningInterval, &i.LastChecked, &i.LastReminded, + &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, ); err != nil { @@ -185,7 +185,7 @@ func (q *Queries) ListAlertConfigsForInstrument(ctx context.Context, instrumentI } const listAlertConfigsForProject = `-- name: ListAlertConfigsForProject :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 order by name @@ -221,6 +221,7 @@ func (q *Queries) ListAlertConfigsForProject(ctx context.Context, projectID uuid &i.WarningInterval, &i.LastChecked, &i.LastReminded, + &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, ); err != nil { @@ -234,21 +235,21 @@ func (q *Queries) ListAlertConfigsForProject(ctx context.Context, projectID uuid return items, nil } -const listAlertConfigsForProjectAndAlertType = `-- name: ListAlertConfigsForProjectAndAlertType :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, instruments, alert_email_subscriptions +const listAlertConfigsForProjectAlertType = `-- name: ListAlertConfigsForProjectAlertType :many +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 and alert_type_id = $2 order by name ` -type ListAlertConfigsForProjectAndAlertTypeParams struct { +type ListAlertConfigsForProjectAlertTypeParams struct { ProjectID uuid.UUID `json:"project_id"` AlertTypeID uuid.UUID `json:"alert_type_id"` } -func (q *Queries) ListAlertConfigsForProjectAndAlertType(ctx context.Context, arg ListAlertConfigsForProjectAndAlertTypeParams) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, listAlertConfigsForProjectAndAlertType, arg.ProjectID, arg.AlertTypeID) +func (q *Queries) ListAlertConfigsForProjectAlertType(ctx context.Context, arg ListAlertConfigsForProjectAlertTypeParams) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, listAlertConfigsForProjectAlertType, arg.ProjectID, arg.AlertTypeID) if err != nil { return nil, err } @@ -277,6 +278,7 @@ func (q *Queries) ListAlertConfigsForProjectAndAlertType(ctx context.Context, ar &i.WarningInterval, &i.LastChecked, &i.LastReminded, + &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, ); err != nil { @@ -314,17 +316,17 @@ where id = $1 and project_id = $2 ` type UpdateAlertConfigParams struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - StartDate time.Time `json:"start_date"` - ScheduleInterval pgtype.Interval `json:"schedule_interval"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` - RemindInterval pgtype.Interval `json:"remind_interval"` - WarningInterval pgtype.Interval `json:"warning_interval"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error { @@ -363,7 +365,7 @@ and sq.new_due_date > now() returning id ` -func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID pgtype.UUID) (uuid.UUID, error) { +func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) { row := q.db.QueryRow(ctx, updateFutureSubmittalForAlertConfig, alertConfigID) var id uuid.UUID err := row.Scan(&id) diff --git a/api/internal/db/alert_measurement_check.sql_gen.go b/api/internal/db/alert_measurement_check.sql_gen.go index 00b86821..cee589ba 100644 --- a/api/internal/db/alert_measurement_check.sql_gen.go +++ b/api/internal/db/alert_measurement_check.sql_gen.go @@ -10,7 +10,7 @@ import ( ) const listIncompleteEvaluationSubmittals = `-- name: ListIncompleteEvaluationSubmittals :many -select alert_config_id, submittal_id, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal +select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal where submittal_id = any( select id from submittal where completion_date is null and not marked_as_missing @@ -29,6 +29,7 @@ func (q *Queries) ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAl if err := rows.Scan( &i.AlertConfigID, &i.SubmittalID, + &i.Submittal, &i.ShouldWarn, &i.ShouldAlert, &i.ShouldRemind, @@ -44,7 +45,7 @@ func (q *Queries) ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAl } const listIncompleteMeasurementSubmittals = `-- name: ListIncompleteMeasurementSubmittals :many -select alert_config_id, submittal_id, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal +select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal where submittal_id = any( select id from submittal where completion_date is null and not marked_as_missing @@ -63,6 +64,7 @@ func (q *Queries) ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VA if err := rows.Scan( &i.AlertConfigID, &i.SubmittalID, + &i.Submittal, &i.ShouldWarn, &i.ShouldAlert, &i.ShouldRemind, diff --git a/api/internal/db/alert_subscription.sql_gen.go b/api/internal/db/alert_subscription.sql_gen.go index 6578fa19..b45fe8df 100644 --- a/api/internal/db/alert_subscription.sql_gen.go +++ b/api/internal/db/alert_subscription.sql_gen.go @@ -9,7 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createAlertEmailSubscription = `-- name: CreateAlertEmailSubscription :exec @@ -104,47 +103,34 @@ func (q *Queries) DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.C return err } -const getAlertSubscription = `-- name: GetAlertSubscription :many +const getAlertSubscription = `-- name: GetAlertSubscription :one +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where id = $1 +` + +func (q *Queries) GetAlertSubscription(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, getAlertSubscription, id) + var i AlertProfileSubscription + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ) + return i, err +} + +const getAlertSubscriptionForAlertConfig = `-- name: GetAlertSubscriptionForAlertConfig :one select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 ` -type GetAlertSubscriptionParams struct { +type GetAlertSubscriptionForAlertConfigParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) GetAlertSubscription(ctx context.Context, arg GetAlertSubscriptionParams) ([]AlertProfileSubscription, error) { - rows, err := q.db.Query(ctx, getAlertSubscription, arg.AlertConfigID, arg.ProfileID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []AlertProfileSubscription{} - for rows.Next() { - var i AlertProfileSubscription - if err := rows.Scan( - &i.ID, - &i.AlertConfigID, - &i.ProfileID, - &i.MuteUi, - &i.MuteNotify, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const getAlertSubscriptionByID = `-- name: GetAlertSubscriptionByID :one -select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where id = $1 -` - -func (q *Queries) GetAlertSubscriptionByID(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { - row := q.db.QueryRow(ctx, getAlertSubscriptionByID, id) +func (q *Queries) GetAlertSubscriptionForAlertConfig(ctx context.Context, arg GetAlertSubscriptionForAlertConfigParams) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, getAlertSubscriptionForAlertConfig, arg.AlertConfigID, arg.ProfileID) var i AlertProfileSubscription err := row.Scan( &i.ID, diff --git a/api/internal/db/autocomplete.sql_gen.go b/api/internal/db/autocomplete.sql_gen.go index 67529d54..e45d04ce 100644 --- a/api/internal/db/autocomplete.sql_gen.go +++ b/api/internal/db/autocomplete.sql_gen.go @@ -9,7 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const listEmailAutocomplete = `-- name: ListEmailAutocomplete :many diff --git a/api/internal/db/aware.sql_gen.go b/api/internal/db/aware.sql_gen.go index 96e4b4a2..5ec50a8e 100644 --- a/api/internal/db/aware.sql_gen.go +++ b/api/internal/db/aware.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createAwarePlatform = `-- name: CreateAwarePlatform :exec @@ -18,8 +16,8 @@ insert into aware_platform (instrument_id, aware_id) values ($1, $2) ` type CreateAwarePlatformParams struct { - InstrumentID pgtype.UUID `json:"instrument_id"` - AwareID uuid.UUID `json:"aware_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` } func (q *Queries) CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error { diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index 59f3ddeb..1f757ea4 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -8,9 +8,9 @@ package db import ( "context" "errors" + "time" "github.com/google/uuid" - uuid "github.com/google/uuid" "github.com/jackc/pgx/v5" ) @@ -18,6 +18,105 @@ var ( ErrBatchAlreadyClosed = errors.New("batch already closed") ) +const assignInstrumentToProjectBatch = `-- name: AssignInstrumentToProjectBatch :batchexec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +` + +type AssignInstrumentToProjectBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type AssignInstrumentToProjectBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) AssignInstrumentToProjectBatch(ctx context.Context, arg []AssignInstrumentToProjectBatchParams) *AssignInstrumentToProjectBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(assignInstrumentToProjectBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &AssignInstrumentToProjectBatchBatchResults{br, len(arg), false} +} + +func (b *AssignInstrumentToProjectBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *AssignInstrumentToProjectBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const assignReportConfigPlotConfigBatch = `-- name: AssignReportConfigPlotConfigBatch :batchexec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +` + +type AssignReportConfigPlotConfigBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type AssignReportConfigPlotConfigBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) AssignReportConfigPlotConfigBatch(ctx context.Context, arg []AssignReportConfigPlotConfigBatchParams) *AssignReportConfigPlotConfigBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(assignReportConfigPlotConfigBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &AssignReportConfigPlotConfigBatchBatchResults{br, len(arg), false} +} + +func (b *AssignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *AssignReportConfigPlotConfigBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const createAlerts = `-- name: CreateAlerts :batchexec insert into alert (alert_config_id) values ($1) ` @@ -60,3 +159,1517 @@ func (b *CreateAlertsBatchResults) Close() error { b.closed = true return b.br.Close() } + +const createAwarePlatformBatch = `-- name: CreateAwarePlatformBatch :batchexec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) +` + +type CreateAwarePlatformBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateAwarePlatformBatchParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` +} + +func (q *Queries) CreateAwarePlatformBatch(ctx context.Context, arg []CreateAwarePlatformBatchParams) *CreateAwarePlatformBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.AwareID, + } + batch.Queue(createAwarePlatformBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateAwarePlatformBatchBatchResults{br, len(arg), false} +} + +func (b *CreateAwarePlatformBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateAwarePlatformBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createEvaluationInstrumentsBatch = `-- name: CreateEvaluationInstrumentsBatch :batchexec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type CreateEvaluationInstrumentsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateEvaluationInstrumentsBatchParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) CreateEvaluationInstrumentsBatch(ctx context.Context, arg []CreateEvaluationInstrumentsBatchParams) *CreateEvaluationInstrumentsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.EvaluationID, + a.InstrumentID, + } + batch.Queue(createEvaluationInstrumentsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateEvaluationInstrumentsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateEvaluationInstrumentsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateEvaluationInstrumentsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createInstrumentConstantBatch = `-- name: CreateInstrumentConstantBatch :batchexec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) +` + +type CreateInstrumentConstantBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInstrumentConstantBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreateInstrumentConstantBatch(ctx context.Context, arg []CreateInstrumentConstantBatchParams) *CreateInstrumentConstantBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.TimeseriesID, + } + batch.Queue(createInstrumentConstantBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInstrumentConstantBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInstrumentConstantBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateInstrumentConstantBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createInstrumentGroupsBatch = `-- name: CreateInstrumentGroupsBatch :batchone +insert into instrument_group (slug, name, description, creator, create_date, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, creator, create_date, updater, update_date, project_id +` + +type CreateInstrumentGroupsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInstrumentGroupsBatchParams struct { + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type CreateInstrumentGroupsBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ProjectID *uuid.UUID `json:"project_id"` +} + +func (q *Queries) CreateInstrumentGroupsBatch(ctx context.Context, arg []CreateInstrumentGroupsBatchParams) *CreateInstrumentGroupsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Name, + a.Description, + a.Creator, + a.CreateDate, + a.ProjectID, + } + batch.Queue(createInstrumentGroupsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInstrumentGroupsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInstrumentGroupsBatchBatchResults) QueryRow(f func(int, CreateInstrumentGroupsBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i CreateInstrumentGroupsBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *CreateInstrumentGroupsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createInstrumentNoteBatch = `-- name: CreateInstrumentNoteBatch :batchone +insert into instrument_note (instrument_id, title, body, time, creator, create_date) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +` + +type CreateInstrumentNoteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInstrumentNoteBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +func (q *Queries) CreateInstrumentNoteBatch(ctx context.Context, arg []CreateInstrumentNoteBatchParams) *CreateInstrumentNoteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.Title, + a.Body, + a.Time, + a.Creator, + a.CreateDate, + } + batch.Queue(createInstrumentNoteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInstrumentNoteBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInstrumentNoteBatchBatchResults) QueryRow(f func(int, InstrumentNote, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i InstrumentNote + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *CreateInstrumentNoteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createInstrumentsBatch = `-- name: CreateInstrumentsBatch :batchone +insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) +values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id, slug +` + +type CreateInstrumentsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInstrumentsBatchParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry Geometry `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type CreateInstrumentsBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) CreateInstrumentsBatch(ctx context.Context, arg []CreateInstrumentsBatchParams) *CreateInstrumentsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Name, + a.TypeID, + a.Geometry, + a.Station, + a.StationOffset, + a.Creator, + a.CreateDate, + a.NidID, + a.UsgsID, + a.ShowCwmsTab, + } + batch.Queue(createInstrumentsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInstrumentsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInstrumentsBatchBatchResults) QueryRow(f func(int, CreateInstrumentsBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i CreateInstrumentsBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan(&i.ID, &i.Slug) + if f != nil { + f(t, i, err) + } + } +} + +func (b *CreateInstrumentsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createIpiOptsBatch = `-- name: CreateIpiOptsBatch :batchexec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateIpiOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateIpiOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) CreateIpiOptsBatch(ctx context.Context, arg []CreateIpiOptsBatchParams) *CreateIpiOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(createIpiOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateIpiOptsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateIpiOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateIpiOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createIpiSegmentBatch = `-- name: CreateIpiSegmentBatch :batchexec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6) +` + +type CreateIpiSegmentBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateIpiSegmentBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) CreateIpiSegmentBatch(ctx context.Context, arg []CreateIpiSegmentBatchParams) *CreateIpiSegmentBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(createIpiSegmentBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateIpiSegmentBatchBatchResults{br, len(arg), false} +} + +func (b *CreateIpiSegmentBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateIpiSegmentBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createOrUpdateInstrumentStatusBatch = `-- name: CreateOrUpdateInstrumentStatusBatch :batchexec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id +` + +type CreateOrUpdateInstrumentStatusBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateOrUpdateInstrumentStatusBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) CreateOrUpdateInstrumentStatusBatch(ctx context.Context, arg []CreateOrUpdateInstrumentStatusBatchParams) *CreateOrUpdateInstrumentStatusBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.StatusID, + a.Time, + } + batch.Queue(createOrUpdateInstrumentStatusBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateOrUpdateInstrumentStatusBatchBatchResults{br, len(arg), false} +} + +func (b *CreateOrUpdateInstrumentStatusBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateOrUpdateInstrumentStatusBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createOrUpdateTimeseriesMeasurementsBatch = `-- name: CreateOrUpdateTimeseriesMeasurementsBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type CreateOrUpdateTimeseriesMeasurementsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateOrUpdateTimeseriesMeasurementsBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) CreateOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesMeasurementsBatchParams) *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Value, + } + batch.Queue(createOrUpdateTimeseriesMeasurementsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateOrUpdateTimeseriesMeasurementsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createOrUpdateTimeseriesNoteBatch = `-- name: CreateOrUpdateTimeseriesNoteBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +` + +type CreateOrUpdateTimeseriesNoteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateOrUpdateTimeseriesNoteBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) CreateOrUpdateTimeseriesNoteBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesNoteBatchParams) *CreateOrUpdateTimeseriesNoteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(createOrUpdateTimeseriesNoteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateOrUpdateTimeseriesNoteBatchBatchResults{br, len(arg), false} +} + +func (b *CreateOrUpdateTimeseriesNoteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateOrUpdateTimeseriesNoteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createPlotConfigCustomShapesBatch = `-- name: CreatePlotConfigCustomShapesBatch :batchexec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) +` + +type CreatePlotConfigCustomShapesBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreatePlotConfigCustomShapesBatchParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) CreatePlotConfigCustomShapesBatch(ctx context.Context, arg []CreatePlotConfigCustomShapesBatchParams) *CreatePlotConfigCustomShapesBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotConfigurationID, + a.Enabled, + a.Name, + a.DataPoint, + a.Color, + } + batch.Queue(createPlotConfigCustomShapesBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreatePlotConfigCustomShapesBatchBatchResults{br, len(arg), false} +} + +func (b *CreatePlotConfigCustomShapesBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreatePlotConfigCustomShapesBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createPlotConfigTimeseriesTracesBatch = `-- name: CreatePlotConfigTimeseriesTracesBatch :batchexec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8) +` + +type CreatePlotConfigTimeseriesTracesBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreatePlotConfigTimeseriesTracesBatchParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) CreatePlotConfigTimeseriesTracesBatch(ctx context.Context, arg []CreatePlotConfigTimeseriesTracesBatchParams) *CreatePlotConfigTimeseriesTracesBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotConfigurationID, + a.TimeseriesID, + a.TraceOrder, + a.Color, + a.LineStyle, + a.Width, + a.ShowMarkers, + a.YAxis, + } + batch.Queue(createPlotConfigTimeseriesTracesBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreatePlotConfigTimeseriesTracesBatchBatchResults{br, len(arg), false} +} + +func (b *CreatePlotConfigTimeseriesTracesBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreatePlotConfigTimeseriesTracesBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createPlotContourConfigTimeseriesBatch = `-- name: CreatePlotContourConfigTimeseriesBatch :batchexec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing +` + +type CreatePlotContourConfigTimeseriesBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreatePlotContourConfigTimeseriesBatchParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreatePlotContourConfigTimeseriesBatch(ctx context.Context, arg []CreatePlotContourConfigTimeseriesBatchParams) *CreatePlotContourConfigTimeseriesBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotContourConfigID, + a.TimeseriesID, + } + batch.Queue(createPlotContourConfigTimeseriesBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreatePlotContourConfigTimeseriesBatchBatchResults{br, len(arg), false} +} + +func (b *CreatePlotContourConfigTimeseriesBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreatePlotContourConfigTimeseriesBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createProjectsBatch = `-- name: CreateProjectsBatch :batchone +insert into project (federal_id, slug, name, district_id, creator, create_date) +values ($1, slugify($2, 'project'), $2, $3, $4, $5) +returning id, slug +` + +type CreateProjectsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateProjectsBatchParams struct { + FederalID *string `json:"federal_id"` + Name string `json:"name"` + DistrictID *uuid.UUID `json:"district_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` +} + +type CreateProjectsBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) CreateProjectsBatch(ctx context.Context, arg []CreateProjectsBatchParams) *CreateProjectsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.FederalID, + a.Name, + a.DistrictID, + a.Creator, + a.CreateDate, + } + batch.Queue(createProjectsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateProjectsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateProjectsBatchBatchResults) QueryRow(f func(int, CreateProjectsBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i CreateProjectsBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan(&i.ID, &i.Slug) + if f != nil { + f(t, i, err) + } + } +} + +func (b *CreateProjectsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createSaaOptsBatch = `-- name: CreateSaaOptsBatch :batchexec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateSaaOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateSaaOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) CreateSaaOptsBatch(ctx context.Context, arg []CreateSaaOptsBatchParams) *CreateSaaOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(createSaaOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateSaaOptsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateSaaOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateSaaOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createSaaSegmentBatch = `-- name: CreateSaaSegmentBatch :batchexec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type CreateSaaSegmentBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateSaaSegmentBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) CreateSaaSegmentBatch(ctx context.Context, arg []CreateSaaSegmentBatchParams) *CreateSaaSegmentBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.XTimeseriesID, + a.YTimeseriesID, + a.ZTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(createSaaSegmentBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateSaaSegmentBatchBatchResults{br, len(arg), false} +} + +func (b *CreateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateSaaSegmentBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createTimeseriesBatch = `-- name: CreateTimeseriesBatch :batchone +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type +` + +type CreateTimeseriesBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateTimeseriesBatchParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +type CreateTimeseriesBatchRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type NullTimeseriesType `json:"type"` +} + +func (q *Queries) CreateTimeseriesBatch(ctx context.Context, arg []CreateTimeseriesBatchParams) *CreateTimeseriesBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.Name, + a.ParameterID, + a.UnitID, + a.Type, + } + batch.Queue(createTimeseriesBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateTimeseriesBatchBatchResults{br, len(arg), false} +} + +func (b *CreateTimeseriesBatchBatchResults) QueryRow(f func(int, CreateTimeseriesBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i CreateTimeseriesBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Slug, + &i.Name, + &i.ParameterID, + &i.UnitID, + &i.Type, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *CreateTimeseriesBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createTimeseriesCwmsBatch = `-- name: CreateTimeseriesCwmsBatch :batchexec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5) +` + +type CreateTimeseriesCwmsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateTimeseriesCwmsBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +func (q *Queries) CreateTimeseriesCwmsBatch(ctx context.Context, arg []CreateTimeseriesCwmsBatchParams) *CreateTimeseriesCwmsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.CwmsTimeseriesID, + a.CwmsOfficeID, + a.CwmsExtentEarliestTime, + a.CwmsExtentLatestTime, + } + batch.Queue(createTimeseriesCwmsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateTimeseriesCwmsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateTimeseriesCwmsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateTimeseriesCwmsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createTimeseriesMeasurementsBatch = `-- name: CreateTimeseriesMeasurementsBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing +` + +type CreateTimeseriesMeasurementsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateTimeseriesMeasurementsBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateTimeseriesMeasurementsBatchParams) *CreateTimeseriesMeasurementsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Value, + } + batch.Queue(createTimeseriesMeasurementsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateTimeseriesMeasurementsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateTimeseriesMeasurementsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createTimeseriesNotesBatch = `-- name: CreateTimeseriesNotesBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing +` + +type CreateTimeseriesNotesBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateTimeseriesNotesBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) CreateTimeseriesNotesBatch(ctx context.Context, arg []CreateTimeseriesNotesBatchParams) *CreateTimeseriesNotesBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(createTimeseriesNotesBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateTimeseriesNotesBatchBatchResults{br, len(arg), false} +} + +func (b *CreateTimeseriesNotesBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateTimeseriesNotesBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const unassignInstrumentFromProjectBatch = `-- name: UnassignInstrumentFromProjectBatch :batchexec +delete from project_instrument where project_id = $1 and instrument_id = $2 +` + +type UnassignInstrumentFromProjectBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UnassignInstrumentFromProjectBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) UnassignInstrumentFromProjectBatch(ctx context.Context, arg []UnassignInstrumentFromProjectBatchParams) *UnassignInstrumentFromProjectBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(unassignInstrumentFromProjectBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UnassignInstrumentFromProjectBatchBatchResults{br, len(arg), false} +} + +func (b *UnassignInstrumentFromProjectBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UnassignInstrumentFromProjectBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const unassignReportConfigPlotConfigBatch = `-- name: UnassignReportConfigPlotConfigBatch :batchexec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 +` + +type UnassignReportConfigPlotConfigBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UnassignReportConfigPlotConfigBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) UnassignReportConfigPlotConfigBatch(ctx context.Context, arg []UnassignReportConfigPlotConfigBatchParams) *UnassignReportConfigPlotConfigBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(unassignReportConfigPlotConfigBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UnassignReportConfigPlotConfigBatchBatchResults{br, len(arg), false} +} + +func (b *UnassignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UnassignReportConfigPlotConfigBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const updateIpiOptsBatch = `-- name: UpdateIpiOptsBatch :batchexec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateIpiOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateIpiOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) UpdateIpiOptsBatch(ctx context.Context, arg []UpdateIpiOptsBatchParams) *UpdateIpiOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(updateIpiOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateIpiOptsBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateIpiOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateIpiOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const updateIpiSegmentsBatch = `-- name: UpdateIpiSegmentsBatch :batchexec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2 +` + +type UpdateIpiSegmentsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateIpiSegmentsBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) UpdateIpiSegmentsBatch(ctx context.Context, arg []UpdateIpiSegmentsBatchParams) *UpdateIpiSegmentsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(updateIpiSegmentsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateIpiSegmentsBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateIpiSegmentsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateIpiSegmentsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const updateSaaOptsBatch = `-- name: UpdateSaaOptsBatch :batchexec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateSaaOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateSaaOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) UpdateSaaOptsBatch(ctx context.Context, arg []UpdateSaaOptsBatchParams) *UpdateSaaOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(updateSaaOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateSaaOptsBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateSaaOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateSaaOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go index 7894456f..016fd79b 100644 --- a/api/internal/db/collection_group.sql_gen.go +++ b/api/internal/db/collection_group.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const addTimeseriesToCollectionGroup = `-- name: AddTimeseriesToCollectionGroup :exec @@ -36,12 +34,12 @@ returning id, project_id, name, slug, creator, create_date, updater, update_date ` type CreateCollectionGroupParams struct { - ProjectID uuid.UUID `json:"project_id"` - Column2 string `json:"column_2"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + Column2 string `json:"column_2"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) { @@ -81,99 +79,53 @@ func (q *Queries) DeleteCollectionGroup(ctx context.Context, arg DeleteCollectio return err } -const getCollectionGroupDetailsTimeseries = `-- name: GetCollectionGroupDetailsTimeseries :one -select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit, tm.time as latest_time, tm.value as latest_value -from collection_group_timeseries cgt -inner join collection_group cg on cg.id = cgt.collection_group_id -inner join v_timeseries t on t.id = cgt.timeseries_id -left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( - select time from timeseries_measurement - where timeseries_id = t.id - order by time desc limit 1 -) -inner join project_instrument pi on t.instrument_id = pi.instrument_id -where pi.project_id = $1 -and cgt.collection_group_id = $2 +const getCollectionGroupDetails = `-- name: GetCollectionGroupDetails :one +select id, project_id, name, slug, creator, create_date, updater, update_date, timeseries from v_collection_group_details where id = $1 ` -type GetCollectionGroupDetailsTimeseriesParams struct { - ProjectID uuid.UUID `json:"project_id"` - CollectionGroupID uuid.UUID `json:"collection_group_id"` -} - -type GetCollectionGroupDetailsTimeseriesRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Type NullTimeseriesType `json:"type"` - IsComputed bool `json:"is_computed"` - Variable interface{} `json:"variable"` - InstrumentID uuid.UUID `json:"instrument_id"` - InstrumentSlug string `json:"instrument_slug"` - Instrument string `json:"instrument"` - ParameterID uuid.UUID `json:"parameter_id"` - Parameter string `json:"parameter"` - UnitID uuid.UUID `json:"unit_id"` - Unit string `json:"unit"` - LatestTime pgtype.Timestamptz `json:"latest_time"` - LatestValue *float64 `json:"latest_value"` -} - -func (q *Queries) GetCollectionGroupDetailsTimeseries(ctx context.Context, arg GetCollectionGroupDetailsTimeseriesParams) (GetCollectionGroupDetailsTimeseriesRow, error) { - row := q.db.QueryRow(ctx, getCollectionGroupDetailsTimeseries, arg.ProjectID, arg.CollectionGroupID) - var i GetCollectionGroupDetailsTimeseriesRow +func (q *Queries) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) { + row := q.db.QueryRow(ctx, getCollectionGroupDetails, id) + var i VCollectionGroupDetail err := row.Scan( &i.ID, - &i.Slug, + &i.ProjectID, &i.Name, - &i.Type, - &i.IsComputed, - &i.Variable, - &i.InstrumentID, - &i.InstrumentSlug, - &i.Instrument, - &i.ParameterID, - &i.Parameter, - &i.UnitID, - &i.Unit, - &i.LatestTime, - &i.LatestValue, + &i.Slug, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.Timeseries, ) return i, err } -const listCollectionGroups = `-- name: ListCollectionGroups :many +const listCollectionGroupsForProject = `-- name: ListCollectionGroupsForProject :many select id, project_id, slug, name, creator, create_date, updater, update_date from collection_group where project_id = $1 -and ($2 is null or $2 = id) ` -type ListCollectionGroupsParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID interface{} `json:"id"` -} - -type ListCollectionGroupsRow struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Slug string `json:"slug"` - Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` +type ListCollectionGroupsForProjectRow struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) ListCollectionGroups(ctx context.Context, arg ListCollectionGroupsParams) ([]ListCollectionGroupsRow, error) { - rows, err := q.db.Query(ctx, listCollectionGroups, arg.ProjectID, arg.ID) +func (q *Queries) ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]ListCollectionGroupsForProjectRow, error) { + rows, err := q.db.Query(ctx, listCollectionGroupsForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListCollectionGroupsRow{} + items := []ListCollectionGroupsForProjectRow{} for rows.Next() { - var i ListCollectionGroupsRow + var i ListCollectionGroupsForProjectRow if err := rows.Scan( &i.ID, &i.ProjectID, @@ -215,11 +167,11 @@ returning id, project_id, name, slug, creator, create_date, updater, update_date ` type UpdateCollectionGroupParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (CollectionGroup, error) { diff --git a/api/internal/db/datalogger.sql_gen.go b/api/internal/db/datalogger.sql_gen.go index b73907c0..cb84cd77 100644 --- a/api/internal/db/datalogger.sql_gen.go +++ b/api/internal/db/datalogger.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createDatalogger = `-- name: CreateDatalogger :one diff --git a/api/internal/db/datalogger_telemetry.sql_gen.go b/api/internal/db/datalogger_telemetry.sql_gen.go index b1912cf4..5a06791c 100644 --- a/api/internal/db/datalogger_telemetry.sql_gen.go +++ b/api/internal/db/datalogger_telemetry.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createDataloggerError = `-- name: CreateDataloggerError :exec diff --git a/api/internal/db/district_rollup.sql_gen.go b/api/internal/db/district_rollup.sql_gen.go index 3b8887e9..c45d3c4d 100644 --- a/api/internal/db/district_rollup.sql_gen.go +++ b/api/internal/db/district_rollup.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const listEvaluationDistrictRollupsForProjectAlertConfig = `-- name: ListEvaluationDistrictRollupsForProjectAlertConfig :many diff --git a/api/internal/db/equivalency_table.sql_gen.go b/api/internal/db/equivalency_table.sql_gen.go index 190a8990..e477f735 100644 --- a/api/internal/db/equivalency_table.sql_gen.go +++ b/api/internal/db/equivalency_table.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createOrUpdateEquivalencyTableRow = `-- name: CreateOrUpdateEquivalencyTableRow :exec @@ -22,12 +20,12 @@ do update set display_name = excluded.display_name, instrument_id = excluded.ins ` type CreateOrUpdateEquivalencyTableRowParams struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` - FieldName string `json:"field_name"` - DisplayName *string `json:"display_name"` - InstrumentID pgtype.UUID `json:"instrument_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` } func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error { @@ -46,7 +44,7 @@ const deleteEquivalencyTable = `-- name: DeleteEquivalencyTable :exec delete from datalogger_equivalency_table where datalogger_table_id = $1 ` -func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID pgtype.UUID) error { +func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID *uuid.UUID) error { _, err := q.db.Exec(ctx, deleteEquivalencyTable, dataloggerTableID) return err } @@ -60,7 +58,7 @@ func (q *Queries) DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) e return err } -const getEquivalencyTable = `-- name: GetEquivalencyTable :many +const getEquivalencyTable = `-- name: GetEquivalencyTable :one select datalogger_id, datalogger_table_id, @@ -70,29 +68,16 @@ from v_datalogger_equivalency_table where datalogger_table_id = $1 ` -func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) ([]VDataloggerEquivalencyTable, error) { - rows, err := q.db.Query(ctx, getEquivalencyTable, dataloggerTableID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VDataloggerEquivalencyTable{} - for rows.Next() { - var i VDataloggerEquivalencyTable - if err := rows.Scan( - &i.DataloggerID, - &i.DataloggerTableID, - &i.DataloggerTableName, - &i.Fields, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) { + row := q.db.QueryRow(ctx, getEquivalencyTable, dataloggerTableID) + var i VDataloggerEquivalencyTable + err := row.Scan( + &i.DataloggerID, + &i.DataloggerTableID, + &i.DataloggerTableName, + &i.Fields, + ) + return i, err } const getIsValidDataloggerTable = `-- name: GetIsValidDataloggerTable :one @@ -135,11 +120,11 @@ where id = $1 ` type UpdateEquivalencyTableRowParams struct { - ID uuid.UUID `json:"id"` - FieldName string `json:"field_name"` - DisplayName *string `json:"display_name"` - InstrumentID pgtype.UUID `json:"instrument_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` } func (q *Queries) UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error { diff --git a/api/internal/db/evaluation.sql_gen.go b/api/internal/db/evaluation.sql_gen.go index 9d197daa..c204d78f 100644 --- a/api/internal/db/evaluation.sql_gen.go +++ b/api/internal/db/evaluation.sql_gen.go @@ -10,11 +10,9 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) -const completeEvaluationSubmittal = `-- name: CompleteEvaluationSubmittal :exec +const completeEvaluationSubmittal = `-- name: CompleteEvaluationSubmittal :one update submittal sub1 set submittal_status_id = sq.submittal_status_id, completion_date = now() @@ -38,23 +36,20 @@ where sub1.id = sq.submittal_id returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completion_date, sub1.create_date, sub1.due_date, sub1.marked_as_missing, sub1.warning_sent ` -func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, completeEvaluationSubmittal, id) - return err -} - -const createEvalationInstrument = `-- name: CreateEvalationInstrument :exec -insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) -` - -type CreateEvalationInstrumentParams struct { - EvaluationID pgtype.UUID `json:"evaluation_id"` - InstrumentID pgtype.UUID `json:"instrument_id"` -} - -func (q *Queries) CreateEvalationInstrument(ctx context.Context, arg CreateEvalationInstrumentParams) error { - _, err := q.db.Exec(ctx, createEvalationInstrument, arg.EvaluationID, arg.InstrumentID) - return err +func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) (Submittal, error) { + row := q.db.QueryRow(ctx, completeEvaluationSubmittal, id) + var i Submittal + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.SubmittalStatusID, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ) + return i, err } const createEvaluation = `-- name: CreateEvaluation :one @@ -72,14 +67,14 @@ returning id ` type CreateEvaluationParams struct { - ProjectID uuid.UUID `json:"project_id"` - SubmittalID pgtype.UUID `json:"submittal_id"` - Name string `json:"name"` - Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalID *uuid.UUID `json:"submittal_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` } func (q *Queries) CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) { @@ -98,6 +93,20 @@ func (q *Queries) CreateEvaluation(ctx context.Context, arg CreateEvaluationPara return id, err } +const createEvaluationInstrument = `-- name: CreateEvaluationInstrument :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type CreateEvaluationInstrumentParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) CreateEvaluationInstrument(ctx context.Context, arg CreateEvaluationInstrumentParams) error { + _, err := q.db.Exec(ctx, createEvaluationInstrument, arg.EvaluationID, arg.InstrumentID) + return err +} + const createNextEvaluationSubmittal = `-- name: CreateNextEvaluationSubmittal :exec insert into submittal (alert_config_id, due_date) select @@ -159,7 +168,7 @@ where id = any( ) ` -func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID pgtype.UUID) ([]VEvaluation, error) { +func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) { rows, err := q.db.Query(ctx, listInstrumentEvaluations, instrumentID) if err != nil { return nil, err @@ -249,8 +258,8 @@ and alert_config_id = $2 ` type ListProjectEvaluationsByAlertConfigParams struct { - ProjectID uuid.UUID `json:"project_id"` - AlertConfigID pgtype.UUID `json:"alert_config_id"` + ProjectID uuid.UUID `json:"project_id"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` } func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) { @@ -295,7 +304,7 @@ const unassignAllInstrumentsFromEvaluation = `-- name: UnassignAllInstrumentsFro delete from evaluation_instrument where evaluation_id = $1 ` -func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID pgtype.UUID) error { +func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID *uuid.UUID) error { _, err := q.db.Exec(ctx, unassignAllInstrumentsFromEvaluation, evaluationID) return err } @@ -312,14 +321,14 @@ where id=$1 and project_id=$2 ` type UpdateEvaluationParams struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error { diff --git a/api/internal/db/instrument.sql_gen.go b/api/internal/db/instrument.sql_gen.go index 2030190b..9a03b833 100644 --- a/api/internal/db/instrument.sql_gen.go +++ b/api/internal/db/instrument.sql_gen.go @@ -9,31 +9,26 @@ import ( "context" "time" - "github.com/USACE/instrumentation-api/api/internal/model" - model "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" - go_geom "github.com/twpayne/go-geom" ) const createInstrument = `-- name: CreateInstrument :one insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, st_setsrid(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) +values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id, slug ` type CreateInstrumentParams struct { - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - StGeomfromwkb interface{} `json:"st_geomfromwkb"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry Geometry `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` } type CreateInstrumentRow struct { @@ -45,7 +40,7 @@ func (q *Queries) CreateInstrument(ctx context.Context, arg CreateInstrumentPara row := q.db.QueryRow(ctx, createInstrument, arg.Name, arg.TypeID, - arg.StGeomfromwkb, + arg.Geometry, arg.Station, arg.StationOffset, arg.Creator, @@ -79,6 +74,48 @@ func (q *Queries) DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstru return err } +const getInstrument = `-- name: GetInstrument :one +select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts +from v_instrument +where not deleted +and id=$1 +` + +func (q *Queries) GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, error) { + row := q.db.QueryRow(ctx, getInstrument, id) + var i VInstrument + err := row.Scan( + &i.ID, + &i.Deleted, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.StationOffset, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ) + return i, err +} + const getInstrumentCount = `-- name: GetInstrumentCount :one select count(*) from instrument where not deleted ` @@ -90,55 +127,6 @@ func (q *Queries) GetInstrumentCount(ctx context.Context) (int64, error) { return count, err } -const getProjectCountForInstrument = `-- name: GetProjectCountForInstrument :one -select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count -from project_instrument pi -inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in ($1::uuid[]) -group by pi.instrument_id, i.name -order by i.name -` - -type GetProjectCountForInstrumentRow struct { - InstrumentID uuid.UUID `json:"instrument_id"` - InstrumentName string `json:"instrument_name"` - ProjectCount int64 `json:"project_count"` -} - -func (q *Queries) GetProjectCountForInstrument(ctx context.Context, instrumentIds []uuid.UUID) (GetProjectCountForInstrumentRow, error) { - row := q.db.QueryRow(ctx, getProjectCountForInstrument, instrumentIds) - var i GetProjectCountForInstrumentRow - err := row.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount) - return i, err -} - -const listAdminProjects = `-- name: ListAdminProjects :many -select pr.project_id from profile_project_roles pr -inner join role ro on ro.id = pr.role_id -where pr.profile_id = $1 -and ro.name = 'ADMIN' -` - -func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { - rows, err := q.db.Query(ctx, listAdminProjects, profileID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []uuid.UUID{} - for rows.Next() { - var project_id uuid.UUID - if err := rows.Scan(&project_id); err != nil { - return nil, err - } - items = append(items, project_id) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const listInstrumentIDNamesByIDs = `-- name: ListInstrumentIDNamesByIDs :many select id, name from instrument @@ -196,81 +184,30 @@ func (q *Queries) ListInstrumentProjects(ctx context.Context, instrumentID uuid. } const listInstruments = `-- name: ListInstruments :many -select id, - status_id, - status, - status_time, - slug, - name, - type_id, - type, - icon, - geometry, - station, - station_offset, - creator, - create_date, - updater, - update_date, - projects, - constants, - groups, - alert_configs, - nid_id, - usgs_id, - has_cwms, - show_cwms_tab, - opts +select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts from v_instrument where not deleted -and ($1 is not null or $1 = id) ` -type ListInstrumentsRow struct { - ID uuid.UUID `json:"id"` - StatusID uuid.UUID `json:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time"` - Slug string `json:"slug"` - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - Type string `json:"type"` - Icon *string `json:"icon"` - Geometry go_geom.T `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - Projects []model.IDSlugName `json:"projects"` - Constants []uuid.UUID `json:"constants"` - Groups []uuid.UUID `json:"groups"` - AlertConfigs []uuid.UUID `json:"alert_configs"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - HasCwms bool `json:"has_cwms"` - ShowCwmsTab bool `json:"show_cwms_tab"` - Opts model.Opts `json:"opts"` -} - -func (q *Queries) ListInstruments(ctx context.Context, id interface{}) ([]ListInstrumentsRow, error) { - rows, err := q.db.Query(ctx, listInstruments, id) +func (q *Queries) ListInstruments(ctx context.Context) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, listInstruments) if err != nil { return nil, err } defer rows.Close() - items := []ListInstrumentsRow{} + items := []VInstrument{} for rows.Next() { - var i ListInstrumentsRow + var i VInstrument if err := rows.Scan( &i.ID, + &i.Deleted, &i.StatusID, &i.Status, &i.StatusTime, &i.Slug, &i.Name, &i.TypeID, + &i.ShowCwmsTab, &i.Type, &i.Icon, &i.Geometry, @@ -280,14 +217,69 @@ func (q *Queries) ListInstruments(ctx context.Context, id interface{}) ([]ListIn &i.CreateDate, &i.Updater, &i.UpdateDate, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, &i.Projects, &i.Constants, &i.Groups, &i.AlertConfigs, + &i.Opts, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listInstrumentsForProject = `-- name: ListInstrumentsForProject :many +select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1 +` + +func (q *Queries) ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, listInstrumentsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrument{} + for rows.Next() { + var i VInstrument + if err := rows.Scan( + &i.ID, + &i.Deleted, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.StationOffset, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, &i.NidID, &i.UsgsID, + &i.Telemetry, &i.HasCwms, - &i.ShowCwmsTab, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, &i.Opts, ); err != nil { return nil, err @@ -300,11 +292,46 @@ func (q *Queries) ListInstruments(ctx context.Context, id interface{}) ([]ListIn return items, nil } +const listProjectCountForInstruments = `-- name: ListProjectCountForInstruments :many +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in ($1::uuid[]) +group by pi.instrument_id, i.name +order by i.name +` + +type ListProjectCountForInstrumentsRow struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int64 `json:"project_count"` +} + +func (q *Queries) ListProjectCountForInstruments(ctx context.Context, instrumentIds []uuid.UUID) ([]ListProjectCountForInstrumentsRow, error) { + rows, err := q.db.Query(ctx, listProjectCountForInstruments, instrumentIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ListProjectCountForInstrumentsRow{} + for rows.Next() { + var i ListProjectCountForInstrumentsRow + if err := rows.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const updateInstrument = `-- name: UpdateInstrument :exec update instrument set name = $3, type_id = $4, - geometry = ST_GeomFromWKB($5), + geometry = $5, updater = $6, update_date = $7, station = $8, @@ -321,18 +348,18 @@ and id in ( ` type UpdateInstrumentParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - StGeomfromwkb interface{} `json:"st_geomfromwkb"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry Geometry `json:"geometry"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` } func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error { @@ -341,7 +368,7 @@ func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentPara arg.ID, arg.Name, arg.TypeID, - arg.StGeomfromwkb, + arg.Geometry, arg.Updater, arg.UpdateDate, arg.Station, @@ -355,7 +382,7 @@ func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentPara const updateInstrumentGeometry = `-- name: UpdateInstrumentGeometry :one update instrument set - geometry = st_geomfromwkb($3), + geometry = $3, updater = $4, update_date = now() where id = $2 @@ -368,17 +395,17 @@ returning id ` type UpdateInstrumentGeometryParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - StGeomfromwkb interface{} `json:"st_geomfromwkb"` - Updater pgtype.UUID `json:"updater"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Geometry Geometry `json:"geometry"` + Updater *uuid.UUID `json:"updater"` } func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, updateInstrumentGeometry, arg.ProjectID, arg.ID, - arg.StGeomfromwkb, + arg.Geometry, arg.Updater, ) var id uuid.UUID diff --git a/api/internal/db/instrument_assign.sql_gen.go b/api/internal/db/instrument_assign.sql_gen.go index 7fb9ef3f..35d989a8 100644 --- a/api/internal/db/instrument_assign.sql_gen.go +++ b/api/internal/db/instrument_assign.sql_gen.go @@ -9,7 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const assignInstrumentToProject = `-- name: AssignInstrumentToProject :exec @@ -51,12 +50,12 @@ and not i.deleted ` type ValidateInstrumentNamesProjectUniqueParams struct { - ProjectID uuid.UUID `json:"project_id"` - InstrumentName []string `json:"instrument_name"` + ProjectID uuid.UUID `json:"project_id"` + InstrumentNames []string `json:"instrument_names"` } func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) { - rows, err := q.db.Query(ctx, validateInstrumentNamesProjectUnique, arg.ProjectID, arg.InstrumentName) + rows, err := q.db.Query(ctx, validateInstrumentNamesProjectUnique, arg.ProjectID, arg.InstrumentNames) if err != nil { return nil, err } @@ -162,39 +161,34 @@ func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, arg Va } const validateProjectsInstrumentNameUnique = `-- name: ValidateProjectsInstrumentNameUnique :many -select p.name, i.name +select i.name instrument_name from project_instrument pi inner join instrument i on pi.instrument_id = i.id inner join project p on pi.project_id = p.id where i.name = $1 -and pi.instrument_id in ($2::uuid[]) +and pi.project_id in ($2::uuid[]) and not i.deleted order by pi.project_id ` type ValidateProjectsInstrumentNameUniqueParams struct { - Name string `json:"name"` - InstrumentID []uuid.UUID `json:"instrument_id"` -} - -type ValidateProjectsInstrumentNameUniqueRow struct { - Name string `json:"name"` - Name_2 string `json:"name_2"` + InstrumentName string `json:"instrument_name"` + ProjectIds []uuid.UUID `json:"project_ids"` } -func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]ValidateProjectsInstrumentNameUniqueRow, error) { - rows, err := q.db.Query(ctx, validateProjectsInstrumentNameUnique, arg.Name, arg.InstrumentID) +func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]string, error) { + rows, err := q.db.Query(ctx, validateProjectsInstrumentNameUnique, arg.InstrumentName, arg.ProjectIds) if err != nil { return nil, err } defer rows.Close() - items := []ValidateProjectsInstrumentNameUniqueRow{} + items := []string{} for rows.Next() { - var i ValidateProjectsInstrumentNameUniqueRow - if err := rows.Scan(&i.Name, &i.Name_2); err != nil { + var instrument_name string + if err := rows.Scan(&instrument_name); err != nil { return nil, err } - items = append(items, i) + items = append(items, instrument_name) } if err := rows.Err(); err != nil { return nil, err diff --git a/api/internal/db/instrument_constant.sql_gen.go b/api/internal/db/instrument_constant.sql_gen.go index 41412e6b..9e66e1f2 100644 --- a/api/internal/db/instrument_constant.sql_gen.go +++ b/api/internal/db/instrument_constant.sql_gen.go @@ -9,7 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createInstrumentConstant = `-- name: CreateInstrumentConstant :exec diff --git a/api/internal/db/instrument_group.sql_gen.go b/api/internal/db/instrument_group.sql_gen.go index 1145940c..fd319db8 100644 --- a/api/internal/db/instrument_group.sql_gen.go +++ b/api/internal/db/instrument_group.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createInstrumentGroup = `-- name: CreateInstrumentGroup :one @@ -21,23 +19,23 @@ returning id, slug, name, description, creator, create_date, updater, update_dat ` type CreateInstrumentGroupParams struct { - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - ProjectID pgtype.UUID `json:"project_id"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + ProjectID *uuid.UUID `json:"project_id"` } type CreateInstrumentGroupRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID pgtype.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ProjectID *uuid.UUID `json:"project_id"` } func (q *Queries) CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) { @@ -100,6 +98,46 @@ func (q *Queries) DeleteInstrumentGroupInstruments(ctx context.Context, arg Dele return err } +const getInstrumentGroup = `-- name: GetInstrumentGroup :many +select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count +from v_instrument_group +where not deleted +and id=$1 +` + +func (q *Queries) GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, getInstrumentGroup, id) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentGroup{} + for rows.Next() { + var i VInstrumentGroup + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + &i.Deleted, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const listInstrumentGroupInstruments = `-- name: ListInstrumentGroupInstruments :many select from instrument_group_instruments igi @@ -131,46 +169,59 @@ func (q *Queries) ListInstrumentGroupInstruments(ctx context.Context, instrument } const listInstrumentGroups = `-- name: ListInstrumentGroups :many -select - id, - slug, - name, - description, - creator, - create_date, - updater, - update_date, - project_id, - instrument_count, - timeseries_count +select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count from v_instrument_group where not deleted -and ($1 is not null or $1 = id) ` -type ListInstrumentGroupsRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID pgtype.UUID `json:"project_id"` - InstrumentCount int64 `json:"instrument_count"` - TimeseriesCount interface{} `json:"timeseries_count"` +func (q *Queries) ListInstrumentGroups(ctx context.Context) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, listInstrumentGroups) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentGroup{} + for rows.Next() { + var i VInstrumentGroup + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ProjectID, + &i.Deleted, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } -func (q *Queries) ListInstrumentGroups(ctx context.Context, id interface{}) ([]ListInstrumentGroupsRow, error) { - rows, err := q.db.Query(ctx, listInstrumentGroups, id) +const listInstrumentGroupsForProject = `-- name: ListInstrumentGroupsForProject :many +select ig.id, ig.slug, ig.name, ig.description, ig.creator, ig.create_date, ig.updater, ig.update_date, ig.project_id, ig.deleted, ig.instrument_count, ig.timeseries_count +from v_instrument_group ig +where ig.project_id = $1 +` + +func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, listInstrumentGroupsForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListInstrumentGroupsRow{} + items := []VInstrumentGroup{} for rows.Next() { - var i ListInstrumentGroupsRow + var i VInstrumentGroup if err := rows.Scan( &i.ID, &i.Slug, @@ -181,6 +232,7 @@ func (q *Queries) ListInstrumentGroups(ctx context.Context, id interface{}) ([]L &i.Updater, &i.UpdateDate, &i.ProjectID, + &i.Deleted, &i.InstrumentCount, &i.TimeseriesCount, ); err != nil { @@ -207,13 +259,13 @@ update instrument_group set ` type UpdateInstrumentGroupParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Deleted bool `json:"deleted"` - Description *string `json:"description"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID pgtype.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Deleted bool `json:"deleted"` + Description *string `json:"description"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ProjectID *uuid.UUID `json:"project_id"` } func (q *Queries) UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) { diff --git a/api/internal/db/instrument_ipi.sql_gen.go b/api/internal/db/instrument_ipi.sql_gen.go index 7d8e11e6..bfc859ea 100644 --- a/api/internal/db/instrument_ipi.sql_gen.go +++ b/api/internal/db/instrument_ipi.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createIpiOpts = `-- name: CreateIpiOpts :exec @@ -20,10 +18,10 @@ values ($1, $2, $3, $4) ` type CreateIpiOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - NumSegments int32 `json:"num_segments"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } func (q *Queries) CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error { @@ -48,12 +46,12 @@ insert into ipi_segment ( ` type CreateIpiSegmentParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } func (q *Queries) CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error { @@ -145,9 +143,9 @@ where instrument_id = $1 ` type UpdateIpiOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } func (q *Queries) UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error { @@ -165,12 +163,12 @@ where id = $1 and instrument_id = $2 ` type UpdateIpiSegmentParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } func (q *Queries) UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error { diff --git a/api/internal/db/instrument_note.sql_gen.go b/api/internal/db/instrument_note.sql_gen.go index a98d6fa6..94eb74e8 100644 --- a/api/internal/db/instrument_note.sql_gen.go +++ b/api/internal/db/instrument_note.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createInstrumentNote = `-- name: CreateInstrumentNote :one @@ -126,12 +124,12 @@ returning id, instrument_id, title, body, time, creator, create_date, updater, u ` type UpdateInstrumentNoteParams struct { - ID uuid.UUID `json:"id"` - Title string `json:"title"` - Body string `json:"body"` - Time time.Time `json:"time"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ID uuid.UUID `json:"id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) { diff --git a/api/internal/db/instrument_saa.sql_gen.go b/api/internal/db/instrument_saa.sql_gen.go index 25a04b08..7d141014 100644 --- a/api/internal/db/instrument_saa.sql_gen.go +++ b/api/internal/db/instrument_saa.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createSaaOpts = `-- name: CreateSaaOpts :exec @@ -20,10 +18,10 @@ values ($1, $2, $3, $4) ` type CreateSaaOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - NumSegments int32 `json:"num_segments"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } func (q *Queries) CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error { @@ -49,13 +47,13 @@ insert into saa_segment ( ` type CreateSaaSegmentParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` - YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` - ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } func (q *Queries) CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error { @@ -150,9 +148,9 @@ where instrument_id = $1 ` type UpdateSaaOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } func (q *Queries) UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error { @@ -171,13 +169,13 @@ where id = $1 and instrument_id = $2 ` type UpdateSaaSegmentParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` - YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` - ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } func (q *Queries) UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error { diff --git a/api/internal/db/instrument_status.sql_gen.go b/api/internal/db/instrument_status.sql_gen.go index a12dbd09..a982de3e 100644 --- a/api/internal/db/instrument_status.sql_gen.go +++ b/api/internal/db/instrument_status.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createOrUpdateInstrumentStatus = `-- name: CreateOrUpdateInstrumentStatus :exec diff --git a/api/internal/db/measurement.sql_gen.go b/api/internal/db/measurement.sql_gen.go index 5690fb3e..f195fba5 100644 --- a/api/internal/db/measurement.sql_gen.go +++ b/api/internal/db/measurement.sql_gen.go @@ -10,7 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createOrUpdateTimeseriesMeasurement = `-- name: CreateOrUpdateTimeseriesMeasurement :exec @@ -53,19 +52,19 @@ func (q *Queries) CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOr return err } -const createTimeseriesMeasruement = `-- name: CreateTimeseriesMeasruement :exec +const createTimeseriesMeasurement = `-- name: CreateTimeseriesMeasurement :exec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do nothing ` -type CreateTimeseriesMeasruementParams struct { +type CreateTimeseriesMeasurementParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` Value float64 `json:"value"` } -func (q *Queries) CreateTimeseriesMeasruement(ctx context.Context, arg CreateTimeseriesMeasruementParams) error { - _, err := q.db.Exec(ctx, createTimeseriesMeasruement, arg.TimeseriesID, arg.Time, arg.Value) +func (q *Queries) CreateTimeseriesMeasurement(ctx context.Context, arg CreateTimeseriesMeasurementParams) error { + _, err := q.db.Exec(ctx, createTimeseriesMeasurement, arg.TimeseriesID, arg.Time, arg.Value) return err } @@ -127,27 +126,27 @@ delete from timeseries_measurement where timeseries_id = $1 and time > $2 and ti type DeleteTimeseriesMeasurementsRangeParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } func (q *Queries) DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesMeasurementsRange, arg.TimeseriesID, arg.Time, arg.Time_2) + _, err := q.db.Exec(ctx, deleteTimeseriesMeasurementsRange, arg.TimeseriesID, arg.After, arg.Before) return err } -const deleteTimeseriesNote = `-- name: DeleteTimeseriesNote :exec +const deleteTimeseriesNoteRange = `-- name: DeleteTimeseriesNoteRange :exec delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 ` -type DeleteTimeseriesNoteParams struct { +type DeleteTimeseriesNoteRangeParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -func (q *Queries) DeleteTimeseriesNote(ctx context.Context, arg DeleteTimeseriesNoteParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesNote, arg.TimeseriesID, arg.Time, arg.Time_2) +func (q *Queries) DeleteTimeseriesNoteRange(ctx context.Context, arg DeleteTimeseriesNoteRangeParams) error { + _, err := q.db.Exec(ctx, deleteTimeseriesNoteRange, arg.TimeseriesID, arg.After, arg.Before) return err } diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 87aae2b8..dfa8c986 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -9,12 +9,8 @@ import ( "fmt" "time" - "github.com/USACE/instrumentation-api/api/internal/model" - model "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" - uuid "github.com/google/uuid" "github.com/jackc/pgx/v5/pgtype" - go_geom "github.com/twpayne/go-geom" ) type JobStatus string @@ -333,24 +329,24 @@ type Alert struct { } type AlertConfig struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - AlertTypeID uuid.UUID `json:"alert_type_id"` - StartDate time.Time `json:"start_date"` - ScheduleInterval pgtype.Interval `json:"schedule_interval"` - NMissedBeforeAlert int32 `json:"n_missed_before_alert"` - WarningInterval pgtype.Interval `json:"warning_interval"` - RemindInterval pgtype.Interval `json:"remind_interval"` - LastChecked pgtype.Timestamptz `json:"last_checked"` - LastReminded pgtype.Timestamptz `json:"last_reminded"` - Deleted bool `json:"deleted"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartDate time.Time `json:"start_date"` + ScheduleInterval string `json:"schedule_interval"` + NMissedBeforeAlert int32 `json:"n_missed_before_alert"` + WarningInterval string `json:"warning_interval"` + RemindInterval string `json:"remind_interval"` + LastChecked *time.Time `json:"last_checked"` + LastReminded *time.Time `json:"last_reminded"` + Deleted bool `json:"deleted"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` } type AlertConfigInstrument struct { @@ -393,9 +389,9 @@ type AwareParameter struct { } type AwarePlatform struct { - ID uuid.UUID `json:"id"` - AwareID uuid.UUID `json:"aware_id"` - InstrumentID pgtype.UUID `json:"instrument_id"` + ID uuid.UUID `json:"id"` + AwareID uuid.UUID `json:"aware_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` } type AwarePlatformParameterEnabled struct { @@ -409,14 +405,14 @@ type Calculation struct { } type CollectionGroup struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Slug string `json:"slug"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } type CollectionGroupTimeseries struct { @@ -444,20 +440,20 @@ type Datalogger struct { } type DataloggerEquivalencyTable struct { - ID uuid.UUID `json:"id"` - DataloggerID uuid.UUID `json:"datalogger_id"` - DataloggerDeleted bool `json:"datalogger_deleted"` - FieldName string `json:"field_name"` - DisplayName *string `json:"display_name"` - InstrumentID pgtype.UUID `json:"instrument_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` - DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` + ID uuid.UUID `json:"id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerDeleted bool `json:"datalogger_deleted"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` } type DataloggerError struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - ErrorMessage *string `json:"error_message"` - DataloggerTableID pgtype.UUID `json:"datalogger_table_id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + ErrorMessage *string `json:"error_message"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` } type DataloggerHash struct { @@ -483,11 +479,11 @@ type DataloggerTable struct { } type District struct { - ID uuid.UUID `json:"id"` - DivisionID uuid.UUID `json:"division_id"` - Name *string `json:"name"` - Initials *string `json:"initials"` - OfficeID pgtype.UUID `json:"office_id"` + ID uuid.UUID `json:"id"` + DivisionID uuid.UUID `json:"division_id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + OfficeID *uuid.UUID `json:"office_id"` } type Division struct { @@ -503,22 +499,22 @@ type Email struct { } type Evaluation struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - SubmittalID pgtype.UUID `json:"submittal_id"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartDate time.Time `json:"start_date"` + EndDate time.Time `json:"end_date"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + SubmittalID *uuid.UUID `json:"submittal_id"` } type EvaluationInstrument struct { - EvaluationID pgtype.UUID `json:"evaluation_id"` - InstrumentID pgtype.UUID `json:"instrument_id"` + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` } type Heartbeat struct { @@ -534,21 +530,21 @@ type InclinometerMeasurement struct { } type Instrument struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"deleted"` - Slug string `json:"slug"` - Name string `json:"name"` - Geometry interface{} `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - TypeID uuid.UUID `json:"type_id"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Geometry Geometry `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + TypeID uuid.UUID `json:"type_id"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` } type InstrumentConstant struct { @@ -557,16 +553,16 @@ type InstrumentConstant struct { } type InstrumentGroup struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"deleted"` - Slug string `json:"slug"` - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID pgtype.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ProjectID *uuid.UUID `json:"project_id"` } type InstrumentGroupInstrument struct { @@ -575,15 +571,15 @@ type InstrumentGroupInstrument struct { } type InstrumentNote struct { - ID uuid.UUID `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - Title string `json:"title"` - Body string `json:"body"` - Time time.Time `json:"time"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } type InstrumentStatus struct { @@ -607,19 +603,19 @@ type InstrumentType struct { } type IpiOpt struct { - InstrumentID uuid.UUID `json:"instrument_id"` - NumSegments int32 `json:"num_segments"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } type IpiSegment struct { - InstrumentID uuid.UUID `json:"instrument_id"` - ID int32 `json:"id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } type Measure struct { @@ -637,29 +633,29 @@ type Parameter struct { } type PlotBullseyeConfig struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` - YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` } type PlotConfiguration struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - PlotType PlotType `json:"plot_type"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + PlotType PlotType `json:"plot_type"` } type PlotConfigurationCustomShape struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` - Color string `json:"color"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` } type PlotConfigurationSetting struct { @@ -673,24 +669,24 @@ type PlotConfigurationSetting struct { } type PlotConfigurationTimeseriesTrace struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` - TraceOrder int32 `json:"trace_order"` - TraceType TraceType `json:"trace_type"` - Color string `json:"color"` - LineStyle LineStyle `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis YAxis `json:"y_axis"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + TraceType TraceType `json:"trace_type"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` } type PlotContourConfig struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - Time pgtype.Timestamptz `json:"time"` - LocfBackfill pgtype.Interval `json:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing"` - ShowLabels bool `json:"show_labels"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` } type PlotContourConfigTimeseries struct { @@ -719,12 +715,12 @@ type Profile struct { } type ProfileProjectRole struct { - ID uuid.UUID `json:"id"` - ProfileID uuid.UUID `json:"profile_id"` - RoleID uuid.UUID `json:"role_id"` - ProjectID uuid.UUID `json:"project_id"` - GrantedBy pgtype.UUID `json:"granted_by"` - GrantedDate time.Time `json:"granted_date"` + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + ProjectID uuid.UUID `json:"project_id"` + GrantedBy *uuid.UUID `json:"granted_by"` + GrantedDate time.Time `json:"granted_date"` } type ProfileToken struct { @@ -736,17 +732,17 @@ type ProfileToken struct { } type Project struct { - ID uuid.UUID `json:"id"` - Image *string `json:"image"` - FederalID *string `json:"federal_id"` - Deleted bool `json:"deleted"` - Slug string `json:"slug"` - Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - DistrictID pgtype.UUID `json:"district_id"` + ID uuid.UUID `json:"id"` + Image *string `json:"image"` + FederalID *string `json:"federal_id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + DistrictID *uuid.UUID `json:"district_id"` } type ProjectInstrument struct { @@ -755,21 +751,21 @@ type ProjectInstrument struct { } type ReportConfig struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - DateRange *string `json:"date_range"` - DateRangeEnabled *bool `json:"date_range_enabled"` - ShowMasked *bool `json:"show_masked"` - ShowMaskedEnabled *bool `json:"show_masked_enabled"` - ShowNonvalidated *bool `json:"show_nonvalidated"` - ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` } type ReportConfigPlotConfig struct { @@ -778,15 +774,15 @@ type ReportConfigPlotConfig struct { } type ReportDownloadJob struct { - ID uuid.UUID `json:"id"` - ReportConfigID pgtype.UUID `json:"report_config_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Status JobStatus `json:"status"` - FileKey *string `json:"file_key"` - FileExpiry pgtype.Timestamptz `json:"file_expiry"` - Progress int32 `json:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date"` + ID uuid.UUID `json:"id"` + ReportConfigID *uuid.UUID `json:"report_config_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Status JobStatus `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` + Progress int32 `json:"progress"` + ProgressUpdateDate time.Time `json:"progress_update_date"` } type Role struct { @@ -796,20 +792,20 @@ type Role struct { } type SaaOpt struct { - InstrumentID uuid.UUID `json:"instrument_id"` - NumSegments int32 `json:"num_segments"` - BottomElevationTimeseriesID pgtype.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime pgtype.Timestamptz `json:"initial_time"` + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } type SaaSegment struct { - InstrumentID uuid.UUID `json:"instrument_id"` - ID int32 `json:"id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` - YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` - ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } type Status struct { @@ -819,14 +815,14 @@ type Status struct { } type Submittal struct { - ID uuid.UUID `json:"id"` - AlertConfigID pgtype.UUID `json:"alert_config_id"` - SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` - CompletionDate pgtype.Timestamptz `json:"completion_date"` - CreateDate time.Time `json:"create_date"` - DueDate time.Time `json:"due_date"` - MarkedAsMissing bool `json:"marked_as_missing"` - WarningSent bool `json:"warning_sent"` + ID uuid.UUID `json:"id"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletionDate *time.Time `json:"completion_date"` + CreateDate time.Time `json:"create_date"` + DueDate time.Time `json:"due_date"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` } type SubmittalStatus struct { @@ -854,18 +850,18 @@ type Timeseries struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` Name string `json:"name"` - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` Type NullTimeseriesType `json:"type"` } type TimeseriesCwm struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - CwmsTimeseriesID string `json:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` - CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } type TimeseriesMeasurement struct { @@ -883,11 +879,11 @@ type TimeseriesNote struct { } type Unit struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Abbreviation string `json:"abbreviation"` - UnitFamilyID pgtype.UUID `json:"unit_family_id"` - MeasureID pgtype.UUID `json:"measure_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID *uuid.UUID `json:"unit_family_id"` + MeasureID *uuid.UUID `json:"measure_id"` } type UnitFamily struct { @@ -912,88 +908,103 @@ type UploaderConfigMapping struct { } type VAlert struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - Instruments interface{} `json:"instruments"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreateDate time.Time `json:"create_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` } type VAlertCheckEvaluationSubmittal struct { AlertConfigID uuid.UUID `json:"alert_config_id"` SubmittalID uuid.UUID `json:"submittal_id"` - ShouldWarn interface{} `json:"should_warn"` - ShouldAlert interface{} `json:"should_alert"` - ShouldRemind interface{} `json:"should_remind"` + Submittal *VSubmittal `json:"submittal"` + ShouldWarn bool `json:"should_warn"` + ShouldAlert bool `json:"should_alert"` + ShouldRemind bool `json:"should_remind"` } type VAlertCheckMeasurementSubmittal struct { - AlertConfigID uuid.UUID `json:"alert_config_id"` - SubmittalID uuid.UUID `json:"submittal_id"` - ShouldWarn interface{} `json:"should_warn"` - ShouldAlert interface{} `json:"should_alert"` - ShouldRemind interface{} `json:"should_remind"` - AffectedTimeseries string `json:"affected_timeseries"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + SubmittalID uuid.UUID `json:"submittal_id"` + Submittal *VSubmittal `json:"submittal"` + ShouldWarn bool `json:"should_warn"` + ShouldAlert bool `json:"should_alert"` + ShouldRemind bool `json:"should_remind"` + AffectedTimeseries []AlertCheckMeasurementSubmittalAffectedTimeseries `json:"affected_timeseries"` } type VAlertConfig struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Body string `json:"body"` - Creator pgtype.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - AlertTypeID uuid.UUID `json:"alert_type_id"` - AlertType string `json:"alert_type"` - StartDate time.Time `json:"start_date"` - ScheduleInterval string `json:"schedule_interval"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` - RemindInterval string `json:"remind_interval"` - WarningInterval string `json:"warning_interval"` - LastChecked pgtype.Timestamptz `json:"last_checked"` - LastReminded pgtype.Timestamptz `json:"last_reminded"` - Instruments interface{} `json:"instruments"` - AlertEmailSubscriptions interface{} `json:"alert_email_subscriptions"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + Creator *uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate *time.Time `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertType string `json:"alert_type"` + StartDate time.Time `json:"start_date"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + LastChecked *time.Time `json:"last_checked"` + LastReminded *time.Time `json:"last_reminded"` + CreateNextSubmittalFrom *time.Time `json:"create_next_submittal_from"` + Instruments []InstrumentIDName `json:"instruments"` + AlertEmailSubscriptions []EmailAutocompleteResult `json:"alert_email_subscriptions"` } type VAwarePlatformParameterEnabled struct { - InstrumentID uuid.UUID `json:"instrument_id"` - AwareID uuid.UUID `json:"aware_id"` - AwareParameterKey string `json:"aware_parameter_key"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` + AwareParameterKey string `json:"aware_parameter_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type VCollectionGroupDetail struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + Timeseries []CollectionGroupDetailsTimeseries `json:"timeseries"` } type VDatalogger struct { - ID uuid.UUID `json:"id"` - Sn string `json:"sn"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater uuid.UUID `json:"updater"` - UpdaterUsername string `json:"updater_username"` - UpdateDate time.Time `json:"update_date"` - Name string `json:"name"` - Slug string `json:"slug"` - ModelID uuid.UUID `json:"model_id"` - Model *string `json:"model"` - Errors interface{} `json:"errors"` - Tables string `json:"tables"` + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater uuid.UUID `json:"updater"` + UpdaterUsername string `json:"updater_username"` + UpdateDate time.Time `json:"update_date"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Model *string `json:"model"` + Errors []string `json:"errors"` + Tables []DataloggerTableIDName `json:"tables"` } type VDataloggerEquivalencyTable struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - DataloggerTableID uuid.UUID `json:"datalogger_table_id"` - DataloggerTableName string `json:"datalogger_table_name"` - Fields string `json:"fields"` + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + DataloggerTableName string `json:"datalogger_table_name"` + Fields []DataloggerEquivalencyTableField `json:"fields"` } type VDataloggerHash struct { @@ -1010,18 +1021,18 @@ type VDataloggerPreview struct { } type VDistrict struct { - Agency string `json:"agency"` - ID uuid.UUID `json:"id"` - Name *string `json:"name"` - Initials *string `json:"initials"` - DivisionName *string `json:"division_name"` - DivisionInitials *string `json:"division_initials"` - OfficeID pgtype.UUID `json:"office_id"` + Agency string `json:"agency"` + ID uuid.UUID `json:"id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + DivisionName *string `json:"division_name"` + DivisionInitials *string `json:"division_initials"` + OfficeID *uuid.UUID `json:"office_id"` } type VDistrictRollup struct { AlertTypeID uuid.UUID `json:"alert_type_id"` - OfficeID pgtype.UUID `json:"office_id"` + OfficeID *uuid.UUID `json:"office_id"` DistrictInitials *string `json:"district_initials"` ProjectName string `json:"project_name"` ProjectID uuid.UUID `json:"project_id"` @@ -1041,8 +1052,8 @@ type VDomain struct { } type VDomainGroup struct { - Group string `json:"group"` - Opts string `json:"opts"` + Group string `json:"group"` + Opts []DomainGroupOpt `json:"opts"` } type VEmailAutocomplete struct { @@ -1057,65 +1068,65 @@ type VEvaluation struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Body string `json:"body"` - Creator pgtype.UUID `json:"creator"` + Creator *uuid.UUID `json:"creator"` CreatorUsername string `json:"creator_username"` CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` + Updater *uuid.UUID `json:"updater"` UpdaterUsername *string `json:"updater_username"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + UpdateDate *time.Time `json:"update_date"` ProjectID uuid.UUID `json:"project_id"` ProjectName string `json:"project_name"` - AlertConfigID pgtype.UUID `json:"alert_config_id"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` AlertConfigName *string `json:"alert_config_name"` - SubmittalID pgtype.UUID `json:"submittal_id"` + SubmittalID *uuid.UUID `json:"submittal_id"` StartDate time.Time `json:"start_date"` EndDate time.Time `json:"end_date"` - Instruments interface{} `json:"instruments"` + Instruments []InstrumentIDName `json:"instruments"` } type VInstrument struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"deleted"` - StatusID uuid.UUID `json:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time"` - Slug string `json:"slug"` - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` - Type string `json:"type"` - Icon *string `json:"icon"` - Geometry go_geom.T `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - Telemetry model.Opts `json:"telemetry"` - HasCwms bool `json:"has_cwms"` - Projects []model.IDSlugName `json:"projects"` - Constants []uuid.UUID `json:"constants"` - Groups []uuid.UUID `json:"groups"` - AlertConfigs []uuid.UUID `json:"alert_configs"` - Opts model.Opts `json:"opts"` + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Slug string `json:"slug"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry Geometry `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + Telemetry []IDSlugName `json:"telemetry"` + HasCwms bool `json:"has_cwms"` + Projects []IDSlugName `json:"projects"` + Constants []uuid.UUID `json:"constants"` + Groups []uuid.UUID `json:"groups"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + Opts interface{} `json:"opts"` } type VInstrumentGroup struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ProjectID pgtype.UUID `json:"project_id"` - Deleted bool `json:"deleted"` - InstrumentCount int64 `json:"instrument_count"` - TimeseriesCount interface{} `json:"timeseries_count"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ProjectID *uuid.UUID `json:"project_id"` + Deleted bool `json:"deleted"` + InstrumentCount int64 `json:"instrument_count"` + TimeseriesCount interface{} `json:"timeseries_count"` } type VInstrumentTelemetry struct { @@ -1127,48 +1138,49 @@ type VInstrumentTelemetry struct { } type VIpiMeasurement struct { - InstrumentID uuid.UUID `json:"instrument_id"` - Time time.Time `json:"time"` - Measurements string `json:"measurements"` + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements []IpiMeasurement `json:"measurements"` } type VIpiSegment struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - Length float64 `json:"length"` - TiltTimeseriesID pgtype.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID pgtype.UUID `json:"inc_dev_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` } type VPlotConfiguration struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ShowMasked bool `json:"show_masked"` - ShowNonvalidated bool `json:"show_nonvalidated"` - ShowComments bool `json:"show_comments"` - AutoRange bool `json:"auto_range"` - DateRange string `json:"date_range"` - Threshold int32 `json:"threshold"` - ReportConfigs string `json:"report_configs"` - PlotType PlotType `json:"plot_type"` - Display interface{} `json:"display"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` + ReportConfigs []IDSlugName `json:"report_configs"` + PlotType PlotType `json:"plot_type"` + Display interface{} `json:"display"` } type VProfile struct { - ID uuid.UUID `json:"id"` - Edipi int64 `json:"edipi"` - Username string `json:"username"` - DisplayName string `json:"display_name"` - Email string `json:"email"` - IsAdmin bool `json:"is_admin"` - Roles interface{} `json:"roles"` + ID uuid.UUID `json:"id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + Roles []string `json:"roles"` + Tokens []VProfileToken `json:"tokens"` } type VProfileProjectRole struct { @@ -1186,73 +1198,73 @@ type VProfileProjectRole struct { } type VProject struct { - ID uuid.UUID `json:"id"` - FederalID *string `json:"federal_id"` - Image interface{} `json:"image"` - DistrictID pgtype.UUID `json:"district_id"` - OfficeID pgtype.UUID `json:"office_id"` - Deleted bool `json:"deleted"` - Slug string `json:"slug"` - Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreatorUsername *string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - InstrumentCount int64 `json:"instrument_count"` - InstrumentGroupCount int64 `json:"instrument_group_count"` + ID uuid.UUID `json:"id"` + FederalID *string `json:"federal_id"` + Image interface{} `json:"image"` + DistrictID *uuid.UUID `json:"district_id"` + OfficeID *uuid.UUID `json:"office_id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreatorUsername *string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate *time.Time `json:"update_date"` + InstrumentCount int64 `json:"instrument_count"` + InstrumentGroupCount int64 `json:"instrument_group_count"` } type VReportConfig struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description string `json:"description"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - DistrictName *string `json:"district_name"` - Creator uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - PlotConfigs string `json:"plot_configs"` - GlobalOverrides string `json:"global_overrides"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + Creator uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + UpdateDate *time.Time `json:"update_date"` + PlotConfigs []IDSlugName `json:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` } type VSaaMeasurement struct { - InstrumentID uuid.UUID `json:"instrument_id"` - Time time.Time `json:"time"` - Measurements string `json:"measurements"` + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements []SaaMeasurement `json:"measurements"` } type VSaaSegment struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID pgtype.UUID `json:"length_timeseries_id"` - Length float64 `json:"length"` - XTimeseriesID pgtype.UUID `json:"x_timeseries_id"` - YTimeseriesID pgtype.UUID `json:"y_timeseries_id"` - ZTimeseriesID pgtype.UUID `json:"z_timeseries_id"` - TempTimeseriesID pgtype.UUID `json:"temp_timeseries_id"` + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } type VSubmittal struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - AlertConfigName string `json:"alert_config_name"` - AlertTypeID uuid.UUID `json:"alert_type_id"` - AlertTypeName string `json:"alert_type_name"` - ProjectID uuid.UUID `json:"project_id"` - SubmittalStatusID uuid.UUID `json:"submittal_status_id"` - SubmittalStatusName string `json:"submittal_status_name"` - CompletionDate pgtype.Timestamptz `json:"completion_date"` - CreateDate time.Time `json:"create_date"` - DueDate time.Time `json:"due_date"` - MarkedAsMissing bool `json:"marked_as_missing"` - WarningSent bool `json:"warning_sent"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + AlertConfigName string `json:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertTypeName string `json:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name"` + CompletionDate *time.Time `json:"completion_date"` + CreateDate time.Time `json:"create_date"` + DueDate time.Time `json:"due_date"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` } type VTimeseries struct { @@ -1275,7 +1287,7 @@ type VTimeseriesComputed struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` Name string `json:"name"` - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` Type NullTimeseriesType `json:"type"` @@ -1299,37 +1311,37 @@ type VTimeseriesCwm struct { CwmsTimeseriesID string `json:"cwms_timeseries_id"` CwmsOfficeID string `json:"cwms_office_id"` CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` - CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } type VTimeseriesDependency struct { ID uuid.UUID `json:"id"` - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` ParsedVariable interface{} `json:"parsed_variable"` - DependencyTimeseriesID pgtype.UUID `json:"dependency_timeseries_id"` + DependencyTimeseriesID *uuid.UUID `json:"dependency_timeseries_id"` } type VTimeseriesProjectMap struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - ProjectID pgtype.UUID `json:"project_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + ProjectID *uuid.UUID `json:"project_id"` } type VTimeseriesStored struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` Name string `json:"name"` - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` Type NullTimeseriesType `json:"type"` } type VUnit struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Abbreviation string `json:"abbreviation"` - UnitFamilyID pgtype.UUID `json:"unit_family_id"` - UnitFamily string `json:"unit_family"` - MeasureID pgtype.UUID `json:"measure_id"` - Measure string `json:"measure"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID *uuid.UUID `json:"unit_family_id"` + UnitFamily string `json:"unit_family"` + MeasureID *uuid.UUID `json:"measure_id"` + Measure string `json:"measure"` } diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go new file mode 100644 index 00000000..0119424d --- /dev/null +++ b/api/internal/db/overrides.go @@ -0,0 +1,132 @@ +package db + +// database overrides for scanning json into nested structs +// see sqlc.yml overrides + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/twpayne/go-geom/encoding/geojson" + "github.com/twpayne/go-geom/encoding/wkb" +) + +type Opts map[string]interface{} + +func (o *Opts) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + +type AlertCheckMeasurementSubmittalAffectedTimeseries struct { + InstrumentName string `json:"instrument_name"` + TimeseriesName string `json:"timeseries_name"` + Status string `json:"status"` +} + +type DataloggerEquivalencyTableField struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type DataloggerTableIDName struct { + ID uuid.UUID `json:"id"` + TableName string `json:"table_name"` +} + +type DomainGroupOpt struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type CollectionGroupDetailsTimeseries struct { + VTimeseries + LatestTime *time.Time `json:"latest_time" db:"latest_time"` + LatestValue *float32 `json:"latest_value" db:"latest_value"` +} + +type EmailAutocompleteResult struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username *string `json:"username"` + Email string `json:"email"` +} + +type Geometry struct { + *wkb.Geom +} + +func (g Geometry) MarshalJSON() ([]byte, error) { + return geojson.Marshal(g.T) +} + +func (g *Geometry) UnmarshalJSON(v []byte) error { + return geojson.Unmarshal(v, &g.T) +} + +type IDSlugName struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +type InstrumentIDName struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} + +type IpiMeasurement struct { + SegmentID int `json:"segment_id"` + Tilt *float64 `json:"tilt"` + IncDev *float64 `json:"inc_dev"` + CumDev *float64 `json:"cum_dev"` + Temp *float64 `json:"temp"` + Elelvation *float64 `json:"elevation"` +} + +type VProfileToken struct { + TokenID string `json:"token_id"` + Issued time.Time `json:"issued"` +} + +type ReportConfigGlobalOverrides struct { + DateRange TextOption `json:"date_range"` + ShowMasked ToggleOption `json:"show_masked"` + ShowNonvalidated ToggleOption `json:"show_nonvalidated"` +} + +type SaaMeasurement struct { + SegmentID int `json:"segment_id"` + X *float64 `json:"x"` + Y *float64 `json:"y"` + Z *float64 `json:"z"` + Temp *float64 `json:"temp"` + XIncrement *float64 `json:"x_increment"` + YIncrement *float64 `json:"y_increment"` + ZIncrement *float64 `json:"z_increment"` + TempIncrement *float64 `json:"temp_increment"` + XCumDev *float64 `json:"x_cum_dev"` + YCumDev *float64 `json:"y_cum_dev"` + ZCumDev *float64 `json:"z_cum_dev"` + TempCumDev *float64 `json:"temp_cum_dev"` + Elevation *float64 `json:"elevation"` +} + +type TextOption struct { + Enabled bool `json:"enabled"` + Value string `json:"value"` +} + +type ToggleOption struct { + Enabled bool `json:"enabled"` + Value bool `json:"value"` +} diff --git a/api/internal/db/plot_config.sql_gen.go b/api/internal/db/plot_config.sql_gen.go index f963f736..110d2a61 100644 --- a/api/internal/db/plot_config.sql_gen.go +++ b/api/internal/db/plot_config.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createPlotConfig = `-- name: CreatePlotConfig :one @@ -91,70 +89,57 @@ func (q *Queries) DeletePlotConfigSettings(ctx context.Context, id uuid.UUID) er return err } -const listPlotConfigs = `-- name: ListPlotConfigs :many -select - id, - slug, - name, - project_id, - report_configs, - creator, - create_date, - updater, - update_date, - show_masked, - show_nonvalidated, - show_comments, - auto_range, - date_range, - threshold, - plot_type, - display +const getPlotConfig = `-- name: GetPlotConfig :one +select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration -where ($1 is null or $1 = project_id) -and ($2 is null or $2 = id) +where id = $1 ` -type ListPlotConfigsParams struct { - ProjectID interface{} `json:"project_id"` - ID interface{} `json:"id"` +func (q *Queries) GetPlotConfig(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) { + row := q.db.QueryRow(ctx, getPlotConfig, id) + var i VPlotConfiguration + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.ReportConfigs, + &i.PlotType, + &i.Display, + ) + return i, err } -type ListPlotConfigsRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - ProjectID uuid.UUID `json:"project_id"` - ReportConfigs string `json:"report_configs"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - ShowMasked bool `json:"show_masked"` - ShowNonvalidated bool `json:"show_nonvalidated"` - ShowComments bool `json:"show_comments"` - AutoRange bool `json:"auto_range"` - DateRange string `json:"date_range"` - Threshold int32 `json:"threshold"` - PlotType PlotType `json:"plot_type"` - Display interface{} `json:"display"` -} +const listPlotConfigsForProject = `-- name: ListPlotConfigsForProject :many +select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display +from v_plot_configuration +where project_id = $1 +` -func (q *Queries) ListPlotConfigs(ctx context.Context, arg ListPlotConfigsParams) ([]ListPlotConfigsRow, error) { - rows, err := q.db.Query(ctx, listPlotConfigs, arg.ProjectID, arg.ID) +func (q *Queries) ListPlotConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, listPlotConfigsForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListPlotConfigsRow{} + items := []VPlotConfiguration{} for rows.Next() { - var i ListPlotConfigsRow + var i VPlotConfiguration if err := rows.Scan( &i.ID, &i.Slug, &i.Name, &i.ProjectID, - &i.ReportConfigs, &i.Creator, &i.CreateDate, &i.Updater, @@ -165,6 +150,7 @@ func (q *Queries) ListPlotConfigs(ctx context.Context, arg ListPlotConfigsParams &i.AutoRange, &i.DateRange, &i.Threshold, + &i.ReportConfigs, &i.PlotType, &i.Display, ); err != nil { @@ -183,11 +169,11 @@ update plot_configuration set name = $3, updater = $4, update_date = $5 where pr ` type UpdatePlotConfigParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` } func (q *Queries) UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error { diff --git a/api/internal/db/plot_config_bullseye.sql_gen.go b/api/internal/db/plot_config_bullseye.sql_gen.go index d1485949..86653b42 100644 --- a/api/internal/db/plot_config_bullseye.sql_gen.go +++ b/api/internal/db/plot_config_bullseye.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createPlotBullseyeConfig = `-- name: CreatePlotBullseyeConfig :exec @@ -19,9 +17,9 @@ insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_t ` type CreatePlotBullseyeConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` - YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` } func (q *Queries) CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error { @@ -97,9 +95,9 @@ UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 ` type UpdatePlotBullseyeConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - XAxisTimeseriesID pgtype.UUID `json:"x_axis_timeseries_id"` - YAxisTimeseriesID pgtype.UUID `json:"y_axis_timeseries_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` } func (q *Queries) UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error { diff --git a/api/internal/db/plot_config_contour.sql_gen.go b/api/internal/db/plot_config_contour.sql_gen.go index 105539c0..9e8b1f44 100644 --- a/api/internal/db/plot_config_contour.sql_gen.go +++ b/api/internal/db/plot_config_contour.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createPlotContourConfig = `-- name: CreatePlotContourConfig :exec @@ -20,12 +18,12 @@ values ($1, $2, $3, $4, $5, $6) ` type CreatePlotContourConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - Time pgtype.Timestamptz `json:"time"` - LocfBackfill pgtype.Interval `json:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing"` - ShowLabels bool `json:"show_labels"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` } func (q *Queries) CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error { @@ -75,9 +73,9 @@ func (q *Queries) DeletePlotContourConfig(ctx context.Context, plotConfigID uuid const listPlotConfigMeasurementsContourPlot = `-- name: ListPlotConfigMeasurementsContourPlot :many select - oi.x, - oi.y, - locf(mm.value) as z + oi.x::double precision x, + oi.y::double precision y, + locf(mm.value) z from plot_contour_config pc left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id @@ -100,8 +98,8 @@ type ListPlotConfigMeasurementsContourPlotParams struct { } type ListPlotConfigMeasurementsContourPlotRow struct { - X interface{} `json:"x"` - Y interface{} `json:"y"` + X float64 `json:"x"` + Y float64 `json:"y"` Z interface{} `json:"z"` } @@ -167,12 +165,12 @@ where plot_config_id=$1 ` type UpdatePlotContourConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - Time pgtype.Timestamptz `json:"time"` - LocfBackfill pgtype.Interval `json:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing"` - ShowLabels bool `json:"show_labels"` + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` } func (q *Queries) UpdatePlotContourConfig(ctx context.Context, arg UpdatePlotContourConfigParams) error { diff --git a/api/internal/db/plot_config_profile.sql_gen.go b/api/internal/db/plot_config_profile.sql_gen.go index 8da61f70..b930f047 100644 --- a/api/internal/db/plot_config_profile.sql_gen.go +++ b/api/internal/db/plot_config_profile.sql_gen.go @@ -9,7 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" ) const createPlotProfileConfig = `-- name: CreatePlotProfileConfig :exec diff --git a/api/internal/db/plot_config_scatter_line.sql_gen.go b/api/internal/db/plot_config_scatter_line.sql_gen.go index 4ca69794..2260cf93 100644 --- a/api/internal/db/plot_config_scatter_line.sql_gen.go +++ b/api/internal/db/plot_config_scatter_line.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createPlotConfigCustomShape = `-- name: CreatePlotConfigCustomShape :exec @@ -19,11 +17,11 @@ insert into plot_configuration_custom_shape ` type CreatePlotConfigCustomShapeParams struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` - Color string `json:"color"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` } func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error { @@ -59,14 +57,14 @@ insert into plot_configuration_timeseries_trace ` type CreatePlotConfigTimeseriesTraceParams struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` - TraceOrder int32 `json:"trace_order"` - Color string `json:"color"` - LineStyle LineStyle `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis YAxis `json:"y_axis"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` } func (q *Queries) CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error { @@ -87,7 +85,7 @@ const deleteAllPlotConfigCustomShapes = `-- name: DeleteAllPlotConfigCustomShape delete from plot_configuration_custom_shape where plot_configuration_id=$1 ` -func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID pgtype.UUID) error { +func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID *uuid.UUID) error { _, err := q.db.Exec(ctx, deleteAllPlotConfigCustomShapes, plotConfigurationID) return err } @@ -96,7 +94,7 @@ const deleteAllPlotConfigTimeseriesTraces = `-- name: DeleteAllPlotConfigTimeser delete from plot_configuration_timeseries_trace where plot_configuration_id=$1 ` -func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID pgtype.UUID) error { +func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID *uuid.UUID) error { _, err := q.db.Exec(ctx, deleteAllPlotConfigTimeseriesTraces, plotConfigurationID) return err } @@ -107,11 +105,11 @@ set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1 ` type UpdatePlotConfigCustomShapeParams struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` - Color string `json:"color"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` } func (q *Queries) UpdatePlotConfigCustomShape(ctx context.Context, arg UpdatePlotConfigCustomShapeParams) error { @@ -147,14 +145,14 @@ where plot_configuration_id=$1 and timeseries_id=$2 ` type UpdatePlotConfigTimeseriesTraceParams struct { - PlotConfigurationID pgtype.UUID `json:"plot_configuration_id"` - TimeseriesID pgtype.UUID `json:"timeseries_id"` - TraceOrder int32 `json:"trace_order"` - Color string `json:"color"` - LineStyle LineStyle `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis YAxis `json:"y_axis"` + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` } func (q *Queries) UpdatePlotConfigTimeseriesTrace(ctx context.Context, arg UpdatePlotConfigTimeseriesTraceParams) error { diff --git a/api/internal/db/profile.sql_gen.go b/api/internal/db/profile.sql_gen.go index 18ba8640..c7df60b5 100644 --- a/api/internal/db/profile.sql_gen.go +++ b/api/internal/db/profile.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createProfile = `-- name: CreateProfile :exec @@ -101,40 +99,28 @@ func (q *Queries) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]G return items, nil } -const getProfileForEDIPI = `-- name: GetProfileForEDIPI :many -select id, edipi, username, display_name, email, is_admin, roles from v_profile where edipi = $1 +const getProfileForEDIPI = `-- name: GetProfileForEDIPI :one +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where edipi = $1 ` -func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int64) ([]VProfile, error) { - rows, err := q.db.Query(ctx, getProfileForEDIPI, edipi) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VProfile{} - for rows.Next() { - var i VProfile - if err := rows.Scan( - &i.ID, - &i.Edipi, - &i.Username, - &i.DisplayName, - &i.Email, - &i.IsAdmin, - &i.Roles, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int64) (VProfile, error) { + row := q.db.QueryRow(ctx, getProfileForEDIPI, edipi) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + &i.Tokens, + ) + return i, err } const getProfileForEmail = `-- name: GetProfileForEmail :one -select id, edipi, username, display_name, email, is_admin, roles from v_profile where email ilike $1 +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where email ilike $1 limit 1 ` @@ -149,6 +135,7 @@ func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (VProfil &i.Email, &i.IsAdmin, &i.Roles, + &i.Tokens, ) return i, err } @@ -162,11 +149,11 @@ limit 1 ` type GetProfileForTokenIDRow struct { - ID pgtype.UUID `json:"id"` - Edipi *int64 `json:"edipi"` - Username *string `json:"username"` - Email *string `json:"email"` - IsAdmin *bool `json:"is_admin"` + ID *uuid.UUID `json:"id"` + Edipi *int64 `json:"edipi"` + Username *string `json:"username"` + Email *string `json:"email"` + IsAdmin *bool `json:"is_admin"` } func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) { @@ -183,7 +170,7 @@ func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (Get } const getProfileForUsername = `-- name: GetProfileForUsername :one -select id, edipi, username, display_name, email, is_admin, roles from v_profile where username = $1 +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where username = $1 limit 1 ` @@ -198,6 +185,7 @@ func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (V &i.Email, &i.IsAdmin, &i.Roles, + &i.Tokens, ) return i, err } diff --git a/api/internal/db/project.sql_gen.go b/api/internal/db/project.sql_gen.go index 3ac2b996..6d5a11b1 100644 --- a/api/internal/db/project.sql_gen.go +++ b/api/internal/db/project.sql_gen.go @@ -10,42 +10,8 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) -const createProject = `-- name: CreateProject :one -insert into project (federal_id, slug, name, district_id, creator, create_date) -values ($1, slugify($2, 'project'), $2, $3, $4, $5) -returning id, slug -` - -type CreateProjectParams struct { - FederalID *string `json:"federal_id"` - Name string `json:"name"` - DistrictID pgtype.UUID `json:"district_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` -} - -type CreateProjectRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` -} - -func (q *Queries) CreateProject(ctx context.Context, arg CreateProjectParams) (CreateProjectRow, error) { - row := q.db.QueryRow(ctx, createProject, - arg.FederalID, - arg.Name, - arg.DistrictID, - arg.Creator, - arg.CreateDate, - ) - var i CreateProjectRow - err := row.Scan(&i.ID, &i.Slug) - return i, err -} - const deleteFlagProject = `-- name: DeleteFlagProject :exec update project set deleted=true where id = $1 ` @@ -94,70 +60,26 @@ func (q *Queries) GetProjectCount(ctx context.Context) (int64, error) { return count, err } -const listDistricts = `-- name: ListDistricts :many -select agency, id, name, initials, division_name, division_initials, office_id from v_district -` - -func (q *Queries) ListDistricts(ctx context.Context) ([]VDistrict, error) { - rows, err := q.db.Query(ctx, listDistricts) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VDistrict{} - for rows.Next() { - var i VDistrict - if err := rows.Scan( - &i.Agency, - &i.ID, - &i.Name, - &i.Initials, - &i.DivisionName, - &i.DivisionInitials, - &i.OfficeID, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listInstrumentGroupsForProject = `-- name: ListInstrumentGroupsForProject :many -select ig.id, ig.slug, ig.name, ig.description, ig.creator, ig.create_date, ig.updater, ig.update_date, ig.project_id, ig.deleted, ig.instrument_count, ig.timeseries_count -from v_instrument_group ig -where ig.project_id = $1 +const listAdminProjects = `-- name: ListAdminProjects :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN' ` -func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID pgtype.UUID) ([]VInstrumentGroup, error) { - rows, err := q.db.Query(ctx, listInstrumentGroupsForProject, projectID) +func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, listAdminProjects, profileID) if err != nil { return nil, err } defer rows.Close() - items := []VInstrumentGroup{} + items := []uuid.UUID{} for rows.Next() { - var i VInstrumentGroup - if err := rows.Scan( - &i.ID, - &i.Slug, - &i.Name, - &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, - &i.ProjectID, - &i.Deleted, - &i.InstrumentCount, - &i.TimeseriesCount, - ); err != nil { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { return nil, err } - items = append(items, i) + items = append(items, project_id) } if err := rows.Err(); err != nil { return nil, err @@ -165,50 +87,27 @@ func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID return items, nil } -const listInstrumentsForProject = `-- name: ListInstrumentsForProject :many -select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts -from v_instrument i -inner join project_instrument pi on pi.instrument_id = i.id -where pi.project_id = $1 +const listDistricts = `-- name: ListDistricts :many +select agency, id, name, initials, division_name, division_initials, office_id from v_district ` -func (q *Queries) ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { - rows, err := q.db.Query(ctx, listInstrumentsForProject, projectID) +func (q *Queries) ListDistricts(ctx context.Context) ([]VDistrict, error) { + rows, err := q.db.Query(ctx, listDistricts) if err != nil { return nil, err } defer rows.Close() - items := []VInstrument{} + items := []VDistrict{} for rows.Next() { - var i VInstrument + var i VDistrict if err := rows.Scan( + &i.Agency, &i.ID, - &i.Deleted, - &i.StatusID, - &i.Status, - &i.StatusTime, - &i.Slug, &i.Name, - &i.TypeID, - &i.ShowCwmsTab, - &i.Type, - &i.Icon, - &i.Geometry, - &i.Station, - &i.StationOffset, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, - &i.NidID, - &i.UsgsID, - &i.Telemetry, - &i.HasCwms, - &i.Projects, - &i.Constants, - &i.Groups, - &i.AlertConfigs, - &i.Opts, + &i.Initials, + &i.DivisionName, + &i.DivisionInitials, + &i.OfficeID, ); err != nil { return nil, err } @@ -407,12 +306,12 @@ update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_ ` type UpdateProjectParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - DistrictID pgtype.UUID `json:"district_id"` - FederalID *string `json:"federal_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + DistrictID *uuid.UUID `json:"district_id"` + FederalID *string `json:"federal_id"` } func (q *Queries) UpdateProject(ctx context.Context, arg UpdateProjectParams) (uuid.UUID, error) { diff --git a/api/internal/db/project_role.sql_gen.go b/api/internal/db/project_role.sql_gen.go index 3ed77cfb..a89b31a7 100644 --- a/api/internal/db/project_role.sql_gen.go +++ b/api/internal/db/project_role.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createProfileProjectRole = `-- name: CreateProfileProjectRole :one @@ -21,10 +19,10 @@ returning id ` type CreateProfileProjectRoleParams struct { - ProjectID uuid.UUID `json:"project_id"` - ProfileID uuid.UUID `json:"profile_id"` - RoleID uuid.UUID `json:"role_id"` - GrantedBy pgtype.UUID `json:"granted_by"` + ProjectID uuid.UUID `json:"project_id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + GrantedBy *uuid.UUID `json:"granted_by"` } func (q *Queries) CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) { diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index fd72735d..66bfec93 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -9,16 +9,16 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) type Querier interface { AddTimeseriesToCollectionGroup(ctx context.Context, arg AddTimeseriesToCollectionGroupParams) error AssignInstrumentToAlertConfig(ctx context.Context, arg AssignInstrumentToAlertConfigParams) error AssignInstrumentToProject(ctx context.Context, arg AssignInstrumentToProjectParams) error + AssignInstrumentToProjectBatch(ctx context.Context, arg []AssignInstrumentToProjectBatchParams) *AssignInstrumentToProjectBatchBatchResults AssignReportConfigPlotConfig(ctx context.Context, arg AssignReportConfigPlotConfigParams) error - CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) error + AssignReportConfigPlotConfigBatch(ctx context.Context, arg []AssignReportConfigPlotConfigBatchParams) *AssignReportConfigPlotConfigBatchBatchResults + CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) (Submittal, error) CreateAlert(ctx context.Context, alertConfigID uuid.UUID) error CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) CreateAlertEmailSubscription(ctx context.Context, arg CreateAlertEmailSubscriptionParams) error @@ -27,6 +27,7 @@ type Querier interface { CreateAlertRead(ctx context.Context, arg CreateAlertReadParams) error CreateAlerts(ctx context.Context, alertConfigID []uuid.UUID) *CreateAlertsBatchResults CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error + CreateAwarePlatformBatch(ctx context.Context, arg []CreateAwarePlatformBatchParams) *CreateAwarePlatformBatchBatchResults CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) CreateCalculation(ctx context.Context, arg CreateCalculationParams) error CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) @@ -34,16 +35,23 @@ type Querier interface { CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error CreateDataloggerTablePreview(ctx context.Context, arg CreateDataloggerTablePreviewParams) error - CreateEvalationInstrument(ctx context.Context, arg CreateEvalationInstrumentParams) error CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) + CreateEvaluationInstrument(ctx context.Context, arg CreateEvaluationInstrumentParams) error + CreateEvaluationInstrumentsBatch(ctx context.Context, arg []CreateEvaluationInstrumentsBatchParams) *CreateEvaluationInstrumentsBatchBatchResults CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error + CreateInstrumentConstantBatch(ctx context.Context, arg []CreateInstrumentConstantBatchParams) *CreateInstrumentConstantBatchBatchResults CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) CreateInstrumentGroupInstruments(ctx context.Context, arg CreateInstrumentGroupInstrumentsParams) error + CreateInstrumentGroupsBatch(ctx context.Context, arg []CreateInstrumentGroupsBatchParams) *CreateInstrumentGroupsBatchBatchResults CreateInstrumentNote(ctx context.Context, arg CreateInstrumentNoteParams) (InstrumentNote, error) + CreateInstrumentNoteBatch(ctx context.Context, arg []CreateInstrumentNoteBatchParams) *CreateInstrumentNoteBatchBatchResults + CreateInstrumentsBatch(ctx context.Context, arg []CreateInstrumentsBatchParams) *CreateInstrumentsBatchBatchResults CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error + CreateIpiOptsBatch(ctx context.Context, arg []CreateIpiOptsBatchParams) *CreateIpiOptsBatchBatchResults CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error + CreateIpiSegmentBatch(ctx context.Context, arg []CreateIpiSegmentBatchParams) *CreateIpiSegmentBatchBatchResults CreateNextEvaluationSubmittal(ctx context.Context, id uuid.UUID) error CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error @@ -51,44 +59,56 @@ type Querier interface { CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error CreateOrUpdateInstrumentStatus(ctx context.Context, arg CreateOrUpdateInstrumentStatusParams) error + CreateOrUpdateInstrumentStatusBatch(ctx context.Context, arg []CreateOrUpdateInstrumentStatusBatchParams) *CreateOrUpdateInstrumentStatusBatchBatchResults CreateOrUpdateTimeseriesMeasurement(ctx context.Context, arg CreateOrUpdateTimeseriesMeasurementParams) error + CreateOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesMeasurementsBatchParams) *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOrUpdateTimeseriesNoteParams) error + CreateOrUpdateTimeseriesNoteBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesNoteBatchParams) *CreateOrUpdateTimeseriesNoteBatchBatchResults CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error CreatePlotConfig(ctx context.Context, arg CreatePlotConfigParams) (uuid.UUID, error) CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error + CreatePlotConfigCustomShapesBatch(ctx context.Context, arg []CreatePlotConfigCustomShapesBatchParams) *CreatePlotConfigCustomShapesBatchBatchResults CreatePlotConfigScatterLineLayout(ctx context.Context, arg CreatePlotConfigScatterLineLayoutParams) error CreatePlotConfigSettings(ctx context.Context, arg CreatePlotConfigSettingsParams) error CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error + CreatePlotConfigTimeseriesTracesBatch(ctx context.Context, arg []CreatePlotConfigTimeseriesTracesBatchParams) *CreatePlotConfigTimeseriesTracesBatchBatchResults CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error CreatePlotContourConfigTimeseries(ctx context.Context, arg CreatePlotContourConfigTimeseriesParams) error + CreatePlotContourConfigTimeseriesBatch(ctx context.Context, arg []CreatePlotContourConfigTimeseriesBatchParams) *CreatePlotContourConfigTimeseriesBatchBatchResults CreatePlotProfileConfig(ctx context.Context, arg CreatePlotProfileConfigParams) error CreateProfile(ctx context.Context, arg CreateProfileParams) error CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) CreateProfileToken(ctx context.Context, arg CreateProfileTokenParams) (ProfileToken, error) - CreateProject(ctx context.Context, arg CreateProjectParams) (CreateProjectRow, error) + CreateProjectsBatch(ctx context.Context, arg []CreateProjectsBatchParams) *CreateProjectsBatchBatchResults CreateReportConfig(ctx context.Context, arg CreateReportConfigParams) (uuid.UUID, error) CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error + CreateSaaOptsBatch(ctx context.Context, arg []CreateSaaOptsBatchParams) *CreateSaaOptsBatchBatchResults CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error + CreateSaaSegmentBatch(ctx context.Context, arg []CreateSaaSegmentBatchParams) *CreateSaaSegmentBatchBatchResults CreateTimeseries(ctx context.Context, arg CreateTimeseriesParams) (CreateTimeseriesRow, error) + CreateTimeseriesBatch(ctx context.Context, arg []CreateTimeseriesBatchParams) *CreateTimeseriesBatchBatchResults CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error - CreateTimeseriesMeasruement(ctx context.Context, arg CreateTimeseriesMeasruementParams) error + CreateTimeseriesCwmsBatch(ctx context.Context, arg []CreateTimeseriesCwmsBatchParams) *CreateTimeseriesCwmsBatchBatchResults + CreateTimeseriesMeasurement(ctx context.Context, arg CreateTimeseriesMeasurementParams) error + CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateTimeseriesMeasurementsBatchParams) *CreateTimeseriesMeasurementsBatchBatchResults CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error + CreateTimeseriesNotesBatch(ctx context.Context, arg []CreateTimeseriesNotesBatchParams) *CreateTimeseriesNotesBatchBatchResults DeleteAlertConfig(ctx context.Context, id uuid.UUID) error DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error DeleteAlertRead(ctx context.Context, arg DeleteAlertReadParams) error DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID pgtype.UUID) error - DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID pgtype.UUID) error + DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID *uuid.UUID) error + DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID *uuid.UUID) error DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error DeleteDataloggerTable(ctx context.Context, id uuid.UUID) error DeleteDataloggerTableError(ctx context.Context, arg DeleteDataloggerTableErrorParams) error - DeleteEquivalencyTable(ctx context.Context, dataloggerTableID pgtype.UUID) error + DeleteEquivalencyTable(ctx context.Context, dataloggerTableID *uuid.UUID) error DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) error DeleteEvaluation(ctx context.Context, id uuid.UUID) error DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstrumentParams) error @@ -108,39 +128,42 @@ type Querier interface { DeleteTimeseriesMeasurement(ctx context.Context, arg DeleteTimeseriesMeasurementParams) error DeleteTimeseriesMeasurements(ctx context.Context, arg DeleteTimeseriesMeasurementsParams) error DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error - DeleteTimeseriesNote(ctx context.Context, arg DeleteTimeseriesNoteParams) error + DeleteTimeseriesNoteRange(ctx context.Context, arg DeleteTimeseriesNoteRangeParams) error DeleteToken(ctx context.Context, arg DeleteTokenParams) error GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) - GetAlertSubscription(ctx context.Context, arg GetAlertSubscriptionParams) ([]AlertProfileSubscription, error) - GetAlertSubscriptionByID(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) + GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) + GetAlertSubscription(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) + GetAlertSubscriptionForAlertConfig(ctx context.Context, arg GetAlertSubscriptionForAlertConfigParams) (AlertProfileSubscription, error) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) - GetCollectionGroupDetailsTimeseries(ctx context.Context, arg GetCollectionGroupDetailsTimeseriesParams) (GetCollectionGroupDetailsTimeseriesRow, error) + GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) GetDataloggerHashByModelSN(ctx context.Context, arg GetDataloggerHashByModelSNParams) (string, error) GetDataloggerIsActive(ctx context.Context, arg GetDataloggerIsActiveParams) (int32, error) GetDataloggerModelName(ctx context.Context, id uuid.UUID) (*string, error) GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) - GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) ([]VDataloggerEquivalencyTable, error) + GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) GetHome(ctx context.Context) (GetHomeRow, error) + GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, error) GetInstrumentCount(ctx context.Context) (int64, error) + GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) GetIpiMeasurementsForInstrument(ctx context.Context, arg GetIpiMeasurementsForInstrumentParams) ([]VIpiMeasurement, error) GetIsValidDataloggerTable(ctx context.Context, id uuid.UUID) (bool, error) GetIsValidEquivalencyTableTimeseries(ctx context.Context, id uuid.UUID) (bool, error) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]GetIssuedTokensRow, error) GetLatestHeartbeat(ctx context.Context) (interface{}, error) GetOrCreateDataloggerTable(ctx context.Context, arg GetOrCreateDataloggerTableParams) (uuid.UUID, error) - GetProfileForEDIPI(ctx context.Context, edipi int64) ([]VProfile, error) + GetPlotConfig(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) + GetProfileForEDIPI(ctx context.Context, edipi int64) (VProfile, error) GetProfileForEmail(ctx context.Context, email string) (VProfile, error) GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) GetProfileForUsername(ctx context.Context, username string) (VProfile, error) GetProject(ctx context.Context, id uuid.UUID) (VProject, error) GetProjectCount(ctx context.Context) (int64, error) - GetProjectCountForInstrument(ctx context.Context, instrumentIds []uuid.UUID) (GetProjectCountForInstrumentRow, error) GetProjectMembership(ctx context.Context, id uuid.UUID) (GetProjectMembershipRow, error) - GetReportConfigByID(ctx context.Context, id uuid.UUID) ([]VReportConfig, error) + GetReportConfig(ctx context.Context, id uuid.UUID) (VReportConfig, error) GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) @@ -148,14 +171,13 @@ type Querier interface { GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) GetTokenInfo(ctx context.Context, tokenID string) (ProfileToken, error) - GetetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) IsProjectAdmin(ctx context.Context, arg IsProjectAdminParams) (bool, error) IsProjectMember(ctx context.Context, arg IsProjectMemberParams) (bool, error) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) ListAlertConfigSubmittals(ctx context.Context, arg ListAlertConfigSubmittalsParams) ([]VSubmittal, error) ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) - ListAlertConfigsForProjectAndAlertType(ctx context.Context, arg ListAlertConfigsForProjectAndAlertTypeParams) ([]VAlertConfig, error) + ListAlertConfigsForProjectAlertType(ctx context.Context, arg ListAlertConfigsForProjectAlertTypeParams) ([]VAlertConfig, error) ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) @@ -164,7 +186,7 @@ type Querier interface { ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) - ListCollectionGroups(ctx context.Context, arg ListCollectionGroupsParams) ([]ListCollectionGroupsRow, error) + ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]ListCollectionGroupsForProjectRow, error) ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) ListDistricts(ctx context.Context) ([]VDistrict, error) ListDomainGroups(ctx context.Context) ([]VDomainGroup, error) @@ -175,26 +197,27 @@ type Querier interface { ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) - ListInstrumentEvaluations(ctx context.Context, instrumentID pgtype.UUID) ([]VEvaluation, error) + ListInstrumentEvaluations(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) ListInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID) ([]ListInstrumentGroupInstrumentsRow, error) ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) - ListInstrumentGroups(ctx context.Context, id interface{}) ([]ListInstrumentGroupsRow, error) - ListInstrumentGroupsForProject(ctx context.Context, projectID pgtype.UUID) ([]VInstrumentGroup, error) + ListInstrumentGroups(ctx context.Context) ([]VInstrumentGroup, error) + ListInstrumentGroupsForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]ListInstrumentIDNamesByIDsRow, error) ListInstrumentNotes(ctx context.Context, arg ListInstrumentNotesParams) ([]InstrumentNote, error) ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) ListInstrumentStatuses(ctx context.Context, arg ListInstrumentStatusesParams) ([]ListInstrumentStatusesRow, error) ListInstrumentSubmittals(ctx context.Context, arg ListInstrumentSubmittalsParams) ([]VSubmittal, error) ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) - ListInstruments(ctx context.Context, id interface{}) ([]ListInstrumentsRow, error) + ListInstruments(ctx context.Context) ([]VInstrument, error) ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) ListMeasurementDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListMeasurementDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, arg ListPlotConfigMeasurementsBullseyePlotParams) ([]ListPlotConfigMeasurementsBullseyePlotRow, error) ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg ListPlotConfigMeasurementsContourPlotParams) ([]ListPlotConfigMeasurementsContourPlotRow, error) - ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID pgtype.UUID) ([]VTimeseries, error) - ListPlotConfigs(ctx context.Context, arg ListPlotConfigsParams) ([]ListPlotConfigsRow, error) + ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) + ListPlotConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) ListPlotContourConfigTimes(ctx context.Context, arg ListPlotContourConfigTimesParams) ([]time.Time, error) + ListProjectCountForInstruments(ctx context.Context, instrumentIds []uuid.UUID) ([]ListProjectCountForInstrumentsRow, error) ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) @@ -216,10 +239,12 @@ type Querier interface { ResetDataloggerTableName(ctx context.Context, id uuid.UUID) error SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]VProject, error) UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID pgtype.UUID) error + UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID *uuid.UUID) error UnassignAllReportConfigPlotConfig(ctx context.Context, reportConfigID uuid.UUID) error UnassignInstrumentFromProject(ctx context.Context, arg UnassignInstrumentFromProjectParams) error + UnassignInstrumentFromProjectBatch(ctx context.Context, arg []UnassignInstrumentFromProjectBatchParams) *UnassignInstrumentFromProjectBatchBatchResults UnassignReportConfigPlotConfig(ctx context.Context, arg UnassignReportConfigPlotConfigParams) error + UnassignReportConfigPlotConfigBatch(ctx context.Context, arg []UnassignReportConfigPlotConfigBatchParams) *UnassignReportConfigPlotConfigBatchBatchResults UnregisterEmail(ctx context.Context, id uuid.UUID) error UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error @@ -230,13 +255,15 @@ type Querier interface { UpdateDataloggerUpdater(ctx context.Context, arg UpdateDataloggerUpdaterParams) error UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error - UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID pgtype.UUID) (uuid.UUID, error) + UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error + UpdateIpiOptsBatch(ctx context.Context, arg []UpdateIpiOptsBatchParams) *UpdateIpiOptsBatchBatchResults UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error + UpdateIpiSegmentsBatch(ctx context.Context, arg []UpdateIpiSegmentsBatchParams) *UpdateIpiSegmentsBatchBatchResults UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAlertSubscriptionParams) error UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error @@ -253,6 +280,7 @@ type Querier interface { UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error + UpdateSaaOptsBatch(ctx context.Context, arg []UpdateSaaOptsBatchParams) *UpdateSaaOptsBatchBatchResults UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error @@ -261,9 +289,9 @@ type Querier interface { ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) - ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]ValidateProjectsInstrumentNameUniqueRow, error) + ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]string, error) VerifyDataloggerExists(ctx context.Context, id uuid.UUID) (bool, error) - VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID pgtype.UUID) error + VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID *uuid.UUID) error VerifyMissingSubmittal(ctx context.Context, id uuid.UUID) error } diff --git a/api/internal/db/report_config.sql_gen.go b/api/internal/db/report_config.sql_gen.go index 1a491523..acf75cf2 100644 --- a/api/internal/db/report_config.sql_gen.go +++ b/api/internal/db/report_config.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const assignReportConfigPlotConfig = `-- name: AssignReportConfigPlotConfig :exec @@ -73,8 +71,8 @@ insert into report_download_job (report_config_id, creator) values ($1, $2) retu ` type CreateReportDownloadJobParams struct { - ReportConfigID pgtype.UUID `json:"report_config_id"` - Creator uuid.UUID `json:"creator"` + ReportConfigID *uuid.UUID `json:"report_config_id"` + Creator uuid.UUID `json:"creator"` } func (q *Queries) CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) { @@ -103,44 +101,31 @@ func (q *Queries) DeleteReportConfig(ctx context.Context, id uuid.UUID) error { return err } -const getReportConfigByID = `-- name: GetReportConfigByID :many +const getReportConfig = `-- name: GetReportConfig :one select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where id = $1 ` -func (q *Queries) GetReportConfigByID(ctx context.Context, id uuid.UUID) ([]VReportConfig, error) { - rows, err := q.db.Query(ctx, getReportConfigByID, id) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VReportConfig{} - for rows.Next() { - var i VReportConfig - if err := rows.Scan( - &i.ID, - &i.Slug, - &i.Name, - &i.Description, - &i.ProjectID, - &i.ProjectName, - &i.DistrictName, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, - &i.PlotConfigs, - &i.GlobalOverrides, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) GetReportConfig(ctx context.Context, id uuid.UUID) (VReportConfig, error) { + row := q.db.QueryRow(ctx, getReportConfig, id) + var i VReportConfig + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.ProjectID, + &i.ProjectName, + &i.DistrictName, + &i.Creator, + &i.CreatorUsername, + &i.CreateDate, + &i.Updater, + &i.UpdaterUsername, + &i.UpdateDate, + &i.PlotConfigs, + &i.GlobalOverrides, + ) + return i, err } const getReportDownloadJob = `-- name: GetReportDownloadJob :one @@ -283,17 +268,17 @@ show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 whe ` type UpdateReportConfigParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater pgtype.UUID `json:"updater"` - UpdateDate pgtype.Timestamptz `json:"update_date"` - Description string `json:"description"` - DateRange *string `json:"date_range"` - DateRangeEnabled *bool `json:"date_range_enabled"` - ShowMasked *bool `json:"show_masked"` - ShowMaskedEnabled *bool `json:"show_masked_enabled"` - ShowNonvalidated *bool `json:"show_nonvalidated"` - ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` + Description string `json:"description"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` } func (q *Queries) UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error { @@ -318,12 +303,12 @@ update report_download_job set status=$2, progress=$3, progress_update_date=$4, ` type UpdateReportDownloadJobParams struct { - ID uuid.UUID `json:"id"` - Status JobStatus `json:"status"` - Progress int32 `json:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date"` - FileKey *string `json:"file_key"` - FileExpiry pgtype.Timestamptz `json:"file_expiry"` + ID uuid.UUID `json:"id"` + Status JobStatus `json:"status"` + Progress int32 `json:"progress"` + ProgressUpdateDate time.Time `json:"progress_update_date"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` } func (q *Queries) UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error { diff --git a/api/internal/db/submittal.sql_gen.go b/api/internal/db/submittal.sql_gen.go index e2a7f51f..eff482e2 100644 --- a/api/internal/db/submittal.sql_gen.go +++ b/api/internal/db/submittal.sql_gen.go @@ -7,10 +7,9 @@ package db import ( "context" + "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const listAlertConfigSubmittals = `-- name: ListAlertConfigSubmittals :many @@ -206,10 +205,10 @@ where id = $1 ` type UpdateSubmittalParams struct { - ID uuid.UUID `json:"id"` - SubmittalStatusID pgtype.UUID `json:"submittal_status_id"` - CompletionDate pgtype.Timestamptz `json:"completion_date"` - WarningSent bool `json:"warning_sent"` + ID uuid.UUID `json:"id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletionDate *time.Time `json:"completion_date"` + WarningSent bool `json:"warning_sent"` } func (q *Queries) UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error { @@ -231,7 +230,7 @@ and completion_date is null and now() > due_date ` -func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID pgtype.UUID) error { +func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID *uuid.UUID) error { _, err := q.db.Exec(ctx, verifyMissingAlertConfigSubmittals, alertConfigID) return err } diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go index 49f001a2..cee569b6 100644 --- a/api/internal/db/timeseries.sql_gen.go +++ b/api/internal/db/timeseries.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createTimeseries = `-- name: CreateTimeseries :one @@ -20,7 +18,7 @@ returning id, instrument_id, slug, name, parameter_id, unit_id, type ` type CreateTimeseriesParams struct { - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` Name string `json:"name"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` @@ -29,7 +27,7 @@ type CreateTimeseriesParams struct { type CreateTimeseriesRow struct { ID uuid.UUID `json:"id"` - InstrumentID pgtype.UUID `json:"instrument_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` Slug string `json:"slug"` Name string `json:"name"` ParameterID uuid.UUID `json:"parameter_id"` @@ -189,7 +187,7 @@ INNER JOIN plot_configuration_timeseries_trace pct ON pct.timeseries_id = t.id WHERE pct.plot_configuration_id = $1 ` -func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID pgtype.UUID) ([]VTimeseries, error) { +func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) { rows, err := q.db.Query(ctx, listPlotConfigTimeseries, plotConfigurationID) if err != nil { return nil, err @@ -308,11 +306,11 @@ returning id ` type UpdateTimeseriesParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - InstrumentID pgtype.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` } func (q *Queries) UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) { diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go index 3c944d74..8fc927e5 100644 --- a/api/internal/db/timeseries_calculated.sql_gen.go +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -9,8 +9,6 @@ import ( "context" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createCalculatedTimeseries = `-- name: CreateCalculatedTimeseries :one @@ -26,10 +24,10 @@ RETURNING id ` type CreateCalculatedTimeseriesParams struct { - InstrumentID pgtype.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` } func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) { @@ -78,15 +76,15 @@ on conflict (id) do update set ` type CreateOrUpdateCalculatedTimeseriesParams struct { - ID uuid.UUID `json:"id"` - InstrumentID pgtype.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Name string `json:"name"` - InstrumentID_2 pgtype.UUID `json:"instrument_id_2"` - ParameterID_2 uuid.UUID `json:"parameter_id_2"` - UnitID_2 uuid.UUID `json:"unit_id_2"` - Rawname string `json:"rawname"` + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` + InstrumentID_2 *uuid.UUID `json:"instrument_id_2"` + ParameterID_2 uuid.UUID `json:"parameter_id_2"` + UnitID_2 uuid.UUID `json:"unit_id_2"` + Rawname string `json:"rawname"` } func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error { @@ -149,13 +147,13 @@ type ListCalculatedTimeseriesParams struct { } type ListCalculatedTimeseriesRow struct { - ID uuid.UUID `json:"id"` - InstrumentID pgtype.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Slug string `json:"slug"` - FormulaName string `json:"formula_name"` - Formula string `json:"formula"` + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + FormulaName string `json:"formula_name"` + Formula string `json:"formula"` } func (q *Queries) ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) { diff --git a/api/internal/db/timeseries_cwms.sql_gen.go b/api/internal/db/timeseries_cwms.sql_gen.go index 399075c2..468caa47 100644 --- a/api/internal/db/timeseries_cwms.sql_gen.go +++ b/api/internal/db/timeseries_cwms.sql_gen.go @@ -10,8 +10,6 @@ import ( "time" "github.com/google/uuid" - uuid "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) const createTimeseriesCwms = `-- name: CreateTimeseriesCwms :exec @@ -20,11 +18,11 @@ insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, ` type CreateTimeseriesCwmsParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - CwmsTimeseriesID string `json:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` - CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } func (q *Queries) CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error { @@ -121,11 +119,11 @@ where timeseries_id=$1 ` type UpdateTimeseriesCwmsParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - CwmsTimeseriesID string `json:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` - CwmsExtentLatestTime pgtype.Timestamptz `json:"cwms_extent_latest_time"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } func (q *Queries) UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error { diff --git a/api/internal/handler/handlerv2.go b/api/internal/handler/handlerv2.go index e7982b1f..6eae2c25 100644 --- a/api/internal/handler/handlerv2.go +++ b/api/internal/handler/handlerv2.go @@ -67,7 +67,6 @@ func NewApi(cfg *config.ApiConfig) *ApiHandler { q := db.Queries() ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) - dbStore := service.NewDatabaseStore( profileService := service.NewProfileService(db, q) projectRoleService := service.NewProjectRoleService(db, q) dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) diff --git a/api/internal/model/common.go b/api/internal/model/common.go index d18a2410..3a3bc0f0 100644 --- a/api/internal/model/common.go +++ b/api/internal/model/common.go @@ -43,6 +43,85 @@ type IDSlugName struct { Name string `json:"name,omitempty"` } +type InstrumentIDName struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} + +type DataloggerTable struct { + ID uuid.UUID `json:"id"` + TableName string `json:"table_name"` +} + +type DataloggerEquivalencyTableField struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type DomainGroupOpt struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type AlertEmailSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertCheckMeasurementSubmittalAffectedTimeseries struct { + InstrumentName string `json:"instrument_name"` + TimeseriesName string `json:"timeseries_name"` + Status string `json:"status"` +} + +type IpiMeasurement struct { + SegmentID int `json:"segment_id"` + Tilt *float64 `json:"tilt"` + IncDev *float64 `json:"inc_dev"` + CumDev *float64 `json:"cum_dev"` + Temp *float64 `json:"temp"` + Elelvation *float64 `json:"elevation"` +} + +type SaaMeasurement struct { + SegmentID int `json:"segment_id"` + X *float64 `json:"x"` + Y *float64 `json:"y"` + Z *float64 `json:"z"` + Temp *float64 `json:"temp"` + XIncrement *float64 `json:"x_increment"` + YIncrement *float64 `json:"y_increment"` + ZIncrement *float64 `json:"z_increment"` + TempIncrement *float64 `json:"temp_increment"` + XCumDev *float64 `json:"x_cum_dev"` + YCumDev *float64 `json:"y_cum_dev"` + ZCumDev *float64 `json:"z_cum_dev"` + TempCumDev *float64 `json:"temp_cum_dev"` + Elevation *float64 `json:"elevation"` +} + +type ReportConfigGlobalOverrides struct { + DateRange TextOption `json:"date_range"` + ShowMasked ToggleOption `json:"show_masked"` + ShowNonvalidated ToggleOption `json:"show_nonvalidated"` +} + +type TextOption struct { + Enabled bool `json:"enabled"` + Value string `json:"value"` +} + +type ToggleOption struct { + Enabled bool `json:"enabled"` + Value bool `json:"value"` +} + type IDSlugCollection struct { Items []IDSlug `json:"items"` } diff --git a/api/internal/model/datalogger.go b/api/internal/model/datalogger.go index a1c6fa1a..fd153f03 100644 --- a/api/internal/model/datalogger.go +++ b/api/internal/model/datalogger.go @@ -38,10 +38,10 @@ type DataloggerWithKey struct { Key string `json:"key"` } -type DataloggerTable struct { - ID uuid.UUID `json:"id" db:"id"` - TableName string `json:"table_name" db:"table_name"` -} +// type DataloggerTable struct { +// ID uuid.UUID `json:"id" db:"id"` +// TableName string `json:"table_name" db:"table_name"` +// } type DataloggerTablePreview struct { DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` diff --git a/api/internal/model/instrument.go b/api/internal/model/instrument.go index 66e00afc..361a3bb7 100644 --- a/api/internal/model/instrument.go +++ b/api/internal/model/instrument.go @@ -2,15 +2,11 @@ package model import ( "context" - "database/sql/driver" - "fmt" "time" "github.com/google/uuid" - - "github.com/paulmach/orb" - "github.com/paulmach/orb/encoding/wkb" - "github.com/paulmach/orb/geojson" + "github.com/twpayne/go-geom/encoding/geojson" + "github.com/twpayne/go-geom/encoding/wkb" ) // Instrument is an instrument @@ -29,9 +25,9 @@ type Instrument struct { TypeID uuid.UUID `json:"type_id" db:"type_id"` Type string `json:"type"` Icon *string `json:"icon" db:"icon"` - Geometry Geometry `json:"geometry,omitempty"` - Station *int `json:"station"` - StationOffset *int `json:"offset" db:"station_offset"` + Geometry db.Geometry `json:"geometry,omitempty"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"offset" db:"station_offset"` Projects dbJSONSlice[IDSlugName] `json:"projects" db:"projects"` NIDID *string `json:"nid_id" db:"nid_id"` USGSID *string `json:"usgs_id" db:"usgs_id"` @@ -65,43 +61,10 @@ type InstrumentsProjectCount struct { ProjectCount int `json:"project_count" db:"project_count"` } -type Geometry geojson.Geometry - -func (g Geometry) Value() (driver.Value, error) { - og := geojson.Geometry(g) - return wkb.Value(og.Geometry()), nil -} - -func (g *Geometry) Scan(src interface{}) error { - var p orb.Point - if err := wkb.Scanner(&p).Scan(src); err != nil { - return err - } - *g = Geometry(*geojson.NewGeometry(p)) - return nil -} - -func (g Geometry) MarshalJSON() ([]byte, error) { - gj := geojson.Geometry(g) - return gj.MarshalJSON() -} - -func (g *Geometry) UnmarshalJSON(data []byte) error { - gj, err := geojson.UnmarshalGeometry(data) - if err != nil { - return err - } - if gj == nil { - return fmt.Errorf("unable to unmarshal: geojson geometry is nil") - } - *g = Geometry(*gj) - return nil -} - -type InstrumentIDName struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` -} +// type InstrumentIDName struct { +// ID uuid.UUID `json:"id"` +// Name string `json:"name"` +// } const listInstrumentsSQL = ` SELECT diff --git a/api/internal/model/report_config.go b/api/internal/model/report_config.go index af5e38c5..f49f9f0a 100644 --- a/api/internal/model/report_config.go +++ b/api/internal/model/report_config.go @@ -22,22 +22,6 @@ type ReportConfig struct { AuditInfo } -type ReportConfigGlobalOverrides struct { - DateRange TextOption `json:"date_range" db:"date_range"` - ShowMasked ToggleOption `json:"show_masked" db:"show_masked"` - ShowNonvalidated ToggleOption `json:"show_nonvalidated" db:"show_nonvalidated"` -} - -type TextOption struct { - Enabled bool `json:"enabled" db:"enabled"` - Value string `json:"value" db:"value"` -} - -type ToggleOption struct { - Enabled bool `json:"enabled" db:"enabled"` - Value bool `json:"value" db:"value"` -} - type ReportDownloadJob struct { ID uuid.UUID `json:"id" db:"id"` ReportConfigID uuid.UUID `json:"report_config_id" db:"report_config_id"` diff --git a/api/internal/model/timeseries.go b/api/internal/model/timeseries.go index 95e98a4b..8f2c00b5 100644 --- a/api/internal/model/timeseries.go +++ b/api/internal/model/timeseries.go @@ -61,8 +61,8 @@ func (c *TimeseriesCollectionItems) UnmarshalJSON(b []byte) error { } var ( - unknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") - unknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") + UnknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") + UnknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") ) const getStoredTimeseriesExists = ` @@ -179,10 +179,10 @@ const createTimeseries = ` func (q *Queries) CreateTimeseries(ctx context.Context, ts Timeseries) (Timeseries, error) { if ts.ParameterID == uuid.Nil { - ts.ParameterID = unknownParameterID + ts.ParameterID = UnknownParameterID } if ts.UnitID == uuid.Nil { - ts.UnitID = unknownUnitID + ts.UnitID = UnknownUnitID } if ts.Type == "" { ts.Type = StandardTimeseriesType @@ -200,10 +200,10 @@ const updateTimeseries = ` func (q *Queries) UpdateTimeseries(ctx context.Context, ts Timeseries) (uuid.UUID, error) { if ts.ParameterID == uuid.Nil { - ts.ParameterID = unknownParameterID + ts.ParameterID = UnknownParameterID } if ts.UnitID == uuid.Nil { - ts.UnitID = unknownUnitID + ts.UnitID = UnknownUnitID } var tID uuid.UUID err := q.db.GetContext(ctx, &tID, updateTimeseries, ts.ID, ts.Name, ts.InstrumentID, ts.ParameterID, ts.UnitID) diff --git a/api/internal/model/timeseries_calculated.go b/api/internal/model/timeseries_calculated.go index c97318dd..3344117d 100644 --- a/api/internal/model/timeseries_calculated.go +++ b/api/internal/model/timeseries_calculated.go @@ -55,10 +55,10 @@ const createCalculatedTimeseries = ` func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, cc CalculatedTimeseries) (uuid.UUID, error) { if cc.ParameterID == uuid.Nil { - cc.ParameterID = unknownParameterID + cc.ParameterID = UnknownParameterID } if cc.UnitID == uuid.Nil { - cc.UnitID = unknownUnitID + cc.UnitID = UnknownUnitID } var tsID uuid.UUID err := q.db.GetContext(ctx, &tsID, createCalculatedTimeseries, &cc.InstrumentID, &cc.ParameterID, &cc.UnitID, &cc.FormulaName) diff --git a/api/internal/servicev2/alert.go b/api/internal/servicev2/alert.go index 6c318ceb..355e3954 100644 --- a/api/internal/servicev2/alert.go +++ b/api/internal/servicev2/alert.go @@ -4,93 +4,92 @@ import ( "context" "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type AlertService interface { CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error - GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Alert, error) - GetOneAlertForProfile(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertRead(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertUnread(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) + ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]db.VAlert, error) + ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]db.VAlert, error) + ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]db.VAlert, error) + GetAlert(ctx context.Context, arg db.GetAlertParams) (db.GetAlertRow, error) + DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) + DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) +} + +type alertService struct { + db *Database + *db.Queries +} + +func NewAlertService(db *Database, q *db.Queries) *alertService { + return &alertService{db, q} } // Create creates one or more new alerts -func (s dbStore) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { +func (s alertService) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { var err error s.Queries.CreateAlerts(ctx, alertConfigIDs).Exec(func(_ int, e error) { - err = e + if err != nil { + err = e + return + } }) return err } // DoAlertRead marks an alert as read for a profile -func (s dbStore) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { +func (s alertService) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) { + var a db.GetAlertRow tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) if err := qtx.CreateAlertRead(ctx, db.CreateAlertReadParams{ ProfileID: profileID, AlertID: alertID, }); err != nil { - return model.Alert{}, err + return a, err } - b, err := qtx.GetAlert(ctx, db.GetAlertParams{ + a, err = qtx.GetAlert(ctx, db.GetAlertParams{ ProfileID: profileID, ID: alertID, }) if err != nil { - return model.Alert{}, err + return a, err } if err := tx.Commit(ctx); err != nil { - return model.Alert{}, err + return a, err } - - return model.Alert{ - Read: &b.Read, - ID: b.ID, - AlertConfigID: b.AlertConfigID, - ProjectID: b.ProjectID, - ProjectName: b.ProjectName, - Name: b.Name, - Body: b.Body, - CreateDate: b.CreateDate, - Instruments: b.Instruments, - }, nil + return a, nil } // DoAlertUnread marks an alert as unread for a profile -func (s dbStore) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { +func (s alertService) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) { + var a db.GetAlertRow tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) if err := qtx.DeleteAlertRead(ctx, db.DeleteAlertReadParams{ ProfileID: profileID, AlertID: alertID, }); err != nil { - return model.Alert{}, err + return a, err } - a, err := qtx.GetAlert(ctx, db.GetAlertParams{ + a, err = qtx.GetAlert(ctx, db.GetAlertParams{ ProfileID: profileID, ID: alertID, }) if err != nil { - return model.Alert{}, err + return a, err } if err := tx.Commit(ctx); err != nil { - return model.Alert{}, err + return a, err } - return a, nil } diff --git a/api/internal/servicev2/alert_check.go b/api/internal/servicev2/alert_check.go index 0f618a85..6aa86770 100644 --- a/api/internal/servicev2/alert_check.go +++ b/api/internal/servicev2/alert_check.go @@ -9,11 +9,26 @@ import ( "time" "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/email" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) +type AlertCheckService interface { + DoAlertChecks(ctx context.Context) error +} + +type alertCheckService struct { + db *Database + *db.Queries + cfg *config.AlertCheckConfig +} + +func NewAlertCheckService(db *Database, q *db.Queries, cfg *config.AlertCheckConfig) *alertCheckService { + return &alertCheckService{db, q, cfg} +} + var ( GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") @@ -29,49 +44,33 @@ const ( reminder = "Reminder" ) -type AlertCheckService interface { - DoAlertChecks(ctx context.Context) error -} +type alertConfigMap map[uuid.UUID]db.VAlertConfig + +type submittalMap map[uuid.UUID]db.VSubmittal type alertConfigChecker[T alertChecker] interface { - GetAlertConfig() model.AlertConfig - SetAlertConfig(model.AlertConfig) + GetAlertConfig() db.VAlertConfig + SetAlertConfig(ac db.VAlertConfig) GetChecks() []T - SetChecks([]T) - DoEmail(string, config.AlertCheckConfig) error + SetChecks(checks []T) + DoEmail(content string, cfg config.AlertCheckConfig) error } type alertChecker interface { GetShouldWarn() bool GetShouldAlert() bool GetShouldRemind() bool - GetSubmittal() model.Submittal - SetSubmittal(model.Submittal) -} - -type alertCheckService struct { - db *model.Database - *model.Queries - cfg *config.AlertCheckConfig -} - -func NewAlertCheckService(db *model.Database, q *model.Queries, cfg *config.AlertCheckConfig) *alertCheckService { - return &alertCheckService{db, q, cfg} + GetSubmittal() *db.VSubmittal + SetSubmittal(sub db.VSubmittal) } -func (s alertCheckService) DoAlertChecks(ctx context.Context) error { - if s.cfg == nil { - return fmt.Errorf("missing config") - } - - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertService) DoAlertChecks(ctx context.Context, cfg config.AlertCheckConfig) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - subs, err := qtx.ListUnverifiedMissingSubmittals(ctx) if err != nil { return err @@ -85,25 +84,25 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } - subMap := make(map[uuid.UUID]model.Submittal) + subMap := make(map[uuid.UUID]db.VSubmittal) for _, s := range subs { subMap[s.ID] = s } - acMap := make(map[uuid.UUID]model.AlertConfig) + acMap := make(map[uuid.UUID]db.VAlertConfig) for _, a := range acs { acMap[a.ID] = a } errs := make([]error, 0) - if err := checkMeasurements(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkMeasurements(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := checkEvaluations(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkEvaluations(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { errs = append(errs, err) } @@ -114,29 +113,38 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } -func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigEvaluationCheck, 0) - ecs, err := q.GetAllIncompleteEvaluationSubmittals(ctx) +func checkEvaluations(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg config.AlertCheckConfig) error { + accs := make([]*AlertConfigEvaluationCheck, 0) + ecs, err := q.ListIncompleteEvaluationSubmittals(ctx) if err != nil { return err } - ecMap := make(map[uuid.UUID][]*model.EvaluationCheck) + ecMap := make(map[uuid.UUID][]*EvaluationCheck) for k := range acMap { - ecMap[k] = make([]*model.EvaluationCheck, 0) + ecMap[k] = make([]*EvaluationCheck, 0) } for idx := range ecs { + ck := ecs[idx] + check := EvaluationCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} if sub, ok := subMap[ecs[idx].SubmittalID]; ok { - ecs[idx].Submittal = sub - ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], ecs[idx]) + ecs[idx].Submittal = &sub + ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], &check) } } for k, v := range acMap { if v.AlertTypeID != EvaluationSubmittalAlertTypeID { continue } - acc := model.AlertConfigEvaluationCheck{ - AlertConfig: v, + acc := AlertConfigEvaluationCheck{ + AlertConfig: AlertConfig(v), AlertChecks: ecMap[k], } accs = append(accs, &acc) @@ -151,22 +159,31 @@ func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.Submit return nil } -func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigMeasurementCheck, 0) - mcs, err := q.GetAllIncompleteMeasurementSubmittals(ctx) +func checkMeasurements(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg config.AlertCheckConfig) error { + accs := make([]*AlertConfigMeasurementCheck, 0) + mcs, err := q.ListIncompleteMeasurementSubmittals(ctx) if err != nil { return err } - mcMap := make(map[uuid.UUID][]*model.MeasurementCheck) + mcMap := make(map[uuid.UUID][]*MeasurementCheck) for k := range acMap { - mcMap[k] = make([]*model.MeasurementCheck, 0) + mcMap[k] = make([]*MeasurementCheck, 0) } for idx := range mcs { if sub, ok := subMap[mcs[idx].SubmittalID]; ok { - mcs[idx].Submittal = sub - mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], mcs[idx]) + ck := mcs[idx] + check := MeasurementCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} + mcs[idx].Submittal = &sub + mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], &check) } } @@ -174,8 +191,8 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi if v.AlertTypeID != MeasurementSubmittalAlertTypeID { continue } - acc := model.AlertConfigMeasurementCheck{ - AlertConfig: v, + acc := AlertConfigMeasurementCheck{ + AlertConfig: AlertConfig(v), AlertChecks: mcMap[k], } accs = append(accs, &acc) @@ -189,21 +206,35 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi return nil } -func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT) error { +func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT) error { for _, acc := range accs { ac := acc.GetAlertConfig() - if err := q.UpdateAlertConfigLastReminded(ctx, ac); err != nil { + if err := q.UpdateAlertConfigLastReminded(ctx, db.UpdateAlertConfigLastRemindedParams{ + ID: ac.ID, + LastReminded: ac.LastReminded, + }); err != nil { return err } checks := acc.GetChecks() for _, c := range checks { sub := c.GetSubmittal() - if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, sub); err != nil { + if sub == nil { + continue + } + if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, db.UpdateSubmittalCompletionDateOrWarningSentParams{ + ID: sub.ID, + SubmittalStatusID: &sub.SubmittalStatusID, + CompletionDate: sub.CompletionDate, + WarningSent: sub.WarningSent, + }); err != nil { return err } } if ac.CreateNextSubmittalFrom != nil { - if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, ac); err != nil { + if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, db.CreateNextSubmittalFromNewAlertConfigDateParams{ + ID: ac.ID, + Date: *ac.CreateNextSubmittalFrom, + }); err != nil { return err } } @@ -224,7 +255,7 @@ func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx conte // TODO: smtp.SendMail esablishes a new connection for each batch of emails sent. I would be better to aggregate // the contents of each email, then create a connection pool to reuse and send all emails at once, with any errors wrapped and returned // p.s. Dear future me/someone else: I'm sorry -func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT, cfg config.AlertCheckConfig) error { +func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT, cfg config.AlertCheckConfig) error { defer util.Timer()() mu := &sync.Mutex{} @@ -307,7 +338,11 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, acReminder = true } - c.SetSubmittal(sub) + if sub == nil { + continue + } + + c.SetSubmittal(*sub) checks[j] = c } @@ -360,3 +395,157 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, return nil } + +type AlertCheck struct { + AlertConfigID uuid.UUID + SubmittalID uuid.UUID + ShouldWarn bool + ShouldAlert bool + ShouldRemind bool + Submittal *db.VSubmittal +} + +func (ck AlertCheck) GetShouldWarn() bool { + return ck.ShouldWarn +} + +func (ck AlertCheck) GetShouldAlert() bool { + return ck.ShouldAlert +} + +func (ck AlertCheck) GetShouldRemind() bool { + return ck.ShouldRemind +} + +func (ck AlertCheck) GetSubmittal() *db.VSubmittal { + return ck.Submittal +} + +func (ck *AlertCheck) SetSubmittal(sub db.VSubmittal) { + ck.Submittal = &sub +} + +type AlertConfig db.VAlertConfig + +func (a *AlertConfig) GetToAddresses() []string { + emails := make([]string, len(a.AlertEmailSubscriptions)) + for idx := range a.AlertEmailSubscriptions { + emails[idx] = a.AlertEmailSubscriptions[idx].Email + } + return emails +} + +type AlertConfigEvaluationCheck struct { + AlertConfig + AlertChecks []*EvaluationCheck +} + +type EvaluationCheck struct { + AlertCheck +} + +func (a AlertConfigEvaluationCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigEvaluationCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigEvaluationCheck) GetChecks() []*EvaluationCheck { + return a.AlertChecks +} + +func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { + a.AlertChecks = ec +} + +func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Evaluation Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Evaluation Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Evaluation Submittals:\r\n" + + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + + "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, acc) + if err != nil { + return err + } + content.To = acc.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} + +type AlertConfigMeasurementCheck struct { + AlertConfig AlertConfig + AlertChecks []*MeasurementCheck +} + +type MeasurementCheck struct { + AlertCheck + AffectedTimeseries []db.AlertCheckMeasurementSubmittalAffectedTimeseries +} + +func (a AlertConfigMeasurementCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigMeasurementCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigMeasurementCheck) GetChecks() []*MeasurementCheck { + return a.AlertChecks +} + +func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { + a.AlertChecks = mc +} + +func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Timeseries Measurement Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Measurement Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Measurement Submittals:\r\n" + + "{{range .AlertChecks}}" + + "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "{{range .AffectedTimeseries}}" + + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + + "{{end}}\r\n{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, ms) + if err != nil { + return err + } + content.To = ms.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} diff --git a/api/internal/servicev2/alert_config.go b/api/internal/servicev2/alert_config.go index ae984b7b..01965a09 100644 --- a/api/internal/servicev2/alert_config.go +++ b/api/internal/servicev2/alert_config.go @@ -3,37 +3,38 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type AlertConfigService interface { - GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.AlertConfig, error) - GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (model.AlertConfig, error) - CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) - UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) + ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]db.VAlertConfig, error) + ListAlertConfigsForProjectAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]db.VAlertConfig, error) + ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]db.VAlertConfig, error) + GetAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (db.VAlertConfig, error) + CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (db.VAlertConfig, error) + UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (db.VAlertConfig, error) DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error } type alertConfigService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewAlertConfigService(db *model.Database, q *model.Queries) *alertConfigService { +func NewAlertConfigService(db *Database, q *db.Queries) *alertConfigService { return &alertConfigService{db, q} } // CreateAlertConfig creates one new alert configuration -func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -44,13 +45,28 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler qtx := s.WithTx(tx) - acID, err := qtx.CreateAlertConfig(ctx, ac) + acID, err := qtx.CreateAlertConfig(ctx, db.CreateAlertConfigParams{ + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + AlertTypeID: ac.AlertTypeID, + StartDate: ac.StartDate, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + Creator: ac.CreatorID, + CreateDate: ac.CreateDate, + }) if err != nil { return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, acID, aci.InstrumentID); err != nil { + if err := qtx.AssignInstrumentToAlertConfig(ctx, db.AssignInstrumentToAlertConfigParams{ + AlertConfigID: acID, + InstrumentID: aci.InstrumentID, + }); err != nil { return a, err } } @@ -63,12 +79,12 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, acID) + acNew, err := qtx.GetAlertConfig(ctx, acID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } @@ -76,12 +92,13 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler } // UpdateAlertConfig updates an alert config -func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.AlertConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -92,39 +109,54 @@ func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID qtx := s.WithTx(tx) - if err := qtx.UpdateAlertConfig(ctx, ac); err != nil { - return model.AlertConfig{}, err + if err := qtx.UpdateAlertConfig(ctx, db.UpdateAlertConfigParams{ + ID: ac.ID, + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + StartDate: ac.StartDate, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + Updater: ac.UpdaterID, + UpdateDate: ac.UpdateDate, + }); err != nil { + return a, err } if err := qtx.UnassignAllInstrumentsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, alertConfigID, aci.InstrumentID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AssignInstrumentToAlertConfig(ctx, db.AssignInstrumentToAlertConfigParams{ + AlertConfigID: alertConfigID, + InstrumentID: aci.InstrumentID, + }); err != nil { + return a, err } } - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if err := qtx.DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { + return a, err } if err := registerAndSubscribe(ctx, qtx, alertConfigID, ac.AlertEmailSubscriptions); err != nil { - return model.AlertConfig{}, err + return a, err } - if err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if _, err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, &alertConfigID); err != nil { + return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + a, err = qtx.GetAlertConfig(ctx, alertConfigID) if err != nil { - return model.AlertConfig{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.AlertConfig{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return acNew, nil + return a, nil } diff --git a/api/internal/servicev2/alert_subscription.go b/api/internal/servicev2/alert_subscription.go index ffe73dec..441f0fd2 100644 --- a/api/internal/servicev2/alert_subscription.go +++ b/api/internal/servicev2/alert_subscription.go @@ -4,14 +4,15 @@ import ( "context" "fmt" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) const ( - unknown = "" - email = "email" - profile = "profile" + unknownUserType = "" + emailUserType = "email" + profileUserType = "profile" ) type AlertSubscriptionService interface { @@ -28,35 +29,41 @@ type AlertSubscriptionService interface { } type alertSubscriptionService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewAlertSubscriptionService(db *model.Database, q *model.Queries) *alertSubscriptionService { +func NewAlertSubscriptionService(db *Database, q *db.Queries) *alertSubscriptionService { return &alertSubscriptionService{db, q} } // SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.SubscribeProfileToAlerts(ctx, alertConfigID, profileID); err != nil { + if err := qtx.CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx, db.CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }); err != nil { return a, err } - updated, err := qtx.GetAlertSubscription(ctx, alertConfigID, profileID) + updated, err := qtx.GetAlertSubscription(ctx, db.GetAlertSubscriptionParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } @@ -64,163 +71,166 @@ func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, } // UpdateMyAlertSubscription updates properties on a AlertSubscription -func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UpdateMyAlertSubscription(ctx, sub); err != nil { + if err := qtx.UpdateMyAlertSubscription(ctx, db.UpdateMyAlertSubscriptionParams{ + MuteUi: sub.MuteUI, + MuteNotify: sub.MuteNotify, + AlertConfigID: sub.AlertConfigID, + ProfileID: sub.ProfileID, + }); err != nil { return a, err } - - updated, err := qtx.GetAlertSubscription(ctx, sub.AlertConfigID, sub.ProfileID) + updated, err := qtx.GetAlertSubscription(ctx, sub.ID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return updated, nil } -func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := registerAndSubscribe(ctx, qtx, alertConfigID, emails); err != nil { return a, err } - // Register any emails that are not yet in system for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { + if em.UserType == unknownUserType || em.UserType == emailUserType { newID, err := qtx.RegisterEmail(ctx, em.Email) if err != nil { return a, err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } // Subscribe emails for _, em := range emails { - if em.UserType == email { - if err := qtx.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := qtx.CreateAlertEmailSubscription(ctx, db.CreateAlertEmailSubscriptionParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.CreateAlertProfileSubscription(ctx, db.CreateAlertProfileSubscriptionParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.GetAlertConfig(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } -func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, em := range emails { - if em.UserType == unknown { + if em.UserType == unknownUserType { return a, fmt.Errorf("required field user_type is null, aborting transaction") - } else if em.UserType == email { - if err := qtx.UnsubscribeEmailFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == emailUserType { + if err := qtx.DeleteAlertEmailSubscription(ctx, db.DeleteAlertEmailSubscriptionParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.UnsubscribeProfileFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.DeleteAlertProfileSubscription(ctx, db.DeleteAlertProfileSubscriptionParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.GetAlertConfig(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } func (s alertSubscriptionService) UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := qtx.UnsubscribeAllProfilesFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return err } return nil } -func registerAndSubscribe(ctx context.Context, q *model.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { +func registerAndSubscribe(ctx context.Context, q *db.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { + if em.UserType == unknownUserType || em.UserType == emailUserType { newID, err := q.RegisterEmail(ctx, em.Email) if err != nil { return err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } for _, em := range emails { - if em.UserType == email { - if err := q.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := q.CreateAlertEmailSubscription(ctx, db.CreateAlertEmailSubscriptionParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return err } - } else if em.UserType == profile { - if err := q.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := q.CreateAlertProfileSubscription(ctx, db.CreateAlertProfileSubscriptionParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return err } } else { diff --git a/api/internal/servicev2/autocomplete.go b/api/internal/servicev2/autocomplete.go index c821e69c..8eb581d1 100644 --- a/api/internal/servicev2/autocomplete.go +++ b/api/internal/servicev2/autocomplete.go @@ -9,12 +9,3 @@ import ( type EmailAutocompleteService interface { ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]model.EmailAutocompleteResult, error) } - -type emailAutocompleteService struct { - db *model.Database - *model.Queries -} - -func NewEmailAutocompleteService(db *model.Database, q *model.Queries) *emailAutocompleteService { - return &emailAutocompleteService{db, q} -} diff --git a/api/internal/servicev2/collection_group.go b/api/internal/servicev2/collection_group.go index 8023900d..0f13d512 100644 --- a/api/internal/servicev2/collection_group.go +++ b/api/internal/servicev2/collection_group.go @@ -3,54 +3,17 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type CollectionGroupService interface { - ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]model.CollectionGroup, error) - GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) - CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error - AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error + ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]db.ListCollectionGroupsForProjectRow, error) + GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (db.VCollectionGroupDetail, error) + CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (db.CollectionGroup, error) + UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (db.CollectionGroup, error) + DeleteCollectionGroup(ctx context.Context, projectID, id uuid.UUID) error + AddTimeseriesToCollectionGroup(ctx context.Context, arg db.AddTimeseriesToCollectionGroupParams) error RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error } - -type collectionGroupService struct { - db *model.Database - *model.Queries -} - -func NewCollectionGroupService(db *model.Database, q *model.Queries) *collectionGroupService { - return &collectionGroupService{db, q} -} - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (s collectionGroupService) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) { - var a model.CollectionGroupDetails - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return a, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - cg, err := qtx.GetCollectionGroupDetails(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - ts, err := qtx.GetCollectionGroupDetailsTimeseries(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - - if err := tx.Commit(); err != nil { - return a, err - } - - cg.Timeseries = ts - - return cg, nil -} diff --git a/api/internal/servicev2/datalogger.go b/api/internal/servicev2/datalogger.go index 4f59ff2d..f34c4f77 100644 --- a/api/internal/servicev2/datalogger.go +++ b/api/internal/servicev2/datalogger.go @@ -2,155 +2,195 @@ package servicev2 import ( "context" + "errors" + "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/password" "github.com/google/uuid" ) type DataloggerService interface { - GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) - ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]model.Datalogger, error) - ListAllDataloggers(ctx context.Context) ([]model.Datalogger, error) - GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) - VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error + GetDatalogger(ctx context.Context, dataloggerID uuid.UUID) (db.VDatalogger, error) + ListDataloggers(ctx context.Context) ([]db.VDatalogger, error) + ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]db.VDatalogger, error) CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) - CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) - GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (model.Datalogger, error) UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) DeleteDatalogger(ctx context.Context, d model.Datalogger) error - GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) - ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error + ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error + VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error + GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) + GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) + CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) + GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) } type dataloggerService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewDataloggerService(db *model.Database, q *model.Queries) *dataloggerService { +func NewDataloggerService(db *Database, q *db.Queries) *dataloggerService { return &dataloggerService{db, q} } -func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) +type DataloggerWithKey struct { + db.VDatalogger + Key string `json:"key"` +} + +func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (DataloggerWithKey, error) { + var a DataloggerWithKey + + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - dataloggerID, err := qtx.CreateDatalogger(ctx, n) + dataloggerID, err := qtx.CreateDatalogger(ctx, db.CreateDataloggerParams{ + Name: n.Name, + Sn: n.SN, + ProjectID: n.ProjectID, + Creator: n.CreatorID, + ModelID: n.ModelID, + }) if err != nil { return a, err } - key, err := qtx.CreateDataloggerHash(ctx, dataloggerID) - if err != nil { + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) + + if err := qtx.CreateDataloggerHash(ctx, db.CreateDataloggerHashParams{ + DataloggerID: dataloggerID, + Hash: hash, + }); err != nil { return a, err } - - dl, err := qtx.GetOneDatalogger(ctx, dataloggerID) + dl, err := qtx.GetDatalogger(ctx, dataloggerID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } - return dk, nil } -func (s dataloggerService) CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) +func (s dataloggerService) CycleDataloggerKey(ctx context.Context, profileID, dataloggerID uuid.UUID) (DataloggerWithKey, error) { + var a DataloggerWithKey + + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) - key, err := qtx.UpdateDataloggerHash(ctx, u.ID) - if err != nil { + if err := qtx.UpdateDataloggerHash(ctx, db.UpdateDataloggerHashParams{ + DataloggerID: dataloggerID, + Hash: hash, + }); err != nil { return a, err } - if err := qtx.UpdateDataloggerUpdater(ctx, u); err != nil { + if err := qtx.UpdateDataloggerUpdater(ctx, db.UpdateDataloggerUpdaterParams{ + ID: dataloggerID, + Updater: profileID, + UpdateDate: time.Now(), + }); err != nil { return a, err } - dl, err := qtx.GetOneDatalogger(ctx, u.ID) + dl, err := qtx.GetDatalogger(ctx, dataloggerID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } return dk, nil } -func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) { - var a model.Datalogger - tx, err := s.db.BeginTxx(ctx, nil) +func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (db.VDatalogger, error) { + var a db.VDatalogger + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateDatalogger(ctx, u); err != nil { + if u.UpdaterID == nil { + return a, errors.New("must set updater id") + } + + if err := qtx.UpdateDatalogger(ctx, db.UpdateDataloggerParams{ + ID: u.ID, + Name: u.Name, + Updater: *u.UpdaterID, + UpdateDate: time.Now(), + }); err != nil { return a, err } - dlUpdated, err := qtx.GetOneDatalogger(ctx, u.ID) + dlUpdated, err := qtx.GetDatalogger(ctx, u.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return dlUpdated, nil } -func (s dataloggerTelemetryService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s dataloggerService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.RenameEmptyDataloggerTableName(ctx, db.RenameEmptyDataloggerTableNameParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return uuid.Nil, err } diff --git a/api/internal/servicev2/datalogger_telemetry.go b/api/internal/servicev2/datalogger_telemetry.go index 28278e69..800ddd0e 100644 --- a/api/internal/servicev2/datalogger_telemetry.go +++ b/api/internal/servicev2/datalogger_telemetry.go @@ -11,12 +11,13 @@ import ( "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type DataloggerTelemetryService interface { - GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (model.Datalogger, error) + GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (db.VDatalogger, error) GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) @@ -25,77 +26,101 @@ type DataloggerTelemetryService interface { } type dataloggerTelemetryService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewDataloggerTelemetryService(db *model.Database, q *model.Queries) *dataloggerTelemetryService { +func NewDataloggerTelemetryService(db *Database, q *db.Queries) *dataloggerTelemetryService { return &dataloggerTelemetryService{db, q} } // UpdateDataloggerTablePreview attempts to update a table preview by datalogger_id and table_name, creates the // datalogger table and corresponding preview if it doesn't exist func (s dataloggerTelemetryService) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) // replace empty datalogger table name with most recent payload - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.RenameEmptyDataloggerTableName(ctx, db.RenameEmptyDataloggerTableNameParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := qtx.UpdateDataloggerTablePreview(ctx, dataloggerID, tableName, prv); err != nil { + if err := qtx.UpdateDataloggerTablePreview(ctx, db.UpdateDataloggerTablePreviewParams{ + DataloggerID: dataloggerID, + TableName: tableName, + Preview: prv.Preview.Bytes, + UpdateDate: prv.UpdateDate, + }); err != nil { if !errors.Is(err, sql.ErrNoRows) { return uuid.Nil, err } prv.DataloggerTableID = tableID - if err := qtx.CreateDataloggerTablePreview(ctx, prv); err != nil { + if err := qtx.CreateDataloggerTablePreview(ctx, db.CreateDataloggerTablePreviewParams{ + DataloggerTableID: prv.DataloggerTableID, + Preview: prv.Preview.Bytes, + UpdateDate: prv.UpdateDate, + }); err != nil { } } - return tableID, tx.Commit() + return tableID, tx.Commit(ctx) } func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error { - tx, err := s.db.BeginTxx(ctx, nil) + if tableName == nil { + return errors.New("table name must not be nil") + } + + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DeleteDataloggerTableError(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DeleteDataloggerTableError(ctx, db.DeleteDataloggerTableErrorParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + }); err != nil { return err } for _, m := range e.Errors { - if err := qtx.CreateDataloggerTableError(ctx, dataloggerID, tableName, m); err != nil { + if err := qtx.CreateDataloggerError(ctx, db.CreateDataloggerErrorParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + ErrorMessage: &m, + }); err != nil { return err } } - return tx.Commit() + return tx.Commit(ctx) } // ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. // The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) reader := csv.NewReader(r) @@ -126,12 +151,18 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle TableName: envHeader[6], } - dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + dl, err := qtx.GetDataloggerByModelSN(ctx, db.GetDataloggerByModelSNParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) if err != nil { return err } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) if err != nil { return err } @@ -158,7 +189,7 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle } fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Rows { + for _, eqtRow := range eqt.Fields { fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID } @@ -200,11 +231,15 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle continue } - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, db.CreateOrUpdateTimeseriesMeasurementParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + }); err != nil { return err } } } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/db.go b/api/internal/servicev2/db.go index d486e0c9..1d9e6f2e 100644 --- a/api/internal/servicev2/db.go +++ b/api/internal/servicev2/db.go @@ -6,17 +6,11 @@ import ( "errors" "log" - "github.com/USACE/instrumentation-api/api/internal/db" "github.com/jackc/pgx/v5/pgxpool" ) -type dbStore struct { - db *pgxpool.Pool - *db.Queries -} - -func NewDbStore(db *pgxpool.Pool, q *db.Queries) *dbStore { - return &dbStore{db, q} +type Database struct { + *pgxpool.Pool } func txDo(ctx context.Context, rollback func(ctx context.Context) error) { diff --git a/api/internal/servicev2/district_rollup.go b/api/internal/servicev2/district_rollup.go index e78ef5e8..00299620 100644 --- a/api/internal/servicev2/district_rollup.go +++ b/api/internal/servicev2/district_rollup.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -13,10 +14,10 @@ type DistrictRollupService interface { } type districtRollupService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewDistrictRollupService(db *model.Database, q *model.Queries) *districtRollupService { +func NewDistrictRollupService(db *Database, q *db.Queries) *districtRollupService { return &districtRollupService{db, q} } diff --git a/api/internal/servicev2/domain.go b/api/internal/servicev2/domain.go index 91de8966..04725563 100644 --- a/api/internal/servicev2/domain.go +++ b/api/internal/servicev2/domain.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) @@ -12,10 +13,10 @@ type DomainService interface { } type domainService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewDomainService(db *model.Database, q *model.Queries) *domainService { +func NewDomainService(db *Database, q *db.Queries) *domainService { return &domainService{db, q} } diff --git a/api/internal/servicev2/equivalency_table.go b/api/internal/servicev2/equivalency_table.go index 2ac6f34b..812b86dc 100644 --- a/api/internal/servicev2/equivalency_table.go +++ b/api/internal/servicev2/equivalency_table.go @@ -2,7 +2,9 @@ package servicev2 import ( "context" + "errors" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -17,73 +19,94 @@ type EquivalencyTableService interface { } type equivalencyTableService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewEquivalencyTableService(db *model.Database, q *model.Queries) *equivalencyTableService { +func NewEquivalencyTableService(db *Database, q *db.Queries) *equivalencyTableService { return &equivalencyTableService{db, q} } // CreateEquivalencyTable creates EquivalencyTable rows // If a row with the given datalogger id or field name already exists the row will be ignored -func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable + + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, t.DataloggerID, t.DataloggerTableID, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, db.CreateOrUpdateEquivalencyTableRowParams{ + DataloggerID: t.DataloggerID, + DataloggerTableID: &t.DataloggerTableID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + InstrumentID: r.InstrumentID, + TimeseriesID: r.TimeseriesID, + }); err != nil { + return a, err } } eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil } // UpdateEquivalencyTable updates rows of an EquivalencyTable -func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.UpdateEquivalencyTableRow(ctx, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.UpdateEquivalencyTableRow(ctx, db.UpdateEquivalencyTableRowParams{ + ID: r.ID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + }); err != nil { + return a, err } } eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil diff --git a/api/internal/servicev2/evaluation.go b/api/internal/servicev2/evaluation.go index 8b0b8473..f8579aca 100644 --- a/api/internal/servicev2/evaluation.go +++ b/api/internal/servicev2/evaluation.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -19,46 +20,43 @@ type EvaluationService interface { } type evaluationService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewEvaluationService(db *model.Database, q *model.Queries) *evaluationService { +func NewEvaluationService(db *Database, q *db.Queries) *evaluationService { return &evaluationService{db, q} } func (s evaluationService) RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) sub, err := qtx.CompleteEvaluationSubmittal(ctx, subID) if err != nil { return err } - // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == model.GreenSubmittalStatusID { if err := qtx.CreateNextEvaluationSubmittal(ctx, subID); err != nil { return err } } - return tx.Commit() + return tx.Commit(ctx) } -func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) if ev.SubmittalID != nil { @@ -68,85 +66,116 @@ func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evalua } // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == model.GreenSubmittalStatusID { qtx.CreateNextEvaluationSubmittal(ctx, *ev.SubmittalID) } } - - evID, err := qtx.CreateEvaluation(ctx, ev) + evID, err := qtx.CreateEvaluation(ctx, db.CreateEvaluationParams{ + ProjectID: ev.ProjectID, + SubmittalID: ev.SubmittalID, + Name: ev.Name, + Body: ev.Body, + StartDate: ev.StartDate, + EndDate: ev.EndDate, + Creator: ev.CreatorID, + CreateDate: ev.CreateDate, + }) if err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, evID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.CreateEvaluationInstrumentsBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.CreateEvaluationInstrumentsBatchParams{ + EvaluationID: &evID, + InstrumentID: &aci.InstrumentID, } } - - evNew, err := qtx.GetEvaluation(ctx, evID) + qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + a, err = qtx.GetEvaluation(ctx, evID) + if err != nil { + return a, err + } + if err := tx.Commit(ctx); err != nil { return a, err } - return evNew, nil + return a, nil } -func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateEvaluation(ctx, ev); err != nil { + if err := qtx.UpdateEvaluation(ctx, db.UpdateEvaluationParams{ + ID: ev.ID, + ProjectID: ev.ProjectID, + Name: ev.Name, + Body: ev.Body, + StartDate: ev.StartDate, + EndDate: ev.EndDate, + Updater: ev.UpdaterID, + UpdateDate: ev.UpdateDate, + }); err != nil { return a, err } - - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, ev.ID); err != nil { + if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, &ev.ID); err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, ev.ID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.CreateEvaluationInstrumentsBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.CreateEvaluationInstrumentsBatchParams{ + EvaluationID: &evaluationID, + InstrumentID: &aci.InstrumentID, } } + qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return a, err + } - evUpdated, err := qtx.GetEvaluation(ctx, ev.ID) + a, err = qtx.GetEvaluation(ctx, ev.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return evUpdated, nil + return a, nil } func (s evaluationService) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, evaluationID); err != nil { + if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, &evaluationID); err != nil { return err } - if err := qtx.DeleteEvaluation(ctx, evaluationID); err != nil { return err } - return nil + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/heartbeat.go b/api/internal/servicev2/heartbeat.go index 9df692b7..0c9727f0 100644 --- a/api/internal/servicev2/heartbeat.go +++ b/api/internal/servicev2/heartbeat.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) @@ -13,10 +14,10 @@ type HeartbeatService interface { } type heartbeatService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewHeartbeatService(db *model.Database, q *model.Queries) *heartbeatService { +func NewHeartbeatService(db *Database, q *db.Queries) *heartbeatService { return &heartbeatService{db, q} } diff --git a/api/internal/servicev2/home.go b/api/internal/servicev2/home.go index 0427135a..23e57cb4 100644 --- a/api/internal/servicev2/home.go +++ b/api/internal/servicev2/home.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) @@ -11,10 +12,10 @@ type HomeService interface { } type homeService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewHomeService(db *model.Database, q *model.Queries) *homeService { +func NewHomeService(db *Database, q *db.Queries) *homeService { return &homeService{db, q} } diff --git a/api/internal/servicev2/instrument.go b/api/internal/servicev2/instrument.go index 4724ef18..87548ba6 100644 --- a/api/internal/servicev2/instrument.go +++ b/api/internal/servicev2/instrument.go @@ -2,10 +2,14 @@ package servicev2 import ( "context" + "encoding/json" + "slices" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" - "github.com/paulmach/orb/geojson" + "github.com/twpayne/go-geom" + "github.com/twpayne/go-geom/encoding/geojson" ) type InstrumentService interface { @@ -20,11 +24,11 @@ type InstrumentService interface { } type instrumentService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentService(db *model.Database, q *model.Queries) *instrumentService { +func NewInstrumentService(db *Database, q *db.Queries) *instrumentService { return &instrumentService{db, q} } @@ -40,7 +44,7 @@ const ( update ) -func createInstrument(ctx context.Context, q *model.Queries, instrument model.Instrument) (model.IDSlugName, error) { +func createInstrument(ctx context.Context, q *db.Queries, instrument model.Instrument) (model.IDSlugName, error) { newInstrument, err := q.CreateInstrument(ctx, instrument) if err != nil { return model.IDSlugName{}, err @@ -66,11 +70,11 @@ func createInstrument(ctx context.Context, q *model.Queries, instrument model.In } func (s instrumentService) CreateInstrument(ctx context.Context, instrument model.Instrument) (model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return model.IDSlugName{}, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -79,89 +83,183 @@ func (s instrumentService) CreateInstrument(ctx context.Context, instrument mode return model.IDSlugName{}, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return model.IDSlugName{}, err } return newInstrument, nil } -func (s instrumentService) CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s instrumentService) CreateInstruments(ctx context.Context, ii []model.Instrument) ([]db.CreateInstrumentsBatchRow, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - ii := make([]model.IDSlugName, len(instruments)) - for idx, i := range instruments { - newInstrument, err := createInstrument(ctx, qtx, i) - if err != nil { - return nil, err + createInstrumentsArgs := make([]db.CreateInstrumentsBatchParams, len(ii)) + assignInstrumentsProjectsArgs := make([][]db.AssignInstrumentToProjectBatchParams, len(ii)) + instrumentStatusArgs := make([]db.CreateOrUpdateInstrumentStatusBatchParams, len(ii)) + instrumentAwareArgs := make([]db.CreateAwarePlatformBatchParams, 0) + + for idx, inst := range ii { + createInstrumentsArgs[idx] = db.CreateInstrumentsBatchParams{ + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + Station: inst.Station, + StationOffset: inst.StationOffset, + Creator: inst.CreatorID, + CreateDate: inst.CreateDate, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, + } + } + newInstruments := make([]db.CreateInstrumentsBatchRow, len(createInstrumentsArgs)) + qtx.CreateInstrumentsBatch(ctx, createInstrumentsArgs).QueryRow(func(idx int, r db.CreateInstrumentsBatchRow, e error) { + if e != nil { + err = e + return + } + assignInstrumentsProjectsArgs[idx] = make([]db.AssignInstrumentToProjectBatchParams, len(ii[idx].Projects)) + for j, p := range ii[idx].Projects { + assignInstrumentsProjectsArgs[idx][j] = db.AssignInstrumentToProjectBatchParams{ + InstrumentID: r.ID, + ProjectID: p.ID, + } + } + instrumentStatusArgs[idx] = db.CreateOrUpdateInstrumentStatusBatchParams{ + InstrumentID: r.ID, + StatusID: ii[idx].StatusID, + Time: ii[idx].StatusTime, + } + if ii[idx].AwareID != nil { + instrumentAwareArgs = append(instrumentAwareArgs, db.CreateAwarePlatformBatchParams{ + InstrumentID: &r.ID, + AwareID: *ii[idx].AwareID, + }) + } + newInstruments[idx] = r + }) + if err != nil { + return nil, err + } + qtx.AssignInstrumentToProjectBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(func(_ int, e error) { + if e != nil { + err = e + return } - ii[idx] = newInstrument + }) + if err != nil { + return nil, err } - if err := tx.Commit(); err != nil { + qtx.CreateOrUpdateInstrumentStatusBatch(ctx, instrumentStatusArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { return nil, err } - return ii, nil + qtx.CreateAwarePlatformBatch(ctx, instrumentAwareArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return nil, err + } + if err := handleOptsBatch(ctx, qtx, ii, create); err != nil { + return nil, err + } + + if err := tx.Commit(ctx); err != nil { + return nil, err + } + + return newInstruments, nil } // UpdateInstrument updates a single instrument -func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, inst model.Instrument) (db.VInstrument, error) { + var a db.VInstrument + tx, err := s.db.Begin(ctx) if err != nil { - return model.Instrument{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrument(ctx, projectID, i); err != nil { - return model.Instrument{}, err + if err := qtx.UpdateInstrument(ctx, db.UpdateInstrumentParams{ + ProjectID: projectID, + ID: inst.ID, + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + Updater: inst.UpdaterID, + UpdateDate: inst.UpdateDate, + Station: inst.Station, + StationOffset: inst.StationOffset, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, + }); err != nil { + return a, err } - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, i.ID, i.StatusID, i.StatusTime); err != nil { - return model.Instrument{}, err + if err := qtx.CreateOrUpdateInstrumentStatus(ctx, db.CreateOrUpdateInstrumentStatusParams{ + InstrumentID: inst.ID, + StatusID: inst.StatusID, + Time: inst.StatusTime, + }); err != nil { + return a, err } - - if err := handleOpts(ctx, qtx, i, update); err != nil { - return model.Instrument{}, err + if err := handleOptsBatch(ctx, qtx, []model.Instrument{inst}, update); err != nil { + return a, err } - - aa, err := qtx.GetInstrument(ctx, i.ID) + a, err = qtx.GetInstrument(ctx, inst.ID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return aa, nil + return a, nil } -func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return model.Instrument{}, err +func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, gj json.RawMessage, p model.Profile) (db.VInstrument, error) { + var a db.VInstrument + + var geometry geom.T + if err := geojson.Unmarshal(gj, &geometry); err != nil { + return a, err } - defer model.TxDo(tx.Rollback) + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrumentGeometry(ctx, projectID, instrumentID, geom, p); err != nil { - return model.Instrument{}, err + if _, err := qtx.UpdateInstrumentGeometry(ctx, db.UpdateInstrumentGeometryParams{ + ProjectID: projectID, + ID: instrumentID, + Geometry: geometry, + Updater: &p.ID, + }); err != nil { + return a, err } - - aa, err := qtx.GetInstrument(ctx, instrumentID) + a, err = qtx.GetInstrument(ctx, instrumentID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return aa, nil + return a, nil } diff --git a/api/internal/servicev2/instrument_assign.go b/api/internal/servicev2/instrument_assign.go index d169b02e..5bee91cc 100644 --- a/api/internal/servicev2/instrument_assign.go +++ b/api/internal/servicev2/instrument_assign.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -18,166 +19,326 @@ type InstrumentAssignService interface { } type instrumentAssignService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentAssignService(db *model.Database, q *model.Queries) *instrumentAssignService { +func NewInstrumentAssignService(db *Database, q *db.Queries) *instrumentAssignService { return &instrumentAssignService{db, q} } -func validateAssignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID uuid.UUID, instrument model.Instrument, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrument.ID, projectIDs) - if err != nil || !v.IsValid { +type ReasonCode int + +const ( + None ReasonCode = iota + Unauthorized + InvalidName + InvalidUnassign +) + +type InstrumentsValidation struct { + ReasonCode ReasonCode `json:"-"` + IsValid bool `json:"is_valid"` + Errors []string `json:"errors"` +} + +func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateProjectsInstrumentNameUnique(ctx, instrument.Name, projectIDs) + return v, tx.Commit(ctx) } -func validateAssignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) +func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - iIDs := make([]uuid.UUID, len(iIDNames)) - iNames := make([]string, len(iIDNames)) - for idx := range iIDNames { - iIDs[idx] = iIDNames[idx].ID - iNames[idx] = iIDNames[idx].Name - } - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, iIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateInstrumentNamesProjectUnique(ctx, projectID, iNames) + return v, tx.Commit(ctx) } -func assignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - instrument, err := q.GetInstrument(ctx, instrumentID) +func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - for _, pID := range projectIDs { - if err := q.AssignInstrumentToProject(ctx, pID, instrumentID); err != nil { - return model.InstrumentsValidation{}, err - } - } - return v, nil + return v, tx.Commit(ctx) } -func unassignProjectsFromInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid { - return v, err +func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err } - for _, pID := range projectIDs { - if err := q.UnassignInstrumentFromProject(ctx, pID, instrumentID); err != nil { - return v, err - } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { + return v, err } - return v, nil + return v, tx.Commit(ctx) } -func assignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err +func validateProjectsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ValidateProjectsAssignerAuthorized(ctx, db.ValidateProjectsAssignerAuthorizedParams{ + InstrumentID: instrumentID, + ProjectIds: projectIDs, + ProfileID: profileID, + }) + if err != nil { + return a, err } - for _, iID := range instrumentIDs { - if err := q.AssignInstrumentToProject(ctx, projectID, iID); err != nil { - return v, err + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", + nn[idx], + ) } + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func unassignInstrumentsFromProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err +func validateInstrumentsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ValidateInstrumentsAssignerAuthorized(ctx, db.ValidateInstrumentsAssignerAuthorizedParams{ + InstrumentIds: instrumentIDs, + ProfileID: profileID, + }) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", + nn[idx].InstrumentName, nn[idx].ProjectName, + ) + } + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - cc, err := q.GetProjectCountForInstruments(ctx, instrumentIDs) + return a, err +} + +func validateProjectsInstrumentNameUnique(ctx context.Context, q *db.Queries, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ValidateProjectsInstrumentNameUnique(ctx, db.ValidateProjectsInstrumentNameUniqueParams{ + InstrumentName: instrumentName, + ProjectIds: projectIDs, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - - for _, count := range cc { - if count.ProjectCount < 1 { - // invalid instrument, skipping - continue - } - if count.ProjectCount == 1 { - v.IsValid = false - v.ReasonCode = model.InvalidUnassign - v.Errors = append(v.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) - } - if err := q.UnassignInstrumentFromProject(ctx, projectID, count.InstrumentID); err != nil { - return v, err + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func validateInstrumentNamesProjectUnique(ctx context.Context, q *db.Queries, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ValidateInstrumentNamesProjectUnique(ctx, db.ValidateInstrumentNamesProjectUniqueParams{ + ProjectID: projectID, + InstrumentNames: instrumentNames, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) + } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) + } + return a, err +} - v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func validateAssignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrument db.VInstrument, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + v, err := validateProjectsAssignerAuthorized(ctx, q, profileID, instrument.ID, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + return validateProjectsInstrumentNameUnique(ctx, q, instrument.Name, projectIDs) } -func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func validateAssignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + iIDs := make([]uuid.UUID, len(iIDNames)) + iNames := make([]string, len(iIDNames)) + for idx := range iIDNames { + iIDs[idx] = iIDNames[idx].ID + iNames[idx] = iIDNames[idx].Name + } + return validateInstrumentsAssignerAuthorized(ctx, q, profileID, iIDs) +} - v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func assignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + instrument, err := q.GetInstrument(ctx, instrumentID) + if err != nil { + return a, err + } + v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + for _, pID := range projectIDs { + if err := q.AssignInstrumentToProject(ctx, db.AssignInstrumentToProjectParams{ + ProjectID: pID, + InstrumentID: instrumentID, + }); err != nil { + return a, err + } + } + return v, nil } -func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignProjectsFromInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateProjectsAssignerAuthorized(ctx, q, profileID, instrumentID, projectIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.UnassignInstrumentFromProjectBatchParams, len(projectIDs)) + for idx := range projectIDs { + args[idx] = db.UnassignInstrumentFromProjectBatchParams{ + ProjectID: projectIDs[idx], + InstrumentID: instrumentID, + } + } + q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + return a, nil +} - v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err +func assignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.AssignInstrumentToProjectBatchParams, len(instrumentIDs)) + for idx := range instrumentIDs { + args[idx] = db.AssignInstrumentToProjectBatchParams{ + ProjectID: projectID, + InstrumentID: instrumentIDs[idx], + } } - return v, tx.Commit() + q.AssignInstrumentToProjectBatch(ctx, args).Exec(func(_ int, e error) { + if err != nil { + err = e + return + } + }) + if err != nil { + return a, err + } + return a, nil } -func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignInstrumentsFromProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateInstrumentsAssignerAuthorized(ctx, q, profileID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + cc, err := q.ListProjectCountForInstruments(ctx, instrumentIDs) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err + args := make([]db.UnassignInstrumentFromProjectBatchParams, 0) + for _, count := range cc { + if count.ProjectCount < 1 { + // invalid instrument, skipping + continue + } + if count.ProjectCount == 1 { + a.IsValid = false + a.ReasonCode = InvalidUnassign + a.Errors = append(a.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + } + args = append(args, db.UnassignInstrumentFromProjectBatchParams{ + ProjectID: projectID, + InstrumentID: count.InstrumentID, + }) + } + q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return a, err } - return v, tx.Commit() + return a, nil } diff --git a/api/internal/servicev2/instrument_constant.go b/api/internal/servicev2/instrument_constant.go index ddf8180e..94191995 100644 --- a/api/internal/servicev2/instrument_constant.go +++ b/api/internal/servicev2/instrument_constant.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -15,22 +16,22 @@ type InstrumentConstantService interface { } type instrumentConstantService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentConstantService(db *model.Database, q *model.Queries) *instrumentConstantService { +func NewInstrumentConstantService(db *Database, q *db.Queries) *instrumentConstantService { return &instrumentConstantService{db, q} } // CreateInstrumentConstants creates many instrument constants from an array of instrument constants // An InstrumentConstant is structurally the same as a timeseries and saved in the same tables func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -46,7 +47,7 @@ func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context } uu[idx] = tsNew } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return nil, err } return uu, nil @@ -54,11 +55,11 @@ func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context // DeleteInstrumentConstant removes a timeseries as an Instrument Constant; Does not delete underlying timeseries func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -70,5 +71,5 @@ func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/instrument_group.go b/api/internal/servicev2/instrument_group.go index ea4ee34e..132d1aaa 100644 --- a/api/internal/servicev2/instrument_group.go +++ b/api/internal/servicev2/instrument_group.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -19,36 +20,37 @@ type InstrumentGroupService interface { } type instrumentGroupService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentGroupService(db *model.Database, q *model.Queries) *instrumentGroupService { +func NewInstrumentGroupService(db *Database, q *db.Queries) *instrumentGroupService { return &instrumentGroupService{db, q} } // CreateInstrumentGroup creates many instruments from an array of instruments -func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - q := s.WithTx(tx) - - gg := make([]model.InstrumentGroup, len(groups)) +func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]db.CreateInstrumentGroupsBatchRow, error) { + args := make([]db.CreateInstrumentGroupsBatchParams, len(groups)) for idx, g := range groups { - gNew, err := q.CreateInstrumentGroup(ctx, g) - if err != nil { - return nil, err + args[idx] = db.CreateInstrumentGroupsBatchParams{ + Name: g.Name, + Description: &g.Description, + Creator: g.CreatorID, + CreateDate: g.CreateDate, + ProjectID: g.ProjectID, } - gg[idx] = gNew } - - if err := tx.Commit(); err != nil { + var err error + gg := make([]db.CreateInstrumentGroupsBatchRow, len(groups)) + s.Queries.CreateInstrumentGroupsBatch(ctx, args).QueryRow(func(i int, r db.CreateInstrumentGroupsBatchRow, e error) { + if e != nil { + err = e + return + } + gg[i] = r + }) + if err != nil { return nil, err } - return gg, nil } diff --git a/api/internal/servicev2/instrument_ipi.go b/api/internal/servicev2/instrument_ipi.go index 7d2169ab..1c7e1234 100644 --- a/api/internal/servicev2/instrument_ipi.go +++ b/api/internal/servicev2/instrument_ipi.go @@ -4,6 +4,7 @@ import ( "context" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -16,33 +17,60 @@ type IpiInstrumentService interface { } type ipiInstrumentService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewIpiInstrumentService(db *model.Database, q *model.Queries) *ipiInstrumentService { +func NewIpiInstrumentService(db *Database, q *db.Queries) *ipiInstrumentService { return &ipiInstrumentService{db, q} } func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, seg := range segs { - if err := qtx.UpdateIpiSegment(ctx, seg); err != nil { - return err + updateIpiArgs := make([]db.UpdateIpiSegmentsBatchParams, len(segs)) + createMmtArgs := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) + + for idx, seg := range segs { + updateIpiArgs[idx] = db.UpdateIpiSegmentsBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + TiltTimeseriesID: seg.TiltTimeseriesID, + IncDevTimeseriesID: seg.IncDevTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, } if seg.Length == nil { continue } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { - return err + createMmtArgs = append(createMmtArgs, db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.UpdateIpiSegmentsBatch(ctx, updateIpiArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return } + }) + if err != nil { + return err + } + qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/instrument_note.go b/api/internal/servicev2/instrument_note.go index 5d607fda..3d95cd70 100644 --- a/api/internal/servicev2/instrument_note.go +++ b/api/internal/servicev2/instrument_note.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -17,34 +18,46 @@ type InstrumentNoteService interface { } type instrumentNoteService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentNoteService(db *model.Database, q *model.Queries) *instrumentNoteService { +func NewInstrumentNoteService(db *Database, q *db.Queries) *instrumentNoteService { return &instrumentNoteService{db, q} } // CreateInstrumentNote creates many instrument notes from an array of instrument notes -func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]db.InstrumentNote, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - nn := make([]model.InstrumentNote, len(notes)) + args := make([]db.CreateInstrumentNoteBatchParams, len(notes)) for idx, n := range notes { - noteNew, err := qtx.CreateInstrumentNote(ctx, n) - if err != nil { - return nil, err + args[idx] = db.CreateInstrumentNoteBatchParams{ + InstrumentID: n.InstrumentID, + Title: n.Title, + Body: n.Body, + Time: n.Time, + Creator: n.CreatorID, + CreateDate: n.CreateDate, } - nn[idx] = noteNew } - - if err := tx.Commit(); err != nil { + nn := make([]db.InstrumentNote, len(args)) + qtx.CreateInstrumentNoteBatch(ctx, args).QueryRow(func(i int, n db.InstrumentNote, e error) { + if e != nil { + err = e + return + } + nn[i] = n + }) + if err != nil { + return nil, err + } + if err := tx.Commit(ctx); err != nil { return nil, err } diff --git a/api/internal/servicev2/instrument_opts.go b/api/internal/servicev2/instrument_opts.go index adca2fdf..1d9f4e18 100644 --- a/api/internal/servicev2/instrument_opts.go +++ b/api/internal/servicev2/instrument_opts.go @@ -2,129 +2,372 @@ package servicev2 import ( "context" + "errors" "fmt" + "slices" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) -func handleOpts(ctx context.Context, q *model.Queries, inst model.Instrument, rt requestType) error { - switch inst.TypeID { - case saaTypeID: +func handleOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument, rt requestType) error { + saa := make([]model.Instrument, 0) + ipi := make([]model.Instrument, 0) + for _, inst := range ii { + switch inst.TypeID { + case saaTypeID: + saa = append(saa, inst) + case ipiTypeID: + ipi = append(ipi, inst) + default: + } + } + if len(saa) != 0 { + var err error + switch rt { + case create: + err = createSaaOptsBatch(ctx, q, saa) + case update: + err = updateSaaOptsBatch(ctx, q, saa) + } + if err != nil { + return err + } + } + if len(ipi) != 0 { + var err error + switch rt { + case create: + err = createIpiOptsBatch(ctx, q, ipi) + case update: + err = updateIpiOptsBatch(ctx, q, ipi) + } + if err != nil { + return err + } + } + return nil +} + +func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) + createSaaSegmentBatchParams := make([][]db.CreateSaaSegmentBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) + createSaaOptsParams := make([]db.CreateSaaOptsBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + + for idx, inst := range ii { opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) if err != nil { return err } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateSaaSegment(ctx, model.SaaSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } + createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) + createSaaSegmentBatchParams[idx] = make([]db.CreateSaaSegmentBatchParams, opts.NumSegments) - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), ParameterID: model.SaaParameterID, UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateSaaOpts(ctx, inst.ID, opts); err != nil { - return err + createSaaSegmentBatchParams[idx][i] = db.CreateSaaSegmentBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, } } - if rt == update { - if err := q.UpdateSaaOpts(ctx, inst.ID, opts); err != nil { - return err - } + createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: model.SaaParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createSaaOptsParams[idx] = db.CreateSaaOptsBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + saaArgs := slices.Concat(createSaaSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) + + var err error + q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { + saaArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateSaaSegmentBatch(ctx, saaArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.CreateSaaOptsBatch(ctx, createSaaOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} + +func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + updateSaaOptsParams := make([]db.UpdateSaaOptsBatchParams, len(ii)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) + if err != nil { return err } - case ipiTypeID: + updateSaaOptsParams[idx] = db.UpdateSaaOptsBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.UpdateSaaOptsBatch(ctx, updateSaaOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} + +func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) + createIpiSegmentBatchParams := make([][]db.CreateIpiSegmentBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) + createIpiOptsParams := make([]db.CreateIpiOptsBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + + for idx, inst := range ii { opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) if err != nil { return err } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateIpiSegment(ctx, model.IpiSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } + createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) + createIpiSegmentBatchParams[idx] = make([]db.CreateIpiSegmentBatchParams, opts.NumSegments) - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), ParameterID: model.IpiParameterID, UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateIpiOpts(ctx, inst.ID, opts); err != nil { - return err + createIpiSegmentBatchParams[idx][i] = db.CreateIpiSegmentBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, } } - if rt == update { - if err := q.UpdateIpiOpts(ctx, inst.ID, opts); err != nil { - return err - } + createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: model.IpiParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createIpiOptsParams[idx] = db.CreateIpiOptsBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + ipiArgs := slices.Concat(createIpiSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) + + var err error + q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + ipiArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateIpiSegmentBatch(ctx, ipiArgs).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.CreateIpiOptsBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} + +func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + updateIpiOptsParams := make([]db.UpdateIpiOptsBatchParams, len(ii)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) + if err != nil { return err } - default: + updateIpiOptsParams[idx] = db.UpdateIpiOptsBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } } - return nil + var err error + q.UpdateIpiOptsBatch(ctx, updateIpiOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err } diff --git a/api/internal/servicev2/instrument_saa.go b/api/internal/servicev2/instrument_saa.go index 9983bf56..b14fbfda 100644 --- a/api/internal/servicev2/instrument_saa.go +++ b/api/internal/servicev2/instrument_saa.go @@ -4,6 +4,7 @@ import ( "context" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -16,20 +17,20 @@ type SaaInstrumentService interface { } type saaInstrumentService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewSaaInstrumentService(db *model.Database, q *model.Queries) *saaInstrumentService { +func NewSaaInstrumentService(db *Database, q *db.Queries) *saaInstrumentService { return &saaInstrumentService{db, q} } func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -44,5 +45,5 @@ func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []mode return err } } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/instrument_status.go b/api/internal/servicev2/instrument_status.go index d3396146..433eaeb8 100644 --- a/api/internal/servicev2/instrument_status.go +++ b/api/internal/servicev2/instrument_status.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -15,20 +16,20 @@ type InstrumentStatusService interface { } type instrumentStatusService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInstrumentStatusService(db *model.Database, q *model.Queries) *instrumentStatusService { +func NewInstrumentStatusService(db *Database, q *db.Queries) *instrumentStatusService { return &instrumentStatusService{db, q} } func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -38,5 +39,5 @@ func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Cont } } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/measurement.go b/api/internal/servicev2/measurement.go index 05124fdc..642845cc 100644 --- a/api/internal/servicev2/measurement.go +++ b/api/internal/servicev2/measurement.go @@ -4,6 +4,7 @@ import ( "context" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -19,16 +20,16 @@ type MeasurementService interface { CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) - DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error - DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error + DeleteTimeseriesMeasurementsRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error + DeleteTimeseriesNoteRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error } type measurementService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewMeasurementService(db *model.Database, q *model.Queries) *measurementService { +func NewMeasurementService(db *Database, q *db.Queries) *measurementService { return &measurementService{db, q} } @@ -52,74 +53,124 @@ func createMeasurements(ctx context.Context, mc []model.MeasurementCollection, m } // CreateTimeseriesMeasurements creates many timeseries from an array of timeseries -func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err + mmts := make([]db.CreateTimeseriesMeasruementsBatchParams, 0) + notes := make([]db.CreateTimeseriesNotesBatchParams, 0) + + for idx := range mc { + for _, m := range mc[idx].Items { + mmts = append(mmts, db.CreateTimeseriesMeasruementsBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: float64(m.Value), + }) + notes = append(notes, db.CreateTimeseriesNotesBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + }) + } } - - if err := tx.Commit(); err != nil { - return nil, err + qtx.CreateTimeseriesMeasruementsBatch(ctx, mmts).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) + if err != nil { + return err } - - return mc, nil + qtx.CreateTimeseriesNotesBatch(ctx, notes).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) + if err != nil { + return err + } + return tx.Commit(ctx) } // CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries // If a timeseries measurement already exists for a given timeseries_id and time, the value is updated -func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return nil, err + mmts := make([]db.CreateOrUpdateTimeseriesMeasurementsBatchParams, 0) + notes := make([]db.CreateOrUpdateTimeseriesNoteBatchParams, 0) + + for idx := range mc { + for _, m := range mc[idx].Items { + mmts = append(mmts, db.CreateOrUpdateTimeseriesMeasurementsBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: float64(m.Value), + }) + notes = append(notes, db.CreateOrUpdateTimeseriesNoteBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + }) + } } - - if err := tx.Commit(); err != nil { - return nil, err + qtx.CreateOrUpdateTimeseriesMeasurementsBatch(ctx, mmts).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) + if err != nil { + return err } - - return mc, nil + qtx.CreateOrUpdateTimeseriesNoteBatch(ctx, notes).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) + if err != nil { + return err + } + return tx.Commit(ctx) } // UpdateTimeseriesMeasurements updates many timeseries measurements, "overwriting" time and values to match paylaod -func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, c := range mc { - if err := qtx.DeleteTimeseriesMeasurementsByRange(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + if err := qtx.DeleteTimeseriesMeasurementsRange(ctx, db.DeleteTimeseriesMeasurementsRangeParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + }); err != nil { + return err } - if err := qtx.DeleteTimeseriesNote(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + if err := qtx.DeleteTimeseriesNoteRange(ctx, db.DeleteTimeseriesNoteRangeParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + }); err != nil { + return err } } - - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err - } - - if err := tx.Commit(); err != nil { - return nil, err - } - - return mc, nil + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/measurement_inclinometer.go b/api/internal/servicev2/measurement_inclinometer.go index ed3e139c..dec5e4fa 100644 --- a/api/internal/servicev2/measurement_inclinometer.go +++ b/api/internal/servicev2/measurement_inclinometer.go @@ -4,6 +4,7 @@ import ( "context" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -18,22 +19,22 @@ type InclinometerMeasurementService interface { } type inclinometerMeasurementService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewInclinometerMeasurementService(db *model.Database, q *model.Queries) *inclinometerMeasurementService { +func NewInclinometerMeasurementService(db *Database, q *db.Queries) *inclinometerMeasurementService { return &inclinometerMeasurementService{db, q} } // CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer // If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -47,7 +48,7 @@ func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(c } } } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return nil, err } @@ -56,11 +57,11 @@ func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(c // CreateTimeseriesConstant creates timeseries constant func (s inclinometerMeasurementService) CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) diff --git a/api/internal/servicev2/opendcs.go b/api/internal/servicev2/opendcs.go index 8361a2dc..704e9bdb 100644 --- a/api/internal/servicev2/opendcs.go +++ b/api/internal/servicev2/opendcs.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) @@ -11,10 +12,10 @@ type OpendcsService interface { } type opendcsService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewOpendcsService(db *model.Database, q *model.Queries) *opendcsService { +func NewOpendcsService(db *Database, q *db.Queries) *opendcsService { return &opendcsService{db, q} } diff --git a/api/internal/servicev2/plot_config.go b/api/internal/servicev2/plot_config.go index a244b539..a15eb10d 100644 --- a/api/internal/servicev2/plot_config.go +++ b/api/internal/servicev2/plot_config.go @@ -3,13 +3,14 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type PlotConfigService interface { - ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]model.PlotConfig, error) - GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (model.PlotConfig, error) + ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]db.VPlotConfiguration, error) + GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (db.VPlotConfiguration, error) DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error plotConfigBullseyePlotService plotConfigContourPlotService @@ -18,10 +19,57 @@ type PlotConfigService interface { } type plotConfigService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewPlotConfigService(db *model.Database, q *model.Queries) *plotConfigService { +func NewPlotConfigService(db *Database, q *db.Queries) *plotConfigService { return &plotConfigService{db, q} } + +func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc model.PlotConfig) (uuid.UUID, error) { + pcID, err := q.CreatePlotConfig(ctx, db.CreatePlotConfigParams{ + Name: pc.Name, + ProjectID: pc.ProjectID, + Creator: pc.CreatorID, + CreateDate: pc.CreateDate, + PlotType: db.PlotType(pc.PlotType), + }) + if err != nil { + return pcID, err + } + err = q.CreatePlotConfigSettings(ctx, db.CreatePlotConfigSettingsParams{ + ID: pcID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) + return pcID, err +} + +func updatePlotConfigCommon(ctx context.Context, q *db.Queries, pc model.PlotConfig) error { + if err := q.UpdatePlotConfig(ctx, db.UpdatePlotConfigParams{ + ProjectID: pc.ProjectID, + ID: pc.ID, + Name: pc.Name, + Updater: pc.UpdaterID, + UpdateDate: pc.UpdateDate, + }); err != nil { + return err + } + if err := q.DeletePlotConfigSettings(ctx, pc.ID); err != nil { + return err + } + return q.CreatePlotConfigSettings(ctx, db.CreatePlotConfigSettingsParams{ + ID: pc.ID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) +} diff --git a/api/internal/servicev2/plot_config_bullseye.go b/api/internal/servicev2/plot_config_bullseye.go index 50fff7ce..ea22a406 100644 --- a/api/internal/servicev2/plot_config_bullseye.go +++ b/api/internal/servicev2/plot_config_bullseye.go @@ -3,79 +3,70 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type plotConfigBullseyePlotService interface { - CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) - UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) + CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) + UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]model.PlotConfigMeasurementBullseyePlot, error) } -func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) pc.PlotType = model.BullseyePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotBullseyeConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := qtx.CreatePlotBullseyeConfig(ctx, db.CreatePlotBullseyeConfigParams{ + PlotConfigID: pcID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + a, err = qtx.GetPlotConfig(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() - - return pcNew, err + err = tx.Commit(ctx) + return a, err } -func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.UpdatePlotBullseyeConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.UpdatePlotBullseyeConfig(ctx, db.UpdatePlotBullseyeConfigParams{ + PlotConfigID: pc.ID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } diff --git a/api/internal/servicev2/plot_config_contour.go b/api/internal/servicev2/plot_config_contour.go index 30779136..9c0feb10 100644 --- a/api/internal/servicev2/plot_config_contour.go +++ b/api/internal/servicev2/plot_config_contour.go @@ -2,122 +2,134 @@ package servicev2 import ( "context" + "errors" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type plotConfigContourPlotService interface { - CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) - UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) + CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) + UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]time.Time, error) GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) } -func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) pc.PlotType = model.ContourPlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) + if err := qtx.CreatePlotContourConfig(ctx, db.CreatePlotContourConfigParams{ + PlotConfigID: pcID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err + } + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pcID, pc.Display.TimeseriesIDs); err != nil { + return a, err + } + a, err = qtx.GetPlotConfig(ctx, pcID) if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotContourConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pcID, tsID); err != nil { - return model.PlotConfig{}, err - } + return a, err } + err = tx.Commit(ctx) - pcNew, err := qtx.GetPlotConfig(ctx, pcID) - if err != nil { - return model.PlotConfig{}, err - } - - err = tx.Commit() - - return pcNew, err + return a, err } -func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.UpdatePlotContourConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.UpdatePlotContourConfig(ctx, db.UpdatePlotContourConfigParams{ + PlotConfigID: pc.ID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err } if err := qtx.DeleteAllPlotContourConfigTimeseries(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + return a, err } - - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pc.ID, tsID); err != nil { - return model.PlotConfig{}, err - } + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pc.ID, pc.Display.TimeseriesIDs); err != nil { + return a, err } pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { - q := s.db.Queries() - - mm, err := q.ListPlotConfigMeasurementsContourPlot(ctx, plotConfigID, t) +func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, pcID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { + mm, err := s.Queries.ListPlotConfigMeasurementsContourPlot(ctx, db.ListPlotConfigMeasurementsContourPlotParams{ + PlotConfigID: pcID, + Time: t, + }) if err != nil { return model.AggregatePlotConfigMeasurementsContourPlot{}, err } - am := model.AggregatePlotConfigMeasurementsContourPlot{ X: make([]float64, len(mm)), Y: make([]float64, len(mm)), Z: make([]*float64, len(mm)), } - for idx := range mm { + z, ok := mm[idx].Z.(*float64) + if !ok { + return model.AggregatePlotConfigMeasurementsContourPlot{}, errors.New("failed type assertion: interface to float64") + } am.X[idx] = mm[idx].X am.Y[idx] = mm[idx].Y - am.Z[idx] = mm[idx].Z + am.Z[idx] = z } - return am, nil } + +func createPlotContourConfigTimeseriesBatch(ctx context.Context, q *db.Queries, pcID uuid.UUID, tt []uuid.UUID) error { + args := make([]db.CreatePlotContourConfigTimeseriesBatchParams, len(tt)) + for idx, tsID := range tt { + args[idx] = db.CreatePlotContourConfigTimeseriesBatchParams{ + PlotContourConfigID: pcID, + TimeseriesID: tsID, + } + } + var err error + q.CreatePlotContourConfigTimeseriesBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} diff --git a/api/internal/servicev2/plot_config_profile.go b/api/internal/servicev2/plot_config_profile.go index cd6adf44..d7a42e3b 100644 --- a/api/internal/servicev2/plot_config_profile.go +++ b/api/internal/servicev2/plot_config_profile.go @@ -3,78 +3,67 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) type plotConfigProfilePlotService interface { - CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) - UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) + CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) + UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) } -func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) pc.PlotType = model.ProfilePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotProfileConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := qtx.CreatePlotProfileConfig(ctx, db.CreatePlotProfileConfigParams{ + PlotConfigID: pcID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - pcNew, err := qtx.GetPlotConfig(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.UpdatePlotProfileConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := qtx.UpdatePlotProfileConfig(ctx, db.UpdatePlotProfileConfigParams{ + PlotConfigID: pc.ID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + a, err = qtx.GetPlotConfig(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } + err = tx.Commit(ctx) - err = tx.Commit() - - return pcNew, err + return a, err } diff --git a/api/internal/servicev2/plot_config_scatter_line.go b/api/internal/servicev2/plot_config_scatter_line.go index ac983a72..7175d80a 100644 --- a/api/internal/servicev2/plot_config_scatter_line.go +++ b/api/internal/servicev2/plot_config_scatter_line.go @@ -3,115 +3,97 @@ package servicev2 import ( "context" "fmt" - "log" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type plotConfigScatterLinePlotService interface { - CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) - UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) + CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) + UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) } -func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) pc.PlotType = model.ScatterLinePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) - if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err := validateCreateTraces(ctx, qtx, pcID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigScatterLineLayout(ctx, pcID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.CreatePlotConfigScatterLineLayout(ctx, db.CreatePlotConfigScatterLineLayoutParams{ + PlotConfigID: pcID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pcID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } pcNew, err := qtx.GetPlotConfig(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - log.Printf("fails on delete %s", pc.ID) - return model.PlotConfig{}, err + if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, &pc.ID); err != nil { + return a, err } - - if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, &pc.ID); err != nil { + return a, err } - - if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - log.Printf("fails on create %s, %+v", pc.ID, pc.PlotConfigSettings) - return model.PlotConfig{}, err - } - if err := validateCreateTraces(ctx, qtx, pc.ID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, pc.ID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, db.UpdatePlotConfigScatterLineLayoutParams{ + PlotConfigID: pc.ID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pc.ID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { - for _, tr := range trs { +func validateCreateTraces(ctx context.Context, q *db.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { + args := make([]db.CreatePlotConfigTimeseriesTracesBatchParams, len(trs)) + for idx, tr := range trs { tr.PlotConfigurationID = pcID - if err := validateColor(tr.Color); err != nil { return err } @@ -121,15 +103,31 @@ func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, if tr.YAxis == "" { tr.YAxis = "y1" } - - if err := q.CreatePlotConfigTimeseriesTrace(ctx, tr); err != nil { - return err + args[idx] = db.CreatePlotConfigTimeseriesTracesBatchParams{ + PlotConfigurationID: &tr.PlotConfigurationID, + TimeseriesID: &tr.TimeseriesID, + TraceOrder: int32(tr.TraceOrder), + Color: tr.Color, + LineStyle: db.LineStyle(tr.LineStyle), + Width: tr.Width, + ShowMarkers: tr.ShowMarkers, + YAxis: db.YAxis(tr.YAxis), } } + var err error + q.CreatePlotConfigTimeseriesTracesBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } return nil } -func validateCreateCustomShapes(ctx context.Context, q *model.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { +func validateCreateCustomShapes(ctx context.Context, q *db.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { for _, cs := range css { cs.PlotConfigurationID = pcID @@ -137,7 +135,13 @@ func validateCreateCustomShapes(ctx context.Context, q *model.Queries, pcID uuid return err } - if err := q.CreatePlotConfigCustomShape(ctx, cs); err != nil { + if err := q.CreatePlotConfigCustomShape(ctx, db.CreatePlotConfigCustomShapeParams{ + PlotConfigurationID: &pcID, + Enabled: cs.Enabled, + Name: cs.Name, + DataPoint: cs.DataPoint, + Color: cs.Color, + }); err != nil { return err } } diff --git a/api/internal/servicev2/profile.go b/api/internal/servicev2/profile.go index 109a9b4b..963ecf2a 100644 --- a/api/internal/servicev2/profile.go +++ b/api/internal/servicev2/profile.go @@ -5,6 +5,7 @@ import ( "errors" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -23,79 +24,26 @@ type ProfileService interface { } type profileService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewProfileService(db *model.Database, q *model.Queries) *profileService { +func NewProfileService(db *Database, q *db.Queries) *profileService { return &profileService{db, q} } -func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) { - var p model.Profile +func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (db.VProfile, error) { + var a db.VProfile var err error if claims.CacUID != nil { - p, err = s.GetProfileWithTokensForEDIPI(ctx, *claims.CacUID) + a, err = s.Queries.GetProfileForEDIPI(ctx, int64(*claims.CacUID)) } else { - p, err = s.GetProfileWithTokensForEmail(ctx, claims.Email) + a, err = s.GetProfileForEmail(ctx, claims.Email) } if err != nil { - return model.Profile{}, err + return a, err } - return p, nil -} - -func (s profileService) GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) { - p, err := s.GetProfileForEDIPI(ctx, edipi) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -func (s profileService) GetProfileWithTokensForEmail(ctx context.Context, email string) (model.Profile, error) { - p, err := s.GetProfileForEmail(ctx, email) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -func (s profileService) GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) { - p, err := s.GetProfileForUsername(ctx, username) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -// GetProfileForTokenID returns a profile given a token ID -func (s profileService) GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) { - p, err := s.GetProfileForTokenID(ctx, tokenID) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil + return a, nil } // UpdateProfileForClaims syncs a database profile to the provided token claims @@ -108,12 +56,12 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof if claimsMatchProfile { return p, nil } - if claims.CacUID != nil && !claimsMatchProfile { - if err := s.UpdateProfileForEDIPI(ctx, *claims.CacUID, model.ProfileInfo{ + if err := s.Queries.UpdateProfileForEDIPI(ctx, db.UpdateProfileForEDIPIParams{ Username: claims.PreferredUsername, - DisplayName: claims.Name, Email: claims.Email, + DisplayName: claims.Name, + Edipi: int64(*claims.CacUID), }); err != nil { return p, err } @@ -123,11 +71,11 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof return p, nil } - if strings.ToLower(p.Email) == strings.ToLower(claims.Email) && !claimsMatchProfile { - if err := s.UpdateProfileForEmail(ctx, claims.Email, model.ProfileInfo{ + if err := s.Queries.UpdateProfileForEmail(ctx, db.UpdateProfileForEmailParams{ Username: claims.PreferredUsername, DisplayName: claims.Name, + Email: claims.Email, }); err != nil { return p, err } diff --git a/api/internal/servicev2/project.go b/api/internal/servicev2/project.go index 41e29e6e..324932c3 100644 --- a/api/internal/servicev2/project.go +++ b/api/internal/servicev2/project.go @@ -7,6 +7,7 @@ import ( "mime/multipart" "os" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/img" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" @@ -24,87 +25,91 @@ type ProjectService interface { GetProjectCount(ctx context.Context) (model.ProjectCount, error) GetProject(ctx context.Context, projectID uuid.UUID) (model.Project, error) CreateProject(ctx context.Context, p model.Project) (model.IDSlugName, error) - CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) + CreateProjectsBatch(ctx context.Context, projects []model.Project) ([]db.CreateProjectsBatchRow, error) UpdateProject(ctx context.Context, p model.Project) (model.Project, error) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error DeleteFlagProject(ctx context.Context, projectID uuid.UUID) error } type projectService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewProjectService(db *model.Database, q *model.Queries) *projectService { +func NewProjectService(db *Database, q *db.Queries) *projectService { return &projectService{db, q} } type uploader func(ctx context.Context, r io.Reader, rawPath, bucketName string) error // CreateProjectBulk creates one or more projects from an array of projects -func (s projectService) CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - pp := make([]model.IDSlugName, len(projects)) +func (s projectService) CreateProjectBatch(ctx context.Context, projects []model.Project) ([]db.CreateProjectsBatchRow, error) { + args := make([]db.CreateProjectsBatchParams, len(projects)) for idx, p := range projects { - aa, err := qtx.CreateProject(ctx, p) - if err != nil { - return nil, err + args[idx] = db.CreateProjectsBatchParams{ + FederalID: p.FederalID, + Name: p.Name, + DistrictID: p.DistrictID, + Creator: p.CreatorID, + CreateDate: p.CreateDate, } - pp[idx] = aa } - if err := tx.Commit(); err != nil { + var err error + pp := make([]db.CreateProjectsBatchRow, len(args)) + s.Queries.CreateProjectsBatch(ctx, args).QueryRow(func(i int, r db.CreateProjectsBatchRow, e error) { + if e != nil { + err = e + return + } + pp[i] = r + }) + if err != nil { return nil, err } return pp, nil } -// UpdateProject updates a project -func (s projectService) UpdateProject(ctx context.Context, p model.Project) (model.Project, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s projectService) UpdateProject(ctx context.Context, p model.Project) (db.VProject, error) { + var a db.VProject + tx, err := s.db.Begin(ctx) if err != nil { - return model.Project{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateProject(ctx, p); err != nil { - return model.Project{}, err + if _, err := qtx.UpdateProject(ctx, db.UpdateProjectParams{ + ID: p.ID, + Name: p.Name, + Updater: p.UpdaterID, + UpdateDate: p.UpdateDate, + DistrictID: p.DistrictID, + FederalID: p.FederalID, + }); err != nil { + return a, err } - updated, err := qtx.GetProject(ctx, p.ID) if err != nil { - return model.Project{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Project{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return updated, nil } func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) p, err := qtx.GetProject(ctx, projectID) if err != nil { return err } - src, err := file.Open() if err != nil { return err @@ -115,12 +120,13 @@ func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.U return err } defer dst.Close() - if err := img.Resize(src, dst, image.Rect(0, 0, 480, 480)); err != nil { return err } - - if err := qtx.UpdateProjectImage(ctx, file.Filename, projectID); err != nil { + if err := qtx.UpdateProjectImage(ctx, db.UpdateProjectImageParams{ + ID: projectID, + Image: &file.Filename, + }); err != nil { return err } @@ -128,5 +134,5 @@ func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.U return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/project_role.go b/api/internal/servicev2/project_role.go index b29dba51..24e0114d 100644 --- a/api/internal/servicev2/project_role.go +++ b/api/internal/servicev2/project_role.go @@ -3,51 +3,53 @@ package servicev2 import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) type ProjectRoleService interface { - ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]model.ProjectMembership, error) - GetProjectMembership(ctx context.Context, roleID uuid.UUID) (model.ProjectMembership, error) - AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) + ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]db.ListProjectMembersRow, error) + GetProjectMembership(ctx context.Context, roleID uuid.UUID) (db.GetProjectMembershipRow, error) + AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (db.GetProjectMembershipRow, error) RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) } type projectRoleService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewProjectRoleService(db *model.Database, q *model.Queries) *projectRoleService { +func NewProjectRoleService(db *Database, q *db.Queries) *projectRoleService { return &projectRoleService{db, q} } // AddProjectMemberRole adds a role to a user for a specific project -func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (db.GetProjectMembershipRow, error) { + var a db.GetProjectMembershipRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.ProjectMembership{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pprID, err := qtx.AddProjectMemberRole(ctx, projectID, profileID, roleID, grantedBy) + pprID, err := qtx.CreateProfileProjectRole(ctx, db.CreateProfileProjectRoleParams{ + ProjectID: projectID, + ProfileID: profileID, + RoleID: roleID, + GrantedBy: &grantedBy, + }) if err != nil { - return model.ProjectMembership{}, err + return a, err } - - pm, err := qtx.GetProjectMembership(ctx, pprID) + a, err = qtx.GetProjectMembership(ctx, pprID) if err != nil { - return model.ProjectMembership{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ProjectMembership{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - return pm, nil + return a, nil } diff --git a/api/internal/servicev2/report_config.go b/api/internal/servicev2/report_config.go index 86f42e4c..306d5219 100644 --- a/api/internal/servicev2/report_config.go +++ b/api/internal/servicev2/report_config.go @@ -5,73 +5,102 @@ import ( "encoding/json" "github.com/USACE/instrumentation-api/api/internal/cloud" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type ReportConfigService interface { - ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]model.ReportConfig, error) - CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) + ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]db.VReportConfig, error) + CreateReportConfig(ctx context.Context, rc model.ReportConfig) (db.VReportConfig, error) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error - GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) - CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) - GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (model.ReportDownloadJob, error) + GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) + CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (db.ReportDownloadJob, error) + GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (db.ReportDownloadJob, error) UpdateReportDownloadJob(ctx context.Context, j model.ReportDownloadJob) error } type reportConfigService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries pubsub cloud.Pubsub mockQueue bool } -func NewReportConfigService(db *model.Database, q *model.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { +func NewReportConfigService(db *Database, q *db.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { return &reportConfigService{db, q, ps, mockQueue} } -func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (db.VReportConfig, error) { + var a db.VReportConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - rcID, err := qtx.CreateReportConfig(ctx, rc) + rcID, err := qtx.CreateReportConfig(ctx, db.CreateReportConfigParams{ + Name: rc.Name, + ProjectID: rc.ProjectID, + Creator: rc.CreatorID, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }) if err != nil { - return model.ReportConfig{}, err + return a, err } - - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rcID, pc.ID); err != nil { - return model.ReportConfig{}, err + args := make([]db.AssignReportConfigPlotConfigBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.AssignReportConfigPlotConfigBatchParams{ + ReportConfigID: rcID, + PlotConfigID: rc.PlotConfigs[idx].ID, } } - - rcNew, err := qtx.GetReportConfigByID(ctx, rcID) + qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) if err != nil { - return model.ReportConfig{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportConfig{}, err + a, err = qtx.GetReportConfig(ctx, rcID) + if err != nil { + return a, err + } + if err := tx.Commit(ctx); err != nil { + return a, err } - return rcNew, nil + return a, nil } func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateReportConfig(ctx, rc); err != nil { + if err := qtx.UpdateReportConfig(ctx, db.UpdateReportConfigParams{ + ID: rc.ID, + Name: rc.Name, + Updater: rc.UpdaterID, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }); err != nil { return err } @@ -79,65 +108,77 @@ func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.Re return err } - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rc.ID, pc.ID); err != nil { - return err + args := make([]db.AssignReportConfigPlotConfigBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.AssignReportConfigPlotConfigBatchParams{ + ReportConfigID: rc.ID, + PlotConfigID: rc.PlotConfigs[idx].ID, + } + } + qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(func(_ int, e error) { + if e != nil { + err = e } + }) + if err != nil { + return err } - return tx.Commit() + return tx.Commit(ctx) } -func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) { - q := s.db.Queries() +type ReportConfigWithPlotConfigs struct { + db.VReportConfig + PlotConfigs []db.VPlotConfiguration `json:"plot_configs"` +} - rc, err := q.GetReportConfigByID(ctx, rcID) +func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) { + var a ReportConfigWithPlotConfigs + rc, err := s.Queries.GetReportConfig(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - pcs, err := q.ListReportConfigPlotConfigs(ctx, rcID) + pcs, err := s.Queries.ListReportConfigPlotConfigs(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - return model.ReportConfigWithPlotConfigs{ - ReportConfig: rc, - PlotConfigs: pcs, - }, nil + a.VReportConfig = rc + a.PlotConfigs = pcs + return a, nil } -func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (db.ReportDownloadJob, error) { + var a db.ReportDownloadJob + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - j, err := qtx.CreateReportDownloadJob(ctx, rcID, profileID) + + a, err = qtx.CreateReportDownloadJob(ctx, db.CreateReportDownloadJobParams{ + ReportConfigID: &rcID, + Creator: profileID, + }) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - - msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: j.ID, IsLandscape: isLandscape} + msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: a.ID, IsLandscape: isLandscape} b, err := json.Marshal(msg) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - // NOTE: Depending on how long this takes, possibly invoke the lambdas directly if _, err := s.pubsub.PublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportDownloadJob{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - if s.mockQueue { if _, err := s.pubsub.MockPublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + return a, err } } - - return j, nil + return a, nil } diff --git a/api/internal/servicev2/submittal.go b/api/internal/servicev2/submittal.go index 3b6b3c6a..b96d2ba0 100644 --- a/api/internal/servicev2/submittal.go +++ b/api/internal/servicev2/submittal.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -18,10 +19,10 @@ type SubmittalService interface { } type submittalService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewSubmittalService(db *model.Database, q *model.Queries) *submittalService { +func NewSubmittalService(db *Database, q *db.Queries) *submittalService { return &submittalService{db, q} } diff --git a/api/internal/servicev2/timeseries.go b/api/internal/servicev2/timeseries.go index 92f30bdd..83141827 100644 --- a/api/internal/servicev2/timeseries.go +++ b/api/internal/servicev2/timeseries.go @@ -2,8 +2,8 @@ package servicev2 import ( "context" - "errors" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -22,64 +22,39 @@ type TimeseriesService interface { } type timeseriesService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewTimeseriesService(db *model.Database, q *model.Queries) *timeseriesService { +func NewTimeseriesService(db *Database, q *db.Queries) *timeseriesService { return ×eriesService{db, q} } -func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - uu := make([]model.Timeseries, len(tt)) +func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) error { + uu := make([]db.CreateTimeseriesBatchParams, len(tt)) for idx, ts := range tt { - ts.Type = model.StandardTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, ts) - if err != nil { - return nil, err + if ts.ParameterID == uuid.Nil { + ts.ParameterID = model.UnknownParameterID } - uu[idx] = tsNew - } - - if err := tx.Commit(); err != nil { - return nil, err - } - - return uu, nil -} - -func (s timeseriesService) AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error { - ddc := make(map[uuid.UUID]struct{}, len(dd)) - dds := make([]uuid.UUID, len(dd)) - idx := 0 - for k := range ddc { - ddc[k] = struct{}{} - dds[idx] = k - idx++ - } - - q := s.db.Queries() - - m, err := q.GetTimeseriesProjectMap(ctx, dds) - if err != nil { - return err - } - for tID := range ddc { - ppID, ok := m[tID] - if ok && ppID == projectID { - delete(ddc, tID) + if ts.UnitID == uuid.Nil { + ts.UnitID = model.UnknownUnitID + } + uu[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeStandard, + }, } } - if len(ddc) != 0 { - return errors.New("instruments for all timeseries must be linked to project") - } - return nil + var err error + s.Queries.CreateTimeseriesBatch(ctx, uu).Exec(func(_ int, e error) { + if e != nil { + err = e + } + }) + return err } diff --git a/api/internal/servicev2/timeseries_calculated.go b/api/internal/servicev2/timeseries_calculated.go index e623e377..99633831 100644 --- a/api/internal/servicev2/timeseries_calculated.go +++ b/api/internal/servicev2/timeseries_calculated.go @@ -5,6 +5,7 @@ import ( "database/sql" "errors" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -17,20 +18,20 @@ type CalculatedTimeseriesService interface { } type calculatedTimeseriesService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewCalculatedTimeseriesService(db *model.Database, q *model.Queries) *calculatedTimeseriesService { +func NewCalculatedTimeseriesService(db *Database, q *db.Queries) *calculatedTimeseriesService { return &calculatedTimeseriesService{db, q} } func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -43,7 +44,7 @@ func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Cont return err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return err } @@ -51,11 +52,11 @@ func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Cont } func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -91,7 +92,7 @@ func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Cont return err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return err } diff --git a/api/internal/servicev2/timeseries_cwms.go b/api/internal/servicev2/timeseries_cwms.go index f11185ac..7c31120f 100644 --- a/api/internal/servicev2/timeseries_cwms.go +++ b/api/internal/servicev2/timeseries_cwms.go @@ -3,70 +3,96 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) type TimeseriesCwmsService interface { - ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]model.TimeseriesCwms, error) - CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) + ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]db.VTimeseriesCwm, error) + CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) error UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error } type timeseriesCwmsService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewTimeseriesCwmsService(db *model.Database, q *model.Queries) *timeseriesCwmsService { +func NewTimeseriesCwmsService(db *Database, q *db.Queries) *timeseriesCwmsService { return ×eriesCwmsService{db, q} } -func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { - return tcc, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for idx := range tcc { + for idx, tc := range tcc { + if tc.ParameterID == uuid.Nil { + tc.ParameterID = model.UnknownParameterID + } + if tc.UnitID == uuid.Nil { + tc.UnitID = model.UnknownUnitID + } tcc[idx].Type = model.CwmsTimeseriesType - tcc[idx].InstrumentID = instrumentID - tsNew, err := qtx.CreateTimeseries(ctx, tcc[idx].Timeseries) + tsNew, err := qtx.CreateTimeseries(ctx, db.CreateTimeseriesParams{ + InstrumentID: &instrumentID, + Name: tc.Name, + ParameterID: tc.ParameterID, + UnitID: tc.UnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeCwms, + }, + }) if err != nil { - return tcc, err + return err } - tcc[idx].Timeseries = tsNew - if err := qtx.CreateTimeseriesCwms(ctx, tcc[idx]); err != nil { - return tcc, err + if err := qtx.CreateTimeseriesCwms(ctx, db.CreateTimeseriesCwmsParams{ + TimeseriesID: tsNew.ID, + CwmsTimeseriesID: tc.CwmsTimeseriesID, + CwmsOfficeID: tc.CwmsOfficeID, + CwmsExtentEarliestTime: tc.CwmsExtentEarliestTime, + CwmsExtentLatestTime: tc.CwmsExtentLatestTime, + }); err != nil { + return err } } - - if err := tx.Commit(); err != nil { - return tcc, err - } - - return tcc, nil + return tx.Commit(ctx) } -func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, ts model.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if _, err := qtx.UpdateTimeseries(ctx, tsCwms.Timeseries); err != nil { + if _, err := qtx.UpdateTimeseries(ctx, db.UpdateTimeseriesParams{ + ID: ts.ID, + Name: ts.Name, + InstrumentID: &ts.InstrumentID, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + }); err != nil { return err } - if err := qtx.UpdateTimeseriesCwms(ctx, tsCwms); err != nil { + if err := qtx.UpdateTimeseriesCwms(ctx, db.UpdateTimeseriesCwmsParams{ + TimeseriesID: ts.ID, + CwmsTimeseriesID: ts.CwmsTimeseriesID, + CwmsOfficeID: ts.CwmsOfficeID, + CwmsExtentEarliestTime: ts.CwmsExtentEarliestTime, + CwmsExtentLatestTime: ts.CwmsExtentLatestTime, + }); err != nil { return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/timeseries_process.go b/api/internal/servicev2/timeseries_process.go index 12a4e916..7711e95b 100644 --- a/api/internal/servicev2/timeseries_process.go +++ b/api/internal/servicev2/timeseries_process.go @@ -3,6 +3,7 @@ package servicev2 import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" ) @@ -12,10 +13,10 @@ type ProcessTimeseriesService interface { } type processTimeseriesService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewProcessTimeseriesService(db *model.Database, q *model.Queries) *processTimeseriesService { +func NewProcessTimeseriesService(db *Database, q *db.Queries) *processTimeseriesService { return &processTimeseriesService{db, q} } diff --git a/api/internal/servicev2/unit.go b/api/internal/servicev2/unit.go index 0a9def77..98f736a2 100644 --- a/api/internal/servicev2/unit.go +++ b/api/internal/servicev2/unit.go @@ -3,18 +3,18 @@ package servicev2 import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" ) type UnitService interface { - ListUnits(ctx context.Context) ([]model.Unit, error) + ListUnits(ctx context.Context) ([]db.VUnit, error) } type unitService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewUnitService(db *model.Database, q *model.Queries) *unitService { +func NewUnitService(db *Database, q *db.Queries) *unitService { return &unitService{db, q} } diff --git a/api/internal/servicev2/uploader.go b/api/internal/servicev2/uploader.go index 18d8432d..f89f6dd6 100644 --- a/api/internal/servicev2/uploader.go +++ b/api/internal/servicev2/uploader.go @@ -8,6 +8,7 @@ import ( "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) @@ -19,11 +20,11 @@ type UploaderService interface { } type uploaderService struct { - db *model.Database - *model.Queries + db *Database + *db.Queries } -func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { +func NewUploaderService(db *Database, q *db.Queries) *uploaderService { return &uploaderService{db, q} } @@ -41,11 +42,11 @@ func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, m func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { // TODO Get mapper by id - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -139,5 +140,5 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } } } - return nil + return tx.Commit(ctx) } diff --git a/api/migrations/repeat/0020__views_profiles.sql b/api/migrations/repeat/0020__views_profiles.sql index e6a06c32..83a2cd84 100644 --- a/api/migrations/repeat/0020__views_profiles.sql +++ b/api/migrations/repeat/0020__views_profiles.sql @@ -15,9 +15,14 @@ CREATE OR REPLACE VIEW v_profile AS ( p.display_name, p.email, p.is_admin, - COALESCE(r.roles,'{}') AS roles + COALESCE(r.roles,'{}')::text[] AS roles, + COALESCE(jsonb_agg(jsonb_build_object( + 'token_id', pt.token_id, + 'issued', pt.time + )), '[]'::jsonb) AS tokens FROM profile p LEFT JOIN roles_by_profile r ON r.profile_id = p.id + LEFT JOIN profile_token pt ON pt.profile_id = p.id ); CREATE OR REPLACE VIEW v_profile_project_roles AS ( diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index 8e4734c2..c8cce654 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -34,16 +34,16 @@ CREATE OR REPLACE VIEW v_instrument AS ( i.usgs_id, tel.telemetry, cwms.has_cwms, - COALESCE(op.parr, '[]'::json) projects, - COALESCE(c.constants, '{}') constants, - COALESCE(g.groups, '{}') groups, - COALESCE(a.alert_configs, '{}') alert_configs, - COALESCE(o.opts, '{}'::json) opts + COALESCE(op.parr, '[]'::jsonb) projects, + COALESCE(c.constants, '{}')::uuid[] constants, + COALESCE(g.groups, '{}')::uuid[] groups, + COALESCE(a.alert_configs, '{}')::uuid[] alert_configs, + COALESCE(o.opts, '{}'::jsonb) opts FROM instrument i INNER JOIN instrument_type t ON t.id = i.type_id LEFT JOIN LATERAL ( SELECT - json_agg(json_build_object( + jsonb_agg(jsonb_build_object( 'id', p.id, 'name', p.name, 'slug', p.slug @@ -75,14 +75,14 @@ CREATE OR REPLACE VIEW v_instrument AS ( ) c ON c.instrument_id = i.id LEFT JOIN ( SELECT - ARRAY_AGG(instrument_group_id) as groups, + array_agg(instrument_group_id) as groups, instrument_id FROM instrument_group_instruments GROUP BY instrument_id ) g ON g.instrument_id = i.id LEFT JOIN ( SELECT - ARRAY_AGG(alert_config_id) as alert_configs, + array_agg(alert_config_id) as alert_configs, instrument_id FROM alert_config_instrument GROUP BY instrument_id @@ -90,7 +90,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LEFT JOIN ( SELECT instrument_id, - JSON_AGG(JSON_BUILD_OBJECT( + jsonb_agg(jsonb_build_object( 'id', v.id, 'slug', v.telemetry_type_slug, 'name', v.telemetry_type_name @@ -108,7 +108,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LEFT JOIN ( -- optional properties that vary per -- instrument can be added here via union - SELECT o1.instrument_id, (ROW_TO_JSON(o1)::JSONB || ROW_TO_JSON(b1)::JSONB)::JSON AS opts + SELECT o1.instrument_id, (ROW_TO_JSON(o1)::jsonb || row_to_json(b1)::jsonb) AS opts FROM saa_opts o1 LEFT JOIN LATERAL ( SELECT value AS bottom_elevation FROM timeseries_measurement m @@ -117,7 +117,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LIMIT 1 ) b1 ON true UNION ALL - SELECT o2.instrument_id, (ROW_TO_JSON(o2)::JSONB || ROW_TO_JSON(b2)::JSONB)::JSON AS opts + SELECT o2.instrument_id, (ROW_TO_JSON(o2)::jsonb || row_to_json(b2)::jsonb) AS opts FROM ipi_opts o2 LEFT JOIN LATERAL ( SELECT value AS bottom_elevation FROM timeseries_measurement m diff --git a/api/migrations/repeat/0050__views_timeseries.sql b/api/migrations/repeat/0050__views_timeseries.sql index 9b5c7cff..d4ae5396 100644 --- a/api/migrations/repeat/0050__views_timeseries.sql +++ b/api/migrations/repeat/0050__views_timeseries.sql @@ -80,3 +80,46 @@ CREATE OR REPLACE VIEW v_timeseries_cwms AS ( FROM v_timeseries ts INNER JOIN timeseries_cwms tc ON ts.id = tc.timeseries_id ); + +CREATE OR REPLACE VIEW v_collection_group_details AS ( + SELECT + cg.*, + ts.timeseries + FROM collection_group cg + LEFT JOIN LATERAL ( + SELECT COALESCE(jsonb_agg(to_jsonb(t.*) || jsonb_build_object( + 'latest_time', tm.time, + 'latest_value', tm.value + )), '[]'::jsonb) AS timeseries + FROM collection_group_timeseries cgt + LEFT JOIN v_timeseries t on t.id = cgt.timeseries_id + LEFT JOIN LATERAL ( + SELECT tmm.time, tmm.value FROM timeseries_measurement tmm + WHERE tmm.timeseries_id = t.id + ORDER BY tmm.time DESC LIMIT 1 + ) tm ON true + WHERE cgt.collection_group_id = cg.id + ) ts ON true +); + + +select t.*, tm.time as latest_time, tm.value as latest_value +from collection_group_timeseries cgt +inner join collection_group cg on cg.id = cgt.collection_group_id +inner join v_timeseries t on t.id = cgt.timeseries_id +left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( + select time from timeseries_measurement + where timeseries_id = t.id + order by time desc limit 1 +) +inner join project_instrument pi on t.instrument_id = pi.instrument_id +where pi.project_id = $1 +and cgt.collection_group_id = $2; + + +-- WHERE t.instrument_id = ANY( +-- SELECT instrument_id +-- FROM project_instrument +-- WHERE project_id = $1 +-- ) +-- AND cgt.collection_group_id = $2 diff --git a/api/migrations/repeat/0060__views_alerts.sql b/api/migrations/repeat/0060__views_alerts.sql index 214bd728..6edd34ce 100644 --- a/api/migrations/repeat/0060__views_alerts.sql +++ b/api/migrations/repeat/0060__views_alerts.sql @@ -7,12 +7,12 @@ CREATE OR REPLACE VIEW v_alert AS ( ac.name AS name, ac.body AS body, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( + SELECT COALESCE(jsonb_agg(jsonb_build_object( 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT iac.instrument_id FROM alert_config_instrument iac WHERE iac.alert_config_id = ac.id @@ -45,31 +45,32 @@ CREATE OR REPLACE VIEW v_alert_config AS ( ac.warning_interval::text AS warning_interval, ac.last_checked AS last_checked, ac.last_reminded AS last_reminded, + null::timestamptz create_next_submittal_from, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'instrument_id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT iac.instrument_id - FROM alert_config_instrument iac - WHERE iac.alert_config_id = ac.id + FROM alert_config_instrument iac + WHERE iac.alert_config_id = ac.id ) - ) AS instruments, + ) AS instruments, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'id', id, 'user_type', user_type, 'username', username, - 'email', email - ))::text, '[]'::text) + 'email', email + )), '[]'::jsonb) FROM ( SELECT id, - 'email' AS user_type, - null AS username, - email AS email + 'email' AS user_type, + null AS username, + email AS email FROM email WHERE id IN ( SELECT aes.email_id FROM alert_email_subscription aes @@ -78,21 +79,21 @@ CREATE OR REPLACE VIEW v_alert_config AS ( UNION SELECT id, - 'profile' AS user_type, - username AS username, - email AS email + 'profile' AS user_type, + username AS username, + email AS email FROM profile WHERE id IN ( SELECT aps.profile_id FROM alert_profile_subscription aps WHERE aps.alert_config_id = ac.id ) ) all_emails - ) AS alert_email_subscriptions + ) AS alert_email_subscriptions FROM alert_config ac - INNER JOIN project prj ON ac.project_id = prj.id - INNER JOIN alert_type atype ON ac.alert_type_id = atype.id - LEFT JOIN profile prf1 ON ac.creator = prf1.id - LEFT JOIN profile prf2 ON ac.updater = prf2.id + INNER JOIN project prj ON ac.project_id = prj.id + INNER JOIN alert_type atype ON ac.alert_type_id = atype.id + LEFT JOIN profile prf1 ON ac.creator = prf1.id + LEFT JOIN profile prf2 ON ac.updater = prf2.id WHERE NOT ac.deleted ); diff --git a/api/migrations/repeat/0090__views_plots.sql b/api/migrations/repeat/0090__views_plots.sql index 109df2aa..5f2eb0d2 100644 --- a/api/migrations/repeat/0090__views_plots.sql +++ b/api/migrations/repeat/0090__views_plots.sql @@ -8,13 +8,13 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( pc.create_date, pc.updater, pc.update_date, - COALESCE(k.show_masked, 'true') AS show_masked, + COALESCE(k.show_masked, 'true') AS show_masked, COALESCE(k.show_nonvalidated, 'true') AS show_nonvalidated, - COALESCE(k.show_comments, 'true') AS show_comments, - COALESCE(k.auto_range, 'true') AS auto_range, - COALESCE(k.date_range, '1 year') AS date_range, - COALESCE(k.threshold, 3000) AS threshold, - COALESCE(rc.configs, '[]')::text AS report_configs, + COALESCE(k.show_comments, 'true') AS show_comments, + COALESCE(k.auto_range, 'true') AS auto_range, + COALESCE(k.date_range, '1 year') AS date_range, + COALESCE(k.threshold, 3000) AS threshold, + COALESCE(rc.configs, '[]'::jsonb) AS report_configs, pc.plot_type, CASE WHEN pc.plot_type = 'scatter-line' THEN json_build_object( @@ -24,11 +24,11 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( 'y2_axis_title', pcl.y2_axis_title, 'custom_shapes', COALESCE(cs.items, '[]') ) - )::text + ) WHEN pc.plot_type = 'profile' THEN json_build_object( 'instrument_id', ppc.instrument_id, 'instrument_type', it.name - )::text + ) WHEN pc.plot_type = 'contour' THEN json_build_object( 'timeseries_ids', COALESCE(pcct.timeseries_ids, '{}'), 'time', CASE @@ -39,11 +39,11 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( 'gradient_smoothing', pcc.gradient_smoothing, 'contour_smoothing', pcc.contour_smoothing, 'show_labels', pcc.show_labels - )::text + ) WHEN pc.plot_type = 'bullseye' THEN json_build_object( 'x_axis_timeseries_id', pbc.x_axis_timeseries_id, 'y_axis_timeseries_id', pbc.y_axis_timeseries_id - )::text + ) ELSE NULL END AS display FROM plot_configuration pc @@ -61,7 +61,7 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( ) k ON pc.id = k.id LEFT JOIN LATERAL ( SELECT - json_agg(json_build_object( + jsonb_agg(jsonb_build_object( 'id', id, 'slug', slug, 'name', name diff --git a/api/migrations/repeat/0100__views_datalogger.sql b/api/migrations/repeat/0100__views_datalogger.sql index 50f978e4..c31b4f82 100644 --- a/api/migrations/repeat/0100__views_datalogger.sql +++ b/api/migrations/repeat/0100__views_datalogger.sql @@ -13,8 +13,8 @@ CREATE OR REPLACE VIEW v_datalogger AS ( dl.slug AS slug, m.id AS model_id, m.model AS model, - COALESCE(e.errors, '{}'::TEXT[]) AS errors, - COALESCE(t.tables, '[]'::JSON)::TEXT AS tables + COALESCE(e.errors, '{}')::text[] AS errors, + COALESCE(t.tables, '[]'::jsonb) AS tables FROM datalogger dl INNER JOIN profile p1 ON dl.creator = p1.id INNER JOIN profile p2 ON dl.updater = p2.id @@ -22,7 +22,7 @@ CREATE OR REPLACE VIEW v_datalogger AS ( LEFT JOIN ( SELECT de.datalogger_id, - ARRAY_AGG(de.error_message) AS errors + array_agg(de.error_message)::text[] AS errors FROM datalogger_error de INNER JOIN datalogger_table dt ON dt.id = de.datalogger_table_id WHERE dt.table_name = 'preparse' @@ -31,7 +31,7 @@ CREATE OR REPLACE VIEW v_datalogger AS ( LEFT JOIN ( SELECT dt.datalogger_id, - JSON_AGG(JSON_BUILD_OBJECT( + jsonb_agg(jsonb_build_object( 'id', dt.id, 'table_name', dt.table_name )) AS tables @@ -57,7 +57,7 @@ CREATE OR REPLACE VIEW v_datalogger_equivalency_table AS ( dt.datalogger_id AS datalogger_id, dt.id AS datalogger_table_id, dt.table_name AS datalogger_table_name, - COALESCE(JSON_AGG(ROW_TO_JSON(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::JSON)::TEXT AS fields + COALESCE(jsonb_agg(row_to_jsonb(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::jsonb) AS fields FROM datalogger_table dt INNER JOIN datalogger dl ON dt.datalogger_id = dl.id LEFT JOIN LATERAL ( @@ -72,11 +72,11 @@ CREATE OR REPLACE VIEW v_datalogger_equivalency_table AS ( CREATE OR REPLACE VIEW v_datalogger_hash AS ( SELECT dh.datalogger_id AS datalogger_id, - dh.hash AS "hash", - m.model AS model, - dl.sn AS sn + dh.hash AS "hash", + m.model AS model, + dl.sn AS sn FROM datalogger_hash dh - INNER JOIN datalogger dl ON dh.datalogger_id = dl.id + INNER JOIN datalogger dl ON dh.datalogger_id = dl.id INNER JOIN datalogger_model m ON dl.model_id = m.id WHERE NOT dl.deleted ); diff --git a/api/migrations/repeat/0110__views_evaluations.sql b/api/migrations/repeat/0110__views_evaluations.sql index ebffad49..4cdedd85 100644 --- a/api/migrations/repeat/0110__views_evaluations.sql +++ b/api/migrations/repeat/0110__views_evaluations.sql @@ -1,33 +1,33 @@ CREATE OR REPLACE VIEW v_evaluation AS ( SELECT - ev.id AS id, - ev.name AS name, - ev.body AS body, - prf1.id AS creator, - COALESCE(prf1.username, 'midas') AS creator_username, - ev.create_date AS create_date, - prf2.id AS updater, - prf2.username AS updater_username, - ev.update_date AS update_date, - prj.id AS project_id, - prj.name AS project_name, - ac.id AS alert_config_id, - ac.name AS alert_config_name, - ev.submittal_id AS submittal_id, - ev.start_date AS start_date, - ev.end_date AS end_date, + ev.id, + ev.name, + ev.body, + prf1.id creator, + COALESCE(prf1.username, 'midas') creator_username, + ev.create_date, + prf2.id updater, + prf2.username updater_username, + ev.update_date, + prj.id project_id, + prj.name project_name, + ac.id alert_config_id, + ac.name alert_config_name, + ev.submittal_id, + ev.start_date, + ev.end_date, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'instrument_id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT evi.instrument_id - FROM evaluation_instrument evi - WHERE evi.evaluation_id = ev.id + FROM evaluation_instrument evi + WHERE evi.evaluation_id = ev.id ) - ) AS instruments + ) instruments FROM evaluation ev INNER JOIN project prj ON ev.project_id = prj.id LEFT JOIN profile prf1 ON ev.creator = prf1.id diff --git a/api/migrations/repeat/0120__views_alert_check.sql b/api/migrations/repeat/0120__views_alert_check.sql index b0362b25..2f7387fb 100644 --- a/api/migrations/repeat/0120__views_alert_check.sql +++ b/api/migrations/repeat/0120__views_alert_check.sql @@ -2,21 +2,22 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( SELECT ac.id AS alert_config_id, sub.id AS submittal_id, + null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' AND sub.completion_date IS NULL AND NOW() >= sub.due_date - ac.warning_interval AND NOW() < sub.due_date - AND true = ANY(SELECT UNNEST(ARRAY_AGG(lm.time)) IS NULL), + AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true - ) AS should_warn, + )::boolean AS should_warn, COALESCE( sub.completion_date IS NULL AND NOT sub.marked_as_missing AND NOW() >= sub.due_date - AND true = ANY(SELECT UNNEST(ARRAY_AGG(lm.time)) IS NULL), + AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true - ) AS should_alert, + )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' AND ac.last_reminded IS NOT NULL @@ -26,8 +27,8 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( -- subtract 10 second constant to account for ticker accuracy/execution time AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds', true - ) AS should_remind, - COALESCE(JSON_AGG(JSON_BUILD_OBJECT( + )::boolean AS should_remind, + COALESCE(json_agg(json_build_object( 'instrument_name', inst.name, 'timeseries_name', COALESCE(ts.name, 'No timeseries for instrument'), 'status', CASE @@ -61,19 +62,20 @@ CREATE OR REPLACE VIEW v_alert_check_evaluation_submittal AS ( SELECT ac.id AS alert_config_id, sub.id AS submittal_id, + null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' AND sub.completion_date IS NULL AND NOW() >= sub.due_date - ac.warning_interval AND NOW() < sub.due_date, true - ) AS should_warn, + )::boolean AS should_warn, COALESCE( sub.completion_date IS NULL AND NOW() >= sub.due_date AND NOT sub.marked_as_missing, true - ) AS should_alert, + )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' AND ac.last_reminded IS NOT NULL @@ -83,7 +85,7 @@ CREATE OR REPLACE VIEW v_alert_check_evaluation_submittal AS ( AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds' AND NOT sub.marked_as_missing, true - ) AS should_remind + )::boolean AS should_remind FROM submittal sub INNER JOIN alert_config ac ON sub.alert_config_id = ac.id WHERE ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID diff --git a/api/migrations/repeat/0140__views_depth_based_instruments.sql b/api/migrations/repeat/0140__views_depth_based_instruments.sql index 69ec54bd..2e58d523 100644 --- a/api/migrations/repeat/0140__views_depth_based_instruments.sql +++ b/api/migrations/repeat/0140__views_depth_based_instruments.sql @@ -21,22 +21,22 @@ CREATE OR REPLACE VIEW v_saa_measurement AS ( SELECT r.instrument_id, r.time, - JSON_AGG(JSON_BUILD_OBJECT( - 'segment_id', r.segment_id, - 'x', r.x, - 'y', r.y, - 'z', r.z, - 'temp', r.t, - 'x_increment', r.x_increment, - 'y_increment', r.y_increment, - 'z_increment', r.z_increment, - 'temp_increment', r.temp_increment, - 'x_cum_dev', r.x_cum_dev, - 'y_cum_dev', r.y_cum_dev, - 'z_cum_dev', r.z_cum_dev, - 'temp_cum_dev', r.temp_cum_dev, - 'elevation', r.elevation - ) ORDER BY r.segment_id)::TEXT AS measurements + COALESCE(jsonb_agg(jsonb_build_object( + 'segment_id', r.segment_id, + 'x', r.x, + 'y', r.y, + 'z', r.z, + 'temp', r.t, + 'x_increment', r.x_increment, + 'y_increment', r.y_increment, + 'z_increment', r.z_increment, + 'temp_increment', r.temp_increment, + 'x_cum_dev', r.x_cum_dev, + 'y_cum_dev', r.y_cum_dev, + 'z_cum_dev', r.z_cum_dev, + 'temp_cum_dev', r.temp_cum_dev, + 'elevation', r.elevation + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements FROM (SELECT DISTINCT seg.instrument_id, seg.id AS segment_id, @@ -49,11 +49,11 @@ CREATE OR REPLACE VIEW v_saa_measurement AS ( q.initial_y - q.y y_increment, q.initial_z - q.z z_increment, q.initial_t - q.t temp_increment, - SUM(q.initial_x - q.x) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) x_cum_dev, - SUM(q.initial_y - q.y) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) y_cum_dev, - SUM(q.initial_z - q.z) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) z_cum_dev, - SUM(q.initial_t - q.t) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) temp_cum_dev, - SUM(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation + sum(q.initial_x - q.x) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) x_cum_dev, + sum(q.initial_y - q.y) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) y_cum_dev, + sum(q.initial_z - q.z) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) z_cum_dev, + sum(q.initial_t - q.t) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) temp_cum_dev, + sum(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation FROM saa_segment seg INNER JOIN saa_opts opts ON opts.instrument_id = seg.instrument_id LEFT JOIN LATERAL ( @@ -112,14 +112,14 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( SELECT r.instrument_id, r.time, - JSON_AGG(JSON_BUILD_OBJECT( - 'segment_id', r.segment_id, - 'tilt', r.tilt, - 'inc_dev', r.inc_dev, - 'cum_dev', r.cum_dev, - 'temp', r.temp, - 'elevation', r.elevation - ) ORDER BY r.segment_id)::TEXT AS measurements + COALESCE(jsonb_agg(jsonb_build_object( + 'segment_id', r.segment_id, + 'tilt', r.tilt, + 'inc_dev', r.inc_dev, + 'cum_dev', r.cum_dev, + 'temp', r.temp, + 'elevation', r.elevation + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements FROM (SELECT DISTINCT seg.instrument_id, seg.id AS segment_id, @@ -127,7 +127,7 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( q.time, q.tilt, q.inc_dev, - COALESCE(q.cum_dev, SIN(q.tilt * PI() / 180) * q.seg_length) cum_dev, + COALESCE(q.cum_dev, sin(q.tilt * pi() / 180) * q.seg_length) cum_dev, q.temp, SUM(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation FROM ipi_segment seg @@ -137,7 +137,7 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( a.time, t.value AS tilt, d.value AS inc_dev, - SUM(d.value) OVER (ORDER BY seg.id ASC) AS cum_dev, + sum(d.value) OVER (ORDER BY seg.id ASC) AS cum_dev, temp.value AS temp, locf(b.value) OVER (ORDER BY a.time ASC) AS bottom, locf(l.value) OVER (ORDER BY a.time ASC) AS seg_length diff --git a/api/migrations/repeat/0150__views_domain.sql b/api/migrations/repeat/0150__views_domain.sql index 807f5ca4..9fc1c889 100644 --- a/api/migrations/repeat/0150__views_domain.sql +++ b/api/migrations/repeat/0150__views_domain.sql @@ -60,11 +60,11 @@ CREATE OR REPLACE VIEW v_domain AS ( CREATE OR REPLACE VIEW v_domain_group AS ( SELECT "group", - json_agg(json_build_object( + jsonb_agg(jsonb_build_object( 'id', id, 'value', value, 'description', description - ))::text AS opts + )) AS opts FROM v_domain GROUP BY "group" ); diff --git a/api/migrations/repeat/0160__views_report_config.sql b/api/migrations/repeat/0160__views_report_config.sql index 95878191..4c5ff9c3 100644 --- a/api/migrations/repeat/0160__views_report_config.sql +++ b/api/migrations/repeat/0160__views_report_config.sql @@ -13,28 +13,28 @@ CREATE OR REPLACE VIEW v_report_config AS ( rc.updater, up.username AS updater_username, rc.update_date, - COALESCE(pc.configs, '[]')::text AS plot_configs, - json_build_object( - 'date_range', json_build_object( + COALESCE(pc.configs, '[]'::jsonb) AS plot_configs, + jsonb_build_object( + 'date_range', jsonb_build_object( 'enabled', rc.date_range_enabled, 'value', rc.date_range ), - 'show_masked', json_build_object( + 'show_masked', jsonb_build_object( 'enabled', rc.show_masked_enabled, 'value', rc.show_masked ), - 'show_nonvalidated', json_build_object( + 'show_nonvalidated', jsonb_build_object( 'enabled', rc.show_nonvalidated_enabled, 'value', rc.show_nonvalidated ) - )::text AS global_overrides + ) AS global_overrides FROM report_config rc INNER JOIN project p ON rc.project_id = p.id LEFT JOIN district dt ON p.district_id = dt.id INNER JOIN profile cp ON cp.id = rc.creator LEFT JOIN profile up ON up.id = rc.updater LEFT JOIN LATERAL ( - SELECT json_agg(json_build_object( + SELECT jsonb_agg(jsonb_build_object( 'id', pc.id, 'slug', pc.slug, 'name', pc.name diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql index f4c01901..b640a6eb 100644 --- a/api/migrations/schema/V1.14.00__uploader.sql +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -1,3 +1,8 @@ +-- checked db and there are no null instrument geometries +ALTER TABLE instrument ALTER COLUMN "geometry" TYPE geometry(Point, 4326); +ALTER TABLE instrument ALTER COLUMN "geometry" SET NOT NULL; + + CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); diff --git a/api/queries/alert_check.sql b/api/queries/alert_check.sql index 14400e44..69a39dde 100644 --- a/api/queries/alert_check.sql +++ b/api/queries/alert_check.sql @@ -25,7 +25,7 @@ where id = $1; insert into submittal (alert_config_id, create_date, due_date) select ac.id, - $2::timestamptz, - $2::timestamptz + ac.schedule_interval + sqlc.arg(date)::timestamptz, + sqlc.arg(date)::timestamptz + ac.schedule_interval from alert_config ac where ac.id = $1; diff --git a/api/queries/alert_config.sql b/api/queries/alert_config.sql index 71d5c837..e87e732c 100644 --- a/api/queries/alert_config.sql +++ b/api/queries/alert_config.sql @@ -5,7 +5,7 @@ where project_id = $1 order by name; --- name: ListAlertConfigsForProjectAndAlertType :many +-- name: ListAlertConfigsForProjectAlertType :many select * from v_alert_config where project_id = $1 @@ -21,7 +21,7 @@ where aci.instrument_id = $1 order by t.name; --- name: GetetAlertConfig :one +-- name: GetAlertConfig :one select * from v_alert_config where id = $1; diff --git a/api/queries/alert_subscription.sql b/api/queries/alert_subscription.sql index 054e0f4a..ef50c8ce 100644 --- a/api/queries/alert_subscription.sql +++ b/api/queries/alert_subscription.sql @@ -8,12 +8,12 @@ on conflict do nothing; delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; --- name: GetAlertSubscription :many -select * from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; +-- name: GetAlertSubscription :one +select * from alert_profile_subscription where id = $1; --- name: GetAlertSubscriptionByID :one -select * from alert_profile_subscription where id = $1; +-- name: GetAlertSubscriptionForAlertConfig :one +select * from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; -- name: ListMyAlertSubscriptions :many diff --git a/api/queries/aware.sql b/api/queries/aware.sql index 0881e406..98b5c0e0 100644 --- a/api/queries/aware.sql +++ b/api/queries/aware.sql @@ -10,3 +10,6 @@ order by aware_id, aware_parameter_key; -- name: CreateAwarePlatform :exec insert into aware_platform (instrument_id, aware_id) values ($1, $2); + +-- name: CreateAwarePlatformBatch :batchexec +insert into aware_platform (instrument_id, aware_id) values ($1, $2); diff --git a/api/queries/collection_group.sql b/api/queries/collection_group.sql index 91bca958..93dcdeb8 100644 --- a/api/queries/collection_group.sql +++ b/api/queries/collection_group.sql @@ -1,23 +1,11 @@ --- name: ListCollectionGroups :many +-- name: ListCollectionGroupsForProject :many select id, project_id, slug, name, creator, create_date, updater, update_date from collection_group -where project_id = sqlc.arg(project_id) -and (sqlc.narg(id) is null or sqlc.narg(id) = id); - - --- name: GetCollectionGroupDetailsTimeseries :one -select t.*, tm.time as latest_time, tm.value as latest_value -from collection_group_timeseries cgt -inner join collection_group cg on cg.id = cgt.collection_group_id -inner join v_timeseries t on t.id = cgt.timeseries_id -left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( - select time from timeseries_measurement - where timeseries_id = t.id - order by time desc limit 1 -) -inner join project_instrument pi on t.instrument_id = pi.instrument_id -where pi.project_id = $1 -and cgt.collection_group_id = $2; +where project_id = $1; + + +-- name: GetCollectionGroupDetails :one +select * from v_collection_group_details where id = $1; -- name: CreateCollectionGroup :one diff --git a/api/queries/equivalency_table.sql b/api/queries/equivalency_table.sql index 0f9c6a2e..283f57f3 100644 --- a/api/queries/equivalency_table.sql +++ b/api/queries/equivalency_table.sql @@ -14,7 +14,7 @@ select not exists ( ); --- name: GetEquivalencyTable :many +-- name: GetEquivalencyTable :one select datalogger_id, datalogger_table_id, diff --git a/api/queries/evaluation.sql b/api/queries/evaluation.sql index 00c841a8..547c9224 100644 --- a/api/queries/evaluation.sql +++ b/api/queries/evaluation.sql @@ -24,7 +24,7 @@ where id = any( select * from v_evaluation where id = $1; --- name: CompleteEvaluationSubmittal :exec +-- name: CompleteEvaluationSubmittal :one update submittal sub1 set submittal_status_id = sq.submittal_status_id, completion_date = now() @@ -71,7 +71,11 @@ insert into evaluation ( returning id; --- name: CreateEvalationInstrument :exec +-- name: CreateEvaluationInstrument :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); + + +-- name: CreateEvaluationInstrumentsBatch :batchexec insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql index 2370b4a2..e7dd2bc0 100644 --- a/api/queries/instrument.sql +++ b/api/queries/instrument.sql @@ -1,32 +1,21 @@ -- name: ListInstruments :many -select id, - status_id, - status, - status_time, - slug, - name, - type_id, - type, - icon, - geometry, - station, - station_offset, - creator, - create_date, - updater, - update_date, - projects, - constants, - groups, - alert_configs, - nid_id, - usgs_id, - has_cwms, - show_cwms_tab, - opts +select * +from v_instrument +where not deleted; + + +-- name: ListInstrumentsForProject :many +select i.* +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1; + + +-- name: GetInstrument :one +select * from v_instrument where not deleted -and (sqlc.narg(id) is not null or sqlc.narg(id) = id); +and id=$1; -- name: GetInstrumentCount :one @@ -35,22 +24,21 @@ select count(*) from instrument where not deleted; -- name: CreateInstrument :one insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, st_setsrid(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) +values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id, slug; --- name: ListAdminProjects :many -select pr.project_id from profile_project_roles pr -inner join role ro on ro.id = pr.role_id -where pr.profile_id = $1 -and ro.name = 'ADMIN'; +-- name: CreateInstrumentsBatch :batchone +insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) +values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id, slug; -- name: ListInstrumentProjects :many select project_id from project_instrument where instrument_id = $1; --- name: GetProjectCountForInstrument :one +-- name: ListProjectCountForInstruments :many select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count from project_instrument pi inner join instrument i on pi.instrument_id = i.id @@ -63,7 +51,7 @@ order by i.name; update instrument set name = $3, type_id = $4, - geometry = ST_GeomFromWKB($5), + geometry = $5, updater = $6, update_date = $7, station = $8, @@ -81,7 +69,7 @@ and id in ( -- name: UpdateInstrumentGeometry :one update instrument set - geometry = st_geomfromwkb($3), + geometry = $3, updater = $4, update_date = now() where id = $2 diff --git a/api/queries/instrument_assign.sql b/api/queries/instrument_assign.sql index 8d320f8d..a1ffbfec 100644 --- a/api/queries/instrument_assign.sql +++ b/api/queries/instrument_assign.sql @@ -3,26 +3,35 @@ insert into project_instrument (project_id, instrument_id) values ($1, $2) on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; +-- name: AssignInstrumentToProjectBatch :batchexec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; + + -- name: UnassignInstrumentFromProject :exec delete from project_instrument where project_id = $1 and instrument_id = $2; +-- name: UnassignInstrumentFromProjectBatch :batchexec +delete from project_instrument where project_id = $1 and instrument_id = $2; + + -- name: ValidateInstrumentNamesProjectUnique :many select i.name from project_instrument pi inner join instrument i on pi.instrument_id = i.id where pi.project_id = sqlc.arg(project_id) -and i.name in (sqlc.arg(instrument_name)::text[]) +and i.name in (sqlc.arg(instrument_names)::text[]) and not i.deleted; -- name: ValidateProjectsInstrumentNameUnique :many -select p.name, i.name +select i.name instrument_name from project_instrument pi inner join instrument i on pi.instrument_id = i.id inner join project p on pi.project_id = p.id -where i.name = sqlc.arg(name) -and pi.instrument_id in (sqlc.arg(instrument_id)::uuid[]) +where i.name = sqlc.arg(instrument_name) +and pi.project_id in (sqlc.arg(project_ids)::uuid[]) and not i.deleted order by pi.project_id; diff --git a/api/queries/instrument_constant.sql b/api/queries/instrument_constant.sql index 85b313eb..75654c4d 100644 --- a/api/queries/instrument_constant.sql +++ b/api/queries/instrument_constant.sql @@ -3,9 +3,14 @@ select t.* from v_timeseries t inner join instrument_constants ic on ic.timeseries_id = t.id where ic.instrument_id = $1; + -- name: CreateInstrumentConstant :exec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); +-- name: CreateInstrumentConstantBatch :batchexec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); + + -- name: DeleteInstrumentConstant :exec delete from instrument_constants where instrument_id = $1 and timeseries_id = $2; diff --git a/api/queries/instrument_group.sql b/api/queries/instrument_group.sql index fac9b5f6..c5385219 100644 --- a/api/queries/instrument_group.sql +++ b/api/queries/instrument_group.sql @@ -1,19 +1,20 @@ -- name: ListInstrumentGroups :many -select - id, - slug, - name, - description, - creator, - create_date, - updater, - update_date, - project_id, - instrument_count, - timeseries_count +select * +from v_instrument_group +where not deleted; + + +-- name: GetInstrumentGroup :many +select * from v_instrument_group where not deleted -and (sqlc.narg(id) is not null or sqlc.narg(id) = id); +and id=$1; + + +-- name: ListInstrumentGroupsForProject :many +select ig.* +from v_instrument_group ig +where ig.project_id = $1; -- name: CreateInstrumentGroup :one @@ -22,6 +23,12 @@ values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) returning id, slug, name, description, creator, create_date, updater, update_date, project_id; +-- name: CreateInstrumentGroupsBatch :batchone +insert into instrument_group (slug, name, description, creator, create_date, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, creator, create_date, updater, update_date, project_id; + + -- name: UpdateInstrumentGroup :one update instrument_group set name = $2, diff --git a/api/queries/instrument_ipi.sql b/api/queries/instrument_ipi.sql index 33ef2dc6..1de093d5 100644 --- a/api/queries/instrument_ipi.sql +++ b/api/queries/instrument_ipi.sql @@ -3,6 +3,11 @@ insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_i values ($1, $2, $3, $4); +-- name: CreateIpiOptsBatch :batchexec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + -- name: UpdateIpiOpts :exec update ipi_opts set bottom_elevation_timeseries_id = $2, @@ -10,6 +15,13 @@ update ipi_opts set where instrument_id = $1; +-- name: UpdateIpiOptsBatch :batchexec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + -- name: GetAllIpiSegmentsForInstrument :many select * from v_ipi_segment where instrument_id = $1; @@ -25,6 +37,17 @@ insert into ipi_segment ( ) values ($1, $2, $3, $4, $5, $6); +-- name: CreateIpiSegmentBatch :batchexec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6); + + -- name: UpdateIpiSegment :exec update ipi_segment set length_timeseries_id = $3, @@ -34,6 +57,15 @@ update ipi_segment set where id = $1 and instrument_id = $2; +-- name: UpdateIpiSegmentsBatch :batchexec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2; + + -- name: GetIpiMeasurementsForInstrument :many select m1.instrument_id, m1.time, m1.measurements from v_ipi_measurement m1 diff --git a/api/queries/instrument_note.sql b/api/queries/instrument_note.sql index 4dca597e..0ef3b8d6 100644 --- a/api/queries/instrument_note.sql +++ b/api/queries/instrument_note.sql @@ -20,6 +20,12 @@ values ($1, $2, $3, $4, $5, $6) returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; +-- name: CreateInstrumentNoteBatch :batchone +insert into instrument_note (instrument_id, title, body, time, creator, create_date) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; + + -- name: UpdateInstrumentNote :one update instrument_note set title = $2, diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql index cedac8bd..fd9bd817 100644 --- a/api/queries/instrument_saa.sql +++ b/api/queries/instrument_saa.sql @@ -3,6 +3,11 @@ insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_i values ($1, $2, $3, $4); +-- name: CreateSaaOptsBatch :batchexec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + -- name: UpdateSaaOpts :exec update saa_opts set bottom_elevation_timeseries_id = $2, @@ -10,6 +15,13 @@ update saa_opts set where instrument_id = $1; +-- name: UpdateSaaOptsBatch :batchexec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + -- name: GetAllSaaSegmentsForInstrument :many select * from v_saa_segment where instrument_id = $1; @@ -26,6 +38,18 @@ insert into saa_segment ( ) values ($1, $2, $3, $4, $5, $6, $7); +-- name: CreateSaaSegmentBatch :batchexec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + -- name: UpdateSaaSegment :exec update saa_segment set length_timeseries_id = $3, diff --git a/api/queries/instrument_status.sql b/api/queries/instrument_status.sql index 86f14fb6..0eb3310f 100644 --- a/api/queries/instrument_status.sql +++ b/api/queries/instrument_status.sql @@ -16,5 +16,10 @@ insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $ on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; +-- name: CreateOrUpdateInstrumentStatusBatch :batchexec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; + + -- name: DeleteInstrumentStatus :exec delete from instrument_status where id = $1; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql index 5a153454..34392026 100644 --- a/api/queries/measurement.sql +++ b/api/queries/measurement.sql @@ -32,7 +32,12 @@ where t.instrument_id in ( and p.name = $2; --- name: CreateTimeseriesMeasruement :exec +-- name: CreateTimeseriesMeasurement :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing; + + +-- name: CreateTimeseriesMeasurementsBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do nothing; @@ -42,23 +47,38 @@ insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, on conflict on constraint timeseries_unique_time do update set value = excluded.value; +-- name: CreateOrUpdateTimeseriesMeasurementsBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + -- name: CreateTimeseriesNote :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do nothing; +-- name: CreateTimeseriesNotesBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing; + + -- name: CreateOrUpdateTimeseriesNote :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; +-- name: CreateOrUpdateTimeseriesNoteBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; + + -- name: DeleteTimeseriesMeasurement :exec delete from timeseries_measurement where timeseries_id = $1 and time = $2; -- name: DeleteTimeseriesMeasurementsRange :exec -delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3; +delete from timeseries_measurement where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); --- name: DeleteTimeseriesNote :exec -delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3; +-- name: DeleteTimeseriesNoteRange :exec +delete from timeseries_notes where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); diff --git a/api/queries/plot_config.sql b/api/queries/plot_config.sql index 414d6fb4..51fe31c3 100644 --- a/api/queries/plot_config.sql +++ b/api/queries/plot_config.sql @@ -1,31 +1,20 @@ --- name: ListPlotConfigs :many -select - id, - slug, - name, - project_id, - report_configs, - creator, - create_date, - updater, - update_date, - show_masked, - show_nonvalidated, - show_comments, - auto_range, - date_range, - threshold, - plot_type, - display +-- name: ListPlotConfigsForProject :many +select * from v_plot_configuration -where (sqlc.narg(project_id) is null or sqlc.narg(project_id) = project_id) -and (sqlc.narg(id) is null or sqlc.narg(id) = id); +where project_id = $1; + + +-- name: GetPlotConfig :one +select * +from v_plot_configuration +where id = $1; -- name: CreatePlotConfig :one insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) returning id; + -- name: CreatePlotConfigSettings :exec insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) values ($1, $2, $3, $4, $5, $6, $7); diff --git a/api/queries/plot_config_contour.sql b/api/queries/plot_config_contour.sql index a04aa733..6f8993ca 100644 --- a/api/queries/plot_config_contour.sql +++ b/api/queries/plot_config_contour.sql @@ -17,6 +17,11 @@ insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_i on conflict (plot_contour_config_id, timeseries_id) do nothing; +-- name: CreatePlotContourConfigTimeseriesBatch :batchexec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing; + + -- name: DeleteAllPlotContourConfigTimeseries :exec delete from plot_contour_config_timeseries where plot_contour_config_id = $1; @@ -33,9 +38,9 @@ order by time asc; -- name: ListPlotConfigMeasurementsContourPlot :many select - oi.x, - oi.y, - locf(mm.value) as z + oi.x::double precision x, + oi.y::double precision y, + locf(mm.value) z from plot_contour_config pc left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id diff --git a/api/queries/plot_config_scatter_line.sql b/api/queries/plot_config_scatter_line.sql index 2b41b4bc..ae57e3e4 100644 --- a/api/queries/plot_config_scatter_line.sql +++ b/api/queries/plot_config_scatter_line.sql @@ -12,6 +12,12 @@ insert into plot_configuration_timeseries_trace ($1, $2, $3, $4, $5, $6, $7, $8); +-- name: CreatePlotConfigTimeseriesTracesBatch :batchexec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8); + + -- name: UpdatePlotConfigTimeseriesTrace :exec update plot_configuration_timeseries_trace set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 @@ -27,6 +33,11 @@ insert into plot_configuration_custom_shape (plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); +-- name: CreatePlotConfigCustomShapesBatch :batchexec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); + + -- name: UpdatePlotConfigCustomShape :exec update plot_configuration_custom_shape set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1; diff --git a/api/queries/profile.sql b/api/queries/profile.sql index 489daecc..3bbbafca 100644 --- a/api/queries/profile.sql +++ b/api/queries/profile.sql @@ -1,4 +1,4 @@ --- name: GetProfileForEDIPI :many +-- name: GetProfileForEDIPI :one select * from v_profile where edipi = $1; diff --git a/api/queries/project.sql b/api/queries/project.sql index 69fb81db..1e9a8e88 100644 --- a/api/queries/project.sql +++ b/api/queries/project.sql @@ -26,17 +26,11 @@ where pr.profile_id = $1 and r.name = $2; --- name: ListInstrumentsForProject :many -select i.* -from v_instrument i -inner join project_instrument pi on pi.instrument_id = i.id -where pi.project_id = $1; - - --- name: ListInstrumentGroupsForProject :many -select ig.* -from v_instrument_group ig -where ig.project_id = $1; +-- name: ListAdminProjects :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN'; -- name: GetProjectCount :one @@ -47,7 +41,7 @@ select count(*) from project where not deleted; select * from v_project where id = $1; --- name: CreateProject :one +-- name: CreateProjectsBatch :batchone insert into project (federal_id, slug, name, district_id, creator, create_date) values ($1, slugify($2, 'project'), $2, $3, $4, $5) returning id, slug; diff --git a/api/queries/report_config.sql b/api/queries/report_config.sql index 34511968..9b7b2834 100644 --- a/api/queries/report_config.sql +++ b/api/queries/report_config.sql @@ -17,7 +17,7 @@ select * from v_plot_configuration where id = any( ); --- name: GetReportConfigByID :many +-- name: GetReportConfig :one select * from v_report_config where id = $1; @@ -35,10 +35,18 @@ delete from report_config where id=$1; insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); +-- name: AssignReportConfigPlotConfigBatch :batchexec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); + + -- name: UnassignReportConfigPlotConfig :exec delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; +-- name: UnassignReportConfigPlotConfigBatch :batchexec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; + + -- name: UnassignAllReportConfigPlotConfig :exec delete from report_config_plot_config where report_config_id=$1; diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql index 74955da4..fcc2bce1 100644 --- a/api/queries/timeseries.sql +++ b/api/queries/timeseries.sql @@ -40,6 +40,12 @@ values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) returning id, instrument_id, slug, name, parameter_id, unit_id, type; +-- name: CreateTimeseriesBatch :batchone +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type; + + -- name: UpdateTimeseries :one update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 where id = $1 diff --git a/api/queries/timeseries_cwms.sql b/api/queries/timeseries_cwms.sql index 019c2bfb..e28bb7f0 100644 --- a/api/queries/timeseries_cwms.sql +++ b/api/queries/timeseries_cwms.sql @@ -13,6 +13,11 @@ insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, ($1, $2, $3, $4, $5); +-- name: CreateTimeseriesCwmsBatch :batchexec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5); + + -- name: UpdateTimeseriesCwms :exec update timeseries_cwms set cwms_timeseries_id=$2, diff --git a/go.work.sum b/go.work.sum index 7afc1998..36f0dc97 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,20 +1,41 @@ +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg= github.com/ClickHouse/clickhouse-go/v2 v2.27.1/go.mod h1:XvcaX7ai9T9si83rZ0cB3y2upq9AYMwdj16Trqm+sPg= +github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/hcsshim v0.12.0/go.mod h1:RZV12pcHCXQ42XnlQ3pz6FZfmrC1C+R4gaOHhRNML1g= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= +github.com/containerd/containerd v1.7.14/go.mod h1:YMC9Qt5yzNqXx/fO4j/5yYVIHXSRrlB3H7sxkUTvspg= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v26.1.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJa7AfT8HpBFQ= github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw= github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= @@ -24,27 +45,52 @@ github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9 github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/lucasjones/reggen v0.0.0-20200904144131-37ba4fa293bb/go.mod h1:5ELEyG+X8f+meRWHuqUOewBOhvHkl7M76pdGEansxW4= +github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mfridman/xflag v0.0.0-20240825232106-efb77353e578/go.mod h1:/483ywM5ZO5SuMVjrIGquYNE5CzLrj5Ux/LxWWnjRaE= github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= +github.com/shirou/gopsutil/v3 v3.24.2/go.mod h1:tSg/594BcA+8UdQU2XcW803GWYgdtauFFPgJCJKZlVk= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/testcontainers/testcontainers-go v0.29.1/go.mod h1:SnKnKQav8UcgtKqjp/AD8bE1MqZm+3TDb/B8crE3XnI= +github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1/go.mod h1:YsWyy+pHDgvGdi0axGOx6CGXWsE6eqSaApyd1FYYSSc= +github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= +github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY= github.com/tursodatabase/libsql-client-go v0.0.0-20240812094001-348a4e45b535/go.mod h1:l8xTsYB90uaVdMHXMCxKKLSgw5wLYBwBKKefNIUnm9s= +github.com/twpayne/go-kml/v3 v3.1.1/go.mod h1:7VT0jsr6fzn5CPZ5e4OB93vhgf3fZcwflK7ydbXFVos= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/vertica/vertica-sql-go v1.3.3/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/ydb-platform/ydb-go-genproto v0.0.0-20240528144234-5d5a685e41f7/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I= github.com/ydb-platform/ydb-go-sdk/v3 v3.76.5/go.mod h1:IHwuXyolaAmGK2Dp7+dlhsnXphG1pwCoaP/OITT3+tU= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0= golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8/go.mod h1:CQ1k9gNrJ50XIzaKCRR2hssIjF07kZFEiieALBM/ARQ= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= @@ -58,6 +104,7 @@ golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240311173647-c811ad7063a7/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= diff --git a/sqlc.yml b/sqlc.yml index b2d428e9..0a76415b 100644 --- a/sqlc.yml +++ b/sqlc.yml @@ -16,39 +16,137 @@ sql: emit_empty_slices: true emit_pointers_for_null_types: true overrides: - - db_type: "uuid" - go_type: "github.com/google/uuid.UUID" - - db_type: "geometry" - go_type: "github.com/twpayne/go-geom.T" - - db_type: "timestamptz" - go_type: "time.Time" - - db_type: "json" - go_type: "github.com/USACE/instrumentation-api/api/internal/model.Opts" - - column: "v_instrument.projects" + # uuid + - db_type: uuid + go_type: github.com/google/uuid.UUID + - db_type: uuid + nullable: true go_type: - import: "github.com/USACE/instrumentation-api/api/internal/model" - package: "model" - type: "IDSlugName" + type: uuid.UUID + pointer: true + + # geometry + - db_type: geometry + go_type: + type: Geometry + + # timestamptz + - db_type: timestamptz + go_type: time.Time + - db_type: timestamptz + nullable: true + go_type: + type: time.Time + pointer: true + + # interval + - db_type: pg_catalog.interval + go_type: string + - db_type: pg_catalog.interval + nullable: true + go_type: + type: string + pointer: true + + # v_alert + - column: v_alert.instruments + go_type: + type: InstrumentIDName + slice: true + + # v_alert_check_measurement_submittal + - column: v_alert_check_measurement_submittal.affected_timeseries + go_type: + type: AlertCheckMeasurementSubmittalAffectedTimeseries + slice: true + - column: v_alert_check_evaluation_submittal.submittal + nullable: true + go_type: + type: VSubmittal + pointer: true + - column: v_alert_check_measurement_submittal.submittal + nullable: true + go_type: + type: VSubmittal + pointer: true + + # v_alert_config + - column: v_alert_config.instruments + go_type: + type: InstrumentIDName + slice: true + - column: v_alert_config.alert_email_subscriptions + go_type: + type: EmailAutocompleteResult + slice: true + + # v_collection_group_details + - column: v_collection_group_details.timeseries + go_type: + type: CollectionGroupDetailsTimeseries + slice: true + + # v_datalogger + - column: v_datalogger.tables + go_type: + type: DataloggerTableIDName + slice: true + + # v_datalogger_equivalency_table + - column: v_datalogger_equivalency_table.fields + go_type: + type: DataloggerEquivalencyTableField + slice: true + - column: v_domain_group.opts + go_type: + type: DomainGroupOpt + slice: true + + # v_evaluation + - column: v_evaluation.instruments + go_type: + type: InstrumentIDName + slice: true + + # v_instrument + - column: v_instrument.projects + go_type: + type: IDSlugName + slice: true + - column: v_instrument.telemetry + go_type: + type: IDSlugName + slice: true + + # v_ipi_measurement + - column: v_ipi_measurement.measurements + go_type: + type: IpiMeasurement + slice: true + + # v_plot_configuration + - column: v_plot_configuration.report_configs + go_type: + type: IDSlugName slice: true - - column: "v_instrument.constants" + + # v_profile + - column: v_profile.tokens go_type: - import: "github.com/google/uuid" - package: "uuid" - type: "UUID" + type: VProfileToken slice: true - - column: "v_instrument.groups" + + # v_saa_measurement + - column: v_saa_measurement.measurements go_type: - import: "github.com/google/uuid" - package: "uuid" - type: "UUID" + type: SaaMeasurement slice: true - - column: "v_instrument.alert_configs" + + # v_report_config + - column: v_report_config.plot_configs go_type: - import: "github.com/google/uuid" - package: "uuid" - type: "UUID" + type: IDSlugName slice: true - - column: "v_instrument.geometry" + - column: v_report_config.global_overrides go_type: - import: "github.com/twpayne/go-geom" - type: "T" + type: ReportConfigGlobalOverrides From 99ff6d38520f8ecf8ea1c55e39b9d115827d20d3 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 15:44:56 -0500 Subject: [PATCH 06/23] add timezone options list --- api/internal/handler/domain.go | 18 ++++++ api/internal/model/domains.go | 17 ++++++ api/internal/model/uploader.go | 5 -- api/internal/server/api.go | 1 + api/internal/server/docs/openapi.json | 58 ++++++++++++++------ api/internal/server/docs/openapi.yaml | 52 +++++++++++------- api/internal/service/domain.go | 1 + api/migrations/schema/V1.14.00__uploader.sql | 10 +++- report/generated.d.ts | 47 +++++++++------- 9 files changed, 147 insertions(+), 62 deletions(-) diff --git a/api/internal/handler/domain.go b/api/internal/handler/domain.go index d7fe4831..ad8cf96d 100644 --- a/api/internal/handler/domain.go +++ b/api/internal/handler/domain.go @@ -43,3 +43,21 @@ func (h *ApiHandler) GetDomainMap(c echo.Context) error { } return c.JSON(http.StatusOK, dm) } + +// ListTimezoneOptions godoc +// +// @Summary lists time zone options +// @Tags domain +// @Produce json +// @Success 200 {array} model.TimezoneOption +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /domains [get] +func (h *ApiHandler) ListTimezoneOptions(c echo.Context) error { + dd, err := h.DomainService.ListTimezoneOptions(c.Request().Context()) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, dd) +} diff --git a/api/internal/model/domains.go b/api/internal/model/domains.go index 2caeb54e..6ee923f2 100644 --- a/api/internal/model/domains.go +++ b/api/internal/model/domains.go @@ -58,3 +58,20 @@ func (q *Queries) GetDomainMap(ctx context.Context) (DomainMap, error) { } return m, nil } + +type TimezoneOption struct { + Name string `json:"name" db:"name"` + Abbrev string `json:"abbrev" db:"abbrev"` + UtcOffset string `json:"utc_offset" db:"utc_offset"` + IsDst bool `json:"is_dst" db:"is_dst"` +} + +const listTimezoneOptions = ` + SELECT * FROM pg_timezone_names +` + +func (q *Queries) ListTimezoneOptions(ctx context.Context) ([]TimezoneOption, error) { + dd := make([]TimezoneOption, 0) + err := q.db.SelectContext(ctx, &dd, listTimezoneOptions) + return dd, err +} diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index 953f7cf7..afa10035 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -1,7 +1,6 @@ package model import ( - "context" "time" "github.com/google/uuid" @@ -33,10 +32,6 @@ const createUploaderConfig = ` INSERT INTO uploader_config VALUES ($1, $2, $3, $4, $5, $6) ` -func (q Queries) CreateUploaderConfig(ctx context.Context, uc UploaderConfig) error { - -} - // CREATE TABLE uploader_config ( // id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), // project_id uuid NOT NULL REFERENCES project(id), diff --git a/api/internal/server/api.go b/api/internal/server/api.go index cea68cdb..f3c3bd43 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -142,6 +142,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Domain r.public.GET("/domains", h.GetDomains) r.public.GET("/domains/map", h.GetDomainMap) + r.public.GET("/domains/timezones", h.ListTimezoneOptions) // EquivalencyTable r.private.GET("/datalogger/:datalogger_id/tables/:datalogger_table_id/equivalency_table", h.GetEquivalencyTable) diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index e62c791f..ac9db32d 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -1456,7 +1456,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Domain" + "$ref" : "#/components/schemas/TimezoneOption" }, "type" : "array" } @@ -1495,7 +1495,7 @@ "description" : "Internal Server Error" } }, - "summary" : "lists all domains", + "summary" : "lists time zone options", "tags" : [ "domain" ] } }, @@ -12200,25 +12200,19 @@ }, "/timeseries_measurements" : { "post" : { - "parameters" : [ { - "description" : "api key", - "in" : "query", - "name" : "key", - "required" : true, - "schema" : { - "type" : "string" - } - } ], "requestBody" : { "content" : { - "*/*" : { + "application/json" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" + } + }, + "multipart/form-data" : { + "schema" : { + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" } } - }, - "description" : "array of timeseries measurement collections", - "required" : true + } }, "responses" : { "200" : { @@ -12265,7 +12259,10 @@ "description" : "Internal Server Error" } }, - "summary" : "creates or updates one or more timeseries measurements", + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates one or more timeseries measurements", "tags" : [ "measurement" ], "x-codegen-request-body-name" : "timeseries_measurement_collections" } @@ -14584,6 +14581,23 @@ }, "type" : "object" }, + "TimezoneOption" : { + "properties" : { + "abbrev" : { + "type" : "string" + }, + "is_dst" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "utc_offset" : { + "type" : "string" + } + }, + "type" : "object" + }, "ToggleOption" : { "properties" : { "enabled" : { @@ -14723,6 +14737,16 @@ "enum" : [ 0, 1, 2 ], "type" : "integer", "x-enum-varnames" : [ "Undefined", "Null", "Present" ] + }, + "_timeseries_measurements_post_request" : { + "properties" : { + "timeseries_measurement_collections" : { + "description" : "TOA5 file of timeseries measurement collections", + "format" : "binary", + "type" : "string" + } + }, + "type" : "object" } }, "securitySchemes" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index e6bde2c5..a4d66746 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -974,7 +974,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Domain' + $ref: '#/components/schemas/TimezoneOption' type: array description: OK "400": @@ -995,7 +995,7 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: lists all domains + summary: lists time zone options tags: - domain /domains/map: @@ -8157,20 +8157,14 @@ paths: - timeseries /timeseries_measurements: post: - parameters: - - description: api key - in: query - name: key - required: true - schema: - type: string requestBody: content: - '*/*': + application/json: schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' - description: array of timeseries measurement collections - required: true + $ref: '#/components/schemas/_timeseries_measurements_post_request' + multipart/form-data: + schema: + $ref: '#/components/schemas/_timeseries_measurements_post_request' responses: "200": content: @@ -8198,7 +8192,9 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: creates or updates one or more timeseries measurements + security: + - Bearer: [] + summary: creates one or more timeseries measurements tags: - measurement x-codegen-request-body-name: timeseries_measurement_collections @@ -8822,11 +8818,6 @@ components: type: integer type: object Domain: - example: - description: description - id: id - value: value - group: group properties: description: type: string @@ -11026,6 +11017,22 @@ components: $ref: '#/components/schemas/MeasurementCollection' type: array type: object + TimezoneOption: + example: + utc_offset: utc_offset + name: name + abbrev: abbrev + is_dst: true + properties: + abbrev: + type: string + is_dst: + type: boolean + name: + type: string + utc_offset: + type: string + type: object ToggleOption: example: value: true @@ -11177,6 +11184,13 @@ components: - Undefined - "Null" - Present + _timeseries_measurements_post_request: + properties: + timeseries_measurement_collections: + description: TOA5 file of timeseries measurement collections + format: binary + type: string + type: object securitySchemes: Bearer: description: Type "Bearer" followed by a space and access token. diff --git a/api/internal/service/domain.go b/api/internal/service/domain.go index 9f7fc6e5..94d4530c 100644 --- a/api/internal/service/domain.go +++ b/api/internal/service/domain.go @@ -9,6 +9,7 @@ import ( type DomainService interface { GetDomains(ctx context.Context) ([]model.Domain, error) GetDomainMap(ctx context.Context) (model.DomainMap, error) + ListTimezoneOptions(ctx context.Context) ([]model.TimezoneOption, error) } type domainService struct { diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql index f4c01901..df5d5fc9 100644 --- a/api/migrations/schema/V1.14.00__uploader.sql +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -1,3 +1,8 @@ +-- checked db and there are no null instrument geometries +ALTER TABLE instrument ALTER COLUMN "geometry" TYPE geometry(Point, 4326); +ALTER TABLE instrument ALTER COLUMN "geometry" SET NOT NULL; + + CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); @@ -8,7 +13,10 @@ CREATE TABLE uploader_config ( description text NOT NULL, create_date timestamptz NOT NULL DEFAULT now(), creator uuid NOT NULL REFERENCES profile(id), - type uploader_config_type NOT NULL + update_date timestamptz NOT NULL DEFAULT now(), + updater uuid NOT NULL REFERENCES profile(id), + type uploader_config_type NOT NULL, + tz_name text NOT NULL DEFAULT 'UTC' ); diff --git a/report/generated.d.ts b/report/generated.d.ts index 0f48113c..6efe8b19 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -834,13 +834,13 @@ export interface paths { }; }; "/domains": { - /** lists all domains */ + /** lists time zone options */ get: { responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Domain"][]; + "application/json": components["schemas"]["TimezoneOption"][]; }; }; /** @description Bad Request */ @@ -7030,18 +7030,12 @@ export interface paths { }; }; "/timeseries_measurements": { - /** creates or updates one or more timeseries measurements */ + /** creates one or more timeseries measurements */ post: { - parameters: { - query: { - /** @description api key */ - key: string; - }; - }; - /** @description array of timeseries measurement collections */ - requestBody: { + requestBody?: { content: { - "*/*": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "application/json": components["schemas"]["_timeseries_measurements_post_request"]; + "multipart/form-data": components["schemas"]["_timeseries_measurements_post_request"]; }; }; responses: { @@ -7621,14 +7615,6 @@ export interface components { red_submittals?: number; yellow_submittals?: number; }; - /** - * @example { - * "description": "description", - * "id": "id", - * "value": "value", - * "group": "group" - * } - */ Domain: { description?: string; group?: string; @@ -9689,6 +9675,20 @@ export interface components { TimeseriesMeasurementCollectionCollection: { items?: components["schemas"]["MeasurementCollection"][]; }; + /** + * @example { + * "utc_offset": "utc_offset", + * "name": "name", + * "abbrev": "abbrev", + * "is_dst": true + * } + */ + TimezoneOption: { + abbrev?: string; + is_dst?: boolean; + name?: string; + utc_offset?: string; + }; /** * @example { * "value": true, @@ -9813,6 +9813,13 @@ export interface components { }; /** @enum {integer} */ "pgtype.Status": 0 | 1 | 2; + _timeseries_measurements_post_request: { + /** + * Format: binary + * @description TOA5 file of timeseries measurement collections + */ + timeseries_measurement_collections?: string; + }; }; responses: never; parameters: never; From 3b36dcf08407017d0632daf4b4e3502f028b95c8 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 17:49:56 -0500 Subject: [PATCH 07/23] add collection group tests with sort order --- api/internal/handler/collection_groups.go | 2 +- .../handler/collection_groups_test.go | 45 +++++++++-- api/internal/model/collection_group.go | 23 +++--- api/internal/server/docs/openapi.json | 6 ++ api/internal/server/docs/openapi.yaml | 12 ++- api/migrations/schema/V1.14.00__uploader.sql | 2 + compose.sh | 77 +++++++++---------- docker-compose.yml | 7 -- report/generated.d.ts | 10 ++- 9 files changed, 111 insertions(+), 73 deletions(-) diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index adf100ea..7dafe28f 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -141,7 +141,7 @@ func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, cgUpdated) + return c.JSON(http.StatusOK, cgUpdated) } // DeleteCollectionGroup godoc diff --git a/api/internal/handler/collection_groups_test.go b/api/internal/handler/collection_groups_test.go index 4b15fe50..c5481fea 100644 --- a/api/internal/handler/collection_groups_test.go +++ b/api/internal/handler/collection_groups_test.go @@ -19,12 +19,15 @@ const collectionGroupSchema = `{ "creator_id": { "type": "string" }, "create_date": { "type": "string", "format": "date-time" }, "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "update_date": { "type": ["string", "null"], "format": "date-time" }, + "sort_order": { "type": "integer" } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date"], + "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "sort_order"], "additionalProperties": false }` +var collectionGroupObjectLoader = gojsonschema.NewStringLoader(collectionGroupSchema) + var collectionGroupArrayLoader = gojsonschema.NewStringLoader(fmt.Sprintf(`{ "type": "array", "items": %s @@ -41,6 +44,7 @@ const collectionGroupDetailsSchema = `{ "create_date": { "type": "string", "format": "date-time" }, "updater_id": { "type": ["string", "null"] }, "update_date": { "type": ["string", "null"], "format": "date-time" }, + "sort_order": { "type": "integer" }, "timeseries": { "type": "array", "items": { @@ -67,7 +71,7 @@ const collectionGroupDetailsSchema = `{ } } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "timeseries"], + "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "timeseries", "sort_order"], "additionalProperties": false }` @@ -75,12 +79,27 @@ var collectionGroupDetailsObjectLoader = gojsonschema.NewStringLoader(collection const testCollectionGroupID = "30b32cb1-0936-42c4-95d1-63a7832a57db" +var createCollectionGroupBody = `{ + "name": "test new collection group", + "sort_order": 2 +}` + +const updateCollectionGroupBody = `{ + "name": "test update collection group", + "sort_order": 3 +}` + func TestCollectionGroups(t *testing.T) { - objSchema, err := gojsonschema.NewSchema(collectionGroupDetailsObjectLoader) + objSchema, err := gojsonschema.NewSchema(collectionGroupObjectLoader) assert.Nil(t, err) if err != nil { t.Log("invalid object schema") } + detailsObjSchema, err := gojsonschema.NewSchema(collectionGroupDetailsObjectLoader) + assert.Nil(t, err) + if err != nil { + t.Log("invalid details object schema") + } arrSchema, err := gojsonschema.NewSchema(collectionGroupArrayLoader) assert.Nil(t, err) if err != nil { @@ -93,7 +112,7 @@ func TestCollectionGroups(t *testing.T) { URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, - ExpectedSchema: objSchema, + ExpectedSchema: detailsObjSchema, }, { Name: "ListCollectionGroups", @@ -102,6 +121,22 @@ func TestCollectionGroups(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: arrSchema, }, + { + Name: "CreateCollectionGroup", + URL: fmt.Sprintf("/projects/%s/collection_groups", testProjectID), + Method: http.MethodPost, + ExpectedStatus: http.StatusCreated, + ExpectedSchema: arrSchema, + Body: createCollectionGroupBody, + }, + { + Name: "UpdateCollectionGroup", + URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), + Method: http.MethodPut, + ExpectedStatus: http.StatusOK, + ExpectedSchema: objSchema, + Body: updateCollectionGroupBody, + }, { Name: "DeleteCollectionGroup", URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), diff --git a/api/internal/model/collection_group.go b/api/internal/model/collection_group.go index 89ede791..b23a9ef5 100644 --- a/api/internal/model/collection_group.go +++ b/api/internal/model/collection_group.go @@ -13,6 +13,7 @@ type CollectionGroup struct { ProjectID uuid.UUID `json:"project_id" db:"project_id"` Slug string `json:"slug" db:"slug"` Name string `json:"name" db:"name"` + SortOrder int `json:"sort_order" db:"sort_order"` AuditInfo } @@ -32,9 +33,7 @@ type collectionGroupDetailsTimeseries struct { } const listCollectionGroups = ` - SELECT id, project_id, slug, name, creator, create_date, updater, update_date - FROM collection_group - WHERE project_id = $1 + SELECT * FROM collection_group WHERE project_id = $1 ORDER BY sort_order, name ` // ListCollectionGroups lists all collection groups for a project @@ -46,8 +45,8 @@ func (q *Queries) ListCollectionGroups(ctx context.Context, projectID uuid.UUID) return aa, nil } -const getCollectionGroupDetails = listCollectionGroups + ` - AND id = $2 +const getCollectionGroupDetails = ` + SELECT * FROM collection_group WHERE project_id = $1 AND id = $2 ` // GetCollectionGroupDetails returns details for a single CollectionGroup @@ -87,30 +86,30 @@ func (q *Queries) GetCollectionGroupDetailsTimeseries(ctx context.Context, proje } const createCollectionGroup = ` - INSERT INTO collection_group (project_id, name, slug, creator, create_date, updater, update_date) - VALUES ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date + INSERT INTO collection_group (project_id, name, slug, creator, create_date, updater, update_date, sort_order) + VALUES ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6, $7) + RETURNING id, project_id, name, slug, creator, create_date, updater, update_date, sort_order ` // CreateCollectionGroup creates a new collection group func (q *Queries) CreateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { var cgNew CollectionGroup - if err := q.db.GetContext(ctx, &cgNew, createCollectionGroup, cg.ProjectID, cg.Name, cg.CreatorID, cg.CreateDate, cg.UpdaterID, cg.UpdateDate); err != nil { + if err := q.db.GetContext(ctx, &cgNew, createCollectionGroup, cg.ProjectID, cg.Name, cg.CreatorID, cg.CreateDate, cg.UpdaterID, cg.UpdateDate, cg.SortOrder); err != nil { return cgNew, err } return cgNew, nil } const updateCollectionGroup = ` - UPDATE collection_group SET name=$3, updater=$4, update_date=$5 + UPDATE collection_group SET name=$3, updater=$4, update_date=$5, sort_order=$6 WHERE project_id=$1 AND id=$2 - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date + RETURNING id, project_id, name, slug, creator, create_date, updater, update_date, sort_order ` // UpdateCollectionGroup updates an existing collection group's metadata func (q *Queries) UpdateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { var cgUpdated CollectionGroup - if err := q.db.GetContext(ctx, &cgUpdated, updateCollectionGroup, cg.ProjectID, cg.ID, cg.Name, cg.UpdaterID, cg.UpdateDate); err != nil { + if err := q.db.GetContext(ctx, &cgUpdated, updateCollectionGroup, cg.ProjectID, cg.ID, cg.Name, cg.UpdaterID, cg.UpdateDate, cg.SortOrder); err != nil { return cgUpdated, err } return cgUpdated, nil diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index ac9db32d..554bef26 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -12575,6 +12575,9 @@ "slug" : { "type" : "string" }, + "sort_order" : { + "type" : "integer" + }, "update_date" : { "type" : "string" }, @@ -12610,6 +12613,9 @@ "slug" : { "type" : "string" }, + "sort_order" : { + "type" : "integer" + }, "timeseries" : { "items" : { "$ref" : "#/components/schemas/collectionGroupDetailsTimeseries" diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index a4d66746..c7cf0201 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -8488,6 +8488,7 @@ components: updater_id: updater_id id: id create_date: create_date + sort_order: 0 slug: slug update_date: update_date properties: @@ -8505,6 +8506,8 @@ components: type: string slug: type: string + sort_order: + type: integer update_date: type: string updater_id: @@ -8536,7 +8539,7 @@ components: parameter: parameter name: name variable: variable - latest_value: 0.8008281904610115 + latest_value: 6.027456183070403 id: id instrument_slug: instrument_slug is_computed: true @@ -8564,7 +8567,7 @@ components: parameter: parameter name: name variable: variable - latest_value: 0.8008281904610115 + latest_value: 6.027456183070403 id: id instrument_slug: instrument_slug is_computed: true @@ -8579,6 +8582,7 @@ components: updater_id: updater_id id: id create_date: create_date + sort_order: 0 slug: slug update_date: update_date properties: @@ -8596,6 +8600,8 @@ components: type: string slug: type: string + sort_order: + type: integer timeseries: items: $ref: '#/components/schemas/collectionGroupDetailsTimeseries' @@ -11116,7 +11122,7 @@ components: parameter: parameter name: name variable: variable - latest_value: 0.8008281904610115 + latest_value: 6.027456183070403 id: id instrument_slug: instrument_slug is_computed: true diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql index df5d5fc9..7b249f95 100644 --- a/api/migrations/schema/V1.14.00__uploader.sql +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -26,3 +26,5 @@ CREATE TABLE uploader_config_mapping ( timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) ); + +ALTER TABLE collection_group ADD COLUMN sort_order integer NOT NULL DEFAULT 0; diff --git a/compose.sh b/compose.sh index 4ce826ef..a7c955a3 100755 --- a/compose.sh +++ b/compose.sh @@ -2,7 +2,10 @@ set -Eeo pipefail -parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) +parent_path=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + pwd -P +) cd "$parent_path" COMPOSECMD="docker compose -f docker-compose.yml" @@ -10,28 +13,25 @@ COMPOSECMD="docker compose -f docker-compose.yml" mkdocs() { ( DOCKER_BUILDKIT=1 docker build --file api/Dockerfile.openapi --output api/internal/server/docs api - cd report && npm run generate >/dev/null; + cd report && npm run generate >/dev/null ) } - if [ "$1" = "watch" ]; then mkdocs -q if [ "$2" = "mock" ]; then - DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml --profile=mock watch + DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml --profile=mock up --watch else - DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml watch + DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml up --watch fi - elif [ "$1" = "up" ]; then mkdocs -q if [ "$2" = "mock" ]; then - DOCKER_BUILDKIT=1 $COMPOSECMD --profile=mock up -d --build + DOCKER_BUILDKIT=1 $COMPOSECMD --profile=mock up --remove-orphans -d --build else - DOCKER_BUILDKIT=1 $COMPOSECMD up -d --build + DOCKER_BUILDKIT=1 $COMPOSECMD up --remove-orphans -d --build fi - elif [ "$1" = "build" ]; then if [ "$2" = "local" ] || [ "$2" = "develop" ] || [ "$2" = "test" ] || [ "$2" = "prod" ]; then @@ -46,20 +46,19 @@ elif [ "$1" = "build" ]; then AMD64_TARGET_PLATFORM=true fi - for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader - do - docker build \ - ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ - --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ - --build-arg="GO_VERSION=1.23" \ - --build-arg="BUILD_TAG=$2" \ - --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ - -t $BUILD_TARGET:"$2" api + for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader; do + docker build \ + ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ + --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ + --build-arg="GO_VERSION=1.23" \ + --build-arg="BUILD_TAG=$2" \ + --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ + -t $BUILD_TARGET:"$2" api done docker build \ - --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ - -t midas-report:$2 report + --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ + -t midas-report:$2 report else echo -e "usage:\n\t./compose.sh build [local,develop,test,prod]" exit 1 @@ -74,8 +73,7 @@ elif [ "$1" = "build" ]; then declare -a REGISTRIES=("midas-api" "midas-telemetry" "midas-alert" "midas-dcs-loader" "midas-sql") # tag - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker tag $IMAGE:"$2" $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -83,8 +81,7 @@ elif [ "$1" = "build" ]; then fi # push - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker push $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -92,22 +89,18 @@ elif [ "$1" = "build" ]; then fi fi - elif [ "$1" = "authdbdump" ]; then - $COMPOSECMD exec authdb pg_dump postgres > auth/initdb/init2.sql - + $COMPOSECMD exec authdb pg_dump postgres >auth/initdb/init2.sql elif [ "$1" = "down" ]; then mkdocs -q $COMPOSECMD -f docker-compose.dev.yml --profile=mock down - elif [ "$1" = "clean" ]; then $COMPOSECMD -f docker-compose.dev.yml --profile=mock down -v - elif [ "$1" = "test" ]; then - docker compose build + $COMPOSECMD up --remove-orphans -d --build db migrate elasticmq api shift TEARDOWN=false @@ -115,34 +108,34 @@ elif [ "$1" = "test" ]; then while [[ $# -gt 0 ]]; do case $1 in - -rm) - TEARDOWN=true - shift - ;; - *) - REST_ARGS+=("$1") - shift - ;; + -rm) + TEARDOWN=true + shift + ;; + *) + REST_ARGS+=("$1") + shift + ;; esac done GOCMD="go test ${REST_ARGS[@]} github.com/USACE/instrumentation-api/api/internal/handler" + set +e if [ "$REPORT" = true ]; then - docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api > $(pwd)/test.log + docker compose run --remove-orphans -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api >$(pwd)/test.log else - docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api + docker compose run --remove-orphans -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api fi + set -e if [ $TEARDOWN = true ]; then docker compose --profile=mock down -v fi - elif [ "$1" = "mkdocs" ]; then mkdocs - else echo -e "usage:\n\t./compose.sh watch\n\t./compose.sh up\n\t./compose.sh down\n\t./compose.sh clean\n\t./compose.sh test\n\t./compose.sh mkdocs" fi diff --git a/docker-compose.yml b/docker-compose.yml index e3465ea9..f2d9c487 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,13 +87,6 @@ services: required: true ports: - "${API_PORT:-8080}:80" - depends_on: - db: - condition: service_healthy - migrate: - condition: service_completed_successfully - elasticmq: - condition: service_started telemetry: build: diff --git a/report/generated.d.ts b/report/generated.d.ts index 6efe8b19..437098c7 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -7326,6 +7326,7 @@ export interface components { * "updater_id": "updater_id", * "id": "id", * "create_date": "create_date", + * "sort_order": 0, * "slug": "slug", * "update_date": "update_date" * } @@ -7338,6 +7339,7 @@ export interface components { name?: string; project_id?: string; slug?: string; + sort_order?: number; update_date?: string; updater_id?: string; updater_username?: string; @@ -7372,7 +7374,7 @@ export interface components { * "parameter": "parameter", * "name": "name", * "variable": "variable", - * "latest_value": 0.8008281904610115, + * "latest_value": 6.027456183070403, * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, @@ -7407,7 +7409,7 @@ export interface components { * "parameter": "parameter", * "name": "name", * "variable": "variable", - * "latest_value": 0.8008281904610115, + * "latest_value": 6.027456183070403, * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, @@ -7424,6 +7426,7 @@ export interface components { * "updater_id": "updater_id", * "id": "id", * "create_date": "create_date", + * "sort_order": 0, * "slug": "slug", * "update_date": "update_date" * } @@ -7436,6 +7439,7 @@ export interface components { name?: string; project_id?: string; slug?: string; + sort_order?: number; timeseries?: components["schemas"]["collectionGroupDetailsTimeseries"][]; update_date?: string; updater_id?: string; @@ -9770,7 +9774,7 @@ export interface components { * "parameter": "parameter", * "name": "name", * "variable": "variable", - * "latest_value": 0.8008281904610115, + * "latest_value": 6.027456183070403, * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, From fe2c6de430495667befcd7e1529d559ee2faaeef Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 18:26:32 -0500 Subject: [PATCH 08/23] add sort order to collection group timesries --- api/internal/handler/collection_groups.go | 53 +++++++++++++++++++- api/internal/model/collection_group.go | 20 ++++++-- api/internal/model/uploader.go | 5 +- api/internal/server/api.go | 1 + api/internal/service/collection_group.go | 3 +- api/migrations/schema/V1.14.00__uploader.sql | 1 + 6 files changed, 76 insertions(+), 7 deletions(-) diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index 7dafe28f..e90fca0f 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -2,6 +2,7 @@ package handler import ( "net/http" + "strconv" "time" "github.com/USACE/instrumentation-api/api/internal/httperr" @@ -197,7 +198,57 @@ func (h *ApiHandler) AddTimeseriesToCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.AddTimeseriesToCollectionGroup(c.Request().Context(), cgID, tsID); err != nil { + var sortOrder int + soParam := c.QueryParam("sort_order") + if soParam != "" { + so64, err := strconv.ParseInt(soParam, 10, 0) + if err != nil { + return httperr.BadRequest(err) + } + sortOrder = int(so64) + } + + if err := h.CollectionGroupService.AddTimeseriesToCollectionGroup(c.Request().Context(), cgID, tsID, sortOrder); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, make(map[string]interface{})) +} + +// UpdateTimeseriesCollectionGroupSortOrder godoc +// +// @Summary updates sort order for collection group timesries +// @Tags collection-groups +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param collection_group_id path string true "collection group uuid" Format(uuid) +// @Param timeseries_id path string true "timeseries uuid" Format(uuid) +// @Param key query string false "api key" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/collection_groups/{collection_group_id}/timeseries/{timeseries_id} [put] +// @Security Bearer +func (h *ApiHandler) UpdateTimeseriesCollectionGroupSortOrder(c echo.Context) error { + cgID, err := uuid.Parse(c.Param("collection_group_id")) + if err != nil { + return httperr.MalformedID(err) + } + tsID, err := uuid.Parse(c.Param("timeseries_id")) + if err != nil { + return httperr.MalformedID(err) + } + var sortOrder int + soParam := c.QueryParam("sort_order") + if soParam != "" { + so64, err := strconv.ParseInt(soParam, 10, 0) + if err != nil { + return httperr.BadRequest(err) + } + sortOrder = int(so64) + } + + if err := h.CollectionGroupService.UpdateTimeseriesCollectionGroupSortOrder(c.Request().Context(), cgID, tsID, sortOrder); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/model/collection_group.go b/api/internal/model/collection_group.go index b23a9ef5..1d25da33 100644 --- a/api/internal/model/collection_group.go +++ b/api/internal/model/collection_group.go @@ -30,6 +30,7 @@ type collectionGroupDetailsTimeseries struct { Timeseries LatestTime *time.Time `json:"latest_time" db:"latest_time"` LatestValue *float32 `json:"latest_value" db:"latest_value"` + SortOrder int `json:"sort_order" db:"sort_order"` } const listCollectionGroups = ` @@ -59,7 +60,7 @@ func (q *Queries) GetCollectionGroupDetails(ctx context.Context, projectID, coll } const getCollectionGroupDetailsTimeseries = ` - SELECT t.*, tm.time as latest_time, tm.value as latest_value + SELECT t.*, tm.time as latest_time, tm.value as latest_value, cgt.sort_order FROM collection_group_timeseries cgt INNER JOIN collection_group cg on cg.id = cgt.collection_group_id INNER JOIN v_timeseries t on t.id = cgt.timeseries_id @@ -74,6 +75,7 @@ const getCollectionGroupDetailsTimeseries = ` WHERE project_id = $1 ) AND cgt.collection_group_id = $2 + ORDER BY sort_order ASC, t.name ASC ` // GetCollectionGroupDetails returns details for a single CollectionGroup @@ -127,13 +129,13 @@ func (q *Queries) DeleteCollectionGroup(ctx context.Context, projectID, collecti } const addTimeseriesToCollectionGroup = ` - INSERT INTO collection_group_timeseries (collection_group_id, timeseries_id) VALUES ($1, $2) + INSERT INTO collection_group_timeseries (collection_group_id, timeseries_id, sort_order) VALUES ($1, $2, $3) ON CONFLICT ON CONSTRAINT collection_group_unique_timeseries DO NOTHING ` // AddTimeseriesToCollectionGroup adds a timeseries to a collection group -func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, addTimeseriesToCollectionGroup, collectionGroupID, timeseriesID) +func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error { + _, err := q.db.ExecContext(ctx, addTimeseriesToCollectionGroup, collectionGroupID, timeseriesID, sortOrder) return err } @@ -146,3 +148,13 @@ func (q *Queries) RemoveTimeseriesFromCollectionGroup(ctx context.Context, colle _, err := q.db.ExecContext(ctx, removeTimeseriesFromCollectionGroup, collectionGroupID, timeseriesID) return err } + +const updateTimeseriesCollectionGroupSortOrder = ` + UPDATE collection_group_timeseries set sort_order=$3 + WHERE collection_group_id=$1 AND timeseries_id=$2 +` + +func (q *Queries) UpdateTimeseriesCollectionGroupSortOrder(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error { + _, err := q.db.ExecContext(ctx, updateTimeseriesCollectionGroupSortOrder, collectionGroupID, timeseriesID, sortOrder) + return err +} diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index afa10035..5d0a1be6 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -39,7 +39,10 @@ const createUploaderConfig = ` // description text NOT NULL, // create_date timestamptz NOT NULL DEFAULT now(), // creator uuid NOT NULL REFERENCES profile(id), -// type uploader_config_type NOT NULL +// update_date timestamptz NOT NULL DEFAULT now(), +// updater uuid NOT NULL REFERENCES profile(id), +// type uploader_config_type NOT NULL, +// tz_name text NOT NULL DEFAULT 'UTC' // ); // // diff --git a/api/internal/server/api.go b/api/internal/server/api.go index f3c3bd43..f60762ab 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -123,6 +123,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/projects/:project_id/collection_groups/:collection_group_id", h.UpdateCollectionGroup) r.private.DELETE("/projects/:project_id/collection_groups/:collection_group_id", h.DeleteCollectionGroup) r.private.POST("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.AddTimeseriesToCollectionGroup) + r.private.PUT("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.UpdateTimeseriesCollectionGroupSortOrder) r.private.DELETE("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.RemoveTimeseriesFromCollectionGroup) // Datalogger diff --git a/api/internal/service/collection_group.go b/api/internal/service/collection_group.go index 784adab9..405d412a 100644 --- a/api/internal/service/collection_group.go +++ b/api/internal/service/collection_group.go @@ -13,7 +13,8 @@ type CollectionGroupService interface { CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error - AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error + AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error + UpdateTimeseriesCollectionGroupSortOrder(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error } diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql index 7b249f95..5ea31cc5 100644 --- a/api/migrations/schema/V1.14.00__uploader.sql +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -28,3 +28,4 @@ CREATE TABLE uploader_config_mapping ( ); ALTER TABLE collection_group ADD COLUMN sort_order integer NOT NULL DEFAULT 0; +ALTER TABLE collection_group_timeseries ADD COLUMN sort_order integer NOT NULL DEFAULT 0; From bbcbec7288018f1b9af140fb8730f8e45f47cf52 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 18:31:46 -0500 Subject: [PATCH 09/23] fix tests for cg sort order timeseries --- .../handler/collection_groups_test.go | 3 +- api/internal/server/docs/openapi.json | 88 +++++++++++++++++++ api/internal/server/docs/openapi.yaml | 64 ++++++++++++++ report/generated.d.ts | 49 +++++++++++ 4 files changed, 203 insertions(+), 1 deletion(-) diff --git a/api/internal/handler/collection_groups_test.go b/api/internal/handler/collection_groups_test.go index c5481fea..bd29efb3 100644 --- a/api/internal/handler/collection_groups_test.go +++ b/api/internal/handler/collection_groups_test.go @@ -64,9 +64,10 @@ const collectionGroupDetailsSchema = `{ "latest_time": {"type": "string", "format": "date-time" }, "latest_value": {"type": "number" }, "is_computed": { "type": "boolean" }, + "sort_order": { "type": "integer" }, "type": { "type": "string" } }, - "required": ["id", "slug", "name", "variable", "instrument_id", "instrument", "instrument_slug", "parameter_id", "parameter", "unit_id", "unit", "latest_time", "latest_value", "is_computed", "type"], + "required": ["id", "slug", "name", "variable", "instrument_id", "instrument", "instrument_slug", "parameter_id", "parameter", "unit_id", "unit", "latest_time", "latest_value", "is_computed", "type", "sort_order"], "additionalProperties": false } } diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index 554bef26..a142663b 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -6246,6 +6246,91 @@ } ], "summary" : "adds a timeseries to a collection group", "tags" : [ "collection-groups" ] + }, + "put" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "collection group uuid", + "in" : "path", + "name" : "collection_group_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "timeseries uuid", + "in" : "path", + "name" : "timeseries_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates sort order for collection group timesries", + "tags" : [ "collection-groups" ] } }, "/projects/{project_id}/district_rollup/evaluation_submittals" : { @@ -14704,6 +14789,9 @@ "slug" : { "type" : "string" }, + "sort_order" : { + "type" : "integer" + }, "type" : { "type" : "string" }, diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index c7cf0201..7d713b33 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -4145,6 +4145,65 @@ paths: summary: adds a timeseries to a collection group tags: - collection-groups + put: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: collection group uuid + in: path + name: collection_group_id + required: true + schema: + format: uuid + type: string + - description: timeseries uuid + in: path + name: timeseries_id + required: true + schema: + format: uuid + type: string + - description: api key + in: query + name: key + schema: + type: string + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: updates sort order for collection group timesries + tags: + - collection-groups /projects/{project_id}/district_rollup/evaluation_submittals: get: parameters: @@ -8544,6 +8603,7 @@ components: instrument_slug: instrument_slug is_computed: true latest_time: latest_time + sort_order: 1 unit_id: unit_id slug: slug parameter_id: parameter_id @@ -8572,6 +8632,7 @@ components: instrument_slug: instrument_slug is_computed: true latest_time: latest_time + sort_order: 1 unit_id: unit_id slug: slug parameter_id: parameter_id @@ -11127,6 +11188,7 @@ components: instrument_slug: instrument_slug is_computed: true latest_time: latest_time + sort_order: 1 unit_id: unit_id slug: slug parameter_id: parameter_id @@ -11153,6 +11215,8 @@ components: type: string slug: type: string + sort_order: + type: integer type: type: string unit: diff --git a/report/generated.d.ts b/report/generated.d.ts index 437098c7..125ebbfc 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -3575,6 +3575,51 @@ export interface paths { }; }; "/projects/{project_id}/collection_groups/{collection_group_id}/timeseries/{timeseries_id}": { + /** updates sort order for collection group timesries */ + put: { + parameters: { + query?: { + /** @description api key */ + key?: string; + }; + path: { + /** @description project uuid */ + project_id: string; + /** @description collection group uuid */ + collection_group_id: string; + /** @description timeseries uuid */ + timeseries_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; /** adds a timeseries to a collection group */ post: { parameters: { @@ -7379,6 +7424,7 @@ export interface components { * "instrument_slug": "instrument_slug", * "is_computed": true, * "latest_time": "latest_time", + * "sort_order": 1, * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" @@ -7414,6 +7460,7 @@ export interface components { * "instrument_slug": "instrument_slug", * "is_computed": true, * "latest_time": "latest_time", + * "sort_order": 1, * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" @@ -9779,6 +9826,7 @@ export interface components { * "instrument_slug": "instrument_slug", * "is_computed": true, * "latest_time": "latest_time", + * "sort_order": 1, * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" @@ -9796,6 +9844,7 @@ export interface components { parameter?: string; parameter_id?: string; slug?: string; + sort_order?: number; type?: string; unit?: string; unit_id?: string; From 4242d2d19ceca28180872aa7d6f2651c275e223c Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 21:31:14 -0500 Subject: [PATCH 10/23] add uploader config endpoints --- api/internal/handler/handler.go | 2 + api/internal/handler/uploader.go | 240 ++++++++++ api/internal/model/uploader.go | 121 +++-- api/internal/server/api.go | 10 + api/internal/server/docs/openapi.json | 471 +++++++++++++++++++ api/internal/server/docs/openapi.yaml | 351 ++++++++++++++ api/internal/service/uploader.go | 49 +- api/migrations/schema/V1.14.00__uploader.sql | 9 +- report/generated.d.ts | 280 +++++++++++ 9 files changed, 1495 insertions(+), 38 deletions(-) create mode 100644 api/internal/handler/uploader.go diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index 6eae2c25..c333df19 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -60,6 +60,7 @@ type ApiHandler struct { CalculatedTimeseriesService service.CalculatedTimeseriesService ProcessTimeseriesService service.ProcessTimeseriesService UnitService service.UnitService + UploaderService service.UploaderService } func NewApi(cfg *config.ApiConfig) *ApiHandler { @@ -111,6 +112,7 @@ func NewApi(cfg *config.ApiConfig) *ApiHandler { CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), UnitService: service.NewUnitService(db, q), + UploaderService: service.NewUploaderService(db, q), } } diff --git a/api/internal/handler/uploader.go b/api/internal/handler/uploader.go new file mode 100644 index 00000000..dbe6f8ec --- /dev/null +++ b/api/internal/handler/uploader.go @@ -0,0 +1,240 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// ListUploaderConfigsForProject godoc +// +// @Summary lists uploader configs for a project +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Success 200 {array} model.UploaderConfig +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs [get] +func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + uu, err := h.UploaderService.ListUploaderConfigsForProject(c.Request().Context(), projectID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, uu) +} + +// ListUploaderConfigMappings godoc +// +// @Summary lists timeseries mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {array} model.UploaderConfigMapping +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [get] +func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm, err := h.UploaderService.ListUploaderConfigMappings(c.Request().Context(), ucID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, mm) +} + +// CreateUploaderConfig godoc +// +// @Summary creates an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config body model.UploaderConfig true "uploader config payload" +// @Success 201 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs [post] +func (h *ApiHandler) CreateUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + var uc model.UploaderConfig + if err := c.Bind(&uc); err != nil { + return httperr.MalformedBody(err) + } + + profile := c.Get("profile").(model.Profile) + + uc.CreatorID = profile.ID + uc.CreateDate = time.Now() + uc.ProjectID = projectID + + newID, err := h.UploaderService.CreateUploaderConfig(c.Request().Context(), uc) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, map[string]interface{}{"id": newID}) +} + +// UpdateUploaderConfig godoc +// +// @Summary updates an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config body model.UploaderConfig true "uploader config payload" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id} [put] +func (h *ApiHandler) UpdateUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + var uc model.UploaderConfig + if err := c.Bind(&uc); err != nil { + return httperr.MalformedBody(err) + } + + profile := c.Get("profile").(model.Profile) + + t := time.Now() + uc.UpdaterID = &profile.ID + uc.UpdateDate = &t + uc.ProjectID = projectID + uc.ID = ucID + + if err := h.UploaderService.UpdateUploaderConfig(c.Request().Context(), uc); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// DeleteUploaderConfig godoc +// +// @Summary deletes an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id} [delete] +func (h *ApiHandler) DeleteUploaderConfig(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + if err := h.UploaderService.DeleteUploaderConfig(c.Request().Context(), ucID); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// CreateUploaderConfigMappings godoc +// +// @Summary creates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config_mappings body []model.UploaderConfigMapping true "uploader config mappings payload" +// @Success 201 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [post] +func (h *ApiHandler) CreateUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm := make([]model.UploaderConfigMapping, 0) + if err := c.Bind(&mm); err != nil { + return httperr.MalformedBody(err) + } + if err := h.UploaderService.CreateUploaderConfigMappings(c.Request().Context(), ucID, mm); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, map[string]interface{}{"id": ucID}) +} + +// UpdateUploaderConfigMappings godoc +// +// @Summary updates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config_mappings body []model.UploaderConfigMapping true "uploader config mappings payload" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [put] +func (h *ApiHandler) UpdateUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm := make([]model.UploaderConfigMapping, 0) + if err := c.Bind(&mm); err != nil { + return httperr.MalformedBody(err) + } + if err := h.UploaderService.UpdateUploaderConfigMappings(c.Request().Context(), ucID, mm); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// DeleteAllUploaderConfigMappingsForUploaderConfig godoc +// +// @Summary updates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [delete] +func (h *ApiHandler) DeleteAllUploaderConfigMappingsForUploaderConfig(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + if err := h.UploaderService.DeleteAllUploaderConfigMappingsForUploaderConfig(c.Request().Context(), ucID); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index 5d0a1be6..4c7170ba 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -1,7 +1,7 @@ package model import ( - "time" + "context" "github.com/google/uuid" ) @@ -13,42 +13,99 @@ const ( ) type UploaderConfig struct { - ID uuid.UUID - ProjectID uuid.UUID - Name string - Discription string - CreateDate time.Time - Creator uuid.UUID - Type UploaderConfigType + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Name string `json:"name" db:"name"` + Description string `json:"description" db:"description"` + Type UploaderConfigType `json:"type" db:"type"` + TzName string `json:"tz_name" db:"tz_name"` + AuditInfo } type UploaderConfigMapping struct { - UploaderConfigID uuid.UUID - FieldName string - TimeseriesID uuid.UUID + UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` + FieldName string `json:"field_name" db:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} + +const listUploaderConfigsForProject = ` + SELECT * FROM uploader_config WHERE project_id=$1 +` + +func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { + uu := make([]UploaderConfig, 0) + err := q.db.SelectContext(ctx, &uu, listUploaderConfigsForProject, projectID) + return uu, err } const createUploaderConfig = ` - INSERT INTO uploader_config VALUES ($1, $2, $3, $4, $5, $6) + INSERT INTO uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) + VALUES ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) ` -// CREATE TABLE uploader_config ( -// id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), -// project_id uuid NOT NULL REFERENCES project(id), -// name text NOT NULL, -// description text NOT NULL, -// create_date timestamptz NOT NULL DEFAULT now(), -// creator uuid NOT NULL REFERENCES profile(id), -// update_date timestamptz NOT NULL DEFAULT now(), -// updater uuid NOT NULL REFERENCES profile(id), -// type uploader_config_type NOT NULL, -// tz_name text NOT NULL DEFAULT 'UTC' -// ); -// -// -// CREATE TABLE uploader_config_mapping ( -// uploader_config_id uuid NOT NULL REFERENCES uploader_config(id), -// field_name text NOT NULL, -// timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), -// CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) -// ); +func (q *Queries) CreateUploaderConfig(ctx context.Context, uc UploaderConfig) (uuid.UUID, error) { + var newID uuid.UUID + err := q.db.GetContext( + ctx, &newID, createUploaderConfig, + uc.ProjectID, uc.Name, uc.Description, uc.CreateDate, uc.CreatorID, uc.Type, uc.TzName, + ) + return newID, err +} + +const updateUploaderConfig = ` + UPDATE uploader_config SET + name=$2, + description=$3, + update_date=$4, + updater=$5, + type=$6, + tz_name=$7 + WHERE id=$1 +` + +func (q *Queries) UpdateUploaderConfig(ctx context.Context, uc UploaderConfig) error { + _, err := q.db.ExecContext( + ctx, updateUploaderConfig, + uc.ID, uc.Name, uc.Description, uc.UpdateDate, uc.UpdaterID, uc.Type, uc.TzName, + ) + return err +} + +const deleteUploaderConfig = ` + DELETE FROM uploader_config WHERE id=$1 +` + +func (q *Queries) DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error { + _, err := q.db.ExecContext(ctx, deleteUploaderConfig, ucID) + return err +} + +const listUploaderConfigMappings = ` + SELECT * FROM uploader_config_mapping WHERE uploader_config_id=$1 +` + +func (q *Queries) ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]UploaderConfigMapping, error) { + mm := make([]UploaderConfigMapping, 0) + err := q.db.SelectContext(ctx, &mm, listUploaderConfigMappings, ucID) + return mm, err +} + +const createUploaderConfigMapping = ` + INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_id) VALUES ($1, $2, $3) + RETURNING id +` + +func (q *Queries) CreateUploaderConfigMapping(ctx context.Context, m UploaderConfigMapping) (uuid.UUID, error) { + var newID uuid.UUID + err := q.db.GetContext(ctx, &newID, createUploaderConfigMapping, m.UploaderConfigID, m.FieldName, m.TimeseriesID) + return newID, err +} + +const deleteAllUploaderConfigMappingsForUploaderConfig = ` + DELETE FROM uploader_config_mapping WHERE uploader_config_id=$1 +` + +func (q *Queries) DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error { + _, err := q.db.ExecContext(ctx, deleteAllUploaderConfigMappingsForUploaderConfig, ucID) + return err +} diff --git a/api/internal/server/api.go b/api/internal/server/api.go index f60762ab..4c3ce31c 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -354,4 +354,14 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Unit r.public.GET("/units", h.ListUnits) + + // Uploader + r.private.GET("/projects/:project_id/uploader_configs", h.ListUploaderConfigsForProject) + r.private.GET("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.ListUploaderConfigMappings) + r.private.POST("/projects/:project_id/uploader_configs", h.CreateUploaderConfig) + r.private.PUT("/projects/:project_id/uploader_configs/:uploader_config_id", h.UpdateUploaderConfig) + r.private.DELETE("/projects/:project_id/uploader_configs/:uploader_config_id", h.DeleteUploaderConfig) + r.private.POST("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.CreateUploaderConfigMappings) + r.private.PUT("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.UpdateUploaderConfigMappings) + r.private.DELETE("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.DeleteAllUploaderConfigMappingsForUploaderConfig) } diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index a142663b..e699ac20 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -11353,6 +11353,420 @@ "x-codegen-request-body-name" : "timeseries_measurement_collections" } }, + "/projects/{project_id}/uploader_configs" : { + "get" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/UploaderConfig" + }, + "type" : "array" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "lists uploader configs for a project", + "tags" : [ "uploader" ] + }, + "post" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/UploaderConfig" + } + } + }, + "description" : "uploader config payload", + "required" : true + }, + "responses" : { + "201" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "Created" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "creates an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config" + } + }, + "/projects/{project_id}/uploader_configs/{uploader_config_id}" : { + "delete" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "deletes an uploader config", + "tags" : [ "uploader" ] + }, + "put" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/UploaderConfig" + } + } + }, + "description" : "uploader config payload", + "required" : true + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "updates an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config" + } + }, + "/projects/{project_id}/uploader_configs/{uploader_config_id}/mappings" : { + "delete" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "updates mappings for an uploader config", + "tags" : [ "uploader" ] + }, + "get" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/UploaderConfigMapping" + }, + "type" : "array" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "lists timeseries mappings for an uploader config", + "tags" : [ "uploader" ] + }, + "post" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/UploaderConfigMapping" + }, + "type" : "array" + } + } + }, + "description" : "uploader config mappings payload", + "required" : true + }, + "responses" : { + "201" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "Created" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "creates mappings for an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config_mappings" + }, + "put" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/UploaderConfigMapping" + }, + "type" : "array" + } + } + }, + "description" : "uploader config mappings payload", + "required" : true + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + } + }, + "summary" : "updates mappings for an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config_mappings" + } + }, "/projects/{project_slug}/images/{uri_path}" : { "get" : { "parameters" : [ { @@ -14754,6 +15168,63 @@ }, "type" : "object" }, + "UploaderConfig" : { + "properties" : { + "create_date" : { + "type" : "string" + }, + "creator_id" : { + "type" : "string" + }, + "creator_username" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/UploaderConfigType" + }, + "tz_name" : { + "type" : "string" + }, + "update_date" : { + "type" : "string" + }, + "updater_id" : { + "type" : "string" + }, + "updater_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "UploaderConfigMapping" : { + "properties" : { + "field_name" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "UploaderConfigType" : { + "enum" : [ "csv", "dux", "toa5" ], + "type" : "string", + "x-enum-varnames" : [ "CSV", "DUX", "TOA5" ] + }, "collectionGroupDetailsTimeseries" : { "properties" : { "id" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index 7d713b33..c84bd1c8 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -7591,6 +7591,297 @@ paths: tags: - measurement x-codegen-request-body-name: timeseries_measurement_collections + /projects/{project_id}/uploader_configs: + get: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/UploaderConfig' + type: array + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: lists uploader configs for a project + tags: + - uploader + post: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/UploaderConfig' + description: uploader config payload + required: true + responses: + "201": + content: + application/json: + schema: + additionalProperties: true + type: object + description: Created + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: creates an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config + /projects/{project_id}/uploader_configs/{uploader_config_id}: + delete: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: deletes an uploader config + tags: + - uploader + put: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/UploaderConfig' + description: uploader config payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: updates an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config + /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings: + delete: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: updates mappings for an uploader config + tags: + - uploader + get: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/UploaderConfigMapping' + type: array + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: lists timeseries mappings for an uploader config + tags: + - uploader + post: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/UploaderConfigMapping' + type: array + description: uploader config mappings payload + required: true + responses: + "201": + content: + application/json: + schema: + additionalProperties: true + type: object + description: Created + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: creates mappings for an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config_mappings + put: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/UploaderConfigMapping' + type: array + description: uploader config mappings payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + summary: updates mappings for an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config_mappings /projects/{project_slug}/images/{uri_path}: get: parameters: @@ -11161,6 +11452,66 @@ components: unit_family_id: type: string type: object + UploaderConfig: + example: + updater_username: updater_username + tz_name: tz_name + project_id: project_id + creator_username: creator_username + creator_id: creator_id + name: name + updater_id: updater_id + description: description + id: id + create_date: create_date + type: csv + update_date: update_date + properties: + create_date: + type: string + creator_id: + type: string + creator_username: + type: string + description: + type: string + id: + type: string + name: + type: string + project_id: + type: string + type: + $ref: '#/components/schemas/UploaderConfigType' + tz_name: + type: string + update_date: + type: string + updater_id: + type: string + updater_username: + type: string + type: object + UploaderConfigMapping: + example: + timeseries_id: timeseries_id + field_name: field_name + properties: + field_name: + type: string + timeseries_id: + type: string + type: object + UploaderConfigType: + enum: + - csv + - dux + - toa5 + type: string + x-enum-varnames: + - CSV + - DUX + - TOA5 collectionGroupDetailsTimeseries: example: values: diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index 7067b794..a4574ee1 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -13,8 +13,17 @@ import ( ) type UploaderService interface { - CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error - CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error + ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.UploaderConfig, error) + ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]model.UploaderConfigMapping, error) + CreateUploaderConfig(ctx context.Context, uc model.UploaderConfig) (uuid.UUID, error) + UpdateUploaderConfig(ctx context.Context, uc model.UploaderConfig) error + DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error + CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) (uuid.UUID, error) + CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error + UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error + DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error + // CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error + // CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error } type uploaderService struct { @@ -26,6 +35,42 @@ func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { return &uploaderService{db, q} } +func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + for _, m := range mm { + if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { + return err + } + } + return tx.Commit() +} + +func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { + return err + } + + for _, m := range mm { + if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { + return err + } + } + return tx.Commit() +} + // TODO: transition away from datalogger equivalency table to different parser that's uploader specific func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { tx, err := s.db.BeginTxx(ctx, nil) diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql index 5ea31cc5..25aeafee 100644 --- a/api/migrations/schema/V1.14.00__uploader.sql +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -9,21 +9,22 @@ CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); CREATE TABLE uploader_config ( id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), project_id uuid NOT NULL REFERENCES project(id), + slug text UNIQUE NOT NULL, name text NOT NULL, description text NOT NULL, create_date timestamptz NOT NULL DEFAULT now(), creator uuid NOT NULL REFERENCES profile(id), - update_date timestamptz NOT NULL DEFAULT now(), - updater uuid NOT NULL REFERENCES profile(id), + update_date timestamptz, + updater uuid REFERENCES profile(id), type uploader_config_type NOT NULL, tz_name text NOT NULL DEFAULT 'UTC' ); CREATE TABLE uploader_config_mapping ( - uploader_config_id uuid NOT NULL REFERENCES uploader_config(id), + uploader_config_id uuid NOT NULL REFERENCES uploader_config(id) ON DELETE CASCADE, field_name text NOT NULL, - timeseries_id uuid UNIQUE NOT NULL REFERENCES timeseries(id), + timeseries_id uuid REFERENCES timeseries(id) ON DELETE SET NULL, CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) ); diff --git a/report/generated.d.ts b/report/generated.d.ts index 125ebbfc..6fd296d3 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -6518,6 +6518,244 @@ export interface paths { }; }; }; + "/projects/{project_id}/uploader_configs": { + /** lists uploader configs for a project */ + get: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["UploaderConfig"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** creates an uploader config */ + post: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + }; + }; + /** @description uploader config payload */ + requestBody: { + content: { + "*/*": components["schemas"]["UploaderConfig"]; + }; + }; + responses: { + /** @description Created */ + 201: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; + "/projects/{project_id}/uploader_configs/{uploader_config_id}": { + /** updates an uploader config */ + put: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + /** @description uploader config payload */ + requestBody: { + content: { + "*/*": components["schemas"]["UploaderConfig"]; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** deletes an uploader config */ + delete: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; + "/projects/{project_id}/uploader_configs/{uploader_config_id}/mappings": { + /** lists timeseries mappings for an uploader config */ + get: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["UploaderConfigMapping"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** updates mappings for an uploader config */ + put: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + /** @description uploader config mappings payload */ + requestBody: { + content: { + "*/*": components["schemas"]["UploaderConfigMapping"][]; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** creates mappings for an uploader config */ + post: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + /** @description uploader config mappings payload */ + requestBody: { + content: { + "*/*": components["schemas"]["UploaderConfigMapping"][]; + }; + }; + responses: { + /** @description Created */ + 201: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** updates mappings for an uploader config */ + delete: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; "/projects/{project_slug}/images/{uri_path}": { /** serves media, files, etc for a given project */ get: { @@ -9794,6 +10032,48 @@ export interface components { unit_family?: string; unit_family_id?: string; }; + /** + * @example { + * "updater_username": "updater_username", + * "tz_name": "tz_name", + * "project_id": "project_id", + * "creator_username": "creator_username", + * "creator_id": "creator_id", + * "name": "name", + * "updater_id": "updater_id", + * "description": "description", + * "id": "id", + * "create_date": "create_date", + * "type": "csv", + * "update_date": "update_date" + * } + */ + UploaderConfig: { + create_date?: string; + creator_id?: string; + creator_username?: string; + description?: string; + id?: string; + name?: string; + project_id?: string; + type?: components["schemas"]["UploaderConfigType"]; + tz_name?: string; + update_date?: string; + updater_id?: string; + updater_username?: string; + }; + /** + * @example { + * "timeseries_id": "timeseries_id", + * "field_name": "field_name" + * } + */ + UploaderConfigMapping: { + field_name?: string; + timeseries_id?: string; + }; + /** @enum {string} */ + UploaderConfigType: "csv" | "dux" | "toa5"; /** * @example { * "values": [ From fd8d467c8622fe227957ad9e0bfbb806100f00ac Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 23:55:12 -0500 Subject: [PATCH 11/23] resolve danginling issues from merge conflict with uploader feature branch --- api/internal/handler/handlerv2.go | 173 ------------------------------ api/internal/service/uploader.go | 53 ++++----- 2 files changed, 23 insertions(+), 203 deletions(-) delete mode 100644 api/internal/handler/handlerv2.go diff --git a/api/internal/handler/handlerv2.go b/api/internal/handler/handlerv2.go deleted file mode 100644 index 6eae2c25..00000000 --- a/api/internal/handler/handlerv2.go +++ /dev/null @@ -1,173 +0,0 @@ -package handler - -import ( - "net/http" - "strings" - "time" - - "github.com/USACE/instrumentation-api/api/internal/cloud" - "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/middleware" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/USACE/instrumentation-api/api/internal/service" -) - -func newHttpClient() *http.Client { - return &http.Client{ - Timeout: time.Second * 60, - CheckRedirect: func(req *http.Request, via []*http.Request) error { - return nil - }, - } -} - -type ApiHandler struct { - Middleware middleware.Middleware - BlobService cloud.Blob - AlertService service.AlertService - AlertConfigService service.AlertConfigService - AlertSubscriptionService service.AlertSubscriptionService - EmailAutocompleteService service.EmailAutocompleteService - AwareParameterService service.AwareParameterService - CollectionGroupService service.CollectionGroupService - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - DistrictRollupService service.DistrictRollupService - DomainService service.DomainService - EquivalencyTableService service.EquivalencyTableService - EvaluationService service.EvaluationService - HeartbeatService service.HeartbeatService - HomeService service.HomeService - InstrumentService service.InstrumentService - InstrumentAssignService service.InstrumentAssignService - InstrumentConstantService service.InstrumentConstantService - InstrumentGroupService service.InstrumentGroupService - InstrumentNoteService service.InstrumentNoteService - InstrumentStatusService service.InstrumentStatusService - IpiInstrumentService service.IpiInstrumentService - MeasurementService service.MeasurementService - InclinometerMeasurementService service.InclinometerMeasurementService - OpendcsService service.OpendcsService - PlotConfigService service.PlotConfigService - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - ProjectService service.ProjectService - ReportConfigService service.ReportConfigService - SaaInstrumentService service.SaaInstrumentService - SubmittalService service.SubmittalService - TimeseriesService service.TimeseriesService - TimeseriesCwmsService service.TimeseriesCwmsService - CalculatedTimeseriesService service.CalculatedTimeseriesService - ProcessTimeseriesService service.ProcessTimeseriesService - UnitService service.UnitService -} - -func NewApi(cfg *config.ApiConfig) *ApiHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) - - return &ApiHandler{ - Middleware: mw, - BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), - AlertService: service.NewAlertService(db, q), - AlertConfigService: service.NewAlertConfigService(db, q), - AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), - EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), - AwareParameterService: service.NewAwareParameterService(db, q), - CollectionGroupService: service.NewCollectionGroupService(db, q), - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - DistrictRollupService: service.NewDistrictRollupService(db, q), - DomainService: service.NewDomainService(db, q), - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - EvaluationService: service.NewEvaluationService(db, q), - HeartbeatService: service.NewHeartbeatService(db, q), - HomeService: service.NewHomeService(db, q), - InstrumentService: service.NewInstrumentService(db, q), - InstrumentAssignService: service.NewInstrumentAssignService(db, q), - InstrumentConstantService: service.NewInstrumentConstantService(db, q), - InstrumentGroupService: service.NewInstrumentGroupService(db, q), - InstrumentNoteService: service.NewInstrumentNoteService(db, q), - InstrumentStatusService: service.NewInstrumentStatusService(db, q), - IpiInstrumentService: service.NewIpiInstrumentService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), - OpendcsService: service.NewOpendcsService(db, q), - PlotConfigService: service.NewPlotConfigService(db, q), - ProfileService: profileService, - ProjectRoleService: service.NewProjectRoleService(db, q), - ProjectService: service.NewProjectService(db, q), - ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), - SaaInstrumentService: service.NewSaaInstrumentService(db, q), - SubmittalService: service.NewSubmittalService(db, q), - TimeseriesService: service.NewTimeseriesService(db, q), - TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), - CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), - ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), - UnitService: service.NewUnitService(db, q), - } -} - -type TelemetryHandler struct { - Middleware middleware.Middleware - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - EquivalencyTableService service.EquivalencyTableService - MeasurementService service.MeasurementService -} - -func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) - - return &TelemetryHandler{ - Middleware: mw, - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - } -} - -type AlertCheckHandler struct { - AlertCheckService service.AlertCheckService -} - -func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - return &AlertCheckHandler{ - AlertCheckService: service.NewAlertCheckService(db, q, cfg), - } -} - -type DcsLoaderHandler struct { - PubsubService cloud.Pubsub - DcsLoaderService service.DcsLoaderService -} - -func NewDcsLoader(cfg *config.DcsLoaderConfig) *DcsLoaderHandler { - if !strings.HasPrefix(cfg.AWSSQSEndpoint, "https://") || !strings.HasPrefix(cfg.AWSSQSEndpoint, "http://") { - cfg.AWSSQSEndpoint = "https://" + cfg.AWSSQSEndpoint - } - s3Blob := cloud.NewS3Blob(&cfg.AWSS3Config, "", "") - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig).WithBlob(s3Blob) - apiClient := newHttpClient() - - return &DcsLoaderHandler{ - PubsubService: ps, - DcsLoaderService: service.NewDcsLoaderService(apiClient, cfg), - } -} diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index a4574ee1..cac8eeec 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -43,21 +43,14 @@ func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID defer model.TxDo(tx.Rollback) qtx := s.WithTx(tx) - for _, m := range mm { - if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { - return err - } - } - return tx.Commit() +func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO + return nil } -func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) +// TODO transition away from datalogger equivalency table to different parser that's uploader specific +func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { + // TODO Get mapper by id if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { return err @@ -101,23 +94,23 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } meta := model.Environment{ - StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - OSVersion: envHeader[4], - ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) - if err != nil { - return err - } - - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) - if err != nil { - return err - } + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], + } + + // dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + // if err != nil { + // return err + // } + // + // tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + // if err != nil { + // return err + // } // first two columns are timestamp and record number // we only want to collect the measurement fields here From ad996224e36a44a80d5b067bbf8bb4d65b1cee02 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 6 Nov 2024 23:59:06 -0500 Subject: [PATCH 12/23] fix uploader service function definitions --- api/internal/service/uploader.go | 53 ++++++++++++++++++-------------- 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index cac8eeec..a4574ee1 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -43,14 +43,21 @@ func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID defer model.TxDo(tx.Rollback) qtx := s.WithTx(tx) -func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { - // TODO - return nil + for _, m := range mm { + if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { + return err + } + } + return tx.Commit() } -// TODO transition away from datalogger equivalency table to different parser that's uploader specific -func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { - // TODO Get mapper by id +func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { + tx, err := s.db.BeginTxx(ctx, nil) + if err != nil { + return err + } + defer model.TxDo(tx.Rollback) + qtx := s.WithTx(tx) if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { return err @@ -94,23 +101,23 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } meta := model.Environment{ - // StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - // OSVersion: envHeader[4], - // ProgName: envHeader[5], - TableName: envHeader[6], - } - - // dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) - // if err != nil { - // return err - // } - // - // tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) - // if err != nil { - // return err - // } + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + if err != nil { + return err + } + + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + if err != nil { + return err + } // first two columns are timestamp and record number // we only want to collect the measurement fields here From bb4f480205b47c6391df71d7a948470f9696b033 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 7 Nov 2024 00:13:57 -0500 Subject: [PATCH 13/23] fix uploader config mapping create sql statement --- api/internal/model/uploader.go | 8 +++----- api/internal/service/uploader.go | 6 +++--- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index 4c7170ba..beb61469 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -92,13 +92,11 @@ func (q *Queries) ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID const createUploaderConfigMapping = ` INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_id) VALUES ($1, $2, $3) - RETURNING id ` -func (q *Queries) CreateUploaderConfigMapping(ctx context.Context, m UploaderConfigMapping) (uuid.UUID, error) { - var newID uuid.UUID - err := q.db.GetContext(ctx, &newID, createUploaderConfigMapping, m.UploaderConfigID, m.FieldName, m.TimeseriesID) - return newID, err +func (q *Queries) CreateUploaderConfigMapping(ctx context.Context, m UploaderConfigMapping) error { + _, err := q.db.ExecContext(ctx, createUploaderConfigMapping, m.UploaderConfigID, m.FieldName, m.TimeseriesID) + return err } const deleteAllUploaderConfigMappingsForUploaderConfig = ` diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index a4574ee1..7a7bc5e4 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -18,7 +18,7 @@ type UploaderService interface { CreateUploaderConfig(ctx context.Context, uc model.UploaderConfig) (uuid.UUID, error) UpdateUploaderConfig(ctx context.Context, uc model.UploaderConfig) error DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error - CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) (uuid.UUID, error) + CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) error CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error @@ -44,7 +44,7 @@ func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID qtx := s.WithTx(tx) for _, m := range mm { - if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { + if err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { return err } } @@ -64,7 +64,7 @@ func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID } for _, m := range mm { - if _, err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { + if err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { return err } } From 080f85c55f8a07152b25badeb2307924d2a10150 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 7 Nov 2024 00:19:45 -0500 Subject: [PATCH 14/23] fix create uploader config sql statement --- api/internal/model/uploader.go | 1 + 1 file changed, 1 insertion(+) diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index beb61469..c527df8d 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -41,6 +41,7 @@ func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID u const createUploaderConfig = ` INSERT INTO uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) VALUES ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) + RETURNING id ` func (q *Queries) CreateUploaderConfig(ctx context.Context, uc UploaderConfig) (uuid.UUID, error) { From fb8d46b8c97ad3c9339167c490b7d3473f17cec1 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 8 Nov 2024 11:11:36 -0500 Subject: [PATCH 15/23] wip; fix existing db queries; start incl v2 --- api/go.mod | 3 +- api/go.sum | 15 +- api/internal/db/batch.go | 356 +++++++++++++++++ api/internal/db/collection_group.sql_gen.go | 33 +- api/internal/db/instrument_incl.sql_gen.go | 191 +++++++++ api/internal/db/models.go | 67 +++- api/internal/db/querier.go | 28 +- api/internal/db/timeseries.sql_gen.go | 75 ++-- .../db/timeseries_calculated.sql_gen.go | 93 ++--- api/internal/db/uploader.sql_gen.go | 163 ++++++++ api/internal/handler/handler.go | 23 ++ api/internal/model/instrument.go | 4 +- api/internal/servicev2/alert.go | 7 +- api/internal/servicev2/alert_subscription.go | 2 +- api/internal/servicev2/db.go | 34 ++ api/internal/servicev2/evaluation.go | 14 +- api/internal/servicev2/instrument.go | 116 ++---- api/internal/servicev2/instrument_assign.go | 21 +- api/internal/servicev2/instrument_constant.go | 51 ++- api/internal/servicev2/instrument_group.go | 8 +- api/internal/servicev2/instrument_incl.go | 211 ++++++++++ api/internal/servicev2/instrument_ipi.go | 140 ++++++- api/internal/servicev2/instrument_note.go | 8 +- api/internal/servicev2/instrument_opts.go | 373 ------------------ api/internal/servicev2/instrument_saa.go | 163 +++++++- api/internal/servicev2/instrument_status.go | 22 +- api/internal/servicev2/measurement.go | 28 +- api/internal/servicev2/plot_config_contour.go | 7 +- .../servicev2/plot_config_scatter_line.go | 7 +- api/internal/servicev2/project.go | 8 +- api/internal/servicev2/report_config.go | 12 +- api/internal/servicev2/timeseries.go | 6 +- .../servicev2/timeseries_calculated.go | 73 ++-- api/internal/servicev2/uploader.go | 113 ++++-- .../0140__views_depth_based_instruments.sql | 125 +++++- .../repeat/0170__views_uploader.sql | 26 ++ api/migrations/schema/V1.15.00__incl_opts.sql | 17 + .../schema/V1.16.00__uploader_config.sql | 8 + api/queries/aware.sql | 1 + api/queries/instrument_incl.sql | 82 ++++ api/queries/instrument_saa.sql | 10 + api/queries/timeseries.sql | 21 +- api/queries/timeseries_calculated.sql | 26 +- api/queries/uploader.sql | 35 ++ 44 files changed, 1991 insertions(+), 835 deletions(-) create mode 100644 api/internal/db/instrument_incl.sql_gen.go create mode 100644 api/internal/db/uploader.sql_gen.go create mode 100644 api/internal/servicev2/instrument_incl.go delete mode 100644 api/internal/servicev2/instrument_opts.go create mode 100644 api/migrations/repeat/0170__views_uploader.sql create mode 100644 api/migrations/schema/V1.15.00__incl_opts.sql create mode 100644 api/migrations/schema/V1.16.00__uploader_config.sql create mode 100644 api/queries/instrument_incl.sql create mode 100644 api/queries/uploader.sql diff --git a/api/go.mod b/api/go.mod index 6fec5963..0935beec 100644 --- a/api/go.mod +++ b/api/go.mod @@ -28,6 +28,8 @@ require ( github.com/paulmach/orb v0.11.1 github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 + github.com/twpayne/go-geom v1.5.7 + github.com/twpayne/pgx-geom v0.0.2 github.com/xeipuuv/gojsonschema v1.2.0 golang.org/x/crypto v0.27.0 golang.org/x/image v0.20.0 @@ -64,7 +66,6 @@ require ( github.com/mattn/go-isatty v0.0.20 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.11.0 // indirect - github.com/twpayne/go-geom v1.5.7 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect diff --git a/api/go.sum b/api/go.sum index c46a7ff8..f9aaa63f 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,10 +1,16 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= +github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1:1G1pk05UrOh0NlF1oeaaix1x8XzrfjIDK47TY0Zehcw= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/alecthomas/assert/v2 v2.10.0 h1:jjRCHsj6hBJhkmhznrCzoNpbA3zqy0fYiUcYZP/GkPY= +github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/aws/aws-lambda-go v1.47.0 h1:0H8s0vumYx/YKs4sE7YM0ktwL2eWse+kfopsRI1sXVI= github.com/aws/aws-lambda-go v1.47.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= github.com/aws/aws-sdk-go-v2 v1.30.5 h1:mWSRTwQAb0aLE17dSzztCVJWI9+cRMgqebndjwDyK0g= @@ -92,6 +98,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= @@ -142,16 +150,13 @@ github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgS github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= -github.com/jackc/pgx/v5 v5.7.0 h1:FG6VLIdzvAPhnYqP14sQ2xhFLkiUQHCs6ySqO91kF4g= -github.com/jackc/pgx/v5 v5.7.0/go.mod h1:awP1KNnjylvpxHuHP63gzjhnGkI1iw+PMoIwvoleN/8= github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= -github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= @@ -240,6 +245,8 @@ github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EU github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/twpayne/go-geom v1.5.7 h1:7fdceDUr03/MP7rAKOaTV6x9njMiQdxB/D0PDzMTCDc= github.com/twpayne/go-geom v1.5.7/go.mod h1:y4fTAQtLedXW8eG2Yo4tYrIGN1yIwwKkmA+K3iSHKBA= +github.com/twpayne/pgx-geom v0.0.2 h1:DZcp66JfCwyfQMH1JNBa0vfF+/hi4WQsfHMqBRXp8WI= +github.com/twpayne/pgx-geom v0.0.2/go.mod h1:rUjv/MgeOmPZqUbLY7Qgq56dAAHE28S7FZMFtXQMRoI= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index 1f757ea4..550882dc 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -258,6 +258,127 @@ func (b *CreateEvaluationInstrumentsBatchBatchResults) Close() error { return b.br.Close() } +const createInclOptsBatch = `-- name: CreateInclOptsBatch :batchexec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateInclOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInclOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) CreateInclOptsBatch(ctx context.Context, arg []CreateInclOptsBatchParams) *CreateInclOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(createInclOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInclOptsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInclOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateInclOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const createInclSegmentBatch = `-- name: CreateInclSegmentBatch :batchexec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type CreateInclSegmentBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateInclSegmentBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) CreateInclSegmentBatch(ctx context.Context, arg []CreateInclSegmentBatchParams) *CreateInclSegmentBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.DepthTimeseriesID, + a.A0TimeseriesID, + a.A180TimeseriesID, + a.B0TimeseriesID, + a.B180TimeseriesID, + } + batch.Queue(createInclSegmentBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateInclSegmentBatchBatchResults{br, len(arg), false} +} + +func (b *CreateInclSegmentBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateInclSegmentBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const createInstrumentConstantBatch = `-- name: CreateInstrumentConstantBatch :batchexec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) ` @@ -1406,6 +1527,57 @@ func (b *CreateTimeseriesNotesBatchBatchResults) Close() error { return b.br.Close() } +const createUploaderConfigMappingsBatch = `-- name: CreateUploaderConfigMappingsBatch :batchexec +insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3) +` + +type CreateUploaderConfigMappingsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type CreateUploaderConfigMappingsBatchParams struct { + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CreateUploaderConfigMappingsBatch(ctx context.Context, arg []CreateUploaderConfigMappingsBatchParams) *CreateUploaderConfigMappingsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.UploaderConfigID, + a.FieldName, + a.TimeseriesID, + } + batch.Queue(createUploaderConfigMappingsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &CreateUploaderConfigMappingsBatchBatchResults{br, len(arg), false} +} + +func (b *CreateUploaderConfigMappingsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *CreateUploaderConfigMappingsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const unassignInstrumentFromProjectBatch = `-- name: UnassignInstrumentFromProjectBatch :batchexec delete from project_instrument where project_id = $1 and instrument_id = $2 ` @@ -1504,6 +1676,125 @@ func (b *UnassignReportConfigPlotConfigBatchBatchResults) Close() error { return b.br.Close() } +const updateInclOptsBatch = `-- name: UpdateInclOptsBatch :batchexec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateInclOptsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateInclOptsBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) UpdateInclOptsBatch(ctx context.Context, arg []UpdateInclOptsBatchParams) *UpdateInclOptsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(updateInclOptsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateInclOptsBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateInclOptsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateInclOptsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const updateInclSegmentsBatch = `-- name: UpdateInclSegmentsBatch :batchexec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2 +` + +type UpdateInclSegmentsBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateInclSegmentsBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) UpdateInclSegmentsBatch(ctx context.Context, arg []UpdateInclSegmentsBatchParams) *UpdateInclSegmentsBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.DepthTimeseriesID, + a.A0TimeseriesID, + a.A180TimeseriesID, + a.B0TimeseriesID, + a.B180TimeseriesID, + } + batch.Queue(updateInclSegmentsBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateInclSegmentsBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateInclSegmentsBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateInclSegmentsBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const updateIpiOptsBatch = `-- name: UpdateIpiOptsBatch :batchexec update ipi_opts set bottom_elevation_timeseries_id = $2, @@ -1673,3 +1964,68 @@ func (b *UpdateSaaOptsBatchBatchResults) Close() error { b.closed = true return b.br.Close() } + +const updateSaaSegmentBatch = `-- name: UpdateSaaSegmentBatch :batchexec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2 +` + +type UpdateSaaSegmentBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UpdateSaaSegmentBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) UpdateSaaSegmentBatch(ctx context.Context, arg []UpdateSaaSegmentBatchParams) *UpdateSaaSegmentBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.XTimeseriesID, + a.YTimeseriesID, + a.ZTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(updateSaaSegmentBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UpdateSaaSegmentBatchBatchResults{br, len(arg), false} +} + +func (b *UpdateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UpdateSaaSegmentBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go index 016fd79b..8ecef1d4 100644 --- a/api/internal/db/collection_group.sql_gen.go +++ b/api/internal/db/collection_group.sql_gen.go @@ -42,7 +42,18 @@ type CreateCollectionGroupParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) { +type CreateCollectionGroupRow struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` +} + +func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CreateCollectionGroupRow, error) { row := q.db.QueryRow(ctx, createCollectionGroup, arg.ProjectID, arg.Column2, @@ -51,7 +62,7 @@ func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectio arg.Updater, arg.UpdateDate, ) - var i CollectionGroup + var i CreateCollectionGroupRow err := row.Scan( &i.ID, &i.ProjectID, @@ -80,7 +91,7 @@ func (q *Queries) DeleteCollectionGroup(ctx context.Context, arg DeleteCollectio } const getCollectionGroupDetails = `-- name: GetCollectionGroupDetails :one -select id, project_id, name, slug, creator, create_date, updater, update_date, timeseries from v_collection_group_details where id = $1 +select id, project_id, name, slug, creator, create_date, updater, update_date, sort_order, timeseries from v_collection_group_details where id = $1 ` func (q *Queries) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) { @@ -95,6 +106,7 @@ func (q *Queries) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) ( &i.CreateDate, &i.Updater, &i.UpdateDate, + &i.SortOrder, &i.Timeseries, ) return i, err @@ -174,7 +186,18 @@ type UpdateCollectionGroupParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (CollectionGroup, error) { +type UpdateCollectionGroupRow struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + Updater *uuid.UUID `json:"updater"` + UpdateDate *time.Time `json:"update_date"` +} + +func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (UpdateCollectionGroupRow, error) { row := q.db.QueryRow(ctx, updateCollectionGroup, arg.ProjectID, arg.ID, @@ -182,7 +205,7 @@ func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectio arg.Updater, arg.UpdateDate, ) - var i CollectionGroup + var i UpdateCollectionGroupRow err := row.Scan( &i.ID, &i.ProjectID, diff --git a/api/internal/db/instrument_incl.sql_gen.go b/api/internal/db/instrument_incl.sql_gen.go new file mode 100644 index 00000000..e3ee5e91 --- /dev/null +++ b/api/internal/db/instrument_incl.sql_gen.go @@ -0,0 +1,191 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_incl.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const createInclOpts = `-- name: CreateInclOpts :exec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type CreateInclOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) CreateInclOpts(ctx context.Context, arg CreateInclOptsParams) error { + _, err := q.db.Exec(ctx, createInclOpts, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const createInclSegment = `-- name: CreateInclSegment :exec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type CreateInclSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) CreateInclSegment(ctx context.Context, arg CreateInclSegmentParams) error { + _, err := q.db.Exec(ctx, createInclSegment, + arg.ID, + arg.InstrumentID, + arg.DepthTimeseriesID, + arg.A0TimeseriesID, + arg.A180TimeseriesID, + arg.B0TimeseriesID, + arg.B180TimeseriesID, + ) + return err +} + +const getAllInclSegmentsForInstrument = `-- name: GetAllInclSegmentsForInstrument :many +select id, instrument_id, depth_timeseries_id, a0_timeseries_id, a180_timeseries_id, b0_timeseries_id, b180_timeseries_id from v_incl_segment where instrument_id = $1 +` + +func (q *Queries) GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) { + rows, err := q.db.Query(ctx, getAllInclSegmentsForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInclSegment{} + for rows.Next() { + var i VInclSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.DepthTimeseriesID, + &i.A0TimeseriesID, + &i.A180TimeseriesID, + &i.B0TimeseriesID, + &i.B180TimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getInclMeasurementsForInstrument = `-- name: GetInclMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_incl_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_incl_measurement m2 +where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type GetInclMeasurementsForInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Time_2 time.Time `json:"time_2"` +} + +func (q *Queries) GetInclMeasurementsForInstrument(ctx context.Context, arg GetInclMeasurementsForInstrumentParams) ([]VInclMeasurement, error) { + rows, err := q.db.Query(ctx, getInclMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInclMeasurement{} + for rows.Next() { + var i VInclMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateInclOpts = `-- name: UpdateInclOpts :exec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type UpdateInclOptsParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) UpdateInclOpts(ctx context.Context, arg UpdateInclOptsParams) error { + _, err := q.db.Exec(ctx, updateInclOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const updateInclSegment = `-- name: UpdateInclSegment :exec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2 +` + +type UpdateInclSegmentParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) UpdateInclSegment(ctx context.Context, arg UpdateInclSegmentParams) error { + _, err := q.db.Exec(ctx, updateInclSegment, + arg.ID, + arg.InstrumentID, + arg.DepthTimeseriesID, + arg.A0TimeseriesID, + arg.A180TimeseriesID, + arg.B0TimeseriesID, + arg.B180TimeseriesID, + ) + return err +} diff --git a/api/internal/db/models.go b/api/internal/db/models.go index dfa8c986..235a803a 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -413,11 +413,13 @@ type CollectionGroup struct { CreateDate time.Time `json:"create_date"` Updater *uuid.UUID `json:"updater"` UpdateDate *time.Time `json:"update_date"` + SortOrder int32 `json:"sort_order"` } type CollectionGroupTimeseries struct { CollectionGroupID uuid.UUID `json:"collection_group_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` } type Config struct { @@ -521,6 +523,23 @@ type Heartbeat struct { Time time.Time `json:"time"` } +type InclOpt struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +type InclSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + type InclinometerMeasurement struct { Time time.Time `json:"time"` Values []byte `json:"values"` @@ -892,19 +911,30 @@ type UnitFamily struct { } type UploaderConfig struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Description string `json:"description"` - CreateDate time.Time `json:"create_date"` - Creator uuid.UUID `json:"creator"` - Type UploaderConfigType `json:"type"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + CreateDate time.Time `json:"create_date"` + Creator uuid.UUID `json:"creator"` + UpdateDate *time.Time `json:"update_date"` + Updater *uuid.UUID `json:"updater"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` } type UploaderConfigMapping struct { - UploaderConfigID uuid.UUID `json:"uploader_config_id"` - FieldName string `json:"field_name"` - TimeseriesID uuid.UUID `json:"timeseries_id"` + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` } type VAlert struct { @@ -979,6 +1009,7 @@ type VCollectionGroupDetail struct { CreateDate time.Time `json:"create_date"` Updater *uuid.UUID `json:"updater"` UpdateDate *time.Time `json:"update_date"` + SortOrder int32 `json:"sort_order"` Timeseries []CollectionGroupDetailsTimeseries `json:"timeseries"` } @@ -1084,6 +1115,22 @@ type VEvaluation struct { Instruments []InstrumentIDName `json:"instruments"` } +type VInclMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements interface{} `json:"measurements"` +} + +type VInclSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + type VInstrument struct { ID uuid.UUID `json:"id"` Deleted bool `json:"deleted"` diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index 66bfec93..c224ed06 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -30,7 +30,7 @@ type Querier interface { CreateAwarePlatformBatch(ctx context.Context, arg []CreateAwarePlatformBatchParams) *CreateAwarePlatformBatchBatchResults CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) CreateCalculation(ctx context.Context, arg CreateCalculationParams) error - CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CollectionGroup, error) + CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CreateCollectionGroupRow, error) CreateDatalogger(ctx context.Context, arg CreateDataloggerParams) (uuid.UUID, error) CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error @@ -39,6 +39,10 @@ type Querier interface { CreateEvaluationInstrument(ctx context.Context, arg CreateEvaluationInstrumentParams) error CreateEvaluationInstrumentsBatch(ctx context.Context, arg []CreateEvaluationInstrumentsBatchParams) *CreateEvaluationInstrumentsBatchBatchResults CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) + CreateInclOpts(ctx context.Context, arg CreateInclOptsParams) error + CreateInclOptsBatch(ctx context.Context, arg []CreateInclOptsBatchParams) *CreateInclOptsBatchBatchResults + CreateInclSegment(ctx context.Context, arg CreateInclSegmentParams) error + CreateInclSegmentBatch(ctx context.Context, arg []CreateInclSegmentBatchParams) *CreateInclSegmentBatchBatchResults CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error CreateInstrumentConstantBatch(ctx context.Context, arg []CreateInstrumentConstantBatchParams) *CreateInstrumentConstantBatchBatchResults @@ -94,6 +98,8 @@ type Querier interface { CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateTimeseriesMeasurementsBatchParams) *CreateTimeseriesMeasurementsBatchBatchResults CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error CreateTimeseriesNotesBatch(ctx context.Context, arg []CreateTimeseriesNotesBatchParams) *CreateTimeseriesNotesBatchBatchResults + CreateUploaderConfig(ctx context.Context, arg CreateUploaderConfigParams) (uuid.UUID, error) + CreateUploaderConfigMappingsBatch(ctx context.Context, arg []CreateUploaderConfigMappingsBatchParams) *CreateUploaderConfigMappingsBatchBatchResults DeleteAlertConfig(ctx context.Context, id uuid.UUID) error DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error @@ -103,6 +109,7 @@ type Querier interface { DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID *uuid.UUID) error DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID *uuid.UUID) error DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error + DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error @@ -130,12 +137,15 @@ type Querier interface { DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error DeleteTimeseriesNoteRange(ctx context.Context, arg DeleteTimeseriesNoteRangeParams) error DeleteToken(ctx context.Context, arg DeleteTokenParams) error + DeleteUploaderConfig(ctx context.Context, id uuid.UUID) error GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) GetAlertSubscription(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) GetAlertSubscriptionForAlertConfig(ctx context.Context, arg GetAlertSubscriptionForAlertConfigParams) (AlertProfileSubscription, error) + GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) + GetCalculatedTimeseries(ctx context.Context, id uuid.UUID) (GetCalculatedTimeseriesRow, error) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) @@ -146,6 +156,7 @@ type Querier interface { GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) GetHome(ctx context.Context) (GetHomeRow, error) + GetInclMeasurementsForInstrument(ctx context.Context, arg GetInclMeasurementsForInstrumentParams) ([]VInclMeasurement, error) GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, error) GetInstrumentCount(ctx context.Context) (int64, error) GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) @@ -167,6 +178,7 @@ type Querier interface { GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) + GetTimeseries(ctx context.Context, id uuid.UUID) (VTimeseries, error) GetTimeseriesConstantMeasurement(ctx context.Context, arg GetTimeseriesConstantMeasurementParams) ([]GetTimeseriesConstantMeasurementRow, error) GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) @@ -185,7 +197,6 @@ type Querier interface { ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) - ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]ListCollectionGroupsForProjectRow, error) ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) ListDistricts(ctx context.Context) ([]VDistrict, error) @@ -223,16 +234,17 @@ type Querier interface { ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) ListProjectSubmittals(ctx context.Context, arg ListProjectSubmittalsParams) ([]VSubmittal, error) - ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) ListProjects(ctx context.Context) ([]VProject, error) ListProjectsForFederalID(ctx context.Context) ([]VProject, error) ListProjectsForProfileRole(ctx context.Context, arg ListProjectsForProfileRoleParams) ([]VProject, error) ListReportConfigPlotConfigs(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) - ListTimeseries(ctx context.Context, id uuid.UUID) ([]VTimeseries, error) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) + ListTimeseriesForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) ListTimeseriesMeasurements(ctx context.Context, arg ListTimeseriesMeasurementsParams) ([]ListTimeseriesMeasurementsRow, error) ListUnits(ctx context.Context) ([]VUnit, error) ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmittal, error) + ListUploaderConfigMappings(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) + ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) RegisterEmail(ctx context.Context, email string) (uuid.UUID, error) RemoveTimeseriesFromCollectionGroup(ctx context.Context, arg RemoveTimeseriesFromCollectionGroupParams) error RenameEmptyDataloggerTableName(ctx context.Context, arg RenameEmptyDataloggerTableNameParams) error @@ -248,7 +260,7 @@ type Querier interface { UnregisterEmail(ctx context.Context, id uuid.UUID) error UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error - UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (CollectionGroup, error) + UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (UpdateCollectionGroupRow, error) UpdateDatalogger(ctx context.Context, arg UpdateDataloggerParams) error UpdateDataloggerHash(ctx context.Context, arg UpdateDataloggerHashParams) error UpdateDataloggerTablePreview(ctx context.Context, arg UpdateDataloggerTablePreviewParams) error @@ -256,6 +268,10 @@ type Querier interface { UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) + UpdateInclOpts(ctx context.Context, arg UpdateInclOptsParams) error + UpdateInclOptsBatch(ctx context.Context, arg []UpdateInclOptsBatchParams) *UpdateInclOptsBatchBatchResults + UpdateInclSegment(ctx context.Context, arg UpdateInclSegmentParams) error + UpdateInclSegmentsBatch(ctx context.Context, arg []UpdateInclSegmentsBatchParams) *UpdateInclSegmentsBatchBatchResults UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) @@ -282,10 +298,12 @@ type Querier interface { UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error UpdateSaaOptsBatch(ctx context.Context, arg []UpdateSaaOptsBatchParams) *UpdateSaaOptsBatchBatchResults UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error + UpdateSaaSegmentBatch(ctx context.Context, arg []UpdateSaaSegmentBatchParams) *UpdateSaaSegmentBatchBatchResults UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error + UpdateUploaderConfig(ctx context.Context, arg UpdateUploaderConfigParams) error ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go index cee569b6..2eb55631 100644 --- a/api/internal/db/timeseries.sql_gen.go +++ b/api/internal/db/timeseries.sql_gen.go @@ -76,6 +76,31 @@ func (q *Queries) GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) ( return exists, err } +const getTimeseries = `-- name: GetTimeseries :one +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries where id=$1 +` + +func (q *Queries) GetTimeseries(ctx context.Context, id uuid.UUID) (VTimeseries, error) { + row := q.db.QueryRow(ctx, getTimeseries, id) + var i VTimeseries + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ) + return i, err +} + const getTimeseriesProjectMap = `-- name: GetTimeseriesProjectMap :many select timeseries_id, project_id from v_timeseries_project_map @@ -182,9 +207,9 @@ func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uui } const listPlotConfigTimeseries = `-- name: ListPlotConfigTimeseries :many -SELECT t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit FROM v_timeseries t -INNER JOIN plot_configuration_timeseries_trace pct ON pct.timeseries_id = t.id -WHERE pct.plot_configuration_id = $1 +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id +where pct.plot_configuration_id = $1 ` func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) { @@ -221,52 +246,14 @@ func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfiguratio return items, nil } -const listProjectTimeseries = `-- name: ListProjectTimeseries :many +const listTimeseriesForProject = `-- name: ListTimeseriesForProject :many select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t inner join project_instrument p on p.instrument_id = t.instrument_id where p.project_id = $1 ` -func (q *Queries) ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listProjectTimeseries, projectID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VTimeseries{} - for rows.Next() { - var i VTimeseries - if err := rows.Scan( - &i.ID, - &i.Slug, - &i.Name, - &i.Type, - &i.IsComputed, - &i.Variable, - &i.InstrumentID, - &i.InstrumentSlug, - &i.Instrument, - &i.ParameterID, - &i.Parameter, - &i.UnitID, - &i.Unit, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listTimeseries = `-- name: ListTimeseries :many -SELECT id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit FROM v_timeseries WHERE id = $1 -` - -func (q *Queries) ListTimeseries(ctx context.Context, id uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listTimeseries, id) +func (q *Queries) ListTimeseriesForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, listTimeseriesForProject, projectID) if err != nil { return nil, err } diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go index 8fc927e5..51de6365 100644 --- a/api/internal/db/timeseries_calculated.sql_gen.go +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -57,6 +57,10 @@ func (q *Queries) CreateCalculation(ctx context.Context, arg CreateCalculationPa } const createOrUpdateCalculatedTimeseries = `-- name: CreateOrUpdateCalculatedTimeseries :exec +with p as ( + select id, slug, name, instrument_id, parameter_id, unit_id, type from timeseries + where id=$1 +) insert into timeseries ( id, instrument_id, @@ -67,24 +71,20 @@ insert into timeseries ( type ) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') on conflict (id) do update set - instrument_id = coalesce(excluded.instrument_id, $6), - parameter_id = coalesce(excluded.parameter_id, $7), - unit_id = coalesce(excluded.unit_id, $8), - slug = coalesce(excluded.slug, slugify($9, 'timeseries')), - name = coalesce(excluded.name, $9), - type = 'computed' + instrument_id=coalesce(excluded.instrument_id, p.instrument_id), + parameter_id=coalesce(excluded.parameter_id, p.parameter_id), + unit_id=coalesce(excluded.unit_id, p.unit_id), + slug=coalesce(excluded.slug, p.slug), + name=coalesce(excluded.name, p.name), + type='computed' ` type CreateOrUpdateCalculatedTimeseriesParams struct { - ID uuid.UUID `json:"id"` - InstrumentID *uuid.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Name string `json:"name"` - InstrumentID_2 *uuid.UUID `json:"instrument_id_2"` - ParameterID_2 uuid.UUID `json:"parameter_id_2"` - UnitID_2 uuid.UUID `json:"unit_id_2"` - Rawname string `json:"rawname"` + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` } func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error { @@ -94,27 +94,25 @@ func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg Cr arg.ParameterID, arg.UnitID, arg.Name, - arg.InstrumentID_2, - arg.ParameterID_2, - arg.UnitID_2, - arg.Rawname, ) return err } const createOrUpdateCalculation = `-- name: CreateOrUpdateCalculation :exec +with p as ( + select contents from calculation where timeseries_id=$1 +) insert into calculation (timeseries_id, contents) values ($1, $2) -on conflict (timeseries_id) do update set contents = coalesce(excluded.contents, $3) +on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents) ` type CreateOrUpdateCalculationParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Contents *string `json:"contents"` - Contents_2 *string `json:"contents_2"` } func (q *Queries) CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error { - _, err := q.db.Exec(ctx, createOrUpdateCalculation, arg.TimeseriesID, arg.Contents, arg.Contents_2) + _, err := q.db.Exec(ctx, createOrUpdateCalculation, arg.TimeseriesID, arg.Contents) return err } @@ -127,7 +125,7 @@ func (q *Queries) DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) return err } -const listCalculatedTimeseries = `-- name: ListCalculatedTimeseries :many +const getCalculatedTimeseries = `-- name: GetCalculatedTimeseries :one select id, instrument_id, @@ -137,16 +135,10 @@ select name as formula_name, coalesce(contents, '') as formula from v_timeseries_computed -where ($1 is null or instrument_id = $1) -and ($2 is null or id = $2) +where id=$1 ` -type ListCalculatedTimeseriesParams struct { - InstrumentID interface{} `json:"instrument_id"` - ID interface{} `json:"id"` -} - -type ListCalculatedTimeseriesRow struct { +type GetCalculatedTimeseriesRow struct { ID uuid.UUID `json:"id"` InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` @@ -156,30 +148,17 @@ type ListCalculatedTimeseriesRow struct { Formula string `json:"formula"` } -func (q *Queries) ListCalculatedTimeseries(ctx context.Context, arg ListCalculatedTimeseriesParams) ([]ListCalculatedTimeseriesRow, error) { - rows, err := q.db.Query(ctx, listCalculatedTimeseries, arg.InstrumentID, arg.ID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []ListCalculatedTimeseriesRow{} - for rows.Next() { - var i ListCalculatedTimeseriesRow - if err := rows.Scan( - &i.ID, - &i.InstrumentID, - &i.ParameterID, - &i.UnitID, - &i.Slug, - &i.FormulaName, - &i.Formula, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) GetCalculatedTimeseries(ctx context.Context, id uuid.UUID) (GetCalculatedTimeseriesRow, error) { + row := q.db.QueryRow(ctx, getCalculatedTimeseries, id) + var i GetCalculatedTimeseriesRow + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.ParameterID, + &i.UnitID, + &i.Slug, + &i.FormulaName, + &i.Formula, + ) + return i, err } diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go new file mode 100644 index 00000000..021e10d6 --- /dev/null +++ b/api/internal/db/uploader.sql_gen.go @@ -0,0 +1,163 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: uploader.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const createUploaderConfig = `-- name: CreateUploaderConfig :one +insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) +values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) +returning id +` + +type CreateUploaderConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Description string `json:"description"` + CreateDate time.Time `json:"create_date"` + Creator uuid.UUID `json:"creator"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` +} + +func (q *Queries) CreateUploaderConfig(ctx context.Context, arg CreateUploaderConfigParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, createUploaderConfig, + arg.ProjectID, + arg.Name, + arg.Description, + arg.CreateDate, + arg.Creator, + arg.Type, + arg.TzName, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const deleteAllUploaderConfigMappingsForUploaderConfig = `-- name: DeleteAllUploaderConfigMappingsForUploaderConfig :exec +delete from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteAllUploaderConfigMappingsForUploaderConfig, uploaderConfigID) + return err +} + +const deleteUploaderConfig = `-- name: DeleteUploaderConfig :exec +delete from uploader_config where id=$1 +` + +func (q *Queries) DeleteUploaderConfig(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteUploaderConfig, id) + return err +} + +const listUploaderConfigMappings = `-- name: ListUploaderConfigMappings :many +select uploader_config_id, field_name, timeseries_id from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) ListUploaderConfigMappings(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) { + rows, err := q.db.Query(ctx, listUploaderConfigMappings, uploaderConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []UploaderConfigMapping{} + for rows.Next() { + var i UploaderConfigMapping + if err := rows.Scan(&i.UploaderConfigID, &i.FieldName, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listUploaderConfigsForProject = `-- name: ListUploaderConfigsForProject :many +select id, project_id, slug, name, description, create_date, creator, update_date, updater, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field from uploader_config where project_id=$1 +` + +func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { + rows, err := q.db.Query(ctx, listUploaderConfigsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []UploaderConfig{} + for rows.Next() { + var i UploaderConfig + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreateDate, + &i.Creator, + &i.UpdateDate, + &i.Updater, + &i.Type, + &i.TzName, + &i.TimeField, + &i.ValidatedFieldEnabled, + &i.ValidatedField, + &i.MaskedFieldEnabled, + &i.MaskedField, + &i.CommentFieldEnabled, + &i.CommentField, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateUploaderConfig = `-- name: UpdateUploaderConfig :exec +update uploader_config set + name=$2, + description=$3, + update_date=$4, + updater=$5, + type=$6, + tz_name=$7 +where id=$1 +` + +type UpdateUploaderConfigParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + UpdateDate *time.Time `json:"update_date"` + Updater *uuid.UUID `json:"updater"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` +} + +func (q *Queries) UpdateUploaderConfig(ctx context.Context, arg UpdateUploaderConfigParams) error { + _, err := q.db.Exec(ctx, updateUploaderConfig, + arg.ID, + arg.Name, + arg.Description, + arg.UpdateDate, + arg.Updater, + arg.Type, + arg.TzName, + ) + return err +} diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index c333df19..b8e9a263 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -1,6 +1,8 @@ package handler import ( + "context" + "log" "net/http" "strings" "time" @@ -10,6 +12,9 @@ import ( "github.com/USACE/instrumentation-api/api/internal/middleware" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/USACE/instrumentation-api/api/internal/service" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + pgxgeom "github.com/twpayne/pgx-geom" ) func newHttpClient() *http.Client { @@ -21,6 +26,24 @@ func newHttpClient() *http.Client { } } +func newDbConnPool(cfg config.DBConfig) *pgxpool.Pool { + config, err := pgxpool.ParseConfig(cfg.ConnStr()) + if err != nil { + log.Fatal(err) + } + + config.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error { + return pgxgeom.Register(ctx, conn) + } + + pool, err := pgxpool.NewWithConfig(context.Background(), config) + if err != nil { + log.Fatal(err) + } + + return pool +} + type ApiHandler struct { Middleware middleware.Middleware BlobService cloud.Blob diff --git a/api/internal/model/instrument.go b/api/internal/model/instrument.go index 361a3bb7..57c09a4d 100644 --- a/api/internal/model/instrument.go +++ b/api/internal/model/instrument.go @@ -4,9 +4,9 @@ import ( "context" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" "github.com/twpayne/go-geom/encoding/geojson" - "github.com/twpayne/go-geom/encoding/wkb" ) // Instrument is an instrument @@ -234,7 +234,7 @@ const updateInstrumentGeometry = ` // UpdateInstrumentGeometry updates instrument geometry property func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p Profile) error { - _, err := q.db.ExecContext(ctx, updateInstrumentGeometry, projectID, instrumentID, wkb.Value(geom.Geometry()), p.ID) + _, err := q.db.ExecContext(ctx, updateInstrumentGeometry, projectID, instrumentID, geom.Geometry(), p.ID) return err } diff --git a/api/internal/servicev2/alert.go b/api/internal/servicev2/alert.go index 355e3954..b113484d 100644 --- a/api/internal/servicev2/alert.go +++ b/api/internal/servicev2/alert.go @@ -29,12 +29,7 @@ func NewAlertService(db *Database, q *db.Queries) *alertService { // Create creates one or more new alerts func (s alertService) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { var err error - s.Queries.CreateAlerts(ctx, alertConfigIDs).Exec(func(_ int, e error) { - if err != nil { - err = e - return - } - }) + s.Queries.CreateAlerts(ctx, alertConfigIDs).Exec(batchExecErr(&err)) return err } diff --git a/api/internal/servicev2/alert_subscription.go b/api/internal/servicev2/alert_subscription.go index 441f0fd2..8c9dcc54 100644 --- a/api/internal/servicev2/alert_subscription.go +++ b/api/internal/servicev2/alert_subscription.go @@ -55,7 +55,7 @@ func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, return a, err } - updated, err := qtx.GetAlertSubscription(ctx, db.GetAlertSubscriptionParams{ + updated, err := qtx.GetAlertSubscriptionForAlertConfig(ctx, db.GetAlertSubscriptionForAlertConfigParams{ AlertConfigID: alertConfigID, ProfileID: profileID, }) diff --git a/api/internal/servicev2/db.go b/api/internal/servicev2/db.go index 1d9e6f2e..e3c4ffb2 100644 --- a/api/internal/servicev2/db.go +++ b/api/internal/servicev2/db.go @@ -4,6 +4,7 @@ import ( "context" "database/sql" "errors" + "fmt" "log" "github.com/jackc/pgx/v5/pgxpool" @@ -19,3 +20,36 @@ func txDo(ctx context.Context, rollback func(ctx context.Context) error) { log.Print(err.Error()) } } + +func batchExecErr(err *error) func(int, error) { + return func(_ int, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowErr[T any](err *error) func(int, T, error) { + return func(_ int, _ T, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowCollect[T any](rr []T, err *error) func(int, T, error) { + rrlen := len(rr) + return func(i int, r T, e error) { + if e != nil { + *err = e + return + } + if i == rrlen { + *err = fmt.Errorf("rr slice must be same length as QueryRow args") + return + } + rr[i] = r + } +} diff --git a/api/internal/servicev2/evaluation.go b/api/internal/servicev2/evaluation.go index f8579aca..7a585e57 100644 --- a/api/internal/servicev2/evaluation.go +++ b/api/internal/servicev2/evaluation.go @@ -90,12 +90,7 @@ func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evalua InstrumentID: &aci.InstrumentID, } } - qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } @@ -141,12 +136,7 @@ func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uu InstrumentID: &aci.InstrumentID, } } - qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } diff --git a/api/internal/servicev2/instrument.go b/api/internal/servicev2/instrument.go index 87548ba6..fc3d020d 100644 --- a/api/internal/servicev2/instrument.go +++ b/api/internal/servicev2/instrument.go @@ -2,13 +2,11 @@ package servicev2 import ( "context" - "encoding/json" "slices" "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" - "github.com/twpayne/go-geom" "github.com/twpayne/go-geom/encoding/geojson" ) @@ -44,51 +42,6 @@ const ( update ) -func createInstrument(ctx context.Context, q *db.Queries, instrument model.Instrument) (model.IDSlugName, error) { - newInstrument, err := q.CreateInstrument(ctx, instrument) - if err != nil { - return model.IDSlugName{}, err - } - for _, prj := range instrument.Projects { - if err := q.AssignInstrumentToProject(ctx, prj.ID, newInstrument.ID); err != nil { - return model.IDSlugName{}, err - } - } - if err := q.CreateOrUpdateInstrumentStatus(ctx, newInstrument.ID, instrument.StatusID, instrument.StatusTime); err != nil { - return model.IDSlugName{}, err - } - if instrument.AwareID != nil { - if err := q.CreateAwarePlatform(ctx, newInstrument.ID, *instrument.AwareID); err != nil { - return model.IDSlugName{}, err - } - } - instrument.ID = newInstrument.ID - if err := handleOpts(ctx, q, instrument, create); err != nil { - return model.IDSlugName{}, err - } - return newInstrument, nil -} - -func (s instrumentService) CreateInstrument(ctx context.Context, instrument model.Instrument) (model.IDSlugName, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return model.IDSlugName{}, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - newInstrument, err := createInstrument(ctx, qtx, instrument) - if err != nil { - return model.IDSlugName{}, err - } - - if err := tx.Commit(ctx); err != nil { - return model.IDSlugName{}, err - } - return newInstrument, nil -} - func (s instrumentService) CreateInstruments(ctx context.Context, ii []model.Instrument) ([]db.CreateInstrumentsBatchRow, error) { tx, err := s.db.Begin(ctx) if err != nil { @@ -145,30 +98,15 @@ func (s instrumentService) CreateInstruments(ctx context.Context, ii []model.Ins if err != nil { return nil, err } - qtx.AssignInstrumentToProjectBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + qtx.AssignInstrumentToProjectBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(batchExecErr(&err)) if err != nil { return nil, err } - qtx.CreateOrUpdateInstrumentStatusBatch(ctx, instrumentStatusArgs).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + qtx.CreateOrUpdateInstrumentStatusBatch(ctx, instrumentStatusArgs).Exec(batchExecErr(&err)) if err != nil { return nil, err } - qtx.CreateAwarePlatformBatch(ctx, instrumentAwareArgs).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + qtx.CreateAwarePlatformBatch(ctx, instrumentAwareArgs).Exec(batchExecErr(&err)) if err != nil { return nil, err } @@ -230,14 +168,8 @@ func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid. return a, nil } -func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, gj json.RawMessage, p model.Profile) (db.VInstrument, error) { +func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geometry db.Geometry, p model.Profile) (db.VInstrument, error) { var a db.VInstrument - - var geometry geom.T - if err := geojson.Unmarshal(gj, &geometry); err != nil { - return a, err - } - tx, err := s.db.Begin(ctx) if err != nil { return a, err @@ -260,6 +192,44 @@ func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, project if err := tx.Commit(ctx); err != nil { return a, err } - return a, nil } + +func handleOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument, rt requestType) error { + saa := make([]model.Instrument, 0) + ipi := make([]model.Instrument, 0) + for _, inst := range ii { + switch inst.TypeID { + case saaTypeID: + saa = append(saa, inst) + case ipiTypeID: + ipi = append(ipi, inst) + default: + } + } + if len(saa) != 0 { + var err error + switch rt { + case create: + err = createSaaOptsBatch(ctx, q, saa) + case update: + err = updateSaaOptsBatch(ctx, q, saa) + } + if err != nil { + return err + } + } + if len(ipi) != 0 { + var err error + switch rt { + case create: + err = createIpiOptsBatch(ctx, q, ipi) + case update: + err = updateIpiOptsBatch(ctx, q, ipi) + } + if err != nil { + return err + } + } + return nil +} diff --git a/api/internal/servicev2/instrument_assign.go b/api/internal/servicev2/instrument_assign.go index 5bee91cc..e9e6f424 100644 --- a/api/internal/servicev2/instrument_assign.go +++ b/api/internal/servicev2/instrument_assign.go @@ -266,12 +266,7 @@ func unassignProjectsFromInstrument(ctx context.Context, q *db.Queries, profileI InstrumentID: instrumentID, } } - q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } @@ -292,12 +287,7 @@ func assignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, p InstrumentID: instrumentIDs[idx], } } - q.AssignInstrumentToProjectBatch(ctx, args).Exec(func(_ int, e error) { - if err != nil { - err = e - return - } - }) + q.AssignInstrumentToProjectBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } @@ -331,12 +321,7 @@ func unassignInstrumentsFromProject(ctx context.Context, q *db.Queries, profileI InstrumentID: count.InstrumentID, }) } - q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } diff --git a/api/internal/servicev2/instrument_constant.go b/api/internal/servicev2/instrument_constant.go index 94191995..8b1994f5 100644 --- a/api/internal/servicev2/instrument_constant.go +++ b/api/internal/servicev2/instrument_constant.go @@ -2,6 +2,7 @@ package servicev2 import ( "context" + "errors" "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" @@ -26,26 +27,50 @@ func NewInstrumentConstantService(db *Database, q *db.Queries) *instrumentConsta // CreateInstrumentConstants creates many instrument constants from an array of instrument constants // An InstrumentConstant is structurally the same as a timeseries and saved in the same tables -func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { +func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]db.CreateTimeseriesBatchRow, error) { tx, err := s.db.Begin(ctx) if err != nil { return nil, err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - uu := make([]model.Timeseries, len(tt)) + createTimeseriesParams := make([]db.CreateTimeseriesBatchParams, len(tt)) for idx, t := range tt { t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return nil, err + createTimeseriesParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &t.InstrumentID, + Name: t.Name, + ParameterID: t.ParameterID, + UnitID: t.UnitID, + Type: db.NullTimeseriesType{ + TimeseriesType: db.TimeseriesTypeConstant, + Valid: true, + }, + } + } + uu := make([]db.CreateTimeseriesBatchRow, len(createTimeseriesParams)) + createConstantsParams := make([]db.CreateInstrumentConstantBatchParams, len(createTimeseriesParams)) + qtx.CreateTimeseriesBatch(ctx, createTimeseriesParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return } - if err := qtx.CreateInstrumentConstant(ctx, tsNew.InstrumentID, tsNew.ID); err != nil { - return nil, err + if r.InstrumentID == nil { + err = errors.New("instrument id must not be nil") } - uu[idx] = tsNew + createConstantsParams[i] = db.CreateInstrumentConstantBatchParams{ + InstrumentID: *r.InstrumentID, + TimeseriesID: r.ID, + } + uu[i] = r + }) + if err != nil { + return nil, err + } + qtx.CreateInstrumentConstantBatch(ctx, createConstantsParams).Exec(batchExecErr(&err)) + if err != nil { + return nil, err } if err := tx.Commit(ctx); err != nil { return nil, err @@ -60,16 +85,16 @@ func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, return err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := qtx.DeleteInstrumentConstant(ctx, instrumentID, timeseriesID); err != nil { + if err := qtx.DeleteInstrumentConstant(ctx, db.DeleteInstrumentConstantParams{ + InstrumentID: instrumentID, + TimeseriesID: timeseriesID, + }); err != nil { return err } - if err := qtx.DeleteTimeseries(ctx, timeseriesID); err != nil { return err } - return tx.Commit(ctx) } diff --git a/api/internal/servicev2/instrument_group.go b/api/internal/servicev2/instrument_group.go index 132d1aaa..7fb5d91d 100644 --- a/api/internal/servicev2/instrument_group.go +++ b/api/internal/servicev2/instrument_group.go @@ -42,13 +42,7 @@ func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, group } var err error gg := make([]db.CreateInstrumentGroupsBatchRow, len(groups)) - s.Queries.CreateInstrumentGroupsBatch(ctx, args).QueryRow(func(i int, r db.CreateInstrumentGroupsBatchRow, e error) { - if e != nil { - err = e - return - } - gg[i] = r - }) + s.Queries.CreateInstrumentGroupsBatch(ctx, args).QueryRow(batchQueryRowCollect(gg, &err)) if err != nil { return nil, err } diff --git a/api/internal/servicev2/instrument_incl.go b/api/internal/servicev2/instrument_incl.go new file mode 100644 index 00000000..33d3de27 --- /dev/null +++ b/api/internal/servicev2/instrument_incl.go @@ -0,0 +1,211 @@ +package servicev2 + +import ( + "context" + "errors" + "fmt" + "slices" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" +) + +type InclInstrumentService interface { + GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.InclSegment, error) + UpdateInclSegment(ctx context.Context, seg model.InclSegment) error + UpdateInclSegments(ctx context.Context, segs []model.InclSegment) error + GetInclMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.InclMeasurements, error) +} + +type inclInstrumentService struct { + db *Database + *db.Queries +} + +func NewInclInstrumentService(db *Database, q *db.Queries) *inclInstrumentService { + return &inclInstrumentService{db, q} +} + +func (s inclInstrumentService) UpdateInclSegments(ctx context.Context, segs []model.InclSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + updateInclArgs := make([]db.UpdateInclSegmentsBatchParams, len(segs)) + createMmtArgs := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) + + for idx, seg := range segs { + updateInclArgs[idx] = db.UpdateInclSegmentsBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + DepthTimeseriesID: seg.DepthTimeseriesID, + A0TimeseriesID: seg.A0TimeseriesID, + A180TimeseriesID: seg.A180TimeseriesID, + B0TimeseriesID: seg.B0TimeseriesID, + B180TimeseriesID: seg.B180TimeseriesID, + } + if seg.Length == nil { + continue + } + createMmtArgs = append(createMmtArgs, db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.UpdateInclSegmentsBatch(ctx, updateInclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func createInclOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) + createInclSegmentBatchParams := make([][]db.CreateInclSegmentBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) + createInclOptsParams := make([]db.CreateInclOptsBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := model.MapToStruct[model.InclOpts](inst.Opts) + if err != nil { + return err + } + createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) + createInclSegmentBatchParams[idx] = make([]db.CreateInclSegmentBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: model.InclParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createInclSegmentBatchParams[idx][i] = db.CreateInclSegmentBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: model.InclParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createInclOptsParams[idx] = db.CreateInclOptsBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + inclArgs := slices.Concat(createInclSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) + + var err error + q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + inclArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateInclSegmentBatch(ctx, inclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + createInclOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.CreateInclOptsBatch(ctx, createInclOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} + +func updateInclOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + updateInclOptsParams := make([]db.UpdateInclOptsBatchParams, len(ii)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := model.MapToStruct[model.InclOpts](inst.Opts) + if err != nil { + return err + } + updateInclOptsParams[idx] = db.UpdateInclOptsBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.UpdateInclOptsBatch(ctx, updateInclOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err +} diff --git a/api/internal/servicev2/instrument_ipi.go b/api/internal/servicev2/instrument_ipi.go index 1c7e1234..ed10ffa5 100644 --- a/api/internal/servicev2/instrument_ipi.go +++ b/api/internal/servicev2/instrument_ipi.go @@ -2,6 +2,9 @@ package servicev2 import ( "context" + "errors" + "fmt" + "slices" "time" "github.com/USACE/instrumentation-api/api/internal/db" @@ -54,23 +57,154 @@ func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []mode Value: *seg.Length, }) } - qtx.UpdateIpiSegmentsBatch(ctx, updateIpiArgs).Exec(func(_ int, e error) { + qtx.UpdateIpiSegmentsBatch(ctx, updateIpiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) + createIpiSegmentBatchParams := make([][]db.CreateIpiSegmentBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) + createIpiOptsParams := make([]db.CreateIpiOptsBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) + if err != nil { + return err + } + createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) + createIpiSegmentBatchParams[idx] = make([]db.CreateIpiSegmentBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: model.IpiParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createIpiSegmentBatchParams[idx][i] = db.CreateIpiSegmentBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: model.IpiParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createIpiOptsParams[idx] = db.CreateIpiOptsBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + ipiArgs := slices.Concat(createIpiSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) + + var err error + q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { if e != nil { err = e return } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + ipiArgs[i].LengthTimeseriesID = &r.ID }) if err != nil { return err } - qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(func(_ int, e error) { + q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateIpiSegmentBatch(ctx, ipiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { if e != nil { err = e return } + createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID }) if err != nil { return err } - return tx.Commit(ctx) + q.CreateIpiOptsBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + return err +} + +func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + updateIpiOptsParams := make([]db.UpdateIpiOptsBatchParams, len(ii)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) + if err != nil { + return err + } + updateIpiOptsParams[idx] = db.UpdateIpiOptsBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.UpdateIpiOptsBatch(ctx, updateIpiOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/servicev2/instrument_note.go b/api/internal/servicev2/instrument_note.go index 3d95cd70..17f24044 100644 --- a/api/internal/servicev2/instrument_note.go +++ b/api/internal/servicev2/instrument_note.go @@ -47,13 +47,7 @@ func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes [ } } nn := make([]db.InstrumentNote, len(args)) - qtx.CreateInstrumentNoteBatch(ctx, args).QueryRow(func(i int, n db.InstrumentNote, e error) { - if e != nil { - err = e - return - } - nn[i] = n - }) + qtx.CreateInstrumentNoteBatch(ctx, args).QueryRow(batchQueryRowCollect(nn, &err)) if err != nil { return nil, err } diff --git a/api/internal/servicev2/instrument_opts.go b/api/internal/servicev2/instrument_opts.go deleted file mode 100644 index 1d9f4e18..00000000 --- a/api/internal/servicev2/instrument_opts.go +++ /dev/null @@ -1,373 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "fmt" - "slices" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -func handleOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument, rt requestType) error { - saa := make([]model.Instrument, 0) - ipi := make([]model.Instrument, 0) - for _, inst := range ii { - switch inst.TypeID { - case saaTypeID: - saa = append(saa, inst) - case ipiTypeID: - ipi = append(ipi, inst) - default: - } - } - if len(saa) != 0 { - var err error - switch rt { - case create: - err = createSaaOptsBatch(ctx, q, saa) - case update: - err = updateSaaOptsBatch(ctx, q, saa) - } - if err != nil { - return err - } - } - if len(ipi) != 0 { - var err error - switch rt { - case create: - err = createIpiOptsBatch(ctx, q, ipi) - case update: - err = updateIpiOptsBatch(ctx, q, ipi) - } - if err != nil { - return err - } - } - return nil -} - -func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) - createSaaSegmentBatchParams := make([][]db.CreateSaaSegmentBatchParams, len(ii)) - - createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) - createSaaOptsParams := make([]db.CreateSaaOptsBatchParams, len(ii)) - createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - - for idx, inst := range ii { - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) - createSaaSegmentBatchParams[idx] = make([]db.CreateSaaSegmentBatchParams, opts.NumSegments) - - for i := range opts.NumSegments { - createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createSaaSegmentBatchParams[idx][i] = db.CreateSaaSegmentBatchParams{ - ID: int32(i + 1), - InstrumentID: inst.ID, - } - } - createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + "-bottom-elevation", - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createSaaOptsParams[idx] = db.CreateSaaOptsBatchParams{ - InstrumentID: inst.ID, - NumSegments: int32(opts.NumSegments), - InitialTime: opts.InitialTime, - } - createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - Time: time.Now(), - Value: opts.BottomElevation, - } - } - - args := slices.Concat(createTimeseriesBatchParams...) - saaArgs := slices.Concat(createSaaSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) - - var err error - q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } - saaArgs[i].LengthTimeseriesID = &r.ID - }) - if err != nil { - return err - } - q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateSaaSegmentBatch(ctx, saaArgs).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID - createBottomElevationMmtParams[i].TimeseriesID = r.ID - }) - if err != nil { - return err - } - q.CreateSaaOptsBatch(ctx, createSaaOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} - -func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - updateSaaOptsParams := make([]db.UpdateSaaOptsBatchParams, len(ii)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - for idx, inst := range ii { - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - updateSaaOptsParams[idx] = db.UpdateSaaOptsBatchParams{ - InstrumentID: inst.ID, - BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, - InitialTime: opts.InitialTime, - } - createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: opts.BottomElevationTimeseriesID, - Time: time.Now(), - Value: opts.BottomElevation, - } - } - var err error - q.UpdateSaaOptsBatch(ctx, updateSaaOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} - -func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) - createIpiSegmentBatchParams := make([][]db.CreateIpiSegmentBatchParams, len(ii)) - - createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) - createIpiOptsParams := make([]db.CreateIpiOptsBatchParams, len(ii)) - createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - - for idx, inst := range ii { - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) - createIpiSegmentBatchParams[idx] = make([]db.CreateIpiSegmentBatchParams, opts.NumSegments) - - for i := range opts.NumSegments { - createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createIpiSegmentBatchParams[idx][i] = db.CreateIpiSegmentBatchParams{ - ID: int32(i + 1), - InstrumentID: inst.ID, - } - } - createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + "-bottom-elevation", - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createIpiOptsParams[idx] = db.CreateIpiOptsBatchParams{ - InstrumentID: inst.ID, - NumSegments: int32(opts.NumSegments), - InitialTime: opts.InitialTime, - } - createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - Time: time.Now(), - Value: opts.BottomElevation, - } - } - - args := slices.Concat(createTimeseriesBatchParams...) - ipiArgs := slices.Concat(createIpiSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) - - var err error - q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } - ipiArgs[i].LengthTimeseriesID = &r.ID - }) - if err != nil { - return err - } - q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateIpiSegmentBatch(ctx, ipiArgs).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID - createBottomElevationMmtParams[i].TimeseriesID = r.ID - }) - if err != nil { - return err - } - q.CreateIpiOptsBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} - -func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - updateIpiOptsParams := make([]db.UpdateIpiOptsBatchParams, len(ii)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - for idx, inst := range ii { - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - updateIpiOptsParams[idx] = db.UpdateIpiOptsBatchParams{ - InstrumentID: inst.ID, - BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, - InitialTime: opts.InitialTime, - } - createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: opts.BottomElevationTimeseriesID, - Time: time.Now(), - Value: opts.BottomElevation, - } - } - var err error - q.UpdateIpiOptsBatch(ctx, updateIpiOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} diff --git a/api/internal/servicev2/instrument_saa.go b/api/internal/servicev2/instrument_saa.go index b14fbfda..3b1702ea 100644 --- a/api/internal/servicev2/instrument_saa.go +++ b/api/internal/servicev2/instrument_saa.go @@ -2,6 +2,9 @@ package servicev2 import ( "context" + "errors" + "fmt" + "slices" "time" "github.com/USACE/instrumentation-api/api/internal/db" @@ -31,19 +34,167 @@ func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []mode return err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - for _, seg := range segs { - if err := qtx.UpdateSaaSegment(ctx, seg); err != nil { - return err + updateSaaSegParams := make([]db.UpdateSaaSegmentBatchParams, len(segs)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) + for idx, seg := range segs { + updateSaaSegParams[idx] = db.UpdateSaaSegmentBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + XTimeseriesID: seg.XTimeseriesID, + YTimeseriesID: seg.YTimeseriesID, + ZTimeseriesID: seg.ZTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, } if seg.Length == nil { continue } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { + createMmtParams = append(createMmtParams, db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.UpdateSaaSegmentBatch(ctx, updateSaaSegParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) + createSaaSegmentBatchParams := make([][]db.CreateSaaSegmentBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) + createSaaOptsParams := make([]db.CreateSaaOptsBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) + if err != nil { return err } + createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) + createSaaSegmentBatchParams[idx] = make([]db.CreateSaaSegmentBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: model.SaaParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createSaaSegmentBatchParams[idx][i] = db.CreateSaaSegmentBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: model.SaaParameterID, + UnitID: model.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createSaaOptsParams[idx] = db.CreateSaaOptsBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } } - return tx.Commit(ctx) + + args := slices.Concat(createTimeseriesBatchParams...) + saaArgs := slices.Concat(createSaaSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) + + var err error + q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + saaArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateSaaSegmentBatch(ctx, saaArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { + if e != nil { + err = e + return + } + createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.CreateSaaOptsBatch(ctx, createSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) + return err +} + +func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { + updateSaaOptsParams := make([]db.UpdateSaaOptsBatchParams, len(ii)) + createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) + if err != nil { + return err + } + updateSaaOptsParams[idx] = db.UpdateSaaOptsBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.UpdateSaaOptsBatch(ctx, updateSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/servicev2/instrument_status.go b/api/internal/servicev2/instrument_status.go index 433eaeb8..153890cd 100644 --- a/api/internal/servicev2/instrument_status.go +++ b/api/internal/servicev2/instrument_status.go @@ -25,19 +25,15 @@ func NewInstrumentStatusService(db *Database, q *db.Queries) *instrumentStatusSe } func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - for _, updateStatus := range ss { - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, instrumentID, updateStatus.StatusID, updateStatus.Time); err != nil { - return err + args := make([]db.CreateOrUpdateInstrumentStatusBatchParams, len(ss)) + for idx, st := range ss { + args[idx] = db.CreateOrUpdateInstrumentStatusBatchParams{ + InstrumentID: instrumentID, + StatusID: st.StatusID, + Time: st.Time, } } - - return tx.Commit(ctx) + var err error + s.Queries.CreateOrUpdateInstrumentStatusBatch(ctx, args).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/servicev2/measurement.go b/api/internal/servicev2/measurement.go index 642845cc..938cb550 100644 --- a/api/internal/servicev2/measurement.go +++ b/api/internal/servicev2/measurement.go @@ -61,12 +61,12 @@ func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - mmts := make([]db.CreateTimeseriesMeasruementsBatchParams, 0) + mmts := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) notes := make([]db.CreateTimeseriesNotesBatchParams, 0) for idx := range mc { for _, m := range mc[idx].Items { - mmts = append(mmts, db.CreateTimeseriesMeasruementsBatchParams{ + mmts = append(mmts, db.CreateTimeseriesMeasurementsBatchParams{ TimeseriesID: mc[idx].TimeseriesID, Time: m.Time, Value: float64(m.Value), @@ -80,19 +80,11 @@ func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc }) } } - qtx.CreateTimeseriesMeasruementsBatch(ctx, mmts).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.CreateTimeseriesMeasurementsBatch(ctx, mmts).Exec(batchExecErr(&err)) if err != nil { return err } - qtx.CreateTimeseriesNotesBatch(ctx, notes).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.CreateTimeseriesNotesBatch(ctx, notes).Exec(batchExecErr(&err)) if err != nil { return err } @@ -128,19 +120,11 @@ func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Con }) } } - qtx.CreateOrUpdateTimeseriesMeasurementsBatch(ctx, mmts).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.CreateOrUpdateTimeseriesMeasurementsBatch(ctx, mmts).Exec(batchExecErr(&err)) if err != nil { return err } - qtx.CreateOrUpdateTimeseriesNoteBatch(ctx, notes).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.CreateOrUpdateTimeseriesNoteBatch(ctx, notes).Exec(batchExecErr(&err)) if err != nil { return err } diff --git a/api/internal/servicev2/plot_config_contour.go b/api/internal/servicev2/plot_config_contour.go index 9c0feb10..24e7ca6d 100644 --- a/api/internal/servicev2/plot_config_contour.go +++ b/api/internal/servicev2/plot_config_contour.go @@ -125,11 +125,6 @@ func createPlotContourConfigTimeseriesBatch(ctx context.Context, q *db.Queries, } } var err error - q.CreatePlotContourConfigTimeseriesBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + q.CreatePlotContourConfigTimeseriesBatch(ctx, args).Exec(batchExecErr(&err)) return err } diff --git a/api/internal/servicev2/plot_config_scatter_line.go b/api/internal/servicev2/plot_config_scatter_line.go index 7175d80a..85edf668 100644 --- a/api/internal/servicev2/plot_config_scatter_line.go +++ b/api/internal/servicev2/plot_config_scatter_line.go @@ -115,12 +115,7 @@ func validateCreateTraces(ctx context.Context, q *db.Queries, pcID uuid.UUID, tr } } var err error - q.CreatePlotConfigTimeseriesTracesBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) + q.CreatePlotConfigTimeseriesTracesBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return err } diff --git a/api/internal/servicev2/project.go b/api/internal/servicev2/project.go index 324932c3..21eed8fc 100644 --- a/api/internal/servicev2/project.go +++ b/api/internal/servicev2/project.go @@ -56,13 +56,7 @@ func (s projectService) CreateProjectBatch(ctx context.Context, projects []model } var err error pp := make([]db.CreateProjectsBatchRow, len(args)) - s.Queries.CreateProjectsBatch(ctx, args).QueryRow(func(i int, r db.CreateProjectsBatchRow, e error) { - if e != nil { - err = e - return - } - pp[i] = r - }) + s.Queries.CreateProjectsBatch(ctx, args).QueryRow(batchQueryRowCollect(pp, &err)) if err != nil { return nil, err } diff --git a/api/internal/servicev2/report_config.go b/api/internal/servicev2/report_config.go index 306d5219..21b78c28 100644 --- a/api/internal/servicev2/report_config.go +++ b/api/internal/servicev2/report_config.go @@ -63,11 +63,7 @@ func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.Re PlotConfigID: rc.PlotConfigs[idx].ID, } } - qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } @@ -115,11 +111,7 @@ func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.Re PlotConfigID: rc.PlotConfigs[idx].ID, } } - qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return err } diff --git a/api/internal/servicev2/timeseries.go b/api/internal/servicev2/timeseries.go index 83141827..6b26c649 100644 --- a/api/internal/servicev2/timeseries.go +++ b/api/internal/servicev2/timeseries.go @@ -51,10 +51,6 @@ func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model } } var err error - s.Queries.CreateTimeseriesBatch(ctx, uu).Exec(func(_ int, e error) { - if e != nil { - err = e - } - }) + s.Queries.CreateTimeseriesBatch(ctx, uu).QueryRow(batchQueryRowErr[db.CreateTimeseriesBatchRow](&err)) return err } diff --git a/api/internal/servicev2/timeseries_calculated.go b/api/internal/servicev2/timeseries_calculated.go index 99633831..59d370ca 100644 --- a/api/internal/servicev2/timeseries_calculated.go +++ b/api/internal/servicev2/timeseries_calculated.go @@ -2,8 +2,6 @@ package servicev2 import ( "context" - "database/sql" - "errors" "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/model" @@ -26,75 +24,54 @@ func NewCalculatedTimeseriesService(db *Database, q *db.Queries) *calculatedTime return &calculatedTimeseriesService{db, q} } -func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error { +func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, ct model.CalculatedTimeseries) error { tx, err := s.db.Begin(ctx) if err != nil { return err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - tsID, err := qtx.CreateCalculatedTimeseries(ctx, cc) + tsID, err := qtx.CreateCalculatedTimeseries(ctx, db.CreateCalculatedTimeseriesParams{ + InstrumentID: &ct.InstrumentID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + }) if err != nil { return err } - - if err := qtx.CreateCalculation(ctx, tsID, cc.Formula); err != nil { - return err - } - - if err := tx.Commit(ctx); err != nil { + if err := qtx.CreateCalculation(ctx, db.CreateCalculationParams{ + TimeseriesID: tsID, + Contents: &ct.Formula, + }); err != nil { return err } - - return nil + return tx.Commit(ctx) } -func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error { +func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, ct model.CalculatedTimeseries) error { tx, err := s.db.Begin(ctx) if err != nil { return err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - defaultCts, err := qtx.GetOneCalculation(ctx, &cts.ID) - if err != nil { + if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, db.CreateOrUpdateCalculatedTimeseriesParams{ + ID: ct.ID, + InstrumentID: &ct.InstrumentID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + }); err != nil { return err } - - if cts.InstrumentID == uuid.Nil { - cts.InstrumentID = defaultCts.InstrumentID - } - if cts.ParameterID == uuid.Nil { - cts.ParameterID = defaultCts.ParameterID - } - if cts.UnitID == uuid.Nil { - cts.UnitID = defaultCts.UnitID - } - if cts.Slug == "" { - cts.Slug = defaultCts.Slug - } - if cts.FormulaName == "" { - cts.FormulaName = defaultCts.FormulaName - } - if cts.Formula == "" { - cts.Formula = defaultCts.Formula - } - - if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, cts, defaultCts); err != nil && !errors.Is(err, sql.ErrNoRows) { + if err := qtx.CreateOrUpdateCalculation(ctx, db.CreateOrUpdateCalculationParams{ + TimeseriesID: ct.ID, + Contents: &ct.Formula, + }); err != nil { return err } - - if err := qtx.CreateOrUpdateCalculation(ctx, cts.ID, cts.Formula, defaultCts.Formula); err != nil && !errors.Is(err, sql.ErrNoRows) { - return err - } - - if err := tx.Commit(ctx); err != nil { - return err - } - - return nil + return tx.Commit(ctx) } diff --git a/api/internal/servicev2/uploader.go b/api/internal/servicev2/uploader.go index f89f6dd6..5faf0c64 100644 --- a/api/internal/servicev2/uploader.go +++ b/api/internal/servicev2/uploader.go @@ -14,9 +14,17 @@ import ( ) type UploaderService interface { - CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error - CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error - CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error + ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.UploaderConfig, error) + ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]model.UploaderConfigMapping, error) + CreateUploaderConfig(ctx context.Context, uc model.UploaderConfig) (uuid.UUID, error) + UpdateUploaderConfig(ctx context.Context, uc model.UploaderConfig) error + DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error + CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) error + CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error + UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error + DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error + // CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error + // CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error } type uploaderService struct { @@ -28,20 +36,48 @@ func NewUploaderService(db *Database, q *db.Queries) *uploaderService { return &uploaderService{db, q} } -func CreateTimeseriesMeasurementsFromCSVFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { - // TODO - return nil +func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { + args := make([]db.CreateUploaderConfigMappingsBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.CreateUploaderConfigMappingsBatchParams{ + UploaderConfigID: m.UploaderConfigID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, + } + } + var err error + s.Queries.CreateUploaderConfigMappingsBatch(ctx, args).Exec(batchExecErr(&err)) + return err } -func CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { - // TODO - return nil -} +func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -// TODO transition away from datalogger equivalency table to different parser that's uploader specific -func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader, mapperID uuid.UUID) error { - // TODO Get mapper by id + if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { + return err + } + args := make([]db.CreateUploaderConfigMappingsBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.CreateUploaderConfigMappingsBatchParams{ + UploaderConfigID: m.UploaderConfigID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, + } + } + qtx.CreateUploaderConfigMappingsBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { tx, err := s.db.Begin(ctx) if err != nil { return err @@ -70,23 +106,28 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } meta := model.Environment{ - // StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - // OSVersion: envHeader[4], - // ProgName: envHeader[5], - TableName: envHeader[6], - } - - // dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) - // if err != nil { - // return err - // } - // - // tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) - // if err != nil { - // return err - // } + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.GetDataloggerByModelSN(ctx, db.GetDataloggerByModelSNParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { + return err + } + tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } // first two columns are timestamp and record number // we only want to collect the measurement fields here @@ -105,7 +146,7 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co } fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Rows { + for _, eqtRow := range eqt.Fields { fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID } @@ -129,16 +170,18 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co if !ok { continue } - v, err := strconv.ParseFloat(cell, 64) if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { continue } - - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { + if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, db.CreateOrUpdateTimeseriesMeasurementParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + }); err != nil { return err } } } - return tx.Commit(ctx) + return nil } diff --git a/api/migrations/repeat/0140__views_depth_based_instruments.sql b/api/migrations/repeat/0140__views_depth_based_instruments.sql index 2e58d523..01e7a6ec 100644 --- a/api/migrations/repeat/0140__views_depth_based_instruments.sql +++ b/api/migrations/repeat/0140__views_depth_based_instruments.sql @@ -1,3 +1,9 @@ +-- TODO: some of this logic in the *_measurement views progressively got more complex, it might be better to +-- move these aggregate calculations to the business logic where less data would go over the wire +-- and would (hopefully) be less of a burden to maintain, +-- For later, look into using channels to aggregate multiple instruments simutaniously: +-- https://echo.labstack.com/docs/context#concurrency + CREATE OR REPLACE VIEW v_saa_segment AS ( SELECT seg.id, @@ -17,6 +23,7 @@ CREATE OR REPLACE VIEW v_saa_segment AS ( ) sub ON true ); + CREATE OR REPLACE VIEW v_saa_measurement AS ( SELECT r.instrument_id, @@ -77,20 +84,21 @@ CREATE OR REPLACE VIEW v_saa_measurement AS ( SELECT time FROM timeseries_measurement WHERE time = opts.initial_time ) a LEFT JOIN LATERAL (SELECT time FROM timeseries_measurement WHERE time = opts.initial_time) ia ON true - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.x_timeseries_id) x ON x.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.y_timeseries_id) y ON y.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.z_timeseries_id) z ON z.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) t ON t.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.x_timeseries_id) ix ON ix.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.y_timeseries_id) iy ON iy.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.z_timeseries_id) iz ON iz.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) it ON it.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = opts.bottom_elevation_timeseries_id) b ON b.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.length_timeseries_id) l ON l.time = a.time + LEFT JOIN timeseries_measurement x ON x.timeseries_id = seg.x_timeseries_id AND x.time = a.time + LEFT JOIN timeseries_measurement y ON y.timeseries_id = seg.y_timeseries_id AND y.time = a.time + LEFT JOIN timeseries_measurement z ON z.timeseries_id = seg.z_timeseries_id AND z.time = a.time + LEFT JOIN timeseries_measurement t ON t.timeseries_id = seg.temp_timeseries_id AND t.time = a.time + LEFT JOIN timeseries_measurement ix ON ix.timeseries_id = seg.x_timeseries_id AND ix.time = ia.time + LEFT JOIN timeseries_measurement iy ON iy.timeseries_id = seg.y_timeseries_id AND iy.time = ia.time + LEFT JOIN timeseries_measurement iz ON iz.timeseries_id = seg.z_timeseries_id AND iz.time = ia.time + LEFT JOIN timeseries_measurement it ON it.timeseries_id = seg.temp_timeseries_id AND it.time = ia.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time + LEFT JOIN timeseries_measurement l ON l.timeseries_id = seg.length_timeseries_id AND l.time = a.time ) q ON true) r GROUP BY r.instrument_id, r.time ); + CREATE OR REPLACE VIEW v_ipi_segment AS ( SELECT seg.id, @@ -108,6 +116,7 @@ CREATE OR REPLACE VIEW v_ipi_segment AS ( ) sub ON true ); + CREATE OR REPLACE VIEW v_ipi_measurement AS ( SELECT r.instrument_id, @@ -129,7 +138,7 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( q.inc_dev, COALESCE(q.cum_dev, sin(q.tilt * pi() / 180) * q.seg_length) cum_dev, q.temp, - SUM(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation + sum(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation FROM ipi_segment seg INNER JOIN ipi_opts opts ON opts.instrument_id = seg.instrument_id LEFT JOIN LATERAL ( @@ -147,11 +156,95 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( UNION SELECT time FROM timeseries_measurement WHERE time = opts.initial_time ) a - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.tilt_timeseries_id) t ON t.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.inc_dev_timeseries_id) d ON d.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) temp ON temp.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = opts.bottom_elevation_timeseries_id) b ON b.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.length_timeseries_id) l ON l.time = a.time + LEFT JOIN timeseries_measurement t ON t.timeseries_id = seg.tilt_timeseries_id AND t.time = a.time + LEFT JOIN timeseries_measurement d ON d.timeseries_id = seg.inc_dev_timeseries_id AND d.time = a.time + LEFT JOIN timeseries_measurement temp ON temp.timeseries_id = seg.temp_timeseries_id AND temp.time = a.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time + LEFT JOIN timeseries_measurement l ON l.timeseries_id = seg.length_timeseries_id AND l.time = a.time + ) q ON true) r + GROUP BY r.instrument_id, r.time +); + + +CREATE OR REPLACE VIEW v_incl_segment AS ( + SELECT + seg.id, + seg.instrument_id, + seg.depth_timeseries_id, + seg.a0_timeseries_id, + seg.a180_timeseries_id, + seg.b0_timeseries_id, + seg.b180_timeseries_id + FROM incl_segment seg +); + + +CREATE OR REPLACE VIEW v_incl_measurement AS ( + SELECT + r.instrument_id, + r.time, + COALESCE(jsonb_agg(jsonb_build_object( + 'depth', r.depth, + 'a0', r.a0, + 'a180', r.a180, + 'b0', r.b0, + 'b180', r.b180, + 'a_checksum', r.a_checksum, + 'a_comb', r.a_comb, + 'a_increment', r.a_inc, + 'a_cum_dev', r.a_cum_dev, + 'b_checksum', r.b_checksum, + 'b_comb', r.b_comb, + 'b_increment', r.b_inc, + 'b_cum_dev', r.b_cum_dev + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements + FROM (SELECT DISTINCT + seg.instrument_id, + seg.id AS segment_id, + q.time, + q.depth, + q.a0, + q.a180, + q.b0, + q.b180, + q.a_checksum, + q.a_comb, + COALESCE(q.a_inc, 0) a_inc, + COALESCE(q.a_cum_dev, 0) a_cum_dev, + q.b_checksum, + q.b_comb, + COALESCE(q.b_inc, 0) b_inc, + COALESCE(q.b_cum_dev, 0) b_cum_dev + FROM incl_segment seg + INNER JOIN incl_opts opts ON opts.instrument_id = seg.instrument_id + LEFT JOIN LATERAL ( + SELECT + a.time, + d.value depth, + a0.value a0, + a180.value a180, + b0.value b0, + b180.value b180, + (a0.value + a180.value) a_checksum, + (a0.value - a180.value) / 2 a_comb, + (a0.value - a180.value) / 2 / NULLIF(a.bottom, 0) * 24 a_inc, + sum((a0.value - a180.value) / 2 / NULLIF(a.bottom, 0) * 24) OVER (ORDER BY d.value desc) a_cum_dev, + (b0.value + b180.value) b_checksum, + (b0.value - b180.value) / 2 b_comb, + (b0.value - b180.value) / 2 / NULLIF(a.bottom, 0) * 24 b_inc, + sum((items.b0 - items.b180) / 2 / NULLIF(a.bottom, 0) * 24) OVER (ORDER BY d.value desc) b_cum_dev + FROM ( + SELECT DISTINCT time FROM timeseries_measurement + WHERE timeseries_id IN (SELECT id FROM timeseries WHERE instrument_id = seg.instrument_id) + UNION + SELECT time FROM timeseries_measurement WHERE time = opts.initial_time + ) a + LEFT JOIN timeseries_measurement d ON d.timeseries_id = opts.depth_timeseries_id AND d.time = a.time + LEFT JOIN timeseries_measurement a0 ON a0.timeseries_id = opts.a0_timeseries_id AND a0.time = a.time + LEFT JOIN timeseries_measurement a180 ON a180.timeseries_id = opts.a180_timeseries_id AND a180.time = a.time + LEFT JOIN timeseries_measurement b0 ON b0.timeseries_id = opts.b0_timeseries_id AND b0.time = a.time + LEFT JOIN timeseries_measurement b180 ON b180.timeseries_id = opts.b180_timeseries_id AND b180.time = a.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time ) q ON true) r GROUP BY r.instrument_id, r.time ); diff --git a/api/migrations/repeat/0170__views_uploader.sql b/api/migrations/repeat/0170__views_uploader.sql new file mode 100644 index 00000000..32253bf5 --- /dev/null +++ b/api/migrations/repeat/0170__views_uploader.sql @@ -0,0 +1,26 @@ +CREATE VIEW v_uploader_config AS ( + SELECT + u.id, + u.project_id, + u.slug, + u.name, + u.description, + u.create_date, + u.creator, + pc.username creator_username, + u.update_date, + u.updater, + pu.username updater_username, + u.type, + u.tz_name, + u.time_field, + u.validated_field_enabled, + u.validated_field, + u.masked_field_enabled, + u.masked_field, + u.comment_field_enabled, + u.comment_field + FROM uploader_config u + INNER JOIN profile pc ON u.creator = pc.id + LEFT JOIN profile pu ON u.updater = pu.id +); diff --git a/api/migrations/schema/V1.15.00__incl_opts.sql b/api/migrations/schema/V1.15.00__incl_opts.sql new file mode 100644 index 00000000..e5669d77 --- /dev/null +++ b/api/migrations/schema/V1.15.00__incl_opts.sql @@ -0,0 +1,17 @@ +CREATE TABLE incl_opts ( + instrument_id uuid NOT NULL REFERENCES instrument(id) ON DELETE CASCADE, + num_segments int NOT NULL, + bottom_elevation_timeseries_id uuid REFERENCES timeseries(id), + initial_time timestamptz +); + +CREATE TABLE incl_segment ( + instrument_id uuid NOT NULL REFERENCES instrument(id) ON DELETE CASCADE, + id int NOT NULL, + depth_timeseries_id uuid REFERENCES timeseries(id), + a0_timeseries_id uuid REFERENCES timeseries(id), + a180_timeseries_id uuid REFERENCES timeseries(id), + b0_timeseries_id uuid REFERENCES timeseries(id), + b180_timeseries_id uuid REFERENCES timeseries(id), + PRIMARY KEY (instrument_id, id) +); diff --git a/api/migrations/schema/V1.16.00__uploader_config.sql b/api/migrations/schema/V1.16.00__uploader_config.sql new file mode 100644 index 00000000..f9da6073 --- /dev/null +++ b/api/migrations/schema/V1.16.00__uploader_config.sql @@ -0,0 +1,8 @@ +ALTER TABLE uploader_config +ADD COLUMN time_field text NOT NULL, +ADD COLUMN validated_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN validated_field text, +ADD COLUMN masked_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN masked_field text, +ADD COLUMN comment_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN comment_field text; diff --git a/api/queries/aware.sql b/api/queries/aware.sql index 98b5c0e0..c0424375 100644 --- a/api/queries/aware.sql +++ b/api/queries/aware.sql @@ -11,5 +11,6 @@ order by aware_id, aware_parameter_key; -- name: CreateAwarePlatform :exec insert into aware_platform (instrument_id, aware_id) values ($1, $2); + -- name: CreateAwarePlatformBatch :batchexec insert into aware_platform (instrument_id, aware_id) values ($1, $2); diff --git a/api/queries/instrument_incl.sql b/api/queries/instrument_incl.sql new file mode 100644 index 00000000..63d932ac --- /dev/null +++ b/api/queries/instrument_incl.sql @@ -0,0 +1,82 @@ +-- name: CreateInclOpts :exec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: CreateInclOptsBatch :batchexec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: UpdateInclOpts :exec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: UpdateInclOptsBatch :batchexec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: GetAllInclSegmentsForInstrument :many +select * from v_incl_segment where instrument_id = $1; + + +-- name: CreateInclSegment :exec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: CreateInclSegmentBatch :batchexec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: UpdateInclSegment :exec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2; + + +-- name: UpdateInclSegmentsBatch :batchexec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2; + + +-- name: GetInclMeasurementsForInstrument :many +select m1.instrument_id, m1.time, m1.measurements +from v_incl_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_incl_measurement m2 +where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc; diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql index fd9bd817..9575c7df 100644 --- a/api/queries/instrument_saa.sql +++ b/api/queries/instrument_saa.sql @@ -60,6 +60,16 @@ update saa_segment set where id = $1 and instrument_id = $2; +-- name: UpdateSaaSegmentBatch :batchexec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2; + + -- name: GetSaaMeasurementsForInstrument :many select m1.instrument_id, m1.time, m1.measurements from v_saa_measurement m1 diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql index fcc2bce1..bce8558b 100644 --- a/api/queries/timeseries.sql +++ b/api/queries/timeseries.sql @@ -2,16 +2,15 @@ select exists (select id from v_timeseries_stored where id = $1); +-- name: GetTimeseries :one +select * from v_timeseries where id=$1; + + -- name: GetTimeseriesProjectMap :many select timeseries_id, project_id from v_timeseries_project_map where timeseries_id in (sqlc.arg(timeseries_ids)::uuid[]); --- name: ListProjectTimeseries :many -select t.* from v_timeseries t -inner join project_instrument p on p.instrument_id = t.instrument_id -where p.project_id = $1; - -- name: ListInstrumentTimeseries :many select * from v_timeseries @@ -19,9 +18,9 @@ where instrument_id = $1; -- name: ListPlotConfigTimeseries :many -SELECT t.* FROM v_timeseries t -INNER JOIN plot_configuration_timeseries_trace pct ON pct.timeseries_id = t.id -WHERE pct.plot_configuration_id = $1; +select t.* from v_timeseries t +inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id +where pct.plot_configuration_id = $1; -- name: ListInstrumentGroupTimeseries :many @@ -30,8 +29,10 @@ inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id where gi.instrument_group_id = $1; --- name: ListTimeseries :many -SELECT * FROM v_timeseries WHERE id = $1; +-- name: ListTimeseriesForProject :many +select t.* from v_timeseries t +inner join project_instrument p on p.instrument_id = t.instrument_id +where p.project_id = $1; -- name: CreateTimeseries :one diff --git a/api/queries/timeseries_calculated.sql b/api/queries/timeseries_calculated.sql index 509c7ecd..f84f8a67 100644 --- a/api/queries/timeseries_calculated.sql +++ b/api/queries/timeseries_calculated.sql @@ -1,4 +1,4 @@ --- name: ListCalculatedTimeseries :many +-- name: GetCalculatedTimeseries :one select id, instrument_id, @@ -8,8 +8,7 @@ select name as formula_name, coalesce(contents, '') as formula from v_timeseries_computed -where (sqlc.narg(instrument_id) is null or instrument_id = sqlc.narg(instrument_id)) -and (sqlc.narg(id) is null or id = sqlc.narg(id)); +where id=$1; -- name: CreateCalculatedTimeseries :one @@ -29,8 +28,11 @@ insert into calculation (timeseries_id, contents) values ($1,$2); -- name: CreateOrUpdateCalculation :exec +with p as ( + select contents from calculation where timeseries_id=$1 +) insert into calculation (timeseries_id, contents) values ($1, $2) -on conflict (timeseries_id) do update set contents = coalesce(excluded.contents, $3); +on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents); -- name: DeleteCalculatedTimeseries :exec @@ -38,6 +40,10 @@ delete from timeseries where id = $1 and id in (select timeseries_id from calcul -- name: CreateOrUpdateCalculatedTimeseries :exec +with p as ( + select * from timeseries + where id=$1 +) insert into timeseries ( id, instrument_id, @@ -48,9 +54,9 @@ insert into timeseries ( type ) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') on conflict (id) do update set - instrument_id = coalesce(excluded.instrument_id, $6), - parameter_id = coalesce(excluded.parameter_id, $7), - unit_id = coalesce(excluded.unit_id, $8), - slug = coalesce(excluded.slug, slugify($9, 'timeseries')), - name = coalesce(excluded.name, $9), - type = 'computed'; + instrument_id=coalesce(excluded.instrument_id, p.instrument_id), + parameter_id=coalesce(excluded.parameter_id, p.parameter_id), + unit_id=coalesce(excluded.unit_id, p.unit_id), + slug=coalesce(excluded.slug, p.slug), + name=coalesce(excluded.name, p.name), + type='computed'; diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql new file mode 100644 index 00000000..86042b7b --- /dev/null +++ b/api/queries/uploader.sql @@ -0,0 +1,35 @@ +-- name: ListUploaderConfigsForProject :many +select * from uploader_config where project_id=$1; + + +-- name: CreateUploaderConfig :one +insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) +values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) +returning id; + + +-- name: UpdateUploaderConfig :exec +update uploader_config set + name=$2, + description=$3, + update_date=$4, + updater=$5, + type=$6, + tz_name=$7 +where id=$1; + + +-- name: DeleteUploaderConfig :exec +delete from uploader_config where id=$1; + + +-- name: ListUploaderConfigMappings :many +select * from uploader_config_mapping where uploader_config_id=$1; + + +-- name: CreateUploaderConfigMappingsBatch :batchexec +insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3); + + +-- name: DeleteAllUploaderConfigMappingsForUploaderConfig :exec +delete from uploader_config_mapping where uploader_config_id=$1; From 238dd4561c8926a962812e9c837a9efa31b5f7c5 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 8 Nov 2024 11:14:41 -0500 Subject: [PATCH 16/23] fix uploader config missing slug --- api/internal/model/uploader.go | 1 + 1 file changed, 1 insertion(+) diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go index c527df8d..d461c08d 100644 --- a/api/internal/model/uploader.go +++ b/api/internal/model/uploader.go @@ -16,6 +16,7 @@ type UploaderConfig struct { ID uuid.UUID `json:"id" db:"id"` ProjectID uuid.UUID `json:"project_id" db:"project_id"` Name string `json:"name" db:"name"` + Slug string `json:"slug" db:"slug"` Description string `json:"description" db:"description"` Type UploaderConfigType `json:"type" db:"type"` TzName string `json:"tz_name" db:"tz_name"` From e0beba5a5dc568a2ae0f5025ded5b987662cd256 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Tue, 12 Nov 2024 18:15:57 -0500 Subject: [PATCH 17/23] complete basis for sqlc migration and large refactor; fix uploader batching issue using chunking --- api/Dockerfile.openapi | 2 +- api/go.mod | 6 - api/go.sum | 49 +- api/internal/db/alert.sql_gen.go | 100 +- api/internal/db/alert_check.sql_gen.go | 62 +- api/internal/db/alert_config.sql_gen.go | 124 +- .../db/alert_measurement_check.sql_gen.go | 12 +- api/internal/db/alert_subscription.sql_gen.go | 164 +- api/internal/db/autocomplete.sql_gen.go | 18 +- api/internal/db/aware.sql_gen.go | 46 +- api/internal/db/batch.go | 1521 ++++++++++------- api/internal/db/collection_group.sql_gen.go | 169 +- api/internal/db/datalogger.sql_gen.go | 246 +-- .../db/datalogger_telemetry.sql_gen.go | 70 +- api/internal/db/district_rollup.sql_gen.go | 32 +- api/internal/db/domain.sql_gen.go | 99 ++ api/internal/db/domains.sql_gen.go | 63 - api/internal/db/equivalency_table.sql_gen.go | 68 +- api/internal/db/evaluation.sql_gen.go | 214 +-- api/internal/db/heartbeat.sql_gen.go | 28 +- api/internal/db/home.sql_gen.go | 10 +- api/internal/db/instrument.sql_gen.go | 245 ++- api/internal/db/instrument_assign.sql_gen.go | 136 +- .../db/instrument_constant.sql_gen.go | 22 +- api/internal/db/instrument_group.sql_gen.go | 142 +- api/internal/db/instrument_incl.sql_gen.go | 142 +- api/internal/db/instrument_ipi.sql_gen.go | 142 +- api/internal/db/instrument_note.sql_gen.go | 77 +- api/internal/db/instrument_saa.sql_gen.go | 142 +- api/internal/db/instrument_status.sql_gen.go | 62 +- api/internal/db/manual.go | 8 + api/internal/db/measurement.manual.go | 99 ++ api/internal/db/measurement.sql_gen.go | 276 ++- api/internal/db/models.go | 40 + api/internal/db/plot_config.sql_gen.go | 110 +- .../db/plot_config_bullseye.sql_gen.go | 62 +- .../db/plot_config_contour.sql_gen.go | 152 +- .../db/plot_config_profile.sql_gen.go | 16 +- .../db/plot_config_scatter_line.sql_gen.go | 134 +- api/internal/db/profile.sql_gen.go | 206 +-- api/internal/db/project.sql_gen.go | 208 +-- api/internal/db/project_role.sql_gen.go | 54 +- api/internal/db/querier.go | 608 +++---- api/internal/db/report_config.sql_gen.go | 196 +-- api/internal/db/submittal.sql_gen.go | 72 +- api/internal/db/timeseries.sql_gen.go | 136 +- .../db/timeseries_calculated.sql_gen.go | 155 +- api/internal/db/timeseries_cwms.sql_gen.go | 28 +- api/internal/db/timeseries_process.manual.go | 450 +++++ api/internal/db/unit.sql_gen.go | 6 +- api/internal/db/uploader.sql_gen.go | 94 +- api/internal/dto/alert.go | 19 + api/internal/dto/alert_check.go | 53 + api/internal/dto/alert_config.go | 41 + .../{model => dto}/alert_evaluation_check.go | 21 +- .../{model => dto}/alert_measurement_check.go | 21 +- api/internal/dto/alert_subscription.go | 54 + api/internal/dto/autocomplete.go | 12 + api/internal/dto/aware.go | 25 + api/internal/dto/collection_group.go | 28 + api/internal/{model => dto}/common.go | 8 +- api/internal/dto/datalogger.go | 51 + .../{model => dto}/datalogger_parser.go | 2 +- api/internal/dto/db.go | 41 + api/internal/dto/district_rollup.go | 21 + api/internal/dto/domain.go | 27 + api/internal/dto/equivalency_table.go | 20 + api/internal/{model => dto}/error.go | 2 +- api/internal/dto/evaluation.go | 27 + api/internal/dto/heartbeat.go | 9 + api/internal/dto/home.go | 9 + api/internal/dto/instrument.go | 59 + api/internal/dto/instrument_assign.go | 28 + api/internal/dto/instrument_group.go | 51 + api/internal/dto/instrument_incl.go | 29 + api/internal/dto/instrument_ipi.go | 29 + api/internal/dto/instrument_note.go | 40 + api/internal/dto/instrument_saa.go | 30 + api/internal/dto/instrument_status.go | 18 + api/internal/{model => dto}/job.go | 2 +- api/internal/dto/measurement.go | 74 + api/internal/dto/measurement_inclinometer.go | 84 + api/internal/dto/plot_config.go | 73 + api/internal/dto/plot_config_bullseye.go | 33 + api/internal/dto/plot_config_contour.go | 37 + api/internal/dto/plot_config_profile.go | 26 + api/internal/dto/plot_config_scatter_line.go | 55 + api/internal/dto/profile.go | 51 + api/internal/dto/project.go | 35 + api/internal/dto/project_role.go | 14 + api/internal/dto/report_config.go | 53 + api/internal/{model => dto}/search.go | 3 +- api/internal/dto/submittal.go | 23 + api/internal/dto/timeseries.go | 65 + api/internal/dto/timeseries_calculated.go | 15 + api/internal/dto/timeseries_cwms.go | 13 + api/internal/{model => dto}/unit.go | 20 +- api/internal/dto/uploader.go | 28 + api/internal/email/email.go | 2 +- api/internal/handler/alert.go | 31 +- api/internal/handler/alert_check.go | 2 +- api/internal/handler/alert_config.go | 50 +- api/internal/handler/alert_subscription.go | 34 +- api/internal/handler/autocomplete.go | 14 +- api/internal/handler/aware.go | 10 +- api/internal/handler/collection_groups.go | 70 +- api/internal/handler/datalogger.go | 89 +- api/internal/handler/datalogger_telemetry.go | 54 +- api/internal/handler/district_rollup.go | 27 +- api/internal/handler/domain.go | 17 +- api/internal/handler/equivalency_table.go | 75 +- api/internal/handler/evaluation.go | 46 +- api/internal/handler/handler.go | 151 +- api/internal/handler/heartbeat.go | 26 +- api/internal/handler/home.go | 6 +- api/internal/handler/instrument.go | 67 +- api/internal/handler/instrument_assign.go | 38 +- api/internal/handler/instrument_constant.go | 30 +- api/internal/handler/instrument_group.go | 51 +- api/internal/handler/instrument_incl.go | 95 + api/internal/handler/instrument_ipi.go | 30 +- api/internal/handler/instrument_ipi_test.go | 2 +- api/internal/handler/instrument_note.go | 50 +- api/internal/handler/instrument_saa.go | 30 +- api/internal/handler/instrument_saa_test.go | 2 +- api/internal/handler/instrument_status.go | 18 +- api/internal/handler/measurement.go | 74 +- .../handler/measurement_inclinometer.go | 158 -- api/internal/handler/media.go | 1 - api/internal/handler/opendcs.go | 6 +- api/internal/handler/plot_config.go | 15 +- api/internal/handler/plot_config_bullseye.go | 34 +- api/internal/handler/plot_config_contour.go | 36 +- api/internal/handler/plot_config_profile.go | 22 +- .../handler/plot_config_scatter_line.go | 22 +- api/internal/handler/profile.go | 40 +- api/internal/handler/project.go | 68 +- api/internal/handler/project_role.go | 28 +- api/internal/handler/project_test.go | 2 +- api/internal/handler/report_config.go | 65 +- api/internal/handler/search.go | 37 +- api/internal/handler/submittal.go | 65 +- api/internal/handler/timeseries.go | 37 +- api/internal/handler/timeseries_calculated.go | 18 +- api/internal/handler/timeseries_cwms.go | 26 +- api/internal/handler/timeseries_process.go | 80 +- api/internal/handler/unit.go | 5 +- api/internal/handler/uploader.go | 42 +- api/internal/middleware/audit.go | 45 +- api/internal/middleware/key.go | 8 +- api/internal/middleware/middleware.go | 10 +- api/internal/model/alert.go | 115 -- api/internal/model/alert_check.go | 118 -- api/internal/model/alert_config.go | 239 --- api/internal/model/alert_subscription.go | 209 --- api/internal/model/autocomplete.go | 31 - api/internal/model/aware.go | 67 - api/internal/model/collection_group.go | 160 -- api/internal/model/datalogger.go | 271 --- api/internal/model/datalogger_telemetry.go | 82 - api/internal/model/db.go | 143 -- api/internal/model/district_rollup.go | 56 - api/internal/model/domains.go | 77 - api/internal/model/equivalency_table.go | 161 -- api/internal/model/evaluation.go | 224 --- api/internal/model/heartbeat.go | 50 - api/internal/model/home.go | 30 - api/internal/model/instrument.go | 272 --- api/internal/model/instrument_assign.go | 220 --- api/internal/model/instrument_constant.go | 40 - api/internal/model/instrument_group.go | 182 -- api/internal/model/instrument_ipi.go | 141 -- api/internal/model/instrument_note.go | 134 -- api/internal/model/instrument_saa.go | 154 -- api/internal/model/instrument_status.go | 100 -- api/internal/model/measurement.go | 344 ---- .../model/measurement_inclinometer.go | 213 --- api/internal/model/opendcs.go | 48 - api/internal/model/plot_config.go | 167 -- api/internal/model/plot_config_bullseye.go | 89 - api/internal/model/plot_config_contour.go | 136 -- api/internal/model/plot_config_profile.go | 45 - .../model/plot_config_scatter_line.go | 144 -- api/internal/model/profile.go | 188 -- api/internal/model/project.go | 228 --- api/internal/model/project_role.go | 101 -- api/internal/model/report_config.go | 190 -- api/internal/model/submittal.go | 149 -- api/internal/model/timeseries.go | 220 --- api/internal/model/timeseries_calculated.go | 140 -- api/internal/model/timeseries_cwms.go | 66 - api/internal/model/timeseries_process.go | 564 ------ api/internal/model/uploader.go | 111 -- api/internal/server/api.go | 18 +- api/internal/service/alert.go | 96 +- api/internal/service/alert_check.go | 297 +++- api/internal/service/alert_config.go | 113 +- api/internal/service/alert_subscription.go | 186 +- api/internal/service/autocomplete.go | 20 - api/internal/service/aware.go | 30 +- api/internal/service/collection_group.go | 65 +- api/internal/service/datalogger.go | 149 +- api/internal/service/datalogger_telemetry.go | 184 +- api/internal/service/db.go | 92 + api/internal/service/dcsloader.go | 22 +- api/internal/service/district_rollup.go | 22 - api/internal/service/domain.go | 25 +- api/internal/service/equivalency_table.go | 93 +- api/internal/service/evaluation.go | 143 +- api/internal/service/heartbeat.go | 22 - api/internal/service/home.go | 20 - api/internal/service/instrument.go | 267 +-- api/internal/service/instrument_assign.go | 366 ++-- api/internal/service/instrument_constant.go | 91 +- api/internal/service/instrument_group.go | 65 +- api/internal/service/instrument_incl.go | 161 ++ api/internal/service/instrument_ipi.go | 197 ++- api/internal/service/instrument_note.go | 62 +- api/internal/service/instrument_opts.go | 130 -- api/internal/service/instrument_saa.go | 187 +- api/internal/service/instrument_status.go | 43 +- api/internal/service/measurement.go | 273 ++- .../service/measurement_inclinometer.go | 120 -- api/internal/service/opendcs.go | 35 +- api/internal/service/plot_config.go | 61 +- api/internal/service/plot_config_bullseye.go | 88 +- api/internal/service/plot_config_contour.go | 157 +- api/internal/service/plot_config_profile.go | 84 +- .../service/plot_config_scatter_line.go | 143 +- api/internal/service/profile.go | 122 +- api/internal/service/project.go | 127 +- api/internal/service/project_role.go | 53 - api/internal/service/report_config.go | 198 ++- api/internal/service/submittal.go | 27 - api/internal/service/timeseries.go | 114 +- api/internal/service/timeseries_calculated.go | 101 +- api/internal/service/timeseries_cwms.go | 93 +- api/internal/service/timeseries_process.go | 21 - api/internal/service/unit.go | 20 - api/internal/service/uploader.go | 179 +- api/internal/servicev2/alert.go | 90 - api/internal/servicev2/alert_check.go | 551 ------ api/internal/servicev2/alert_config.go | 162 -- api/internal/servicev2/alert_subscription.go | 241 --- api/internal/servicev2/autocomplete.go | 11 - api/internal/servicev2/aware.go | 49 - api/internal/servicev2/collection_group.go | 19 - api/internal/servicev2/datalogger.go | 198 --- .../servicev2/datalogger_telemetry.go | 245 --- api/internal/servicev2/db.go | 55 - api/internal/servicev2/dcsloader.go | 125 -- api/internal/servicev2/district_rollup.go | 23 - api/internal/servicev2/domain.go | 22 - api/internal/servicev2/equivalency_table.go | 113 -- api/internal/servicev2/evaluation.go | 171 -- api/internal/servicev2/heartbeat.go | 23 - api/internal/servicev2/home.go | 21 - api/internal/servicev2/instrument.go | 235 --- api/internal/servicev2/instrument_assign.go | 329 ---- api/internal/servicev2/instrument_constant.go | 100 -- api/internal/servicev2/instrument_group.go | 50 - api/internal/servicev2/instrument_incl.go | 211 --- api/internal/servicev2/instrument_ipi.go | 210 --- api/internal/servicev2/instrument_note.go | 59 - api/internal/servicev2/instrument_saa.go | 200 --- api/internal/servicev2/instrument_status.go | 39 - api/internal/servicev2/measurement.go | 160 -- .../servicev2/measurement_inclinometer.go | 121 -- api/internal/servicev2/opendcs.go | 21 - api/internal/servicev2/plot_config.go | 75 - .../servicev2/plot_config_bullseye.go | 72 - api/internal/servicev2/plot_config_contour.go | 130 -- api/internal/servicev2/plot_config_profile.go | 69 - .../servicev2/plot_config_scatter_line.go | 161 -- api/internal/servicev2/profile.go | 89 - api/internal/servicev2/project.go | 132 -- api/internal/servicev2/project_role.go | 55 - api/internal/servicev2/report_config.go | 176 -- api/internal/servicev2/submittal.go | 28 - api/internal/servicev2/timeseries.go | 56 - .../servicev2/timeseries_calculated.go | 77 - api/internal/servicev2/timeseries_cwms.go | 98 -- api/internal/servicev2/timeseries_process.go | 22 - api/internal/servicev2/unit.go | 20 - api/internal/servicev2/uploader.go | 187 -- api/internal/{model => util}/timewindow.go | 2 +- .../repeat/0040__views_instruments.sql | 11 + .../repeat/0050__views_timeseries.sql | 33 +- .../V1.17.00__evaluation_del_cascade.sql | 8 + api/queries/alert.sql | 16 +- api/queries/alert_check.sql | 8 +- api/queries/alert_config.sql | 22 +- api/queries/alert_measurement_check.sql | 4 +- api/queries/alert_subscription.sql | 26 +- api/queries/autocomplete.sql | 6 +- api/queries/aware.sql | 8 +- api/queries/collection_group.sql | 35 +- api/queries/datalogger.sql | 72 +- api/queries/datalogger_telemetry.sql | 21 +- api/queries/district_rollup.sql | 16 +- api/queries/domain.sql | 10 + api/queries/domains.sql | 6 - api/queries/equivalency_table.sql | 18 +- api/queries/evaluation.sql | 42 +- api/queries/heartbeat.sql | 12 +- api/queries/home.sql | 2 +- api/queries/instrument.sql | 61 +- api/queries/instrument_assign.sql | 16 +- api/queries/instrument_constant.sql | 8 +- api/queries/instrument_group.sql | 38 +- api/queries/instrument_incl.sql | 26 +- api/queries/instrument_ipi.sql | 26 +- api/queries/instrument_note.sql | 42 +- api/queries/instrument_saa.sql | 26 +- api/queries/instrument_status.sql | 24 +- api/queries/measurement.sql | 93 +- api/queries/plot_config.sql | 14 +- api/queries/plot_config_bullseye.sql | 14 +- api/queries/plot_config_contour.sql | 22 +- api/queries/plot_config_profile.sql | 4 +- api/queries/plot_config_scatter_line.sql | 20 +- api/queries/profile.sql | 24 +- api/queries/project.sql | 26 +- api/queries/project_role.sql | 12 +- api/queries/report_config.sql | 28 +- api/queries/submittal.sql | 22 +- api/queries/timeseries.sql | 38 +- api/queries/timeseries_calculated.sql | 33 +- api/queries/timeseries_cwms.sql | 10 +- api/queries/unit.sql | 2 +- api/queries/uploader.sql | 14 +- go.work.sum | 9 +- 332 files changed, 9874 insertions(+), 18921 deletions(-) create mode 100644 api/internal/db/domain.sql_gen.go delete mode 100644 api/internal/db/domains.sql_gen.go create mode 100644 api/internal/db/manual.go create mode 100644 api/internal/db/measurement.manual.go create mode 100644 api/internal/db/timeseries_process.manual.go create mode 100644 api/internal/dto/alert.go create mode 100644 api/internal/dto/alert_check.go create mode 100644 api/internal/dto/alert_config.go rename api/internal/{model => dto}/alert_evaluation_check.go (77%) rename api/internal/{model => dto}/alert_measurement_check.go (79%) create mode 100644 api/internal/dto/alert_subscription.go create mode 100644 api/internal/dto/autocomplete.go create mode 100644 api/internal/dto/aware.go create mode 100644 api/internal/dto/collection_group.go rename api/internal/{model => dto}/common.go (94%) create mode 100644 api/internal/dto/datalogger.go rename api/internal/{model => dto}/datalogger_parser.go (99%) create mode 100644 api/internal/dto/db.go create mode 100644 api/internal/dto/district_rollup.go create mode 100644 api/internal/dto/domain.go create mode 100644 api/internal/dto/equivalency_table.go rename api/internal/{model => dto}/error.go (98%) create mode 100644 api/internal/dto/evaluation.go create mode 100644 api/internal/dto/heartbeat.go create mode 100644 api/internal/dto/home.go create mode 100644 api/internal/dto/instrument.go create mode 100644 api/internal/dto/instrument_assign.go create mode 100644 api/internal/dto/instrument_group.go create mode 100644 api/internal/dto/instrument_incl.go create mode 100644 api/internal/dto/instrument_ipi.go create mode 100644 api/internal/dto/instrument_note.go create mode 100644 api/internal/dto/instrument_saa.go create mode 100644 api/internal/dto/instrument_status.go rename api/internal/{model => dto}/job.go (93%) create mode 100644 api/internal/dto/measurement.go create mode 100644 api/internal/dto/measurement_inclinometer.go create mode 100644 api/internal/dto/plot_config.go create mode 100644 api/internal/dto/plot_config_bullseye.go create mode 100644 api/internal/dto/plot_config_contour.go create mode 100644 api/internal/dto/plot_config_profile.go create mode 100644 api/internal/dto/plot_config_scatter_line.go create mode 100644 api/internal/dto/profile.go create mode 100644 api/internal/dto/project.go create mode 100644 api/internal/dto/project_role.go create mode 100644 api/internal/dto/report_config.go rename api/internal/{model => dto}/search.go (66%) create mode 100644 api/internal/dto/submittal.go create mode 100644 api/internal/dto/timeseries.go create mode 100644 api/internal/dto/timeseries_calculated.go create mode 100644 api/internal/dto/timeseries_cwms.go rename api/internal/{model => dto}/unit.go (56%) create mode 100644 api/internal/dto/uploader.go create mode 100644 api/internal/handler/instrument_incl.go delete mode 100644 api/internal/handler/measurement_inclinometer.go delete mode 100644 api/internal/model/alert.go delete mode 100644 api/internal/model/alert_check.go delete mode 100644 api/internal/model/alert_config.go delete mode 100644 api/internal/model/alert_subscription.go delete mode 100644 api/internal/model/autocomplete.go delete mode 100644 api/internal/model/aware.go delete mode 100644 api/internal/model/collection_group.go delete mode 100644 api/internal/model/datalogger.go delete mode 100644 api/internal/model/datalogger_telemetry.go delete mode 100644 api/internal/model/db.go delete mode 100644 api/internal/model/district_rollup.go delete mode 100644 api/internal/model/domains.go delete mode 100644 api/internal/model/equivalency_table.go delete mode 100644 api/internal/model/evaluation.go delete mode 100644 api/internal/model/heartbeat.go delete mode 100644 api/internal/model/home.go delete mode 100644 api/internal/model/instrument.go delete mode 100644 api/internal/model/instrument_assign.go delete mode 100644 api/internal/model/instrument_constant.go delete mode 100644 api/internal/model/instrument_group.go delete mode 100644 api/internal/model/instrument_ipi.go delete mode 100644 api/internal/model/instrument_note.go delete mode 100644 api/internal/model/instrument_saa.go delete mode 100644 api/internal/model/instrument_status.go delete mode 100644 api/internal/model/measurement.go delete mode 100644 api/internal/model/measurement_inclinometer.go delete mode 100644 api/internal/model/opendcs.go delete mode 100644 api/internal/model/plot_config.go delete mode 100644 api/internal/model/plot_config_bullseye.go delete mode 100644 api/internal/model/plot_config_contour.go delete mode 100644 api/internal/model/plot_config_profile.go delete mode 100644 api/internal/model/plot_config_scatter_line.go delete mode 100644 api/internal/model/profile.go delete mode 100644 api/internal/model/project.go delete mode 100644 api/internal/model/project_role.go delete mode 100644 api/internal/model/report_config.go delete mode 100644 api/internal/model/submittal.go delete mode 100644 api/internal/model/timeseries.go delete mode 100644 api/internal/model/timeseries_calculated.go delete mode 100644 api/internal/model/timeseries_cwms.go delete mode 100644 api/internal/model/timeseries_process.go delete mode 100644 api/internal/model/uploader.go delete mode 100644 api/internal/service/autocomplete.go create mode 100644 api/internal/service/db.go delete mode 100644 api/internal/service/district_rollup.go delete mode 100644 api/internal/service/heartbeat.go delete mode 100644 api/internal/service/home.go create mode 100644 api/internal/service/instrument_incl.go delete mode 100644 api/internal/service/instrument_opts.go delete mode 100644 api/internal/service/measurement_inclinometer.go delete mode 100644 api/internal/service/project_role.go delete mode 100644 api/internal/service/submittal.go delete mode 100644 api/internal/service/timeseries_process.go delete mode 100644 api/internal/service/unit.go delete mode 100644 api/internal/servicev2/alert.go delete mode 100644 api/internal/servicev2/alert_check.go delete mode 100644 api/internal/servicev2/alert_config.go delete mode 100644 api/internal/servicev2/alert_subscription.go delete mode 100644 api/internal/servicev2/autocomplete.go delete mode 100644 api/internal/servicev2/aware.go delete mode 100644 api/internal/servicev2/collection_group.go delete mode 100644 api/internal/servicev2/datalogger.go delete mode 100644 api/internal/servicev2/datalogger_telemetry.go delete mode 100644 api/internal/servicev2/db.go delete mode 100644 api/internal/servicev2/dcsloader.go delete mode 100644 api/internal/servicev2/district_rollup.go delete mode 100644 api/internal/servicev2/domain.go delete mode 100644 api/internal/servicev2/equivalency_table.go delete mode 100644 api/internal/servicev2/evaluation.go delete mode 100644 api/internal/servicev2/heartbeat.go delete mode 100644 api/internal/servicev2/home.go delete mode 100644 api/internal/servicev2/instrument.go delete mode 100644 api/internal/servicev2/instrument_assign.go delete mode 100644 api/internal/servicev2/instrument_constant.go delete mode 100644 api/internal/servicev2/instrument_group.go delete mode 100644 api/internal/servicev2/instrument_incl.go delete mode 100644 api/internal/servicev2/instrument_ipi.go delete mode 100644 api/internal/servicev2/instrument_note.go delete mode 100644 api/internal/servicev2/instrument_saa.go delete mode 100644 api/internal/servicev2/instrument_status.go delete mode 100644 api/internal/servicev2/measurement.go delete mode 100644 api/internal/servicev2/measurement_inclinometer.go delete mode 100644 api/internal/servicev2/opendcs.go delete mode 100644 api/internal/servicev2/plot_config.go delete mode 100644 api/internal/servicev2/plot_config_bullseye.go delete mode 100644 api/internal/servicev2/plot_config_contour.go delete mode 100644 api/internal/servicev2/plot_config_profile.go delete mode 100644 api/internal/servicev2/plot_config_scatter_line.go delete mode 100644 api/internal/servicev2/profile.go delete mode 100644 api/internal/servicev2/project.go delete mode 100644 api/internal/servicev2/project_role.go delete mode 100644 api/internal/servicev2/report_config.go delete mode 100644 api/internal/servicev2/submittal.go delete mode 100644 api/internal/servicev2/timeseries.go delete mode 100644 api/internal/servicev2/timeseries_calculated.go delete mode 100644 api/internal/servicev2/timeseries_cwms.go delete mode 100644 api/internal/servicev2/timeseries_process.go delete mode 100644 api/internal/servicev2/unit.go delete mode 100644 api/internal/servicev2/uploader.go rename api/internal/{model => util}/timewindow.go (98%) create mode 100644 api/migrations/schema/V1.17.00__evaluation_del_cascade.sql create mode 100644 api/queries/domain.sql delete mode 100644 api/queries/domains.sql diff --git a/api/Dockerfile.openapi b/api/Dockerfile.openapi index a53a03a1..224c921f 100644 --- a/api/Dockerfile.openapi +++ b/api/Dockerfile.openapi @@ -9,7 +9,7 @@ RUN go install github.com/swaggo/swag/cmd/swag@latest COPY . . RUN swag init --ot json --pd -g cmd/midas-api/main.go --parseInternal true --dir internal -RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_model.//g' {} \; +RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_dto.//g' {} \; FROM openapitools/openapi-generator-cli:latest-release AS docgen WORKDIR /work diff --git a/api/go.mod b/api/go.mod index 0935beec..dfe595d1 100644 --- a/api/go.mod +++ b/api/go.mod @@ -17,15 +17,12 @@ require ( github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/uuid v1.6.0 github.com/hashicorp/go-version v1.7.0 - github.com/jackc/pgconn v1.14.3 - github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 github.com/jackc/pgtype v1.14.3 github.com/jackc/pgx/v5 v5.7.1 github.com/jmoiron/sqlx v1.4.0 github.com/labstack/echo-jwt/v4 v4.2.0 github.com/labstack/echo/v4 v4.12.0 github.com/lib/pq v1.10.9 - github.com/paulmach/orb v0.11.1 github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 github.com/twpayne/go-geom v1.5.7 @@ -53,10 +50,8 @@ require ( github.com/aws/smithy-go v1.20.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect - github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgproto3/v2 v2.3.3 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/pgx/v4 v4.18.3 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect @@ -70,7 +65,6 @@ require ( github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - go.mongodb.org/mongo-driver v1.16.1 // indirect golang.org/x/net v0.29.0 // indirect golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.25.0 // indirect diff --git a/api/go.sum b/api/go.sum index f9aaa63f..3a54a230 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,4 +1,3 @@ -filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= @@ -77,22 +76,14 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -101,6 +92,7 @@ github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09 github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -113,16 +105,14 @@ github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8 github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= -github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= -github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= @@ -162,10 +152,8 @@ github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -198,18 +186,12 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU= -github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= -github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -233,7 +215,6 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= @@ -242,7 +223,6 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= -github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/twpayne/go-geom v1.5.7 h1:7fdceDUr03/MP7rAKOaTV6x9njMiQdxB/D0PDzMTCDc= github.com/twpayne/go-geom v1.5.7/go.mod h1:y4fTAQtLedXW8eG2Yo4tYrIGN1yIwwKkmA+K3iSHKBA= github.com/twpayne/pgx-geom v0.0.2 h1:DZcp66JfCwyfQMH1JNBa0vfF+/hi4WQsfHMqBRXp8WI= @@ -251,23 +231,14 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= -go.mongodb.org/mongo-driver v1.16.1 h1:rIVLL3q0IHM39dvE+z2ulZLp9ENZKThVfuvN/IiN4l8= -go.mongodb.org/mongo-driver v1.16.1/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -291,7 +262,6 @@ golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= @@ -301,8 +271,6 @@ golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -310,10 +278,7 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= @@ -322,9 +287,6 @@ golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= @@ -340,9 +302,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -381,18 +341,13 @@ golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= diff --git a/api/internal/db/alert.sql_gen.go b/api/internal/db/alert.sql_gen.go index ff95ac9c..742218a4 100644 --- a/api/internal/db/alert.sql_gen.go +++ b/api/internal/db/alert.sql_gen.go @@ -12,45 +12,16 @@ import ( "github.com/google/uuid" ) -const createAlert = `-- name: CreateAlert :exec +const alertCreate = `-- name: AlertCreate :exec insert into alert (alert_config_id) values ($1) ` -func (q *Queries) CreateAlert(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, createAlert, alertConfigID) +func (q *Queries) AlertCreate(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertCreate, alertConfigID) return err } -const createAlertRead = `-- name: CreateAlertRead :exec -insert into alert_read (profile_id, alert_id) values ($1, $2) -on conflict do nothing -` - -type CreateAlertReadParams struct { - ProfileID uuid.UUID `json:"profile_id"` - AlertID uuid.UUID `json:"alert_id"` -} - -func (q *Queries) CreateAlertRead(ctx context.Context, arg CreateAlertReadParams) error { - _, err := q.db.Exec(ctx, createAlertRead, arg.ProfileID, arg.AlertID) - return err -} - -const deleteAlertRead = `-- name: DeleteAlertRead :exec -delete from alert_read where profile_id = $1 and alert_id = $2 -` - -type DeleteAlertReadParams struct { - ProfileID uuid.UUID `json:"profile_id"` - AlertID uuid.UUID `json:"alert_id"` -} - -func (q *Queries) DeleteAlertRead(ctx context.Context, arg DeleteAlertReadParams) error { - _, err := q.db.Exec(ctx, deleteAlertRead, arg.ProfileID, arg.AlertID) - return err -} - -const getAlert = `-- name: GetAlert :one +const alertGet = `-- name: AlertGet :one select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, case when r.alert_id is not null then true else false end as read @@ -61,12 +32,12 @@ where aps.profile_id = $1 and a.id = $2 ` -type GetAlertParams struct { +type AlertGetParams struct { ProfileID uuid.UUID `json:"profile_id"` ID uuid.UUID `json:"id"` } -type GetAlertRow struct { +type AlertGetRow struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` CreateDate time.Time `json:"create_date"` @@ -78,9 +49,9 @@ type GetAlertRow struct { Read bool `json:"read"` } -func (q *Queries) GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) { - row := q.db.QueryRow(ctx, getAlert, arg.ProfileID, arg.ID) - var i GetAlertRow +func (q *Queries) AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow, error) { + row := q.db.QueryRow(ctx, alertGet, arg.ProfileID, arg.ID) + var i AlertGetRow err := row.Scan( &i.ID, &i.AlertConfigID, @@ -95,7 +66,7 @@ func (q *Queries) GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow return i, err } -const listAlertsForInstrument = `-- name: ListAlertsForInstrument :many +const alertListForInstrument = `-- name: AlertListForInstrument :many select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert where alert_config_id = any( select id from alert_config_instrument @@ -103,8 +74,8 @@ where alert_config_id = any( ) ` -func (q *Queries) ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) { - rows, err := q.db.Query(ctx, listAlertsForInstrument, instrumentID) +func (q *Queries) AlertListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, alertListForInstrument, instrumentID) if err != nil { return nil, err } @@ -132,7 +103,7 @@ func (q *Queries) ListAlertsForInstrument(ctx context.Context, instrumentID uuid return items, nil } -const listAlertsForProfile = `-- name: ListAlertsForProfile :many +const alertListForProfile = `-- name: AlertListForProfile :many select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, case when r.alert_id is not null then true else false end as read @@ -142,7 +113,7 @@ inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_confi where aps.profile_id = $1 ` -type ListAlertsForProfileRow struct { +type AlertListForProfileRow struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` CreateDate time.Time `json:"create_date"` @@ -154,15 +125,15 @@ type ListAlertsForProfileRow struct { Read bool `json:"read"` } -func (q *Queries) ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) { - rows, err := q.db.Query(ctx, listAlertsForProfile, profileID) +func (q *Queries) AlertListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertListForProfileRow, error) { + rows, err := q.db.Query(ctx, alertListForProfile, profileID) if err != nil { return nil, err } defer rows.Close() - items := []ListAlertsForProfileRow{} + items := []AlertListForProfileRow{} for rows.Next() { - var i ListAlertsForProfileRow + var i AlertListForProfileRow if err := rows.Scan( &i.ID, &i.AlertConfigID, @@ -184,12 +155,12 @@ func (q *Queries) ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) return items, nil } -const listAlertsForProject = `-- name: ListAlertsForProject :many +const alertListForProject = `-- name: AlertListForProject :many select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert where project_id = $1 ` -func (q *Queries) ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) { - rows, err := q.db.Query(ctx, listAlertsForProject, projectID) +func (q *Queries) AlertListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, alertListForProject, projectID) if err != nil { return nil, err } @@ -216,3 +187,32 @@ func (q *Queries) ListAlertsForProject(ctx context.Context, projectID uuid.UUID) } return items, nil } + +const alertReadCreate = `-- name: AlertReadCreate :exec +insert into alert_read (profile_id, alert_id) values ($1, $2) +on conflict do nothing +` + +type AlertReadCreateParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) AlertReadCreate(ctx context.Context, arg AlertReadCreateParams) error { + _, err := q.db.Exec(ctx, alertReadCreate, arg.ProfileID, arg.AlertID) + return err +} + +const alertReadDelete = `-- name: AlertReadDelete :exec +delete from alert_read where profile_id = $1 and alert_id = $2 +` + +type AlertReadDeleteParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) AlertReadDelete(ctx context.Context, arg AlertReadDeleteParams) error { + _, err := q.db.Exec(ctx, alertReadDelete, arg.ProfileID, arg.AlertID) + return err +} diff --git a/api/internal/db/alert_check.sql_gen.go b/api/internal/db/alert_check.sql_gen.go index 8ec1c556..e02f9bce 100644 --- a/api/internal/db/alert_check.sql_gen.go +++ b/api/internal/db/alert_check.sql_gen.go @@ -12,27 +12,7 @@ import ( "github.com/google/uuid" ) -const createNextSubmittalFromNewAlertConfigDate = `-- name: CreateNextSubmittalFromNewAlertConfigDate :exec -insert into submittal (alert_config_id, create_date, due_date) -select - ac.id, - $2::timestamptz, - $2::timestamptz + ac.schedule_interval -from alert_config ac -where ac.id = $1 -` - -type CreateNextSubmittalFromNewAlertConfigDateParams struct { - ID uuid.UUID `json:"id"` - Date time.Time `json:"date"` -} - -func (q *Queries) CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error { - _, err := q.db.Exec(ctx, createNextSubmittalFromNewAlertConfigDate, arg.ID, arg.Date) - return err -} - -const listAndCheckAlertConfigs = `-- name: ListAndCheckAlertConfigs :many +const alertConfigListUpdateLastChecked = `-- name: AlertConfigListUpdateLastChecked :many update alert_config ac1 set last_checked = now() from ( @@ -43,8 +23,8 @@ where ac1.id = ac2.id returning ac2.id, ac2.name, ac2.body, ac2.creator, ac2.creator_username, ac2.create_date, ac2.updater, ac2.updater_username, ac2.update_date, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.start_date, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked, ac2.last_reminded, ac2.create_next_submittal_from, ac2.instruments, ac2.alert_email_subscriptions ` -func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, listAndCheckAlertConfigs) +func (q *Queries) AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListUpdateLastChecked) if err != nil { return nil, err } @@ -87,21 +67,41 @@ func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, return items, nil } -const updateAlertConfigLastReminded = `-- name: UpdateAlertConfigLastReminded :exec +const alertConfigUpdateLastReminded = `-- name: AlertConfigUpdateLastReminded :exec update alert_config set last_reminded = $2 where id = $1 ` -type UpdateAlertConfigLastRemindedParams struct { +type AlertConfigUpdateLastRemindedParams struct { ID uuid.UUID `json:"id"` LastReminded *time.Time `json:"last_reminded"` } -func (q *Queries) UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error { - _, err := q.db.Exec(ctx, updateAlertConfigLastReminded, arg.ID, arg.LastReminded) +func (q *Queries) AlertConfigUpdateLastReminded(ctx context.Context, arg AlertConfigUpdateLastRemindedParams) error { + _, err := q.db.Exec(ctx, alertConfigUpdateLastReminded, arg.ID, arg.LastReminded) + return err +} + +const submittalCreateNextFromNewAlertConfigDate = `-- name: SubmittalCreateNextFromNewAlertConfigDate :exec +insert into submittal (alert_config_id, create_date, due_date) +select + ac.id, + $2::timestamptz, + $2::timestamptz + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +type SubmittalCreateNextFromNewAlertConfigDateParams struct { + ID uuid.UUID `json:"id"` + Date time.Time `json:"date"` +} + +func (q *Queries) SubmittalCreateNextFromNewAlertConfigDate(ctx context.Context, arg SubmittalCreateNextFromNewAlertConfigDateParams) error { + _, err := q.db.Exec(ctx, submittalCreateNextFromNewAlertConfigDate, arg.ID, arg.Date) return err } -const updateSubmittalCompletionDateOrWarningSent = `-- name: UpdateSubmittalCompletionDateOrWarningSent :exec +const submittalUpdateCompletionDateOrWarningSent = `-- name: SubmittalUpdateCompletionDateOrWarningSent :exec update submittal set submittal_status_id = $2, completion_date = $3, @@ -109,15 +109,15 @@ update submittal set where id = $1 ` -type UpdateSubmittalCompletionDateOrWarningSentParams struct { +type SubmittalUpdateCompletionDateOrWarningSentParams struct { ID uuid.UUID `json:"id"` SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` CompletionDate *time.Time `json:"completion_date"` WarningSent bool `json:"warning_sent"` } -func (q *Queries) UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error { - _, err := q.db.Exec(ctx, updateSubmittalCompletionDateOrWarningSent, +func (q *Queries) SubmittalUpdateCompletionDateOrWarningSent(ctx context.Context, arg SubmittalUpdateCompletionDateOrWarningSentParams) error { + _, err := q.db.Exec(ctx, submittalUpdateCompletionDateOrWarningSent, arg.ID, arg.SubmittalStatusID, arg.CompletionDate, diff --git a/api/internal/db/alert_config.sql_gen.go b/api/internal/db/alert_config.sql_gen.go index 1dabfc9a..d6ecc891 100644 --- a/api/internal/db/alert_config.sql_gen.go +++ b/api/internal/db/alert_config.sql_gen.go @@ -12,21 +12,7 @@ import ( "github.com/google/uuid" ) -const assignInstrumentToAlertConfig = `-- name: AssignInstrumentToAlertConfig :exec -insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2) -` - -type AssignInstrumentToAlertConfigParams struct { - AlertConfigID uuid.UUID `json:"alert_config_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) AssignInstrumentToAlertConfig(ctx context.Context, arg AssignInstrumentToAlertConfigParams) error { - _, err := q.db.Exec(ctx, assignInstrumentToAlertConfig, arg.AlertConfigID, arg.InstrumentID) - return err -} - -const createAlertConfig = `-- name: CreateAlertConfig :one +const alertConfigCreate = `-- name: AlertConfigCreate :one insert into alert_config ( project_id, name, @@ -43,7 +29,7 @@ insert into alert_config ( returning id ` -type CreateAlertConfigParams struct { +type AlertConfigCreateParams struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Body string `json:"body"` @@ -57,8 +43,8 @@ type CreateAlertConfigParams struct { CreateDate time.Time `json:"create_date"` } -func (q *Queries) CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createAlertConfig, +func (q *Queries) AlertConfigCreate(ctx context.Context, arg AlertConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, alertConfigCreate, arg.ProjectID, arg.Name, arg.Body, @@ -76,33 +62,21 @@ func (q *Queries) CreateAlertConfig(ctx context.Context, arg CreateAlertConfigPa return id, err } -const createNextSubmittalFromExistingAlertConfigDate = `-- name: CreateNextSubmittalFromExistingAlertConfigDate :exec -insert into submittal (alert_config_id, due_date) -select ac.id, ac.create_date + ac.schedule_interval -from alert_config ac -where ac.id = $1 -` - -func (q *Queries) CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, createNextSubmittalFromExistingAlertConfigDate, id) - return err -} - -const deleteAlertConfig = `-- name: DeleteAlertConfig :exec +const alertConfigDelete = `-- name: AlertConfigDelete :exec update alert_config set deleted=true where id = $1 ` -func (q *Queries) DeleteAlertConfig(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAlertConfig, id) +func (q *Queries) AlertConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, alertConfigDelete, id) return err } -const getAlertConfig = `-- name: GetAlertConfig :one +const alertConfigGet = `-- name: AlertConfigGet :one select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where id = $1 ` -func (q *Queries) GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { - row := q.db.QueryRow(ctx, getAlertConfig, id) +func (q *Queries) AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { + row := q.db.QueryRow(ctx, alertConfigGet, id) var i VAlertConfig err := row.Scan( &i.ID, @@ -132,7 +106,30 @@ func (q *Queries) GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfi return i, err } -const listAlertConfigsForInstrument = `-- name: ListAlertConfigsForInstrument :many +const alertConfigInstrumentCreateAssignment = `-- name: AlertConfigInstrumentCreateAssignment :exec +insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2) +` + +type AlertConfigInstrumentCreateAssignmentParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) AlertConfigInstrumentCreateAssignment(ctx context.Context, arg AlertConfigInstrumentCreateAssignmentParams) error { + _, err := q.db.Exec(ctx, alertConfigInstrumentCreateAssignment, arg.AlertConfigID, arg.InstrumentID) + return err +} + +const alertConfigInstrumentDeleteAssignmentsForAlertConfig = `-- name: AlertConfigInstrumentDeleteAssignmentsForAlertConfig :exec +delete from alert_config_instrument where alert_config_id = $1 +` + +func (q *Queries) AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertConfigInstrumentDeleteAssignmentsForAlertConfig, alertConfigID) + return err +} + +const alertConfigListForInstrument = `-- name: AlertConfigListForInstrument :many select t.id, t.name, t.body, t.creator, t.creator_username, t.create_date, t.updater, t.updater_username, t.update_date, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.start_date, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked, t.last_reminded, t.create_next_submittal_from, t.instruments, t.alert_email_subscriptions from v_alert_config t inner join alert_config_instrument aci on t.id = aci.alert_config_id @@ -140,8 +137,8 @@ where aci.instrument_id = $1 order by t.name ` -func (q *Queries) ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, listAlertConfigsForInstrument, instrumentID) +func (q *Queries) AlertConfigListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForInstrument, instrumentID) if err != nil { return nil, err } @@ -184,15 +181,15 @@ func (q *Queries) ListAlertConfigsForInstrument(ctx context.Context, instrumentI return items, nil } -const listAlertConfigsForProject = `-- name: ListAlertConfigsForProject :many +const alertConfigListForProject = `-- name: AlertConfigListForProject :many select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 order by name ` -func (q *Queries) ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, listAlertConfigsForProject, projectID) +func (q *Queries) AlertConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForProject, projectID) if err != nil { return nil, err } @@ -235,7 +232,7 @@ func (q *Queries) ListAlertConfigsForProject(ctx context.Context, projectID uuid return items, nil } -const listAlertConfigsForProjectAlertType = `-- name: ListAlertConfigsForProjectAlertType :many +const alertConfigListForProjectAlertType = `-- name: AlertConfigListForProjectAlertType :many select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 @@ -243,13 +240,13 @@ and alert_type_id = $2 order by name ` -type ListAlertConfigsForProjectAlertTypeParams struct { +type AlertConfigListForProjectAlertTypeParams struct { ProjectID uuid.UUID `json:"project_id"` AlertTypeID uuid.UUID `json:"alert_type_id"` } -func (q *Queries) ListAlertConfigsForProjectAlertType(ctx context.Context, arg ListAlertConfigsForProjectAlertTypeParams) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, listAlertConfigsForProjectAlertType, arg.ProjectID, arg.AlertTypeID) +func (q *Queries) AlertConfigListForProjectAlertType(ctx context.Context, arg AlertConfigListForProjectAlertTypeParams) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForProjectAlertType, arg.ProjectID, arg.AlertTypeID) if err != nil { return nil, err } @@ -292,16 +289,7 @@ func (q *Queries) ListAlertConfigsForProjectAlertType(ctx context.Context, arg L return items, nil } -const unassignAllInstrumentsFromAlertConfig = `-- name: UnassignAllInstrumentsFromAlertConfig :exec -delete from alert_config_instrument where alert_config_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, unassignAllInstrumentsFromAlertConfig, alertConfigID) - return err -} - -const updateAlertConfig = `-- name: UpdateAlertConfig :exec +const alertConfigUpdate = `-- name: AlertConfigUpdate :exec update alert_config set name = $3, body = $4, @@ -315,7 +303,7 @@ update alert_config set where id = $1 and project_id = $2 ` -type UpdateAlertConfigParams struct { +type AlertConfigUpdateParams struct { ID uuid.UUID `json:"id"` ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` @@ -329,8 +317,8 @@ type UpdateAlertConfigParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error { - _, err := q.db.Exec(ctx, updateAlertConfig, +func (q *Queries) AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error { + _, err := q.db.Exec(ctx, alertConfigUpdate, arg.ID, arg.ProjectID, arg.Name, @@ -346,7 +334,19 @@ func (q *Queries) UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigPa return err } -const updateFutureSubmittalForAlertConfig = `-- name: UpdateFutureSubmittalForAlertConfig :one +const submittalCreateNextFromExistingAlertConfigDate = `-- name: SubmittalCreateNextFromExistingAlertConfigDate :exec +insert into submittal (alert_config_id, due_date) +select ac.id, ac.create_date + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +func (q *Queries) SubmittalCreateNextFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalCreateNextFromExistingAlertConfigDate, id) + return err +} + +const submittalUpdateNextForAlertConfig = `-- name: SubmittalUpdateNextForAlertConfig :one update submittal set due_date = sq.new_due_date from ( @@ -365,8 +365,8 @@ and sq.new_due_date > now() returning id ` -func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, updateFutureSubmittalForAlertConfig, alertConfigID) +func (q *Queries) SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, submittalUpdateNextForAlertConfig, alertConfigID) var id uuid.UUID err := row.Scan(&id) return id, err diff --git a/api/internal/db/alert_measurement_check.sql_gen.go b/api/internal/db/alert_measurement_check.sql_gen.go index cee589ba..d2fe6ebe 100644 --- a/api/internal/db/alert_measurement_check.sql_gen.go +++ b/api/internal/db/alert_measurement_check.sql_gen.go @@ -9,7 +9,7 @@ import ( "context" ) -const listIncompleteEvaluationSubmittals = `-- name: ListIncompleteEvaluationSubmittals :many +const submittalListIncompleteEvaluation = `-- name: SubmittalListIncompleteEvaluation :many select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal where submittal_id = any( select id from submittal @@ -17,8 +17,8 @@ where submittal_id = any( ) ` -func (q *Queries) ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) { - rows, err := q.db.Query(ctx, listIncompleteEvaluationSubmittals) +func (q *Queries) SubmittalListIncompleteEvaluation(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListIncompleteEvaluation) if err != nil { return nil, err } @@ -44,7 +44,7 @@ func (q *Queries) ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAl return items, nil } -const listIncompleteMeasurementSubmittals = `-- name: ListIncompleteMeasurementSubmittals :many +const submittalListIncompleteMeasurement = `-- name: SubmittalListIncompleteMeasurement :many select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal where submittal_id = any( select id from submittal @@ -52,8 +52,8 @@ where submittal_id = any( ) ` -func (q *Queries) ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) { - rows, err := q.db.Query(ctx, listIncompleteMeasurementSubmittals) +func (q *Queries) SubmittalListIncompleteMeasurement(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListIncompleteMeasurement) if err != nil { return nil, err } diff --git a/api/internal/db/alert_subscription.sql_gen.go b/api/internal/db/alert_subscription.sql_gen.go index b45fe8df..81499c0f 100644 --- a/api/internal/db/alert_subscription.sql_gen.go +++ b/api/internal/db/alert_subscription.sql_gen.go @@ -11,104 +11,104 @@ import ( "github.com/google/uuid" ) -const createAlertEmailSubscription = `-- name: CreateAlertEmailSubscription :exec +const alertEmailSubscriptionCreate = `-- name: AlertEmailSubscriptionCreate :exec insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) on conflict on constraint email_unique_alert_config do nothing ` -type CreateAlertEmailSubscriptionParams struct { +type AlertEmailSubscriptionCreateParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` EmailID uuid.UUID `json:"email_id"` } -func (q *Queries) CreateAlertEmailSubscription(ctx context.Context, arg CreateAlertEmailSubscriptionParams) error { - _, err := q.db.Exec(ctx, createAlertEmailSubscription, arg.AlertConfigID, arg.EmailID) +func (q *Queries) AlertEmailSubscriptionCreate(ctx context.Context, arg AlertEmailSubscriptionCreateParams) error { + _, err := q.db.Exec(ctx, alertEmailSubscriptionCreate, arg.AlertConfigID, arg.EmailID) return err } -const createAlertProfileSubscription = `-- name: CreateAlertProfileSubscription :exec -insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) -on conflict on constraint profile_unique_alert_config do nothing +const alertEmailSubscriptionDelete = `-- name: AlertEmailSubscriptionDelete :exec +delete from alert_email_subscription where alert_config_id = $1 and email_id = $2 ` -type CreateAlertProfileSubscriptionParams struct { +type AlertEmailSubscriptionDeleteParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` - ProfileID uuid.UUID `json:"profile_id"` + EmailID uuid.UUID `json:"email_id"` } -func (q *Queries) CreateAlertProfileSubscription(ctx context.Context, arg CreateAlertProfileSubscriptionParams) error { - _, err := q.db.Exec(ctx, createAlertProfileSubscription, arg.AlertConfigID, arg.ProfileID) +func (q *Queries) AlertEmailSubscriptionDelete(ctx context.Context, arg AlertEmailSubscriptionDeleteParams) error { + _, err := q.db.Exec(ctx, alertEmailSubscriptionDelete, arg.AlertConfigID, arg.EmailID) return err } -const createAlertProfileSubscriptionOnAnyConflictDoNothing = `-- name: CreateAlertProfileSubscriptionOnAnyConflictDoNothing :exec -insert into alert_profile_subscription (alert_config_id, profile_id) -values ($1, $2) -on conflict do nothing +const alertEmailSubscritpionDeleteForAlertConfig = `-- name: AlertEmailSubscritpionDeleteForAlertConfig :exec +delete from alert_email_subscription where alert_config_id = $1 ` -type CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams struct { - AlertConfigID uuid.UUID `json:"alert_config_id"` - ProfileID uuid.UUID `json:"profile_id"` -} - -func (q *Queries) CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx context.Context, arg CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams) error { - _, err := q.db.Exec(ctx, createAlertProfileSubscriptionOnAnyConflictDoNothing, arg.AlertConfigID, arg.ProfileID) +func (q *Queries) AlertEmailSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertEmailSubscritpionDeleteForAlertConfig, alertConfigID) return err } -const deleteAlertEmailSubscription = `-- name: DeleteAlertEmailSubscription :exec -delete from alert_email_subscription where alert_config_id = $1 and email_id = $2 +const alertProfileSubscriptionCreate = `-- name: AlertProfileSubscriptionCreate :exec +insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) +on conflict on constraint profile_unique_alert_config do nothing ` -type DeleteAlertEmailSubscriptionParams struct { +type AlertProfileSubscriptionCreateParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` - EmailID uuid.UUID `json:"email_id"` + ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error { - _, err := q.db.Exec(ctx, deleteAlertEmailSubscription, arg.AlertConfigID, arg.EmailID) +func (q *Queries) AlertProfileSubscriptionCreate(ctx context.Context, arg AlertProfileSubscriptionCreateParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionCreate, arg.AlertConfigID, arg.ProfileID) return err } -const deleteAlertProfileSubscription = `-- name: DeleteAlertProfileSubscription :exec -delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 +const alertProfileSubscriptionCreateOnAnyConflictDoNothing = `-- name: AlertProfileSubscriptionCreateOnAnyConflictDoNothing :exec +insert into alert_profile_subscription (alert_config_id, profile_id) +values ($1, $2) +on conflict do nothing ` -type DeleteAlertProfileSubscriptionParams struct { +type AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error { - _, err := q.db.Exec(ctx, deleteAlertProfileSubscription, arg.AlertConfigID, arg.ProfileID) +func (q *Queries) AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx context.Context, arg AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionCreateOnAnyConflictDoNothing, arg.AlertConfigID, arg.ProfileID) return err } -const deleteAllAlertEmailSubscritpionsForAlertConfig = `-- name: DeleteAllAlertEmailSubscritpionsForAlertConfig :exec -delete from alert_email_subscription where alert_config_id = $1 +const alertProfileSubscriptionDelete = `-- name: AlertProfileSubscriptionDelete :exec +delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 ` -func (q *Queries) DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllAlertEmailSubscritpionsForAlertConfig, alertConfigID) +type AlertProfileSubscriptionDeleteParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertProfileSubscriptionDelete(ctx context.Context, arg AlertProfileSubscriptionDeleteParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionDelete, arg.AlertConfigID, arg.ProfileID) return err } -const deleteAllAlertProfileSubscritpionsForAlertConfig = `-- name: DeleteAllAlertProfileSubscritpionsForAlertConfig :exec +const alertProfileSubscritpionDeleteForAlertConfig = `-- name: AlertProfileSubscritpionDeleteForAlertConfig :exec delete from alert_profile_subscription where alert_config_id = $1 ` -func (q *Queries) DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllAlertProfileSubscritpionsForAlertConfig, alertConfigID) +func (q *Queries) AlertProfileSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertProfileSubscritpionDeleteForAlertConfig, alertConfigID) return err } -const getAlertSubscription = `-- name: GetAlertSubscription :one +const alertSubscriptionGet = `-- name: AlertSubscriptionGet :one select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where id = $1 ` -func (q *Queries) GetAlertSubscription(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { - row := q.db.QueryRow(ctx, getAlertSubscription, id) +func (q *Queries) AlertSubscriptionGet(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, alertSubscriptionGet, id) var i AlertProfileSubscription err := row.Scan( &i.ID, @@ -120,17 +120,17 @@ func (q *Queries) GetAlertSubscription(ctx context.Context, id uuid.UUID) (Alert return i, err } -const getAlertSubscriptionForAlertConfig = `-- name: GetAlertSubscriptionForAlertConfig :one +const alertSubscriptionGetForAlertConfigProfile = `-- name: AlertSubscriptionGetForAlertConfigProfile :one select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 ` -type GetAlertSubscriptionForAlertConfigParams struct { +type AlertSubscriptionGetForAlertConfigProfileParams struct { AlertConfigID uuid.UUID `json:"alert_config_id"` ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) GetAlertSubscriptionForAlertConfig(ctx context.Context, arg GetAlertSubscriptionForAlertConfigParams) (AlertProfileSubscription, error) { - row := q.db.QueryRow(ctx, getAlertSubscriptionForAlertConfig, arg.AlertConfigID, arg.ProfileID) +func (q *Queries) AlertSubscriptionGetForAlertConfigProfile(ctx context.Context, arg AlertSubscriptionGetForAlertConfigProfileParams) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, alertSubscriptionGetForAlertConfigProfile, arg.AlertConfigID, arg.ProfileID) var i AlertProfileSubscription err := row.Scan( &i.ID, @@ -142,12 +142,12 @@ func (q *Queries) GetAlertSubscriptionForAlertConfig(ctx context.Context, arg Ge return i, err } -const listMyAlertSubscriptions = `-- name: ListMyAlertSubscriptions :many +const alertSubscriptionListForProfile = `-- name: AlertSubscriptionListForProfile :many select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where profile_id = $1 ` -func (q *Queries) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) { - rows, err := q.db.Query(ctx, listMyAlertSubscriptions, profileID) +func (q *Queries) AlertSubscriptionListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) { + rows, err := q.db.Query(ctx, alertSubscriptionListForProfile, profileID) if err != nil { return nil, err } @@ -172,47 +172,19 @@ func (q *Queries) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.U return items, nil } -const registerEmail = `-- name: RegisterEmail :one -with e as ( - insert into email (email) values ($1) - on conflict on constraint unique_email do nothing - returning id -) -select id from e -union -select id from email where email = $1 -limit 1 -` - -func (q *Queries) RegisterEmail(ctx context.Context, email string) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, registerEmail, email) - var id uuid.UUID - err := row.Scan(&id) - return id, err -} - -const unregisterEmail = `-- name: UnregisterEmail :exec -delete from email where id = $1 -` - -func (q *Queries) UnregisterEmail(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, unregisterEmail, id) - return err -} - -const updateMyAlertSubscription = `-- name: UpdateMyAlertSubscription :exec +const alertSubscriptionUpdateForProfile = `-- name: AlertSubscriptionUpdateForProfile :exec update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4 ` -type UpdateMyAlertSubscriptionParams struct { +type AlertSubscriptionUpdateForProfileParams struct { MuteUi bool `json:"mute_ui"` MuteNotify bool `json:"mute_notify"` AlertConfigID uuid.UUID `json:"alert_config_id"` ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAlertSubscriptionParams) error { - _, err := q.db.Exec(ctx, updateMyAlertSubscription, +func (q *Queries) AlertSubscriptionUpdateForProfile(ctx context.Context, arg AlertSubscriptionUpdateForProfileParams) error { + _, err := q.db.Exec(ctx, alertSubscriptionUpdateForProfile, arg.MuteUi, arg.MuteNotify, arg.AlertConfigID, @@ -220,3 +192,31 @@ func (q *Queries) UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAle ) return err } + +const emailDelete = `-- name: EmailDelete :exec +delete from email where id = $1 +` + +func (q *Queries) EmailDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, emailDelete, id) + return err +} + +const emailGetOrCreate = `-- name: EmailGetOrCreate :one +with e as ( + insert into email (email) values ($1) + on conflict on constraint unique_email do nothing + returning id +) +select id from e +union +select id from email where email = $1 +limit 1 +` + +func (q *Queries) EmailGetOrCreate(ctx context.Context, email string) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, emailGetOrCreate, email) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/autocomplete.sql_gen.go b/api/internal/db/autocomplete.sql_gen.go index e45d04ce..82d99c90 100644 --- a/api/internal/db/autocomplete.sql_gen.go +++ b/api/internal/db/autocomplete.sql_gen.go @@ -11,34 +11,34 @@ import ( "github.com/google/uuid" ) -const listEmailAutocomplete = `-- name: ListEmailAutocomplete :many +const emailAutocompleteList = `-- name: EmailAutocompleteList :many select id, user_type, username, email from v_email_autocomplete where username_email ilike '%'||$1||'%' limit $2 ` -type ListEmailAutocompleteParams struct { - Column1 *string `json:"column_1"` - Limit int32 `json:"limit"` +type EmailAutocompleteListParams struct { + SearchKeyword *string `json:"search_keyword"` + ResultLimit int32 `json:"result_limit"` } -type ListEmailAutocompleteRow struct { +type EmailAutocompleteListRow struct { ID uuid.UUID `json:"id"` UserType string `json:"user_type"` Username interface{} `json:"username"` Email string `json:"email"` } -func (q *Queries) ListEmailAutocomplete(ctx context.Context, arg ListEmailAutocompleteParams) ([]ListEmailAutocompleteRow, error) { - rows, err := q.db.Query(ctx, listEmailAutocomplete, arg.Column1, arg.Limit) +func (q *Queries) EmailAutocompleteList(ctx context.Context, arg EmailAutocompleteListParams) ([]EmailAutocompleteListRow, error) { + rows, err := q.db.Query(ctx, emailAutocompleteList, arg.SearchKeyword, arg.ResultLimit) if err != nil { return nil, err } defer rows.Close() - items := []ListEmailAutocompleteRow{} + items := []EmailAutocompleteListRow{} for rows.Next() { - var i ListEmailAutocompleteRow + var i EmailAutocompleteListRow if err := rows.Scan( &i.ID, &i.UserType, diff --git a/api/internal/db/aware.sql_gen.go b/api/internal/db/aware.sql_gen.go index 5ec50a8e..9b468c0a 100644 --- a/api/internal/db/aware.sql_gen.go +++ b/api/internal/db/aware.sql_gen.go @@ -11,40 +11,26 @@ import ( "github.com/google/uuid" ) -const createAwarePlatform = `-- name: CreateAwarePlatform :exec -insert into aware_platform (instrument_id, aware_id) values ($1, $2) -` - -type CreateAwarePlatformParams struct { - InstrumentID *uuid.UUID `json:"instrument_id"` - AwareID uuid.UUID `json:"aware_id"` -} - -func (q *Queries) CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error { - _, err := q.db.Exec(ctx, createAwarePlatform, arg.InstrumentID, arg.AwareID) - return err -} - -const listAwareParameters = `-- name: ListAwareParameters :many +const awareParameterList = `-- name: AwareParameterList :many select id, key, parameter_id, unit_id from aware_parameter ` -type ListAwareParametersRow struct { +type AwareParameterListRow struct { ID uuid.UUID `json:"id"` Key string `json:"key"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` } -func (q *Queries) ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) { - rows, err := q.db.Query(ctx, listAwareParameters) +func (q *Queries) AwareParameterList(ctx context.Context) ([]AwareParameterListRow, error) { + rows, err := q.db.Query(ctx, awareParameterList) if err != nil { return nil, err } defer rows.Close() - items := []ListAwareParametersRow{} + items := []AwareParameterListRow{} for rows.Next() { - var i ListAwareParametersRow + var i AwareParameterListRow if err := rows.Scan( &i.ID, &i.Key, @@ -61,14 +47,28 @@ func (q *Queries) ListAwareParameters(ctx context.Context) ([]ListAwareParameter return items, nil } -const listAwarePlatformParameterEnabled = `-- name: ListAwarePlatformParameterEnabled :many +const awarePlatformCreate = `-- name: AwarePlatformCreate :exec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) +` + +type AwarePlatformCreateParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` +} + +func (q *Queries) AwarePlatformCreate(ctx context.Context, arg AwarePlatformCreateParams) error { + _, err := q.db.Exec(ctx, awarePlatformCreate, arg.InstrumentID, arg.AwareID) + return err +} + +const awarePlatformParameterListEnabled = `-- name: AwarePlatformParameterListEnabled :many select instrument_id, aware_id, aware_parameter_key, timeseries_id from v_aware_platform_parameter_enabled order by aware_id, aware_parameter_key ` -func (q *Queries) ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) { - rows, err := q.db.Query(ctx, listAwarePlatformParameterEnabled) +func (q *Queries) AwarePlatformParameterListEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) { + rows, err := q.db.Query(ctx, awarePlatformParameterListEnabled) if err != nil { return nil, err } diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index 550882dc..f2c5eef9 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -18,36 +18,29 @@ var ( ErrBatchAlreadyClosed = errors.New("batch already closed") ) -const assignInstrumentToProjectBatch = `-- name: AssignInstrumentToProjectBatch :batchexec -insert into project_instrument (project_id, instrument_id) values ($1, $2) -on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +const alertCreateBatch = `-- name: AlertCreateBatch :batchexec +insert into alert (alert_config_id) values ($1) ` -type AssignInstrumentToProjectBatchBatchResults struct { +type AlertCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type AssignInstrumentToProjectBatchParams struct { - ProjectID uuid.UUID `json:"project_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) AssignInstrumentToProjectBatch(ctx context.Context, arg []AssignInstrumentToProjectBatchParams) *AssignInstrumentToProjectBatchBatchResults { +func (q *Queries) AlertCreateBatch(ctx context.Context, alertConfigID []uuid.UUID) *AlertCreateBatchBatchResults { batch := &pgx.Batch{} - for _, a := range arg { + for _, a := range alertConfigID { vals := []interface{}{ - a.ProjectID, - a.InstrumentID, + a, } - batch.Queue(assignInstrumentToProjectBatch, vals...) + batch.Queue(alertCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &AssignInstrumentToProjectBatchBatchResults{br, len(arg), false} + return &AlertCreateBatchBatchResults{br, len(alertConfigID), false} } -func (b *AssignInstrumentToProjectBatchBatchResults) Exec(f func(int, error)) { +func (b *AlertCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -63,40 +56,40 @@ func (b *AssignInstrumentToProjectBatchBatchResults) Exec(f func(int, error)) { } } -func (b *AssignInstrumentToProjectBatchBatchResults) Close() error { +func (b *AlertCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const assignReportConfigPlotConfigBatch = `-- name: AssignReportConfigPlotConfigBatch :batchexec -insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +const awarePlatformCreateBatch = `-- name: AwarePlatformCreateBatch :batchexec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) ` -type AssignReportConfigPlotConfigBatchBatchResults struct { +type AwarePlatformCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type AssignReportConfigPlotConfigBatchParams struct { - ReportConfigID uuid.UUID `json:"report_config_id"` - PlotConfigID uuid.UUID `json:"plot_config_id"` +type AwarePlatformCreateBatchParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` } -func (q *Queries) AssignReportConfigPlotConfigBatch(ctx context.Context, arg []AssignReportConfigPlotConfigBatchParams) *AssignReportConfigPlotConfigBatchBatchResults { +func (q *Queries) AwarePlatformCreateBatch(ctx context.Context, arg []AwarePlatformCreateBatchParams) *AwarePlatformCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ReportConfigID, - a.PlotConfigID, + a.InstrumentID, + a.AwareID, } - batch.Queue(assignReportConfigPlotConfigBatch, vals...) + batch.Queue(awarePlatformCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &AssignReportConfigPlotConfigBatchBatchResults{br, len(arg), false} + return &AwarePlatformCreateBatchBatchResults{br, len(arg), false} } -func (b *AssignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error)) { +func (b *AwarePlatformCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -112,34 +105,47 @@ func (b *AssignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error)) } } -func (b *AssignReportConfigPlotConfigBatchBatchResults) Close() error { +func (b *AwarePlatformCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createAlerts = `-- name: CreateAlerts :batchexec -insert into alert (alert_config_id) values ($1) +const dataloggerErrorCreateBatch = `-- name: DataloggerErrorCreateBatch :batchexec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +) ` -type CreateAlertsBatchResults struct { +type DataloggerErrorCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -func (q *Queries) CreateAlerts(ctx context.Context, alertConfigID []uuid.UUID) *CreateAlertsBatchResults { +type DataloggerErrorCreateBatchParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + ErrorMessage *string `json:"error_message"` +} + +func (q *Queries) DataloggerErrorCreateBatch(ctx context.Context, arg []DataloggerErrorCreateBatchParams) *DataloggerErrorCreateBatchBatchResults { batch := &pgx.Batch{} - for _, a := range alertConfigID { + for _, a := range arg { vals := []interface{}{ - a, + a.DataloggerID, + a.TableName, + a.ErrorMessage, } - batch.Queue(createAlerts, vals...) + batch.Queue(dataloggerErrorCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateAlertsBatchResults{br, len(alertConfigID), false} + return &DataloggerErrorCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateAlertsBatchResults) Exec(f func(int, error)) { +func (b *DataloggerErrorCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -155,40 +161,40 @@ func (b *CreateAlertsBatchResults) Exec(f func(int, error)) { } } -func (b *CreateAlertsBatchResults) Close() error { +func (b *DataloggerErrorCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createAwarePlatformBatch = `-- name: CreateAwarePlatformBatch :batchexec -insert into aware_platform (instrument_id, aware_id) values ($1, $2) +const evaluationInstrumentCreateBatch = `-- name: EvaluationInstrumentCreateBatch :batchexec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) ` -type CreateAwarePlatformBatchBatchResults struct { +type EvaluationInstrumentCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateAwarePlatformBatchParams struct { +type EvaluationInstrumentCreateBatchParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` InstrumentID *uuid.UUID `json:"instrument_id"` - AwareID uuid.UUID `json:"aware_id"` } -func (q *Queries) CreateAwarePlatformBatch(ctx context.Context, arg []CreateAwarePlatformBatchParams) *CreateAwarePlatformBatchBatchResults { +func (q *Queries) EvaluationInstrumentCreateBatch(ctx context.Context, arg []EvaluationInstrumentCreateBatchParams) *EvaluationInstrumentCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ + a.EvaluationID, a.InstrumentID, - a.AwareID, } - batch.Queue(createAwarePlatformBatch, vals...) + batch.Queue(evaluationInstrumentCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateAwarePlatformBatchBatchResults{br, len(arg), false} + return &EvaluationInstrumentCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateAwarePlatformBatchBatchResults) Exec(f func(int, error)) { +func (b *EvaluationInstrumentCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -204,40 +210,45 @@ func (b *CreateAwarePlatformBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateAwarePlatformBatchBatchResults) Close() error { +func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createEvaluationInstrumentsBatch = `-- name: CreateEvaluationInstrumentsBatch :batchexec -insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +const inclOptsCreateBatch = `-- name: InclOptsCreateBatch :batchexec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) ` -type CreateEvaluationInstrumentsBatchBatchResults struct { +type InclOptsCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateEvaluationInstrumentsBatchParams struct { - EvaluationID *uuid.UUID `json:"evaluation_id"` - InstrumentID *uuid.UUID `json:"instrument_id"` +type InclOptsCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateEvaluationInstrumentsBatch(ctx context.Context, arg []CreateEvaluationInstrumentsBatchParams) *CreateEvaluationInstrumentsBatchBatchResults { +func (q *Queries) InclOptsCreateBatch(ctx context.Context, arg []InclOptsCreateBatchParams) *InclOptsCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.EvaluationID, a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, } - batch.Queue(createEvaluationInstrumentsBatch, vals...) + batch.Queue(inclOptsCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateEvaluationInstrumentsBatchBatchResults{br, len(arg), false} + return &InclOptsCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateEvaluationInstrumentsBatchBatchResults) Exec(f func(int, error)) { +func (b *InclOptsCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -253,45 +264,45 @@ func (b *CreateEvaluationInstrumentsBatchBatchResults) Exec(f func(int, error)) } } -func (b *CreateEvaluationInstrumentsBatchBatchResults) Close() error { +func (b *InclOptsCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInclOptsBatch = `-- name: CreateInclOptsBatch :batchexec -insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) -values ($1, $2, $3, $4) +const inclOptsUpdateBatch = `-- name: InclOptsUpdateBatch :batchexec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 ` -type CreateInclOptsBatchBatchResults struct { +type InclOptsUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInclOptsBatchParams struct { +type InclOptsUpdateBatchParams struct { InstrumentID uuid.UUID `json:"instrument_id"` - NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateInclOptsBatch(ctx context.Context, arg []CreateInclOptsBatchParams) *CreateInclOptsBatchBatchResults { +func (q *Queries) InclOptsUpdateBatch(ctx context.Context, arg []InclOptsUpdateBatchParams) *InclOptsUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ a.InstrumentID, - a.NumSegments, a.BottomElevationTimeseriesID, a.InitialTime, } - batch.Queue(createInclOptsBatch, vals...) + batch.Queue(inclOptsUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInclOptsBatchBatchResults{br, len(arg), false} + return &InclOptsUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateInclOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *InclOptsUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -307,12 +318,12 @@ func (b *CreateInclOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateInclOptsBatchBatchResults) Close() error { +func (b *InclOptsUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInclSegmentBatch = `-- name: CreateInclSegmentBatch :batchexec +const inclSegmentCreateBatch = `-- name: InclSegmentCreateBatch :batchexec insert into incl_segment ( id, instrument_id, @@ -324,13 +335,78 @@ insert into incl_segment ( ) values ($1, $2, $3, $4, $5, $6, $7) ` -type CreateInclSegmentBatchBatchResults struct { +type InclSegmentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InclSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) InclSegmentCreateBatch(ctx context.Context, arg []InclSegmentCreateBatchParams) *InclSegmentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.DepthTimeseriesID, + a.A0TimeseriesID, + a.A180TimeseriesID, + a.B0TimeseriesID, + a.B180TimeseriesID, + } + batch.Queue(inclSegmentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InclSegmentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InclSegmentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InclSegmentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const inclSegmentUpdateBatch = `-- name: InclSegmentUpdateBatch :batchexec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2 +` + +type InclSegmentUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInclSegmentBatchParams struct { +type InclSegmentUpdateBatchParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` @@ -340,7 +416,7 @@ type CreateInclSegmentBatchParams struct { B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` } -func (q *Queries) CreateInclSegmentBatch(ctx context.Context, arg []CreateInclSegmentBatchParams) *CreateInclSegmentBatchBatchResults { +func (q *Queries) InclSegmentUpdateBatch(ctx context.Context, arg []InclSegmentUpdateBatchParams) *InclSegmentUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -352,13 +428,13 @@ func (q *Queries) CreateInclSegmentBatch(ctx context.Context, arg []CreateInclSe a.B0TimeseriesID, a.B180TimeseriesID, } - batch.Queue(createInclSegmentBatch, vals...) + batch.Queue(inclSegmentUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInclSegmentBatchBatchResults{br, len(arg), false} + return &InclSegmentUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateInclSegmentBatchBatchResults) Exec(f func(int, error)) { +func (b *InclSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -374,40 +450,40 @@ func (b *CreateInclSegmentBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateInclSegmentBatchBatchResults) Close() error { +func (b *InclSegmentUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInstrumentConstantBatch = `-- name: CreateInstrumentConstantBatch :batchexec +const instrumentConstantCreateBatch = `-- name: InstrumentConstantCreateBatch :batchexec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) ` -type CreateInstrumentConstantBatchBatchResults struct { +type InstrumentConstantCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInstrumentConstantBatchParams struct { +type InstrumentConstantCreateBatchParams struct { InstrumentID uuid.UUID `json:"instrument_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` } -func (q *Queries) CreateInstrumentConstantBatch(ctx context.Context, arg []CreateInstrumentConstantBatchParams) *CreateInstrumentConstantBatchBatchResults { +func (q *Queries) InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ a.InstrumentID, a.TimeseriesID, } - batch.Queue(createInstrumentConstantBatch, vals...) + batch.Queue(instrumentConstantCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInstrumentConstantBatchBatchResults{br, len(arg), false} + return &InstrumentConstantCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateInstrumentConstantBatchBatchResults) Exec(f func(int, error)) { +func (b *InstrumentConstantCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -423,44 +499,106 @@ func (b *CreateInstrumentConstantBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateInstrumentConstantBatchBatchResults) Close() error { +func (b *InstrumentConstantCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInstrumentGroupsBatch = `-- name: CreateInstrumentGroupsBatch :batchone -insert into instrument_group (slug, name, description, creator, create_date, project_id) -values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, slug, name, description, creator, create_date, updater, update_date, project_id +const instrumentCreateBatch = `-- name: InstrumentCreateBatch :batchone +insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) +values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id, slug ` -type CreateInstrumentGroupsBatchBatchResults struct { +type InstrumentCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInstrumentGroupsBatchParams struct { - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - ProjectID *uuid.UUID `json:"project_id"` +type InstrumentCreateBatchParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry Geometry `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentCreateBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) InstrumentCreateBatch(ctx context.Context, arg []InstrumentCreateBatchParams) *InstrumentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Name, + a.TypeID, + a.Geometry, + a.Station, + a.StationOffset, + a.Creator, + a.CreateDate, + a.NidID, + a.UsgsID, + a.ShowCwmsTab, + } + batch.Queue(instrumentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentCreateBatchBatchResults) QueryRow(f func(int, InstrumentCreateBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i InstrumentCreateBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan(&i.ID, &i.Slug) + if f != nil { + f(t, i, err) + } + } +} + +func (b *InstrumentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentGroupCreateBatch = `-- name: InstrumentGroupCreateBatch :batchone +insert into instrument_group (slug, name, description, creator, create_date, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id +` + +type InstrumentGroupCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool } -type CreateInstrumentGroupsBatchRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` +type InstrumentGroupCreateBatchParams struct { Name string `json:"name"` Description *string `json:"description"` Creator uuid.UUID `json:"creator"` CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` ProjectID *uuid.UUID `json:"project_id"` } -func (q *Queries) CreateInstrumentGroupsBatch(ctx context.Context, arg []CreateInstrumentGroupsBatchParams) *CreateInstrumentGroupsBatchBatchResults { +func (q *Queries) InstrumentGroupCreateBatch(ctx context.Context, arg []InstrumentGroupCreateBatchParams) *InstrumentGroupCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -470,16 +608,16 @@ func (q *Queries) CreateInstrumentGroupsBatch(ctx context.Context, arg []CreateI a.CreateDate, a.ProjectID, } - batch.Queue(createInstrumentGroupsBatch, vals...) + batch.Queue(instrumentGroupCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInstrumentGroupsBatchBatchResults{br, len(arg), false} + return &InstrumentGroupCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateInstrumentGroupsBatchBatchResults) QueryRow(f func(int, CreateInstrumentGroupsBatchRow, error)) { +func (b *InstrumentGroupCreateBatchBatchResults) QueryRow(f func(int, InstrumentGroup, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { - var i CreateInstrumentGroupsBatchRow + var i InstrumentGroup if b.closed { if f != nil { f(t, i, ErrBatchAlreadyClosed) @@ -489,6 +627,7 @@ func (b *CreateInstrumentGroupsBatchBatchResults) QueryRow(f func(int, CreateIns row := b.br.QueryRow() err := row.Scan( &i.ID, + &i.Deleted, &i.Slug, &i.Name, &i.Description, @@ -504,24 +643,24 @@ func (b *CreateInstrumentGroupsBatchBatchResults) QueryRow(f func(int, CreateIns } } -func (b *CreateInstrumentGroupsBatchBatchResults) Close() error { +func (b *InstrumentGroupCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInstrumentNoteBatch = `-- name: CreateInstrumentNoteBatch :batchone +const instrumentNoteCreateBatch = `-- name: InstrumentNoteCreateBatch :batchone insert into instrument_note (instrument_id, title, body, time, creator, create_date) values ($1, $2, $3, $4, $5, $6) returning id, instrument_id, title, body, time, creator, create_date, updater, update_date ` -type CreateInstrumentNoteBatchBatchResults struct { +type InstrumentNoteCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInstrumentNoteBatchParams struct { +type InstrumentNoteCreateBatchParams struct { InstrumentID uuid.UUID `json:"instrument_id"` Title string `json:"title"` Body string `json:"body"` @@ -530,7 +669,7 @@ type CreateInstrumentNoteBatchParams struct { CreateDate time.Time `json:"create_date"` } -func (q *Queries) CreateInstrumentNoteBatch(ctx context.Context, arg []CreateInstrumentNoteBatchParams) *CreateInstrumentNoteBatchBatchResults { +func (q *Queries) InstrumentNoteCreateBatch(ctx context.Context, arg []InstrumentNoteCreateBatchParams) *InstrumentNoteCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -541,13 +680,13 @@ func (q *Queries) CreateInstrumentNoteBatch(ctx context.Context, arg []CreateIns a.Creator, a.CreateDate, } - batch.Queue(createInstrumentNoteBatch, vals...) + batch.Queue(instrumentNoteCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInstrumentNoteBatchBatchResults{br, len(arg), false} + return &InstrumentNoteCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateInstrumentNoteBatchBatchResults) QueryRow(f func(int, InstrumentNote, error)) { +func (b *InstrumentNoteCreateBatchBatchResults) QueryRow(f func(int, InstrumentNote, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { var i InstrumentNote @@ -575,104 +714,82 @@ func (b *CreateInstrumentNoteBatchBatchResults) QueryRow(f func(int, InstrumentN } } -func (b *CreateInstrumentNoteBatchBatchResults) Close() error { +func (b *InstrumentNoteCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createInstrumentsBatch = `-- name: CreateInstrumentsBatch :batchone -insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) -returning id, slug +const instrumentStatusCreateOrUpdateBatch = `-- name: InstrumentStatusCreateOrUpdateBatch :batchexec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id ` -type CreateInstrumentsBatchBatchResults struct { +type InstrumentStatusCreateOrUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateInstrumentsBatchParams struct { - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - Geometry Geometry `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` -} - -type CreateInstrumentsBatchRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` +type InstrumentStatusCreateOrUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` } -func (q *Queries) CreateInstrumentsBatch(ctx context.Context, arg []CreateInstrumentsBatchParams) *CreateInstrumentsBatchBatchResults { +func (q *Queries) InstrumentStatusCreateOrUpdateBatch(ctx context.Context, arg []InstrumentStatusCreateOrUpdateBatchParams) *InstrumentStatusCreateOrUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.Name, - a.TypeID, - a.Geometry, - a.Station, - a.StationOffset, - a.Creator, - a.CreateDate, - a.NidID, - a.UsgsID, - a.ShowCwmsTab, + a.InstrumentID, + a.StatusID, + a.Time, } - batch.Queue(createInstrumentsBatch, vals...) + batch.Queue(instrumentStatusCreateOrUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateInstrumentsBatchBatchResults{br, len(arg), false} + return &InstrumentStatusCreateOrUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateInstrumentsBatchBatchResults) QueryRow(f func(int, CreateInstrumentsBatchRow, error)) { +func (b *InstrumentStatusCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { - var i CreateInstrumentsBatchRow if b.closed { if f != nil { - f(t, i, ErrBatchAlreadyClosed) + f(t, ErrBatchAlreadyClosed) } continue } - row := b.br.QueryRow() - err := row.Scan(&i.ID, &i.Slug) + _, err := b.br.Exec() if f != nil { - f(t, i, err) + f(t, err) } } } -func (b *CreateInstrumentsBatchBatchResults) Close() error { +func (b *InstrumentStatusCreateOrUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createIpiOptsBatch = `-- name: CreateIpiOptsBatch :batchexec +const ipiOptsCreateBatch = `-- name: IpiOptsCreateBatch :batchexec insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) ` -type CreateIpiOptsBatchBatchResults struct { +type IpiOptsCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateIpiOptsBatchParams struct { +type IpiOptsCreateBatchParams struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateIpiOptsBatch(ctx context.Context, arg []CreateIpiOptsBatchParams) *CreateIpiOptsBatchBatchResults { +func (q *Queries) IpiOptsCreateBatch(ctx context.Context, arg []IpiOptsCreateBatchParams) *IpiOptsCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -681,13 +798,13 @@ func (q *Queries) CreateIpiOptsBatch(ctx context.Context, arg []CreateIpiOptsBat a.BottomElevationTimeseriesID, a.InitialTime, } - batch.Queue(createIpiOptsBatch, vals...) + batch.Queue(ipiOptsCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateIpiOptsBatchBatchResults{br, len(arg), false} + return &IpiOptsCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateIpiOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *IpiOptsCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -703,55 +820,45 @@ func (b *CreateIpiOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateIpiOptsBatchBatchResults) Close() error { +func (b *IpiOptsCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createIpiSegmentBatch = `-- name: CreateIpiSegmentBatch :batchexec -insert into ipi_segment ( - id, - instrument_id, - length_timeseries_id, - tilt_timeseries_id, - inc_dev_timeseries_id, - temp_timeseries_id -) values ($1, $2, $3, $4, $5, $6) +const ipiOptsUpdateBatch = `-- name: IpiOptsUpdateBatch :batchexec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 ` -type CreateIpiSegmentBatchBatchResults struct { +type IpiOptsUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateIpiSegmentBatchParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` - TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +type IpiOptsUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateIpiSegmentBatch(ctx context.Context, arg []CreateIpiSegmentBatchParams) *CreateIpiSegmentBatchBatchResults { +func (q *Queries) IpiOptsUpdateBatch(ctx context.Context, arg []IpiOptsUpdateBatchParams) *IpiOptsUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ID, a.InstrumentID, - a.LengthTimeseriesID, - a.TiltTimeseriesID, - a.IncDevTimeseriesID, - a.TempTimeseriesID, + a.BottomElevationTimeseriesID, + a.InitialTime, } - batch.Queue(createIpiSegmentBatch, vals...) + batch.Queue(ipiOptsUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateIpiSegmentBatchBatchResults{br, len(arg), false} + return &IpiOptsUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateIpiSegmentBatchBatchResults) Exec(f func(int, error)) { +func (b *IpiOptsUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -767,95 +874,55 @@ func (b *CreateIpiSegmentBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateIpiSegmentBatchBatchResults) Close() error { +func (b *IpiOptsUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createOrUpdateInstrumentStatusBatch = `-- name: CreateOrUpdateInstrumentStatusBatch :batchexec -insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) -on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id +const ipiSegmentCreateBatch = `-- name: IpiSegmentCreateBatch :batchexec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6) ` -type CreateOrUpdateInstrumentStatusBatchBatchResults struct { +type IpiSegmentCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateOrUpdateInstrumentStatusBatchParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - StatusID uuid.UUID `json:"status_id"` - Time time.Time `json:"time"` +type IpiSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) CreateOrUpdateInstrumentStatusBatch(ctx context.Context, arg []CreateOrUpdateInstrumentStatusBatchParams) *CreateOrUpdateInstrumentStatusBatchBatchResults { +func (q *Queries) IpiSegmentCreateBatch(ctx context.Context, arg []IpiSegmentCreateBatchParams) *IpiSegmentCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ + a.ID, a.InstrumentID, - a.StatusID, - a.Time, - } - batch.Queue(createOrUpdateInstrumentStatusBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &CreateOrUpdateInstrumentStatusBatchBatchResults{br, len(arg), false} -} - -func (b *CreateOrUpdateInstrumentStatusBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *CreateOrUpdateInstrumentStatusBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - -const createOrUpdateTimeseriesMeasurementsBatch = `-- name: CreateOrUpdateTimeseriesMeasurementsBatch :batchexec -insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) -on conflict on constraint timeseries_unique_time do update set value = excluded.value -` - -type CreateOrUpdateTimeseriesMeasurementsBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type CreateOrUpdateTimeseriesMeasurementsBatchParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Value float64 `json:"value"` -} - -func (q *Queries) CreateOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesMeasurementsBatchParams) *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.TimeseriesID, - a.Time, - a.Value, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, } - batch.Queue(createOrUpdateTimeseriesMeasurementsBatch, vals...) + batch.Queue(ipiSegmentCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateOrUpdateTimeseriesMeasurementsBatchBatchResults{br, len(arg), false} + return &IpiSegmentCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, error)) { +func (b *IpiSegmentCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -871,47 +938,53 @@ func (b *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, } } -func (b *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults) Close() error { +func (b *IpiSegmentCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createOrUpdateTimeseriesNoteBatch = `-- name: CreateOrUpdateTimeseriesNoteBatch :batchexec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) -on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +const ipiSegmentUpdateBatch = `-- name: IpiSegmentUpdateBatch :batchexec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2 ` -type CreateOrUpdateTimeseriesNoteBatchBatchResults struct { +type IpiSegmentUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateOrUpdateTimeseriesNoteBatchParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Masked *bool `json:"masked"` - Validated *bool `json:"validated"` - Annotation *string `json:"annotation"` +type IpiSegmentUpdateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) CreateOrUpdateTimeseriesNoteBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesNoteBatchParams) *CreateOrUpdateTimeseriesNoteBatchBatchResults { +func (q *Queries) IpiSegmentUpdateBatch(ctx context.Context, arg []IpiSegmentUpdateBatchParams) *IpiSegmentUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.TimeseriesID, - a.Time, - a.Masked, - a.Validated, - a.Annotation, + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, } - batch.Queue(createOrUpdateTimeseriesNoteBatch, vals...) + batch.Queue(ipiSegmentUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateOrUpdateTimeseriesNoteBatchBatchResults{br, len(arg), false} + return &IpiSegmentUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateOrUpdateTimeseriesNoteBatchBatchResults) Exec(f func(int, error)) { +func (b *IpiSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -927,23 +1000,23 @@ func (b *CreateOrUpdateTimeseriesNoteBatchBatchResults) Exec(f func(int, error)) } } -func (b *CreateOrUpdateTimeseriesNoteBatchBatchResults) Close() error { +func (b *IpiSegmentUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createPlotConfigCustomShapesBatch = `-- name: CreatePlotConfigCustomShapesBatch :batchexec +const plotConfigCustomShapeCreateBatch = `-- name: PlotConfigCustomShapeCreateBatch :batchexec insert into plot_configuration_custom_shape (plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) ` -type CreatePlotConfigCustomShapesBatchBatchResults struct { +type PlotConfigCustomShapeCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreatePlotConfigCustomShapesBatchParams struct { +type PlotConfigCustomShapeCreateBatchParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` Enabled bool `json:"enabled"` Name string `json:"name"` @@ -951,7 +1024,7 @@ type CreatePlotConfigCustomShapesBatchParams struct { Color string `json:"color"` } -func (q *Queries) CreatePlotConfigCustomShapesBatch(ctx context.Context, arg []CreatePlotConfigCustomShapesBatchParams) *CreatePlotConfigCustomShapesBatchBatchResults { +func (q *Queries) PlotConfigCustomShapeCreateBatch(ctx context.Context, arg []PlotConfigCustomShapeCreateBatchParams) *PlotConfigCustomShapeCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -961,13 +1034,13 @@ func (q *Queries) CreatePlotConfigCustomShapesBatch(ctx context.Context, arg []C a.DataPoint, a.Color, } - batch.Queue(createPlotConfigCustomShapesBatch, vals...) + batch.Queue(plotConfigCustomShapeCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreatePlotConfigCustomShapesBatchBatchResults{br, len(arg), false} + return &PlotConfigCustomShapeCreateBatchBatchResults{br, len(arg), false} } -func (b *CreatePlotConfigCustomShapesBatchBatchResults) Exec(f func(int, error)) { +func (b *PlotConfigCustomShapeCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -983,24 +1056,24 @@ func (b *CreatePlotConfigCustomShapesBatchBatchResults) Exec(f func(int, error)) } } -func (b *CreatePlotConfigCustomShapesBatchBatchResults) Close() error { +func (b *PlotConfigCustomShapeCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createPlotConfigTimeseriesTracesBatch = `-- name: CreatePlotConfigTimeseriesTracesBatch :batchexec +const plotConfigTimeseriesTracesCreateBatch = `-- name: PlotConfigTimeseriesTracesCreateBatch :batchexec insert into plot_configuration_timeseries_trace (plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values ($1, $2, $3, $4, $5, $6, $7, $8) ` -type CreatePlotConfigTimeseriesTracesBatchBatchResults struct { +type PlotConfigTimeseriesTracesCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreatePlotConfigTimeseriesTracesBatchParams struct { +type PlotConfigTimeseriesTracesCreateBatchParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` TimeseriesID *uuid.UUID `json:"timeseries_id"` TraceOrder int32 `json:"trace_order"` @@ -1011,7 +1084,7 @@ type CreatePlotConfigTimeseriesTracesBatchParams struct { YAxis YAxis `json:"y_axis"` } -func (q *Queries) CreatePlotConfigTimeseriesTracesBatch(ctx context.Context, arg []CreatePlotConfigTimeseriesTracesBatchParams) *CreatePlotConfigTimeseriesTracesBatchBatchResults { +func (q *Queries) PlotConfigTimeseriesTracesCreateBatch(ctx context.Context, arg []PlotConfigTimeseriesTracesCreateBatchParams) *PlotConfigTimeseriesTracesCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1024,13 +1097,13 @@ func (q *Queries) CreatePlotConfigTimeseriesTracesBatch(ctx context.Context, arg a.ShowMarkers, a.YAxis, } - batch.Queue(createPlotConfigTimeseriesTracesBatch, vals...) + batch.Queue(plotConfigTimeseriesTracesCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreatePlotConfigTimeseriesTracesBatchBatchResults{br, len(arg), false} + return &PlotConfigTimeseriesTracesCreateBatchBatchResults{br, len(arg), false} } -func (b *CreatePlotConfigTimeseriesTracesBatchBatchResults) Exec(f func(int, error)) { +func (b *PlotConfigTimeseriesTracesCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1046,41 +1119,41 @@ func (b *CreatePlotConfigTimeseriesTracesBatchBatchResults) Exec(f func(int, err } } -func (b *CreatePlotConfigTimeseriesTracesBatchBatchResults) Close() error { +func (b *PlotConfigTimeseriesTracesCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createPlotContourConfigTimeseriesBatch = `-- name: CreatePlotContourConfigTimeseriesBatch :batchexec +const plotContourConfigTimeseriesCreateBatch = `-- name: PlotContourConfigTimeseriesCreateBatch :batchexec insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) on conflict (plot_contour_config_id, timeseries_id) do nothing ` -type CreatePlotContourConfigTimeseriesBatchBatchResults struct { +type PlotContourConfigTimeseriesCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreatePlotContourConfigTimeseriesBatchParams struct { +type PlotContourConfigTimeseriesCreateBatchParams struct { PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` } -func (q *Queries) CreatePlotContourConfigTimeseriesBatch(ctx context.Context, arg []CreatePlotContourConfigTimeseriesBatchParams) *CreatePlotContourConfigTimeseriesBatchBatchResults { +func (q *Queries) PlotContourConfigTimeseriesCreateBatch(ctx context.Context, arg []PlotContourConfigTimeseriesCreateBatchParams) *PlotContourConfigTimeseriesCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ a.PlotContourConfigID, a.TimeseriesID, } - batch.Queue(createPlotContourConfigTimeseriesBatch, vals...) + batch.Queue(plotContourConfigTimeseriesCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreatePlotContourConfigTimeseriesBatchBatchResults{br, len(arg), false} + return &PlotContourConfigTimeseriesCreateBatchBatchResults{br, len(arg), false} } -func (b *CreatePlotContourConfigTimeseriesBatchBatchResults) Exec(f func(int, error)) { +func (b *PlotContourConfigTimeseriesCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1096,24 +1169,24 @@ func (b *CreatePlotContourConfigTimeseriesBatchBatchResults) Exec(f func(int, er } } -func (b *CreatePlotContourConfigTimeseriesBatchBatchResults) Close() error { +func (b *PlotContourConfigTimeseriesCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createProjectsBatch = `-- name: CreateProjectsBatch :batchone +const projectCreateBatch = `-- name: ProjectCreateBatch :batchone insert into project (federal_id, slug, name, district_id, creator, create_date) values ($1, slugify($2, 'project'), $2, $3, $4, $5) returning id, slug ` -type CreateProjectsBatchBatchResults struct { +type ProjectCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateProjectsBatchParams struct { +type ProjectCreateBatchParams struct { FederalID *string `json:"federal_id"` Name string `json:"name"` DistrictID *uuid.UUID `json:"district_id"` @@ -1121,12 +1194,12 @@ type CreateProjectsBatchParams struct { CreateDate time.Time `json:"create_date"` } -type CreateProjectsBatchRow struct { +type ProjectCreateBatchRow struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` } -func (q *Queries) CreateProjectsBatch(ctx context.Context, arg []CreateProjectsBatchParams) *CreateProjectsBatchBatchResults { +func (q *Queries) ProjectCreateBatch(ctx context.Context, arg []ProjectCreateBatchParams) *ProjectCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1136,16 +1209,16 @@ func (q *Queries) CreateProjectsBatch(ctx context.Context, arg []CreateProjectsB a.Creator, a.CreateDate, } - batch.Queue(createProjectsBatch, vals...) + batch.Queue(projectCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateProjectsBatchBatchResults{br, len(arg), false} + return &ProjectCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateProjectsBatchBatchResults) QueryRow(f func(int, CreateProjectsBatchRow, error)) { +func (b *ProjectCreateBatchBatchResults) QueryRow(f func(int, ProjectCreateBatchRow, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { - var i CreateProjectsBatchRow + var i ProjectCreateBatchRow if b.closed { if f != nil { f(t, i, ErrBatchAlreadyClosed) @@ -1160,45 +1233,363 @@ func (b *CreateProjectsBatchBatchResults) QueryRow(f func(int, CreateProjectsBat } } -func (b *CreateProjectsBatchBatchResults) Close() error { +func (b *ProjectCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const projectInstrumentCreateBatch = `-- name: ProjectInstrumentCreateBatch :batchexec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +` + +type ProjectInstrumentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ProjectInstrumentCreateBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentCreateBatch(ctx context.Context, arg []ProjectInstrumentCreateBatchParams) *ProjectInstrumentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(projectInstrumentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ProjectInstrumentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *ProjectInstrumentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ProjectInstrumentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const projectInstrumentDeleteBatch = `-- name: ProjectInstrumentDeleteBatch :batchexec +delete from project_instrument where project_id = $1 and instrument_id = $2 +` + +type ProjectInstrumentDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ProjectInstrumentDeleteBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentDeleteBatch(ctx context.Context, arg []ProjectInstrumentDeleteBatchParams) *ProjectInstrumentDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(projectInstrumentDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ProjectInstrumentDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *ProjectInstrumentDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ProjectInstrumentDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const reportConfigPlotConfigCreateBatch = `-- name: ReportConfigPlotConfigCreateBatch :batchexec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +` + +type ReportConfigPlotConfigCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ReportConfigPlotConfigCreateBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigCreateBatch(ctx context.Context, arg []ReportConfigPlotConfigCreateBatchParams) *ReportConfigPlotConfigCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(reportConfigPlotConfigCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ReportConfigPlotConfigCreateBatchBatchResults{br, len(arg), false} +} + +func (b *ReportConfigPlotConfigCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ReportConfigPlotConfigCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const reportConfigPlotConfigDeleteBatch = `-- name: ReportConfigPlotConfigDeleteBatch :batchexec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 +` + +type ReportConfigPlotConfigDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ReportConfigPlotConfigDeleteBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigDeleteBatch(ctx context.Context, arg []ReportConfigPlotConfigDeleteBatchParams) *ReportConfigPlotConfigDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(reportConfigPlotConfigDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ReportConfigPlotConfigDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *ReportConfigPlotConfigDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ReportConfigPlotConfigDeleteBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createSaaOptsBatch = `-- name: CreateSaaOptsBatch :batchexec +const saaOptsCreateBatch = `-- name: SaaOptsCreateBatch :batchexec insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) ` -type CreateSaaOptsBatchBatchResults struct { +type SaaOptsCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateSaaOptsBatchParams struct { +type SaaOptsCreateBatchParams struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateSaaOptsBatch(ctx context.Context, arg []CreateSaaOptsBatchParams) *CreateSaaOptsBatchBatchResults { +func (q *Queries) SaaOptsCreateBatch(ctx context.Context, arg []SaaOptsCreateBatchParams) *SaaOptsCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ a.InstrumentID, - a.NumSegments, - a.BottomElevationTimeseriesID, - a.InitialTime, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(saaOptsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaOptsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaOptsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaOptsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaOptsUpdateBatch = `-- name: SaaOptsUpdateBatch :batchexec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type SaaOptsUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaOptsUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsUpdateBatch(ctx context.Context, arg []SaaOptsUpdateBatchParams) *SaaOptsUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(saaOptsUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaOptsUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaOptsUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaOptsUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaSegmentCreateBatch = `-- name: SaaSegmentCreateBatch :batchexec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type SaaSegmentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) SaaSegmentCreateBatch(ctx context.Context, arg []SaaSegmentCreateBatchParams) *SaaSegmentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.XTimeseriesID, + a.YTimeseriesID, + a.ZTimeseriesID, + a.TempTimeseriesID, } - batch.Queue(createSaaOptsBatch, vals...) + batch.Queue(saaSegmentCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateSaaOptsBatchBatchResults{br, len(arg), false} + return &SaaSegmentCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateSaaOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *SaaSegmentCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1214,30 +1605,28 @@ func (b *CreateSaaOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateSaaOptsBatchBatchResults) Close() error { +func (b *SaaSegmentCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createSaaSegmentBatch = `-- name: CreateSaaSegmentBatch :batchexec -insert into saa_segment ( - id, - instrument_id, - length_timeseries_id, - x_timeseries_id, - y_timeseries_id, - z_timeseries_id, - temp_timeseries_id -) values ($1, $2, $3, $4, $5, $6, $7) +const saaSegmentUpdateBatch = `-- name: SaaSegmentUpdateBatch :batchexec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2 ` -type CreateSaaSegmentBatchBatchResults struct { +type SaaSegmentUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateSaaSegmentBatchParams struct { +type SaaSegmentUpdateBatchParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` @@ -1247,7 +1636,7 @@ type CreateSaaSegmentBatchParams struct { TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) CreateSaaSegmentBatch(ctx context.Context, arg []CreateSaaSegmentBatchParams) *CreateSaaSegmentBatchBatchResults { +func (q *Queries) SaaSegmentUpdateBatch(ctx context.Context, arg []SaaSegmentUpdateBatchParams) *SaaSegmentUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1259,13 +1648,13 @@ func (q *Queries) CreateSaaSegmentBatch(ctx context.Context, arg []CreateSaaSegm a.ZTimeseriesID, a.TempTimeseriesID, } - batch.Queue(createSaaSegmentBatch, vals...) + batch.Queue(saaSegmentUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateSaaSegmentBatchBatchResults{br, len(arg), false} + return &SaaSegmentUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { +func (b *SaaSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1281,24 +1670,24 @@ func (b *CreateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateSaaSegmentBatchBatchResults) Close() error { +func (b *SaaSegmentUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createTimeseriesBatch = `-- name: CreateTimeseriesBatch :batchone +const timeseriesCreateBatch = `-- name: TimeseriesCreateBatch :batchone insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) returning id, instrument_id, slug, name, parameter_id, unit_id, type ` -type CreateTimeseriesBatchBatchResults struct { +type TimeseriesCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateTimeseriesBatchParams struct { +type TimeseriesCreateBatchParams struct { InstrumentID *uuid.UUID `json:"instrument_id"` Name string `json:"name"` ParameterID uuid.UUID `json:"parameter_id"` @@ -1306,7 +1695,7 @@ type CreateTimeseriesBatchParams struct { Type NullTimeseriesType `json:"type"` } -type CreateTimeseriesBatchRow struct { +type TimeseriesCreateBatchRow struct { ID uuid.UUID `json:"id"` InstrumentID *uuid.UUID `json:"instrument_id"` Slug string `json:"slug"` @@ -1316,7 +1705,7 @@ type CreateTimeseriesBatchRow struct { Type NullTimeseriesType `json:"type"` } -func (q *Queries) CreateTimeseriesBatch(ctx context.Context, arg []CreateTimeseriesBatchParams) *CreateTimeseriesBatchBatchResults { +func (q *Queries) TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1326,16 +1715,16 @@ func (q *Queries) CreateTimeseriesBatch(ctx context.Context, arg []CreateTimeser a.UnitID, a.Type, } - batch.Queue(createTimeseriesBatch, vals...) + batch.Queue(timeseriesCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateTimeseriesBatchBatchResults{br, len(arg), false} + return &TimeseriesCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateTimeseriesBatchBatchResults) QueryRow(f func(int, CreateTimeseriesBatchRow, error)) { +func (b *TimeseriesCreateBatchBatchResults) QueryRow(f func(int, TimeseriesCreateBatchRow, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { - var i CreateTimeseriesBatchRow + var i TimeseriesCreateBatchRow if b.closed { if f != nil { f(t, i, ErrBatchAlreadyClosed) @@ -1358,23 +1747,23 @@ func (b *CreateTimeseriesBatchBatchResults) QueryRow(f func(int, CreateTimeserie } } -func (b *CreateTimeseriesBatchBatchResults) Close() error { +func (b *TimeseriesCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createTimeseriesCwmsBatch = `-- name: CreateTimeseriesCwmsBatch :batchexec +const timeseriesCwmsCreateBatch = `-- name: TimeseriesCwmsCreateBatch :batchexec insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values ($1, $2, $3, $4, $5) ` -type CreateTimeseriesCwmsBatchBatchResults struct { +type TimeseriesCwmsCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateTimeseriesCwmsBatchParams struct { +type TimeseriesCwmsCreateBatchParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` CwmsTimeseriesID string `json:"cwms_timeseries_id"` CwmsOfficeID string `json:"cwms_office_id"` @@ -1382,7 +1771,7 @@ type CreateTimeseriesCwmsBatchParams struct { CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } -func (q *Queries) CreateTimeseriesCwmsBatch(ctx context.Context, arg []CreateTimeseriesCwmsBatchParams) *CreateTimeseriesCwmsBatchBatchResults { +func (q *Queries) TimeseriesCwmsCreateBatch(ctx context.Context, arg []TimeseriesCwmsCreateBatchParams) *TimeseriesCwmsCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1392,13 +1781,13 @@ func (q *Queries) CreateTimeseriesCwmsBatch(ctx context.Context, arg []CreateTim a.CwmsExtentEarliestTime, a.CwmsExtentLatestTime, } - batch.Queue(createTimeseriesCwmsBatch, vals...) + batch.Queue(timeseriesCwmsCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateTimeseriesCwmsBatchBatchResults{br, len(arg), false} + return &TimeseriesCwmsCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateTimeseriesCwmsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesCwmsCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1414,29 +1803,29 @@ func (b *CreateTimeseriesCwmsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateTimeseriesCwmsBatchBatchResults) Close() error { +func (b *TimeseriesCwmsCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createTimeseriesMeasurementsBatch = `-- name: CreateTimeseriesMeasurementsBatch :batchexec +const timeseriesMeasurementCreateBatch = `-- name: TimeseriesMeasurementCreateBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do nothing ` -type CreateTimeseriesMeasurementsBatchBatchResults struct { +type TimeseriesMeasurementCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateTimeseriesMeasurementsBatchParams struct { +type TimeseriesMeasurementCreateBatchParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` Value float64 `json:"value"` } -func (q *Queries) CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateTimeseriesMeasurementsBatchParams) *CreateTimeseriesMeasurementsBatchBatchResults { +func (q *Queries) TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ @@ -1444,13 +1833,13 @@ func (q *Queries) CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []C a.Time, a.Value, } - batch.Queue(createTimeseriesMeasurementsBatch, vals...) + batch.Queue(timeseriesMeasurementCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateTimeseriesMeasurementsBatchBatchResults{br, len(arg), false} + return &TimeseriesMeasurementCreateBatchBatchResults{br, len(arg), false} } -func (b *CreateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesMeasurementCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1466,47 +1855,43 @@ func (b *CreateTimeseriesMeasurementsBatchBatchResults) Exec(f func(int, error)) } } -func (b *CreateTimeseriesMeasurementsBatchBatchResults) Close() error { +func (b *TimeseriesMeasurementCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createTimeseriesNotesBatch = `-- name: CreateTimeseriesNotesBatch :batchexec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) -on conflict on constraint notes_unique_time do nothing +const timeseriesMeasurementCreateOrUpdateBatch = `-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value ` -type CreateTimeseriesNotesBatchBatchResults struct { +type TimeseriesMeasurementCreateOrUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateTimeseriesNotesBatchParams struct { +type TimeseriesMeasurementCreateOrUpdateBatchParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` - Masked *bool `json:"masked"` - Validated *bool `json:"validated"` - Annotation *string `json:"annotation"` + Value float64 `json:"value"` } -func (q *Queries) CreateTimeseriesNotesBatch(ctx context.Context, arg []CreateTimeseriesNotesBatchParams) *CreateTimeseriesNotesBatchBatchResults { +func (q *Queries) TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ a.TimeseriesID, a.Time, - a.Masked, - a.Validated, - a.Annotation, + a.Value, } - batch.Queue(createTimeseriesNotesBatch, vals...) + batch.Queue(timeseriesMeasurementCreateOrUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &CreateTimeseriesNotesBatchBatchResults{br, len(arg), false} + return &TimeseriesMeasurementCreateOrUpdateBatchBatchResults{br, len(arg), false} } -func (b *CreateTimeseriesNotesBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesMeasurementCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1522,140 +1907,40 @@ func (b *CreateTimeseriesNotesBatchBatchResults) Exec(f func(int, error)) { } } -func (b *CreateTimeseriesNotesBatchBatchResults) Close() error { +func (b *TimeseriesMeasurementCreateOrUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const createUploaderConfigMappingsBatch = `-- name: CreateUploaderConfigMappingsBatch :batchexec -insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3) +const timeseriesMeasurementDeleteBatch = `-- name: TimeseriesMeasurementDeleteBatch :batchexec +delete from timeseries_measurement where timeseries_id=$1 and time=$2 ` -type CreateUploaderConfigMappingsBatchBatchResults struct { +type TimeseriesMeasurementDeleteBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type CreateUploaderConfigMappingsBatchParams struct { - UploaderConfigID uuid.UUID `json:"uploader_config_id"` - FieldName string `json:"field_name"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` +type TimeseriesMeasurementDeleteBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` } -func (q *Queries) CreateUploaderConfigMappingsBatch(ctx context.Context, arg []CreateUploaderConfigMappingsBatchParams) *CreateUploaderConfigMappingsBatchBatchResults { +func (q *Queries) TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.UploaderConfigID, - a.FieldName, a.TimeseriesID, + a.Time, } - batch.Queue(createUploaderConfigMappingsBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &CreateUploaderConfigMappingsBatchBatchResults{br, len(arg), false} -} - -func (b *CreateUploaderConfigMappingsBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *CreateUploaderConfigMappingsBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - -const unassignInstrumentFromProjectBatch = `-- name: UnassignInstrumentFromProjectBatch :batchexec -delete from project_instrument where project_id = $1 and instrument_id = $2 -` - -type UnassignInstrumentFromProjectBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type UnassignInstrumentFromProjectBatchParams struct { - ProjectID uuid.UUID `json:"project_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) UnassignInstrumentFromProjectBatch(ctx context.Context, arg []UnassignInstrumentFromProjectBatchParams) *UnassignInstrumentFromProjectBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.ProjectID, - a.InstrumentID, - } - batch.Queue(unassignInstrumentFromProjectBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &UnassignInstrumentFromProjectBatchBatchResults{br, len(arg), false} -} - -func (b *UnassignInstrumentFromProjectBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *UnassignInstrumentFromProjectBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - -const unassignReportConfigPlotConfigBatch = `-- name: UnassignReportConfigPlotConfigBatch :batchexec -delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 -` - -type UnassignReportConfigPlotConfigBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type UnassignReportConfigPlotConfigBatchParams struct { - ReportConfigID uuid.UUID `json:"report_config_id"` - PlotConfigID uuid.UUID `json:"plot_config_id"` -} - -func (q *Queries) UnassignReportConfigPlotConfigBatch(ctx context.Context, arg []UnassignReportConfigPlotConfigBatchParams) *UnassignReportConfigPlotConfigBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.ReportConfigID, - a.PlotConfigID, - } - batch.Queue(unassignReportConfigPlotConfigBatch, vals...) + batch.Queue(timeseriesMeasurementDeleteBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UnassignReportConfigPlotConfigBatchBatchResults{br, len(arg), false} + return &TimeseriesMeasurementDeleteBatchBatchResults{br, len(arg), false} } -func (b *UnassignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesMeasurementDeleteBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1671,45 +1956,42 @@ func (b *UnassignReportConfigPlotConfigBatchBatchResults) Exec(f func(int, error } } -func (b *UnassignReportConfigPlotConfigBatchBatchResults) Close() error { +func (b *TimeseriesMeasurementDeleteBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateInclOptsBatch = `-- name: UpdateInclOptsBatch :batchexec -update incl_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 +const timeseriesMeasurementDeleteRangeBatch = `-- name: TimeseriesMeasurementDeleteRangeBatch :batchexec +delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3 ` -type UpdateInclOptsBatchBatchResults struct { +type TimeseriesMeasurementDeleteRangeBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateInclOptsBatchParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` +type TimeseriesMeasurementDeleteRangeBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -func (q *Queries) UpdateInclOptsBatch(ctx context.Context, arg []UpdateInclOptsBatchParams) *UpdateInclOptsBatchBatchResults { +func (q *Queries) TimeseriesMeasurementDeleteRangeBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteRangeBatchParams) *TimeseriesMeasurementDeleteRangeBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.InstrumentID, - a.BottomElevationTimeseriesID, - a.InitialTime, + a.TimeseriesID, + a.After, + a.Before, } - batch.Queue(updateInclOptsBatch, vals...) + batch.Queue(timeseriesMeasurementDeleteRangeBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateInclOptsBatchBatchResults{br, len(arg), false} + return &TimeseriesMeasurementDeleteRangeBatchBatchResults{br, len(arg), false} } -func (b *UpdateInclOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesMeasurementDeleteRangeBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1725,56 +2007,47 @@ func (b *UpdateInclOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateInclOptsBatchBatchResults) Close() error { +func (b *TimeseriesMeasurementDeleteRangeBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateInclSegmentsBatch = `-- name: UpdateInclSegmentsBatch :batchexec -update incl_segment set - depth_timeseries_id=$3, - a0_timeseries_id=$4, - a180_timeseries_id=$5, - b0_timeseries_id=$6, - b180_timeseries_id=$7 -where id = $1 and instrument_id = $2 +const timeseriesNoteCreateBatch = `-- name: TimeseriesNoteCreateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing ` -type UpdateInclSegmentsBatchBatchResults struct { +type TimeseriesNoteCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateInclSegmentsBatchParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` - A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` - A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` - B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` - B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +type TimeseriesNoteCreateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` } -func (q *Queries) UpdateInclSegmentsBatch(ctx context.Context, arg []UpdateInclSegmentsBatchParams) *UpdateInclSegmentsBatchBatchResults { +func (q *Queries) TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ID, - a.InstrumentID, - a.DepthTimeseriesID, - a.A0TimeseriesID, - a.A180TimeseriesID, - a.B0TimeseriesID, - a.B180TimeseriesID, + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, } - batch.Queue(updateInclSegmentsBatch, vals...) + batch.Queue(timeseriesNoteCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateInclSegmentsBatchBatchResults{br, len(arg), false} + return &TimeseriesNoteCreateBatchBatchResults{br, len(arg), false} } -func (b *UpdateInclSegmentsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesNoteCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1790,45 +2063,47 @@ func (b *UpdateInclSegmentsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateInclSegmentsBatchBatchResults) Close() error { +func (b *TimeseriesNoteCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateIpiOptsBatch = `-- name: UpdateIpiOptsBatch :batchexec -update ipi_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 +const timeseriesNoteCreateOrUpdateBatch = `-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation ` -type UpdateIpiOptsBatchBatchResults struct { +type TimeseriesNoteCreateOrUpdateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateIpiOptsBatchParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` +type TimeseriesNoteCreateOrUpdateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` } -func (q *Queries) UpdateIpiOptsBatch(ctx context.Context, arg []UpdateIpiOptsBatchParams) *UpdateIpiOptsBatchBatchResults { +func (q *Queries) TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.InstrumentID, - a.BottomElevationTimeseriesID, - a.InitialTime, + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, } - batch.Queue(updateIpiOptsBatch, vals...) + batch.Queue(timeseriesNoteCreateOrUpdateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateIpiOptsBatchBatchResults{br, len(arg), false} + return &TimeseriesNoteCreateOrUpdateBatchBatchResults{br, len(arg), false} } -func (b *UpdateIpiOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesNoteCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1844,53 +2119,40 @@ func (b *UpdateIpiOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateIpiOptsBatchBatchResults) Close() error { +func (b *TimeseriesNoteCreateOrUpdateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateIpiSegmentsBatch = `-- name: UpdateIpiSegmentsBatch :batchexec -update ipi_segment set - length_timeseries_id = $3, - tilt_timeseries_id = $4, - inc_dev_timeseries_id = $5, - temp_timeseries_id = $6 -where id = $1 and instrument_id = $2 +const timeseriesNoteDeleteBatch = `-- name: TimeseriesNoteDeleteBatch :batchexec +delete from timeseries_notes where timeseries_id=$1 and time=$2 ` -type UpdateIpiSegmentsBatchBatchResults struct { +type TimeseriesNoteDeleteBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateIpiSegmentsBatchParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` - TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` - IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +type TimeseriesNoteDeleteBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` } -func (q *Queries) UpdateIpiSegmentsBatch(ctx context.Context, arg []UpdateIpiSegmentsBatchParams) *UpdateIpiSegmentsBatchBatchResults { +func (q *Queries) TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ID, - a.InstrumentID, - a.LengthTimeseriesID, - a.TiltTimeseriesID, - a.IncDevTimeseriesID, - a.TempTimeseriesID, + a.TimeseriesID, + a.Time, } - batch.Queue(updateIpiSegmentsBatch, vals...) + batch.Queue(timeseriesNoteDeleteBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateIpiSegmentsBatchBatchResults{br, len(arg), false} + return &TimeseriesNoteDeleteBatchBatchResults{br, len(arg), false} } -func (b *UpdateIpiSegmentsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesNoteDeleteBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1906,45 +2168,42 @@ func (b *UpdateIpiSegmentsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateIpiSegmentsBatchBatchResults) Close() error { +func (b *TimeseriesNoteDeleteBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateSaaOptsBatch = `-- name: UpdateSaaOptsBatch :batchexec -update saa_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 +const timeseriesNoteDeleteRangeBatch = `-- name: TimeseriesNoteDeleteRangeBatch :batchexec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 ` -type UpdateSaaOptsBatchBatchResults struct { +type TimeseriesNoteDeleteRangeBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateSaaOptsBatchParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` +type TimeseriesNoteDeleteRangeBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -func (q *Queries) UpdateSaaOptsBatch(ctx context.Context, arg []UpdateSaaOptsBatchParams) *UpdateSaaOptsBatchBatchResults { +func (q *Queries) TimeseriesNoteDeleteRangeBatch(ctx context.Context, arg []TimeseriesNoteDeleteRangeBatchParams) *TimeseriesNoteDeleteRangeBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.InstrumentID, - a.BottomElevationTimeseriesID, - a.InitialTime, + a.TimeseriesID, + a.After, + a.Before, } - batch.Queue(updateSaaOptsBatch, vals...) + batch.Queue(timeseriesNoteDeleteRangeBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateSaaOptsBatchBatchResults{br, len(arg), false} + return &TimeseriesNoteDeleteRangeBatchBatchResults{br, len(arg), false} } -func (b *UpdateSaaOptsBatchBatchResults) Exec(f func(int, error)) { +func (b *TimeseriesNoteDeleteRangeBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -1960,56 +2219,42 @@ func (b *UpdateSaaOptsBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateSaaOptsBatchBatchResults) Close() error { +func (b *TimeseriesNoteDeleteRangeBatchBatchResults) Close() error { b.closed = true return b.br.Close() } -const updateSaaSegmentBatch = `-- name: UpdateSaaSegmentBatch :batchexec -update saa_segment set - length_timeseries_id = $3, - x_timeseries_id = $4, - y_timeseries_id = $5, - z_timeseries_id = $6, - temp_timeseries_id = $7 -where id = $1 and instrument_id = $2 +const uploaderConfigMappingCreateBatch = `-- name: UploaderConfigMappingCreateBatch :batchexec +insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3) ` -type UpdateSaaSegmentBatchBatchResults struct { +type UploaderConfigMappingCreateBatchBatchResults struct { br pgx.BatchResults tot int closed bool } -type UpdateSaaSegmentBatchParams struct { - ID int32 `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id"` - LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` - XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` - YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` - ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +type UploaderConfigMappingCreateBatchParams struct { + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` } -func (q *Queries) UpdateSaaSegmentBatch(ctx context.Context, arg []UpdateSaaSegmentBatchParams) *UpdateSaaSegmentBatchBatchResults { +func (q *Queries) UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ID, - a.InstrumentID, - a.LengthTimeseriesID, - a.XTimeseriesID, - a.YTimeseriesID, - a.ZTimeseriesID, - a.TempTimeseriesID, + a.UploaderConfigID, + a.FieldName, + a.TimeseriesID, } - batch.Queue(updateSaaSegmentBatch, vals...) + batch.Queue(uploaderConfigMappingCreateBatch, vals...) } br := q.db.SendBatch(ctx, batch) - return &UpdateSaaSegmentBatchBatchResults{br, len(arg), false} + return &UploaderConfigMappingCreateBatchBatchResults{br, len(arg), false} } -func (b *UpdateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { +func (b *UploaderConfigMappingCreateBatchBatchResults) Exec(f func(int, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { if b.closed { @@ -2025,7 +2270,7 @@ func (b *UpdateSaaSegmentBatchBatchResults) Exec(f func(int, error)) { } } -func (b *UpdateSaaSegmentBatchBatchResults) Close() error { +func (b *UploaderConfigMappingCreateBatchBatchResults) Close() error { b.closed = true return b.br.Close() } diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go index 8ecef1d4..72182660 100644 --- a/api/internal/db/collection_group.sql_gen.go +++ b/api/internal/db/collection_group.sql_gen.go @@ -12,57 +12,29 @@ import ( "github.com/google/uuid" ) -const addTimeseriesToCollectionGroup = `-- name: AddTimeseriesToCollectionGroup :exec -insert into collection_group_timeseries (collection_group_id, timeseries_id) values ($1, $2) -on conflict on constraint collection_group_unique_timeseries do nothing +const collectionGroupCreate = `-- name: CollectionGroupCreate :one +insert into collection_group (project_id, name, slug, creator, create_date, sort_order) +values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5) +returning id, project_id, name, slug, creator, create_date, updater, update_date, sort_order ` -type AddTimeseriesToCollectionGroupParams struct { - CollectionGroupID uuid.UUID `json:"collection_group_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, arg AddTimeseriesToCollectionGroupParams) error { - _, err := q.db.Exec(ctx, addTimeseriesToCollectionGroup, arg.CollectionGroupID, arg.TimeseriesID) - return err -} - -const createCollectionGroup = `-- name: CreateCollectionGroup :one -insert into collection_group (project_id, name, slug, creator, create_date, updater, update_date) -values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) -returning id, project_id, name, slug, creator, create_date, updater, update_date -` - -type CreateCollectionGroupParams struct { - ProjectID uuid.UUID `json:"project_id"` - Column2 string `json:"column_2"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` -} - -type CreateCollectionGroupRow struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Slug string `json:"slug"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` +type CollectionGroupCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + SortOrder int32 `json:"sort_order"` } -func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CreateCollectionGroupRow, error) { - row := q.db.QueryRow(ctx, createCollectionGroup, +func (q *Queries) CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, collectionGroupCreate, arg.ProjectID, - arg.Column2, + arg.Name, arg.Creator, arg.CreateDate, - arg.Updater, - arg.UpdateDate, + arg.SortOrder, ) - var i CreateCollectionGroupRow + var i CollectionGroup err := row.Scan( &i.ID, &i.ProjectID, @@ -72,30 +44,31 @@ func (q *Queries) CreateCollectionGroup(ctx context.Context, arg CreateCollectio &i.CreateDate, &i.Updater, &i.UpdateDate, + &i.SortOrder, ) return i, err } -const deleteCollectionGroup = `-- name: DeleteCollectionGroup :exec +const collectionGroupDelete = `-- name: CollectionGroupDelete :exec delete from collection_group where project_id=$1 and id=$2 ` -type DeleteCollectionGroupParams struct { +type CollectionGroupDeleteParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` } -func (q *Queries) DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error { - _, err := q.db.Exec(ctx, deleteCollectionGroup, arg.ProjectID, arg.ID) +func (q *Queries) CollectionGroupDelete(ctx context.Context, arg CollectionGroupDeleteParams) error { + _, err := q.db.Exec(ctx, collectionGroupDelete, arg.ProjectID, arg.ID) return err } -const getCollectionGroupDetails = `-- name: GetCollectionGroupDetails :one +const collectionGroupDetailsGet = `-- name: CollectionGroupDetailsGet :one select id, project_id, name, slug, creator, create_date, updater, update_date, sort_order, timeseries from v_collection_group_details where id = $1 ` -func (q *Queries) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) { - row := q.db.QueryRow(ctx, getCollectionGroupDetails, id) +func (q *Queries) CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) { + row := q.db.QueryRow(ctx, collectionGroupDetailsGet, id) var i VCollectionGroupDetail err := row.Scan( &i.ID, @@ -112,41 +85,29 @@ func (q *Queries) GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) ( return i, err } -const listCollectionGroupsForProject = `-- name: ListCollectionGroupsForProject :many -select id, project_id, slug, name, creator, create_date, updater, update_date -from collection_group -where project_id = $1 +const collectionGroupListForProject = `-- name: CollectionGroupListForProject :many +select id, project_id, name, slug, creator, create_date, updater, update_date, sort_order from collection_group where project_id = $1 ` -type ListCollectionGroupsForProjectRow struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Slug string `json:"slug"` - Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` -} - -func (q *Queries) ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]ListCollectionGroupsForProjectRow, error) { - rows, err := q.db.Query(ctx, listCollectionGroupsForProject, projectID) +func (q *Queries) CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) { + rows, err := q.db.Query(ctx, collectionGroupListForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListCollectionGroupsForProjectRow{} + items := []CollectionGroup{} for rows.Next() { - var i ListCollectionGroupsForProjectRow + var i CollectionGroup if err := rows.Scan( &i.ID, &i.ProjectID, - &i.Slug, &i.Name, + &i.Slug, &i.Creator, &i.CreateDate, &i.Updater, &i.UpdateDate, + &i.SortOrder, ); err != nil { return nil, err } @@ -158,54 +119,77 @@ func (q *Queries) ListCollectionGroupsForProject(ctx context.Context, projectID return items, nil } -const removeTimeseriesFromCollectionGroup = `-- name: RemoveTimeseriesFromCollectionGroup :exec +const collectionGroupTimeseriesCreate = `-- name: CollectionGroupTimeseriesCreate :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id, sort_order) values ($1, $2, $3) +on conflict on constraint collection_group_unique_timeseries do nothing +` + +type CollectionGroupTimeseriesCreateParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` +} + +func (q *Queries) CollectionGroupTimeseriesCreate(ctx context.Context, arg CollectionGroupTimeseriesCreateParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesCreate, arg.CollectionGroupID, arg.TimeseriesID, arg.SortOrder) + return err +} + +const collectionGroupTimeseriesDelete = `-- name: CollectionGroupTimeseriesDelete :exec delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2 ` -type RemoveTimeseriesFromCollectionGroupParams struct { +type CollectionGroupTimeseriesDeleteParams struct { CollectionGroupID uuid.UUID `json:"collection_group_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` } -func (q *Queries) RemoveTimeseriesFromCollectionGroup(ctx context.Context, arg RemoveTimeseriesFromCollectionGroupParams) error { - _, err := q.db.Exec(ctx, removeTimeseriesFromCollectionGroup, arg.CollectionGroupID, arg.TimeseriesID) +func (q *Queries) CollectionGroupTimeseriesDelete(ctx context.Context, arg CollectionGroupTimeseriesDeleteParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesDelete, arg.CollectionGroupID, arg.TimeseriesID) return err } -const updateCollectionGroup = `-- name: UpdateCollectionGroup :one -update collection_group set name=$3, updater=$4, update_date=$5 -where project_id=$1 and id=$2 -returning id, project_id, name, slug, creator, create_date, updater, update_date +const collectionGroupTimeseriesUpdateSortOrder = `-- name: CollectionGroupTimeseriesUpdateSortOrder :exec +update collection_group_timeseries set sort_order=$3 +where collection_group_id=$1 and timeseries_id=$2 ` -type UpdateCollectionGroupParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` +type CollectionGroupTimeseriesUpdateSortOrderParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` } -type UpdateCollectionGroupRow struct { - ID uuid.UUID `json:"id"` +func (q *Queries) CollectionGroupTimeseriesUpdateSortOrder(ctx context.Context, arg CollectionGroupTimeseriesUpdateSortOrderParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesUpdateSortOrder, arg.CollectionGroupID, arg.TimeseriesID, arg.SortOrder) + return err +} + +const collectionGroupUpdate = `-- name: CollectionGroupUpdate :one +update collection_group set name=$3, updater=$4, update_date=$5, sort_order=$6 +where project_id=$1 and id=$2 +returning id, project_id, name, slug, creator, create_date, updater, update_date, sort_order +` + +type CollectionGroupUpdateParams struct { ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` Name string `json:"name"` - Slug string `json:"slug"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` Updater *uuid.UUID `json:"updater"` UpdateDate *time.Time `json:"update_date"` + SortOrder int32 `json:"sort_order"` } -func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (UpdateCollectionGroupRow, error) { - row := q.db.QueryRow(ctx, updateCollectionGroup, +func (q *Queries) CollectionGroupUpdate(ctx context.Context, arg CollectionGroupUpdateParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, collectionGroupUpdate, arg.ProjectID, arg.ID, arg.Name, arg.Updater, arg.UpdateDate, + arg.SortOrder, ) - var i UpdateCollectionGroupRow + var i CollectionGroup err := row.Scan( &i.ID, &i.ProjectID, @@ -215,6 +199,7 @@ func (q *Queries) UpdateCollectionGroup(ctx context.Context, arg UpdateCollectio &i.CreateDate, &i.Updater, &i.UpdateDate, + &i.SortOrder, ) return i, err } diff --git a/api/internal/db/datalogger.sql_gen.go b/api/internal/db/datalogger.sql_gen.go index cb84cd77..940ad75d 100644 --- a/api/internal/db/datalogger.sql_gen.go +++ b/api/internal/db/datalogger.sql_gen.go @@ -12,13 +12,13 @@ import ( "github.com/google/uuid" ) -const createDatalogger = `-- name: CreateDatalogger :one +const dataloggerCreate = `-- name: DataloggerCreate :one insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) returning id ` -type CreateDataloggerParams struct { +type DataloggerCreateParams struct { Name string `json:"name"` Sn string `json:"sn"` ProjectID uuid.UUID `json:"project_id"` @@ -26,8 +26,8 @@ type CreateDataloggerParams struct { ModelID uuid.UUID `json:"model_id"` } -func (q *Queries) CreateDatalogger(ctx context.Context, arg CreateDataloggerParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createDatalogger, +func (q *Queries) DataloggerCreate(ctx context.Context, arg DataloggerCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, dataloggerCreate, arg.Name, arg.Sn, arg.ProjectID, @@ -39,50 +39,27 @@ func (q *Queries) CreateDatalogger(ctx context.Context, arg CreateDataloggerPara return id, err } -const createDataloggerHash = `-- name: CreateDataloggerHash :exec -insert into datalogger_hash (datalogger_id, "hash") values ($1, $2) +const dataloggerDelete = `-- name: DataloggerDelete :exec +update datalogger set deleted=true, updater=$2, update_date=$3 where id=$1 ` -type CreateDataloggerHashParams struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - Hash string `json:"hash"` -} - -func (q *Queries) CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error { - _, err := q.db.Exec(ctx, createDataloggerHash, arg.DataloggerID, arg.Hash) - return err -} - -const deleteDatalogger = `-- name: DeleteDatalogger :exec -update datalogger set deleted = true, updater = $2, update_date = $3 where id = $1 -` - -type DeleteDataloggerParams struct { +type DataloggerDeleteParams struct { ID uuid.UUID `json:"id"` Updater uuid.UUID `json:"updater"` UpdateDate time.Time `json:"update_date"` } -func (q *Queries) DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error { - _, err := q.db.Exec(ctx, deleteDatalogger, arg.ID, arg.Updater, arg.UpdateDate) +func (q *Queries) DataloggerDelete(ctx context.Context, arg DataloggerDeleteParams) error { + _, err := q.db.Exec(ctx, dataloggerDelete, arg.ID, arg.Updater, arg.UpdateDate) return err } -const deleteDataloggerTable = `-- name: DeleteDataloggerTable :exec -delete from datalogger_table where id = $1 +const dataloggerGet = `-- name: DataloggerGet :one +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where id=$1 ` -func (q *Queries) DeleteDataloggerTable(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteDataloggerTable, id) - return err -} - -const getDatalogger = `-- name: GetDatalogger :one -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where id = $1 -` - -func (q *Queries) GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) { - row := q.db.QueryRow(ctx, getDatalogger, id) +func (q *Queries) DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, error) { + row := q.db.QueryRow(ctx, dataloggerGet, id) var i VDatalogger err := row.Scan( &i.ID, @@ -104,73 +81,78 @@ func (q *Queries) GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, return i, err } -const getDataloggerIsActive = `-- name: GetDataloggerIsActive :one -select exists (select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where model = $1 and sn = $2)::int +const dataloggerGetActive = `-- name: DataloggerGetActive :one +select exists (select true from v_datalogger where model=$1 and sn=$2) ` -type GetDataloggerIsActiveParams struct { +type DataloggerGetActiveParams struct { Model *string `json:"model"` Sn string `json:"sn"` } -func (q *Queries) GetDataloggerIsActive(ctx context.Context, arg GetDataloggerIsActiveParams) (int32, error) { - row := q.db.QueryRow(ctx, getDataloggerIsActive, arg.Model, arg.Sn) - var column_1 int32 +func (q *Queries) DataloggerGetActive(ctx context.Context, arg DataloggerGetActiveParams) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerGetActive, arg.Model, arg.Sn) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const dataloggerGetExists = `-- name: DataloggerGetExists :one +select true from v_datalogger where id=$1 +` + +func (q *Queries) DataloggerGetExists(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerGetExists, id) + var column_1 bool err := row.Scan(&column_1) return column_1, err } -const getDataloggerModelName = `-- name: GetDataloggerModelName :one -select model from datalogger_model where id = $1 +const dataloggerGetModelName = `-- name: DataloggerGetModelName :one +select model from datalogger_model where id=$1 ` -func (q *Queries) GetDataloggerModelName(ctx context.Context, id uuid.UUID) (*string, error) { - row := q.db.QueryRow(ctx, getDataloggerModelName, id) +func (q *Queries) DataloggerGetModelName(ctx context.Context, id uuid.UUID) (*string, error) { + row := q.db.QueryRow(ctx, dataloggerGetModelName, id) var model *string err := row.Scan(&model) return model, err } -const getDataloggerTablePreview = `-- name: GetDataloggerTablePreview :one -select datalogger_table_id, preview, update_date from v_datalogger_preview where datalogger_table_id = $1 limit 1 +const dataloggerHashCreate = `-- name: DataloggerHashCreate :exec +insert into datalogger_hash (datalogger_id, "hash") values ($1, $2) ` -func (q *Queries) GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) { - row := q.db.QueryRow(ctx, getDataloggerTablePreview, dataloggerTableID) - var i VDataloggerPreview - err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdateDate) - return i, err +type DataloggerHashCreateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` } -const getOrCreateDataloggerTable = `-- name: GetOrCreateDataloggerTable :one -with new_datalogger_table as ( - insert into datalogger_table (datalogger_id, table_name) values ($1, $2) - on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing - returning id -) -select ndt.id from new_datalogger_table ndt -union -select sdt.id from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +func (q *Queries) DataloggerHashCreate(ctx context.Context, arg DataloggerHashCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerHashCreate, arg.DataloggerID, arg.Hash) + return err +} + +const dataloggerHashUpdate = `-- name: DataloggerHashUpdate :exec +update datalogger_hash set "hash"=$2 where datalogger_id=$1 ` -type GetOrCreateDataloggerTableParams struct { +type DataloggerHashUpdateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` - TableName string `json:"table_name"` + Hash string `json:"hash"` } -func (q *Queries) GetOrCreateDataloggerTable(ctx context.Context, arg GetOrCreateDataloggerTableParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, getOrCreateDataloggerTable, arg.DataloggerID, arg.TableName) - var id uuid.UUID - err := row.Scan(&id) - return id, err +func (q *Queries) DataloggerHashUpdate(ctx context.Context, arg DataloggerHashUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerHashUpdate, arg.DataloggerID, arg.Hash) + return err } -const listAllDataloggers = `-- name: ListAllDataloggers :many +const dataloggerList = `-- name: DataloggerList :many select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger ` -func (q *Queries) ListAllDataloggers(ctx context.Context) ([]VDatalogger, error) { - rows, err := q.db.Query(ctx, listAllDataloggers) +func (q *Queries) DataloggerList(ctx context.Context) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, dataloggerList) if err != nil { return nil, err } @@ -205,12 +187,12 @@ func (q *Queries) ListAllDataloggers(ctx context.Context) ([]VDatalogger, error) return items, nil } -const listDataloggersForProject = `-- name: ListDataloggersForProject :many -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where project_id = $1 +const dataloggerListForProject = `-- name: DataloggerListForProject :many +select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where project_id=$1 ` -func (q *Queries) ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) { - rows, err := q.db.Query(ctx, listDataloggersForProject, projectID) +func (q *Queries) DataloggerListForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, dataloggerListForProject, projectID) if err != nil { return nil, err } @@ -245,51 +227,85 @@ func (q *Queries) ListDataloggersForProject(ctx context.Context, projectID uuid. return items, nil } -const renameEmptyDataloggerTableName = `-- name: RenameEmptyDataloggerTableName :exec -update datalogger_table dt -set table_name = $2 -where dt.table_name = '' and dt.datalogger_id = $1 -and not exists ( - select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +const dataloggerTableDelete = `-- name: DataloggerTableDelete :exec +delete from datalogger_table where id=$1 +` + +func (q *Queries) DataloggerTableDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, dataloggerTableDelete, id) + return err +} + +const dataloggerTableGetOrCreate = `-- name: DataloggerTableGetOrCreate :one +with new_datalogger_table as ( + insert into datalogger_table (datalogger_id, table_name) values ($1, $2) + on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing + returning id ) +select ndt.id from new_datalogger_table ndt +union +select sdt.id from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 ` -type RenameEmptyDataloggerTableNameParams struct { +type DataloggerTableGetOrCreateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` TableName string `json:"table_name"` } -func (q *Queries) RenameEmptyDataloggerTableName(ctx context.Context, arg RenameEmptyDataloggerTableNameParams) error { - _, err := q.db.Exec(ctx, renameEmptyDataloggerTableName, arg.DataloggerID, arg.TableName) - return err +func (q *Queries) DataloggerTableGetOrCreate(ctx context.Context, arg DataloggerTableGetOrCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, dataloggerTableGetOrCreate, arg.DataloggerID, arg.TableName) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const dataloggerTablePreviewGet = `-- name: DataloggerTablePreviewGet :one +select datalogger_table_id, preview, update_date from v_datalogger_preview where datalogger_table_id=$1 limit 1 +` + +func (q *Queries) DataloggerTablePreviewGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) { + row := q.db.QueryRow(ctx, dataloggerTablePreviewGet, dataloggerTableID) + var i VDataloggerPreview + err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdateDate) + return i, err } -const resetDataloggerTableName = `-- name: ResetDataloggerTableName :exec -update datalogger_table set table_name = '' where id = $1 +const dataloggerTableUpdateNameIfEmpty = `-- name: DataloggerTableUpdateNameIfEmpty :exec +update datalogger_table dt +set table_name=$2 +where dt.table_name='' and dt.datalogger_id=$1 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 +) ` -func (q *Queries) ResetDataloggerTableName(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, resetDataloggerTableName, id) +type DataloggerTableUpdateNameIfEmptyParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg DataloggerTableUpdateNameIfEmptyParams) error { + _, err := q.db.Exec(ctx, dataloggerTableUpdateNameIfEmpty, arg.DataloggerID, arg.TableName) return err } -const updateDatalogger = `-- name: UpdateDatalogger :exec +const dataloggerUpdate = `-- name: DataloggerUpdate :exec update datalogger set - name = $2, - updater = $3, - update_date = $4 -where id = $1 + name=$2, + updater=$3, + update_date=$4 +where id=$1 ` -type UpdateDataloggerParams struct { +type DataloggerUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Updater uuid.UUID `json:"updater"` UpdateDate time.Time `json:"update_date"` } -func (q *Queries) UpdateDatalogger(ctx context.Context, arg UpdateDataloggerParams) error { - _, err := q.db.Exec(ctx, updateDatalogger, +func (q *Queries) DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerUpdate, arg.ID, arg.Name, arg.Updater, @@ -298,42 +314,26 @@ func (q *Queries) UpdateDatalogger(ctx context.Context, arg UpdateDataloggerPara return err } -const updateDataloggerHash = `-- name: UpdateDataloggerHash :exec -update datalogger_hash set "hash" = $2 where datalogger_id = $1 +const dataloggerUpdateTableNameBlank = `-- name: DataloggerUpdateTableNameBlank :exec +update datalogger_table set table_name='' where id=$1 ` -type UpdateDataloggerHashParams struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - Hash string `json:"hash"` -} - -func (q *Queries) UpdateDataloggerHash(ctx context.Context, arg UpdateDataloggerHashParams) error { - _, err := q.db.Exec(ctx, updateDataloggerHash, arg.DataloggerID, arg.Hash) +func (q *Queries) DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, dataloggerUpdateTableNameBlank, id) return err } -const updateDataloggerUpdater = `-- name: UpdateDataloggerUpdater :exec -update datalogger set updater = $2, update_date = $3 where id = $1 +const dataloggerUpdateUpdater = `-- name: DataloggerUpdateUpdater :exec +update datalogger set updater=$2, update_date=$3 where id=$1 ` -type UpdateDataloggerUpdaterParams struct { +type DataloggerUpdateUpdaterParams struct { ID uuid.UUID `json:"id"` Updater uuid.UUID `json:"updater"` UpdateDate time.Time `json:"update_date"` } -func (q *Queries) UpdateDataloggerUpdater(ctx context.Context, arg UpdateDataloggerUpdaterParams) error { - _, err := q.db.Exec(ctx, updateDataloggerUpdater, arg.ID, arg.Updater, arg.UpdateDate) +func (q *Queries) DataloggerUpdateUpdater(ctx context.Context, arg DataloggerUpdateUpdaterParams) error { + _, err := q.db.Exec(ctx, dataloggerUpdateUpdater, arg.ID, arg.Updater, arg.UpdateDate) return err } - -const verifyDataloggerExists = `-- name: VerifyDataloggerExists :one -select true from v_datalogger where id = $1 -` - -func (q *Queries) VerifyDataloggerExists(ctx context.Context, id uuid.UUID) (bool, error) { - row := q.db.QueryRow(ctx, verifyDataloggerExists, id) - var column_1 bool - err := row.Scan(&column_1) - return column_1, err -} diff --git a/api/internal/db/datalogger_telemetry.sql_gen.go b/api/internal/db/datalogger_telemetry.sql_gen.go index 5a06791c..8426456e 100644 --- a/api/internal/db/datalogger_telemetry.sql_gen.go +++ b/api/internal/db/datalogger_telemetry.sql_gen.go @@ -12,7 +12,7 @@ import ( "github.com/google/uuid" ) -const createDataloggerError = `-- name: CreateDataloggerError :exec +const dataloggerErrorCreate = `-- name: DataloggerErrorCreate :exec insert into datalogger_error (datalogger_table_id, error_message) select dt.id, $3 from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2 @@ -21,60 +21,45 @@ and not exists ( ) ` -type CreateDataloggerErrorParams struct { +type DataloggerErrorCreateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` TableName string `json:"table_name"` ErrorMessage *string `json:"error_message"` } -func (q *Queries) CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error { - _, err := q.db.Exec(ctx, createDataloggerError, arg.DataloggerID, arg.TableName, arg.ErrorMessage) +func (q *Queries) DataloggerErrorCreate(ctx context.Context, arg DataloggerErrorCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerErrorCreate, arg.DataloggerID, arg.TableName, arg.ErrorMessage) return err } -const createDataloggerTablePreview = `-- name: CreateDataloggerTablePreview :exec -insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3) -` - -type CreateDataloggerTablePreviewParams struct { - DataloggerTableID uuid.UUID `json:"datalogger_table_id"` - Preview []byte `json:"preview"` - UpdateDate time.Time `json:"update_date"` -} - -func (q *Queries) CreateDataloggerTablePreview(ctx context.Context, arg CreateDataloggerTablePreviewParams) error { - _, err := q.db.Exec(ctx, createDataloggerTablePreview, arg.DataloggerTableID, arg.Preview, arg.UpdateDate) - return err -} - -const deleteDataloggerTableError = `-- name: DeleteDataloggerTableError :exec +const dataloggerErrorDelete = `-- name: DataloggerErrorDelete :exec delete from datalogger_error where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) ` -type DeleteDataloggerTableErrorParams struct { +type DataloggerErrorDeleteParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` TableName string `json:"table_name"` } -func (q *Queries) DeleteDataloggerTableError(ctx context.Context, arg DeleteDataloggerTableErrorParams) error { - _, err := q.db.Exec(ctx, deleteDataloggerTableError, arg.DataloggerID, arg.TableName) +func (q *Queries) DataloggerErrorDelete(ctx context.Context, arg DataloggerErrorDeleteParams) error { + _, err := q.db.Exec(ctx, dataloggerErrorDelete, arg.DataloggerID, arg.TableName) return err } -const getDataloggerByModelSN = `-- name: GetDataloggerByModelSN :one +const dataloggerGetForModelSn = `-- name: DataloggerGetForModelSn :one select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where model = $1 and sn = $2 limit 1 ` -type GetDataloggerByModelSNParams struct { +type DataloggerGetForModelSnParams struct { Model *string `json:"model"` Sn string `json:"sn"` } -func (q *Queries) GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) { - row := q.db.QueryRow(ctx, getDataloggerByModelSN, arg.Model, arg.Sn) +func (q *Queries) DataloggerGetForModelSn(ctx context.Context, arg DataloggerGetForModelSnParams) (VDatalogger, error) { + row := q.db.QueryRow(ctx, dataloggerGetForModelSn, arg.Model, arg.Sn) var i VDatalogger err := row.Scan( &i.ID, @@ -96,38 +81,53 @@ func (q *Queries) GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerB return i, err } -const getDataloggerHashByModelSN = `-- name: GetDataloggerHashByModelSN :one +const dataloggerHashGetForModelSn = `-- name: DataloggerHashGetForModelSn :one select "hash" from v_datalogger_hash where model = $1 and sn = $2 limit 1 ` -type GetDataloggerHashByModelSNParams struct { +type DataloggerHashGetForModelSnParams struct { Model *string `json:"model"` Sn string `json:"sn"` } -func (q *Queries) GetDataloggerHashByModelSN(ctx context.Context, arg GetDataloggerHashByModelSNParams) (string, error) { - row := q.db.QueryRow(ctx, getDataloggerHashByModelSN, arg.Model, arg.Sn) +func (q *Queries) DataloggerHashGetForModelSn(ctx context.Context, arg DataloggerHashGetForModelSnParams) (string, error) { + row := q.db.QueryRow(ctx, dataloggerHashGetForModelSn, arg.Model, arg.Sn) var hash string err := row.Scan(&hash) return hash, err } -const updateDataloggerTablePreview = `-- name: UpdateDataloggerTablePreview :exec +const dataloggerTablePreviewCreate = `-- name: DataloggerTablePreviewCreate :exec +insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3) +` + +type DataloggerTablePreviewCreateParams struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview []byte `json:"preview"` + UpdateDate time.Time `json:"update_date"` +} + +func (q *Queries) DataloggerTablePreviewCreate(ctx context.Context, arg DataloggerTablePreviewCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerTablePreviewCreate, arg.DataloggerTableID, arg.Preview, arg.UpdateDate) + return err +} + +const dataloggerTablePreviewUpdate = `-- name: DataloggerTablePreviewUpdate :exec update datalogger_preview set preview = $3, update_date = $4 where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) ` -type UpdateDataloggerTablePreviewParams struct { +type DataloggerTablePreviewUpdateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` TableName string `json:"table_name"` Preview []byte `json:"preview"` UpdateDate time.Time `json:"update_date"` } -func (q *Queries) UpdateDataloggerTablePreview(ctx context.Context, arg UpdateDataloggerTablePreviewParams) error { - _, err := q.db.Exec(ctx, updateDataloggerTablePreview, +func (q *Queries) DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerTablePreviewUpdate, arg.DataloggerID, arg.TableName, arg.Preview, diff --git a/api/internal/db/district_rollup.sql_gen.go b/api/internal/db/district_rollup.sql_gen.go index c45d3c4d..aab2a70c 100644 --- a/api/internal/db/district_rollup.sql_gen.go +++ b/api/internal/db/district_rollup.sql_gen.go @@ -12,22 +12,22 @@ import ( "github.com/google/uuid" ) -const listEvaluationDistrictRollupsForProjectAlertConfig = `-- name: ListEvaluationDistrictRollupsForProjectAlertConfig :many +const districtRollupListEvaluationForProjectAlertConfig = `-- name: DistrictRollupListEvaluationForProjectAlertConfig :many select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid -and project_id = $1 +and project_id=$1 and the_month >= date_trunc('month', $2::timestamptz) and the_month <= date_trunc('month', $3::timestamptz) ` -type ListEvaluationDistrictRollupsForProjectAlertConfigParams struct { - ProjectID uuid.UUID `json:"project_id"` - Column2 time.Time `json:"column_2"` - Column3 time.Time `json:"column_3"` +type DistrictRollupListEvaluationForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + StartMonthTime time.Time `json:"start_month_time"` + EndMonthTime time.Time `json:"end_month_time"` } -func (q *Queries) ListEvaluationDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListEvaluationDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) { - rows, err := q.db.Query(ctx, listEvaluationDistrictRollupsForProjectAlertConfig, arg.ProjectID, arg.Column2, arg.Column3) +func (q *Queries) DistrictRollupListEvaluationForProjectAlertConfig(ctx context.Context, arg DistrictRollupListEvaluationForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, districtRollupListEvaluationForProjectAlertConfig, arg.ProjectID, arg.StartMonthTime, arg.EndMonthTime) if err != nil { return nil, err } @@ -58,22 +58,22 @@ func (q *Queries) ListEvaluationDistrictRollupsForProjectAlertConfig(ctx context return items, nil } -const listMeasurementDistrictRollupsForProjectAlertConfig = `-- name: ListMeasurementDistrictRollupsForProjectAlertConfig :many +const districtRollupListMeasurementForProjectAlertConfig = `-- name: DistrictRollupListMeasurementForProjectAlertConfig :many select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid -and project_id = $1 +and project_id=$1 and the_month >= date_trunc('month', $2::timestamptz) and the_month <= date_trunc('month', $3::timestamptz) ` -type ListMeasurementDistrictRollupsForProjectAlertConfigParams struct { - ProjectID uuid.UUID `json:"project_id"` - Column2 time.Time `json:"column_2"` - Column3 time.Time `json:"column_3"` +type DistrictRollupListMeasurementForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + StartMonthTime time.Time `json:"start_month_time"` + EndMonthTime time.Time `json:"end_month_time"` } -func (q *Queries) ListMeasurementDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListMeasurementDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) { - rows, err := q.db.Query(ctx, listMeasurementDistrictRollupsForProjectAlertConfig, arg.ProjectID, arg.Column2, arg.Column3) +func (q *Queries) DistrictRollupListMeasurementForProjectAlertConfig(ctx context.Context, arg DistrictRollupListMeasurementForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, districtRollupListMeasurementForProjectAlertConfig, arg.ProjectID, arg.StartMonthTime, arg.EndMonthTime) if err != nil { return nil, err } diff --git a/api/internal/db/domain.sql_gen.go b/api/internal/db/domain.sql_gen.go new file mode 100644 index 00000000..382dce78 --- /dev/null +++ b/api/internal/db/domain.sql_gen.go @@ -0,0 +1,99 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: domain.sql + +package db + +import ( + "context" +) + +const domainGroupList = `-- name: DomainGroupList :many +select "group", opts from v_domain_group +` + +func (q *Queries) DomainGroupList(ctx context.Context) ([]VDomainGroup, error) { + rows, err := q.db.Query(ctx, domainGroupList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomainGroup{} + for rows.Next() { + var i VDomainGroup + if err := rows.Scan(&i.Group, &i.Opts); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const domainList = `-- name: DomainList :many +select id, "group", value, description from v_domain +` + +func (q *Queries) DomainList(ctx context.Context) ([]VDomain, error) { + rows, err := q.db.Query(ctx, domainList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomain{} + for rows.Next() { + var i VDomain + if err := rows.Scan( + &i.ID, + &i.Group, + &i.Value, + &i.Description, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const pgTimezoneNamesList = `-- name: PgTimezoneNamesList :many +select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names +` + +type PgTimezoneNamesListRow struct { + Name *string `json:"name"` + Abbrev *string `json:"abbrev"` + UtcOffset string `json:"utc_offset"` + IsDst *bool `json:"is_dst"` +} + +func (q *Queries) PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) { + rows, err := q.db.Query(ctx, pgTimezoneNamesList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []PgTimezoneNamesListRow{} + for rows.Next() { + var i PgTimezoneNamesListRow + if err := rows.Scan( + &i.Name, + &i.Abbrev, + &i.UtcOffset, + &i.IsDst, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/domains.sql_gen.go b/api/internal/db/domains.sql_gen.go deleted file mode 100644 index b84d2c99..00000000 --- a/api/internal/db/domains.sql_gen.go +++ /dev/null @@ -1,63 +0,0 @@ -// Code generated by sqlc. DO NOT EDIT. -// versions: -// sqlc v1.27.0 -// source: domains.sql - -package db - -import ( - "context" -) - -const listDomainGroups = `-- name: ListDomainGroups :many -select "group", opts from v_domain_group -` - -func (q *Queries) ListDomainGroups(ctx context.Context) ([]VDomainGroup, error) { - rows, err := q.db.Query(ctx, listDomainGroups) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VDomainGroup{} - for rows.Next() { - var i VDomainGroup - if err := rows.Scan(&i.Group, &i.Opts); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listDomains = `-- name: ListDomains :many -select id, "group", value, description from v_domain -` - -func (q *Queries) ListDomains(ctx context.Context) ([]VDomain, error) { - rows, err := q.db.Query(ctx, listDomains) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VDomain{} - for rows.Next() { - var i VDomain - if err := rows.Scan( - &i.ID, - &i.Group, - &i.Value, - &i.Description, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} diff --git a/api/internal/db/equivalency_table.sql_gen.go b/api/internal/db/equivalency_table.sql_gen.go index e477f735..7f74b979 100644 --- a/api/internal/db/equivalency_table.sql_gen.go +++ b/api/internal/db/equivalency_table.sql_gen.go @@ -11,7 +11,18 @@ import ( "github.com/google/uuid" ) -const createOrUpdateEquivalencyTableRow = `-- name: CreateOrUpdateEquivalencyTableRow :exec +const dataloggerTableGetIsValid = `-- name: DataloggerTableGetIsValid :one +select not exists (select id, datalogger_id, table_name from datalogger_table where id = $1 and table_name = 'preparse') +` + +func (q *Queries) DataloggerTableGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerTableGetIsValid, id) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const equivalencyTableCreateOrUpdate = `-- name: EquivalencyTableCreateOrUpdate :exec insert into datalogger_equivalency_table (datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) values ($1, $2, $3, $4, $5, $6) @@ -19,7 +30,7 @@ on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id ` -type CreateOrUpdateEquivalencyTableRowParams struct { +type EquivalencyTableCreateOrUpdateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` FieldName string `json:"field_name"` @@ -28,8 +39,8 @@ type CreateOrUpdateEquivalencyTableRowParams struct { TimeseriesID *uuid.UUID `json:"timeseries_id"` } -func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error { - _, err := q.db.Exec(ctx, createOrUpdateEquivalencyTableRow, +func (q *Queries) EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, equivalencyTableCreateOrUpdate, arg.DataloggerID, arg.DataloggerTableID, arg.FieldName, @@ -40,25 +51,25 @@ func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg Cre return err } -const deleteEquivalencyTable = `-- name: DeleteEquivalencyTable :exec -delete from datalogger_equivalency_table where datalogger_table_id = $1 +const equivalencyTableDelete = `-- name: EquivalencyTableDelete :exec +delete from datalogger_equivalency_table where id = $1 ` -func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID *uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteEquivalencyTable, dataloggerTableID) +func (q *Queries) EquivalencyTableDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, equivalencyTableDelete, id) return err } -const deleteEquivalencyTableRow = `-- name: DeleteEquivalencyTableRow :exec -delete from datalogger_equivalency_table where id = $1 +const equivalencyTableDeleteForDataloggerTable = `-- name: EquivalencyTableDeleteForDataloggerTable :exec +delete from datalogger_equivalency_table where datalogger_table_id = $1 ` -func (q *Queries) DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteEquivalencyTableRow, id) +func (q *Queries) EquivalencyTableDeleteForDataloggerTable(ctx context.Context, dataloggerTableID *uuid.UUID) error { + _, err := q.db.Exec(ctx, equivalencyTableDeleteForDataloggerTable, dataloggerTableID) return err } -const getEquivalencyTable = `-- name: GetEquivalencyTable :one +const equivalencyTableGet = `-- name: EquivalencyTableGet :one select datalogger_id, datalogger_table_id, @@ -68,8 +79,8 @@ from v_datalogger_equivalency_table where datalogger_table_id = $1 ` -func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) { - row := q.db.QueryRow(ctx, getEquivalencyTable, dataloggerTableID) +func (q *Queries) EquivalencyTableGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) { + row := q.db.QueryRow(ctx, equivalencyTableGet, dataloggerTableID) var i VDataloggerEquivalencyTable err := row.Scan( &i.DataloggerID, @@ -80,20 +91,7 @@ func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uui return i, err } -const getIsValidDataloggerTable = `-- name: GetIsValidDataloggerTable :one -select not exists ( - select id, datalogger_id, table_name from datalogger_table where id = $1 and table_name = 'preparse' -) -` - -func (q *Queries) GetIsValidDataloggerTable(ctx context.Context, id uuid.UUID) (bool, error) { - row := q.db.QueryRow(ctx, getIsValidDataloggerTable, id) - var not_exists bool - err := row.Scan(¬_exists) - return not_exists, err -} - -const getIsValidEquivalencyTableTimeseries = `-- name: GetIsValidEquivalencyTableTimeseries :one +const equivalencyTableTimeseriesGetIsValid = `-- name: EquivalencyTableTimeseriesGetIsValid :one select not exists ( select id from v_timeseries_computed where id = $1 @@ -103,14 +101,14 @@ select not exists ( ) ` -func (q *Queries) GetIsValidEquivalencyTableTimeseries(ctx context.Context, id uuid.UUID) (bool, error) { - row := q.db.QueryRow(ctx, getIsValidEquivalencyTableTimeseries, id) +func (q *Queries) EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, equivalencyTableTimeseriesGetIsValid, id) var not_exists bool err := row.Scan(¬_exists) return not_exists, err } -const updateEquivalencyTableRow = `-- name: UpdateEquivalencyTableRow :exec +const equivalencyTableUpdate = `-- name: EquivalencyTableUpdate :exec update datalogger_equivalency_table set field_name = $2, display_name = $3, @@ -119,7 +117,7 @@ update datalogger_equivalency_table set where id = $1 ` -type UpdateEquivalencyTableRowParams struct { +type EquivalencyTableUpdateParams struct { ID uuid.UUID `json:"id"` FieldName string `json:"field_name"` DisplayName *string `json:"display_name"` @@ -127,8 +125,8 @@ type UpdateEquivalencyTableRowParams struct { TimeseriesID *uuid.UUID `json:"timeseries_id"` } -func (q *Queries) UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error { - _, err := q.db.Exec(ctx, updateEquivalencyTableRow, +func (q *Queries) EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error { + _, err := q.db.Exec(ctx, equivalencyTableUpdate, arg.ID, arg.FieldName, arg.DisplayName, diff --git a/api/internal/db/evaluation.sql_gen.go b/api/internal/db/evaluation.sql_gen.go index c204d78f..0e469eb2 100644 --- a/api/internal/db/evaluation.sql_gen.go +++ b/api/internal/db/evaluation.sql_gen.go @@ -12,47 +12,7 @@ import ( "github.com/google/uuid" ) -const completeEvaluationSubmittal = `-- name: CompleteEvaluationSubmittal :one -update submittal sub1 set - submittal_status_id = sq.submittal_status_id, - completion_date = now() -from ( - select - sub2.id as submittal_id, - case - -- if completed before due date, mark submittal as green id - when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid - -- if completed after due date, mark as yellow - else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid - end as submittal_status_id - from submittal sub2 - inner join alert_config ac on sub2.alert_config_id = ac.id - where sub2.id = $1 - and sub2.completion_date is null - and not sub2.marked_as_missing - and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid -) sq -where sub1.id = sq.submittal_id -returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completion_date, sub1.create_date, sub1.due_date, sub1.marked_as_missing, sub1.warning_sent -` - -func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) (Submittal, error) { - row := q.db.QueryRow(ctx, completeEvaluationSubmittal, id) - var i Submittal - err := row.Scan( - &i.ID, - &i.AlertConfigID, - &i.SubmittalStatusID, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, - &i.MarkedAsMissing, - &i.WarningSent, - ) - return i, err -} - -const createEvaluation = `-- name: CreateEvaluation :one +const evaluationCreate = `-- name: EvaluationCreate :one insert into evaluation ( project_id, submittal_id, @@ -66,7 +26,7 @@ insert into evaluation ( returning id ` -type CreateEvaluationParams struct { +type EvaluationCreateParams struct { ProjectID uuid.UUID `json:"project_id"` SubmittalID *uuid.UUID `json:"submittal_id"` Name string `json:"name"` @@ -77,8 +37,8 @@ type CreateEvaluationParams struct { CreateDate time.Time `json:"create_date"` } -func (q *Queries) CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createEvaluation, +func (q *Queries) EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, evaluationCreate, arg.ProjectID, arg.SubmittalID, arg.Name, @@ -93,49 +53,21 @@ func (q *Queries) CreateEvaluation(ctx context.Context, arg CreateEvaluationPara return id, err } -const createEvaluationInstrument = `-- name: CreateEvaluationInstrument :exec -insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) -` - -type CreateEvaluationInstrumentParams struct { - EvaluationID *uuid.UUID `json:"evaluation_id"` - InstrumentID *uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) CreateEvaluationInstrument(ctx context.Context, arg CreateEvaluationInstrumentParams) error { - _, err := q.db.Exec(ctx, createEvaluationInstrument, arg.EvaluationID, arg.InstrumentID) - return err -} - -const createNextEvaluationSubmittal = `-- name: CreateNextEvaluationSubmittal :exec -insert into submittal (alert_config_id, due_date) -select - ac.id, - now() + ac.schedule_interval -from alert_config ac -where ac.id in (select sub.alert_config_id from submittal sub where sub.id = $1) +const evaluationDelete = `-- name: EvaluationDelete :exec +delete from evaluation where id=$1 ` -func (q *Queries) CreateNextEvaluationSubmittal(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, createNextEvaluationSubmittal, id) +func (q *Queries) EvaluationDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, evaluationDelete, id) return err } -const deleteEvaluation = `-- name: DeleteEvaluation :exec -delete from evaluation where id = $1 +const evaluationGet = `-- name: EvaluationGet :one +select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation where id=$1 ` -func (q *Queries) DeleteEvaluation(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteEvaluation, id) - return err -} - -const getEvaluation = `-- name: GetEvaluation :one -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation where id = $1 -` - -func (q *Queries) GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) { - row := q.db.QueryRow(ctx, getEvaluation, id) +func (q *Queries) EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) { + row := q.db.QueryRow(ctx, evaluationGet, id) var i VEvaluation err := row.Scan( &i.ID, @@ -159,17 +91,40 @@ func (q *Queries) GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, return i, err } -const listInstrumentEvaluations = `-- name: ListInstrumentEvaluations :many +const evaluationInstrumentCreate = `-- name: EvaluationInstrumentCreate :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type EvaluationInstrumentCreateParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) EvaluationInstrumentCreate(ctx context.Context, arg EvaluationInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, evaluationInstrumentCreate, arg.EvaluationID, arg.InstrumentID) + return err +} + +const evaluationInstrumentDeleteForEvaluation = `-- name: EvaluationInstrumentDeleteForEvaluation :exec +delete from evaluation_instrument where evaluation_id=$1 +` + +func (q *Queries) EvaluationInstrumentDeleteForEvaluation(ctx context.Context, evaluationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, evaluationInstrumentDeleteForEvaluation, evaluationID) + return err +} + +const evaluationListForInstrument = `-- name: EvaluationListForInstrument :many select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation where id = any( select evaluation_id from evaluation_instrument - where instrument_id = $1 + where instrument_id=$1 ) ` -func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) { - rows, err := q.db.Query(ctx, listInstrumentEvaluations, instrumentID) +func (q *Queries) EvaluationListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForInstrument, instrumentID) if err != nil { return nil, err } @@ -206,14 +161,14 @@ func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID *u return items, nil } -const listProjectEvaluations = `-- name: ListProjectEvaluations :many +const evaluationListForProject = `-- name: EvaluationListForProject :many select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation -where project_id = $1 +where project_id=$1 ` -func (q *Queries) ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) { - rows, err := q.db.Query(ctx, listProjectEvaluations, projectID) +func (q *Queries) EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForProject, projectID) if err != nil { return nil, err } @@ -250,20 +205,20 @@ func (q *Queries) ListProjectEvaluations(ctx context.Context, projectID uuid.UUI return items, nil } -const listProjectEvaluationsByAlertConfig = `-- name: ListProjectEvaluationsByAlertConfig :many +const evaluationListForProjectAlertConfig = `-- name: EvaluationListForProjectAlertConfig :many select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation -where project_id = $1 +where project_id=$1 and alert_config_id is not null -and alert_config_id = $2 +and alert_config_id=$2 ` -type ListProjectEvaluationsByAlertConfigParams struct { +type EvaluationListForProjectAlertConfigParams struct { ProjectID uuid.UUID `json:"project_id"` AlertConfigID *uuid.UUID `json:"alert_config_id"` } -func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) { - rows, err := q.db.Query(ctx, listProjectEvaluationsByAlertConfig, arg.ProjectID, arg.AlertConfigID) +func (q *Queries) EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForProjectAlertConfig, arg.ProjectID, arg.AlertConfigID) if err != nil { return nil, err } @@ -300,16 +255,7 @@ func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, arg L return items, nil } -const unassignAllInstrumentsFromEvaluation = `-- name: UnassignAllInstrumentsFromEvaluation :exec -delete from evaluation_instrument where evaluation_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID *uuid.UUID) error { - _, err := q.db.Exec(ctx, unassignAllInstrumentsFromEvaluation, evaluationID) - return err -} - -const updateEvaluation = `-- name: UpdateEvaluation :exec +const evaluationUpdate = `-- name: EvaluationUpdate :exec update evaluation set name=$3, body=$4, @@ -320,7 +266,7 @@ update evaluation set where id=$1 and project_id=$2 ` -type UpdateEvaluationParams struct { +type EvaluationUpdateParams struct { ID uuid.UUID `json:"id"` ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` @@ -331,8 +277,8 @@ type UpdateEvaluationParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error { - _, err := q.db.Exec(ctx, updateEvaluation, +func (q *Queries) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error { + _, err := q.db.Exec(ctx, evaluationUpdate, arg.ID, arg.ProjectID, arg.Name, @@ -344,3 +290,57 @@ func (q *Queries) UpdateEvaluation(ctx context.Context, arg UpdateEvaluationPara ) return err } + +const submittalCreateNextEvaluation = `-- name: SubmittalCreateNextEvaluation :exec +insert into submittal (alert_config_id, due_date) +select + ac.id, + now() + ac.schedule_interval +from alert_config ac +where ac.id in (select sub.alert_config_id from submittal sub where sub.id=$1) +` + +func (q *Queries) SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalCreateNextEvaluation, id) + return err +} + +const submittalUpdateCompleteEvaluation = `-- name: SubmittalUpdateCompleteEvaluation :one +update submittal sub1 set + submittal_status_id = sq.submittal_status_id, + completion_date = now() +from ( + select + sub2.id as submittal_id, + case + -- if completed before due date, mark submittal as green id + when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + -- if completed after due date, mark as yellow + else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid + end as submittal_status_id + from submittal sub2 + inner join alert_config ac on sub2.alert_config_id = ac.id + where sub2.id=$1 + and sub2.completion_date is null + and not sub2.marked_as_missing + and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +) sq +where sub1.id = sq.submittal_id +returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completion_date, sub1.create_date, sub1.due_date, sub1.marked_as_missing, sub1.warning_sent +` + +func (q *Queries) SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid.UUID) (Submittal, error) { + row := q.db.QueryRow(ctx, submittalUpdateCompleteEvaluation, id) + var i Submittal + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.SubmittalStatusID, + &i.CompletionDate, + &i.CreateDate, + &i.DueDate, + &i.MarkedAsMissing, + &i.WarningSent, + ) + return i, err +} diff --git a/api/internal/db/heartbeat.sql_gen.go b/api/internal/db/heartbeat.sql_gen.go index 882f56b1..6a731eb0 100644 --- a/api/internal/db/heartbeat.sql_gen.go +++ b/api/internal/db/heartbeat.sql_gen.go @@ -10,34 +10,36 @@ import ( "time" ) -const createHeartbeat = `-- name: CreateHeartbeat :one +const heartbeatCreate = `-- name: HeartbeatCreate :one insert into heartbeat (time) values ($1) returning time ` -func (q *Queries) CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) { - row := q.db.QueryRow(ctx, createHeartbeat, argTime) +func (q *Queries) HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) { + row := q.db.QueryRow(ctx, heartbeatCreate, argTime) var time time.Time err := row.Scan(&time) return time, err } -const getLatestHeartbeat = `-- name: GetLatestHeartbeat :one -select max(time) as time from heartbeat +const heartbeatGetLatest = `-- name: HeartbeatGetLatest :one +select max(time)::timestamptz from heartbeat ` -func (q *Queries) GetLatestHeartbeat(ctx context.Context) (interface{}, error) { - row := q.db.QueryRow(ctx, getLatestHeartbeat) - var time interface{} - err := row.Scan(&time) - return time, err +func (q *Queries) HeartbeatGetLatest(ctx context.Context) (time.Time, error) { + row := q.db.QueryRow(ctx, heartbeatGetLatest) + var column_1 time.Time + err := row.Scan(&column_1) + return column_1, err } -const listHeartbeats = `-- name: ListHeartbeats :many +const heartbeatList = `-- name: HeartbeatList :many select time from heartbeat +order by time desc +limit $1 ` -func (q *Queries) ListHeartbeats(ctx context.Context) ([]time.Time, error) { - rows, err := q.db.Query(ctx, listHeartbeats) +func (q *Queries) HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error) { + rows, err := q.db.Query(ctx, heartbeatList, resultLimit) if err != nil { return nil, err } diff --git a/api/internal/db/home.sql_gen.go b/api/internal/db/home.sql_gen.go index 628dabeb..5a592fbd 100644 --- a/api/internal/db/home.sql_gen.go +++ b/api/internal/db/home.sql_gen.go @@ -9,7 +9,7 @@ import ( "context" ) -const getHome = `-- name: GetHome :one +const homeGet = `-- name: HomeGet :one select (select count(*) from instrument where not deleted) as instrument_count, (select count(*) from project where not deleted) as project_count, @@ -18,7 +18,7 @@ select (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h ` -type GetHomeRow struct { +type HomeGetRow struct { InstrumentCount int64 `json:"instrument_count"` ProjectCount int64 `json:"project_count"` InstrumentGroupCount int64 `json:"instrument_group_count"` @@ -26,9 +26,9 @@ type GetHomeRow struct { NewMeasurements2h int64 `json:"new_measurements_2h"` } -func (q *Queries) GetHome(ctx context.Context) (GetHomeRow, error) { - row := q.db.QueryRow(ctx, getHome) - var i GetHomeRow +func (q *Queries) HomeGet(ctx context.Context) (HomeGetRow, error) { + row := q.db.QueryRow(ctx, homeGet) + var i HomeGetRow err := row.Scan( &i.InstrumentCount, &i.ProjectCount, diff --git a/api/internal/db/instrument.sql_gen.go b/api/internal/db/instrument.sql_gen.go index 9a03b833..9add2526 100644 --- a/api/internal/db/instrument.sql_gen.go +++ b/api/internal/db/instrument.sql_gen.go @@ -12,13 +12,13 @@ import ( "github.com/google/uuid" ) -const createInstrument = `-- name: CreateInstrument :one +const instrumentCreate = `-- name: InstrumentCreate :one insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id, slug ` -type CreateInstrumentParams struct { +type InstrumentCreateParams struct { Name string `json:"name"` TypeID uuid.UUID `json:"type_id"` Geometry Geometry `json:"geometry"` @@ -31,13 +31,13 @@ type CreateInstrumentParams struct { ShowCwmsTab bool `json:"show_cwms_tab"` } -type CreateInstrumentRow struct { +type InstrumentCreateRow struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` } -func (q *Queries) CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) { - row := q.db.QueryRow(ctx, createInstrument, +func (q *Queries) InstrumentCreate(ctx context.Context, arg InstrumentCreateParams) (InstrumentCreateRow, error) { + row := q.db.QueryRow(ctx, instrumentCreate, arg.Name, arg.TypeID, arg.Geometry, @@ -49,13 +49,13 @@ func (q *Queries) CreateInstrument(ctx context.Context, arg CreateInstrumentPara arg.UsgsID, arg.ShowCwmsTab, ) - var i CreateInstrumentRow + var i InstrumentCreateRow err := row.Scan(&i.ID, &i.Slug) return i, err } -const deleteFlagInstrument = `-- name: DeleteFlagInstrument :exec -update instrument set deleted = true +const instrumentDeleteFlag = `-- name: InstrumentDeleteFlag :exec +update instrument set deleted=true where id = any( select instrument_id from project_instrument @@ -64,25 +64,25 @@ where id = any( and id = $2 ` -type DeleteFlagInstrumentParams struct { +type InstrumentDeleteFlagParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` } -func (q *Queries) DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstrumentParams) error { - _, err := q.db.Exec(ctx, deleteFlagInstrument, arg.ProjectID, arg.ID) +func (q *Queries) InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error { + _, err := q.db.Exec(ctx, instrumentDeleteFlag, arg.ProjectID, arg.ID) return err } -const getInstrument = `-- name: GetInstrument :one +const instrumentGet = `-- name: InstrumentGet :one select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts from v_instrument where not deleted -and id=$1 +and id = $1 ` -func (q *Queries) GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, error) { - row := q.db.QueryRow(ctx, getInstrument, id) +func (q *Queries) InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) { + row := q.db.QueryRow(ctx, instrumentGet, id) var i VInstrument err := row.Scan( &i.ID, @@ -116,38 +116,38 @@ func (q *Queries) GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, return i, err } -const getInstrumentCount = `-- name: GetInstrumentCount :one +const instrumentGetCount = `-- name: InstrumentGetCount :one select count(*) from instrument where not deleted ` -func (q *Queries) GetInstrumentCount(ctx context.Context) (int64, error) { - row := q.db.QueryRow(ctx, getInstrumentCount) +func (q *Queries) InstrumentGetCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, instrumentGetCount) var count int64 err := row.Scan(&count) return count, err } -const listInstrumentIDNamesByIDs = `-- name: ListInstrumentIDNamesByIDs :many +const instrumentIDNameListByIDs = `-- name: InstrumentIDNameListByIDs :many select id, name from instrument where id in ($1::uuid[]) and not deleted ` -type ListInstrumentIDNamesByIDsRow struct { +type InstrumentIDNameListByIDsRow struct { ID uuid.UUID `json:"id"` Name string `json:"name"` } -func (q *Queries) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]ListInstrumentIDNamesByIDsRow, error) { - rows, err := q.db.Query(ctx, listInstrumentIDNamesByIDs, instrumentIds) +func (q *Queries) InstrumentIDNameListByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]InstrumentIDNameListByIDsRow, error) { + rows, err := q.db.Query(ctx, instrumentIDNameListByIDs, instrumentIds) if err != nil { return nil, err } defer rows.Close() - items := []ListInstrumentIDNamesByIDsRow{} + items := []InstrumentIDNameListByIDsRow{} for rows.Next() { - var i ListInstrumentIDNamesByIDsRow + var i InstrumentIDNameListByIDsRow if err := rows.Scan(&i.ID, &i.Name); err != nil { return nil, err } @@ -159,38 +159,14 @@ func (q *Queries) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds return items, nil } -const listInstrumentProjects = `-- name: ListInstrumentProjects :many -select project_id from project_instrument where instrument_id = $1 -` - -func (q *Queries) ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { - rows, err := q.db.Query(ctx, listInstrumentProjects, instrumentID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []uuid.UUID{} - for rows.Next() { - var project_id uuid.UUID - if err := rows.Scan(&project_id); err != nil { - return nil, err - } - items = append(items, project_id) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listInstruments = `-- name: ListInstruments :many +const instrumentList = `-- name: InstrumentList :many select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts from v_instrument where not deleted ` -func (q *Queries) ListInstruments(ctx context.Context) ([]VInstrument, error) { - rows, err := q.db.Query(ctx, listInstruments) +func (q *Queries) InstrumentList(ctx context.Context) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, instrumentList) if err != nil { return nil, err } @@ -237,15 +213,15 @@ func (q *Queries) ListInstruments(ctx context.Context) ([]VInstrument, error) { return items, nil } -const listInstrumentsForProject = `-- name: ListInstrumentsForProject :many +const instrumentListForInstrumentGroup = `-- name: InstrumentListForInstrumentGroup :many select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts from v_instrument i -inner join project_instrument pi on pi.instrument_id = i.id -where pi.project_id = $1 +inner join instrument_group_instruments igi on igi.instrument_id = i.id +where instrument_group_id = $1 ` -func (q *Queries) ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { - rows, err := q.db.Query(ctx, listInstrumentsForProject, projectID) +func (q *Queries) InstrumentListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, instrumentListForInstrumentGroup, instrumentGroupID) if err != nil { return nil, err } @@ -292,31 +268,51 @@ func (q *Queries) ListInstrumentsForProject(ctx context.Context, projectID uuid. return items, nil } -const listProjectCountForInstruments = `-- name: ListProjectCountForInstruments :many -select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count -from project_instrument pi -inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in ($1::uuid[]) -group by pi.instrument_id, i.name -order by i.name +const instrumentListForProject = `-- name: InstrumentListForProject :many +select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1 ` -type ListProjectCountForInstrumentsRow struct { - InstrumentID uuid.UUID `json:"instrument_id"` - InstrumentName string `json:"instrument_name"` - ProjectCount int64 `json:"project_count"` -} - -func (q *Queries) ListProjectCountForInstruments(ctx context.Context, instrumentIds []uuid.UUID) ([]ListProjectCountForInstrumentsRow, error) { - rows, err := q.db.Query(ctx, listProjectCountForInstruments, instrumentIds) +func (q *Queries) InstrumentListForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, instrumentListForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListProjectCountForInstrumentsRow{} + items := []VInstrument{} for rows.Next() { - var i ListProjectCountForInstrumentsRow - if err := rows.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount); err != nil { + var i VInstrument + if err := rows.Scan( + &i.ID, + &i.Deleted, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.StationOffset, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ); err != nil { return nil, err } items = append(items, i) @@ -327,18 +323,18 @@ func (q *Queries) ListProjectCountForInstruments(ctx context.Context, instrument return items, nil } -const updateInstrument = `-- name: UpdateInstrument :exec +const instrumentUpdate = `-- name: InstrumentUpdate :exec update instrument set - name = $3, - type_id = $4, - geometry = $5, - updater = $6, - update_date = $7, - station = $8, - station_offset = $9, - nid_id = $10, - usgs_id = $11, - show_cwms_tab = $12 + name=$3, + type_id=$4, + geometry=$5, + updater=$6, + update_date=$7, + station=$8, + station_offset=$9, + nid_id=$10, + usgs_id=$11, + show_cwms_tab=$12 where id = $2 and id in ( select instrument_id @@ -347,7 +343,7 @@ and id in ( ) ` -type UpdateInstrumentParams struct { +type InstrumentUpdateParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` Name string `json:"name"` @@ -362,8 +358,8 @@ type UpdateInstrumentParams struct { ShowCwmsTab bool `json:"show_cwms_tab"` } -func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error { - _, err := q.db.Exec(ctx, updateInstrument, +func (q *Queries) InstrumentUpdate(ctx context.Context, arg InstrumentUpdateParams) error { + _, err := q.db.Exec(ctx, instrumentUpdate, arg.ProjectID, arg.ID, arg.Name, @@ -380,11 +376,11 @@ func (q *Queries) UpdateInstrument(ctx context.Context, arg UpdateInstrumentPara return err } -const updateInstrumentGeometry = `-- name: UpdateInstrumentGeometry :one +const instrumentUpdateGeometry = `-- name: InstrumentUpdateGeometry :one update instrument set - geometry = $3, - updater = $4, - update_date = now() + geometry=$3, + updater=$4, + update_date=now() where id = $2 and id in ( select instrument_id @@ -394,15 +390,15 @@ and id in ( returning id ` -type UpdateInstrumentGeometryParams struct { +type InstrumentUpdateGeometryParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` Geometry Geometry `json:"geometry"` Updater *uuid.UUID `json:"updater"` } -func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, updateInstrumentGeometry, +func (q *Queries) InstrumentUpdateGeometry(ctx context.Context, arg InstrumentUpdateGeometryParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, instrumentUpdateGeometry, arg.ProjectID, arg.ID, arg.Geometry, @@ -412,3 +408,62 @@ func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstru err := row.Scan(&id) return id, err } + +const projectInstrumentListCountByInstrument = `-- name: ProjectInstrumentListCountByInstrument :many +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id in ($1::uuid[]) +group by pi.instrument_id, i.name +order by i.name +` + +type ProjectInstrumentListCountByInstrumentRow struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int64 `json:"project_count"` +} + +func (q *Queries) ProjectInstrumentListCountByInstrument(ctx context.Context, instrumentIds []uuid.UUID) ([]ProjectInstrumentListCountByInstrumentRow, error) { + rows, err := q.db.Query(ctx, projectInstrumentListCountByInstrument, instrumentIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProjectInstrumentListCountByInstrumentRow{} + for rows.Next() { + var i ProjectInstrumentListCountByInstrumentRow + if err := rows.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectInstrumentListProjectIDForInstrument = `-- name: ProjectInstrumentListProjectIDForInstrument :many +select project_id from project_instrument where instrument_id = $1 +` + +func (q *Queries) ProjectInstrumentListProjectIDForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, projectInstrumentListProjectIDForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_assign.sql_gen.go b/api/internal/db/instrument_assign.sql_gen.go index 35d989a8..9eb2b3a5 100644 --- a/api/internal/db/instrument_assign.sql_gen.go +++ b/api/internal/db/instrument_assign.sql_gen.go @@ -11,62 +11,64 @@ import ( "github.com/google/uuid" ) -const assignInstrumentToProject = `-- name: AssignInstrumentToProject :exec +const projectInstrumentCreate = `-- name: ProjectInstrumentCreate :exec insert into project_instrument (project_id, instrument_id) values ($1, $2) on conflict on constraint project_instrument_project_id_instrument_id_key do nothing ` -type AssignInstrumentToProjectParams struct { +type ProjectInstrumentCreateParams struct { ProjectID uuid.UUID `json:"project_id"` InstrumentID uuid.UUID `json:"instrument_id"` } -func (q *Queries) AssignInstrumentToProject(ctx context.Context, arg AssignInstrumentToProjectParams) error { - _, err := q.db.Exec(ctx, assignInstrumentToProject, arg.ProjectID, arg.InstrumentID) +func (q *Queries) ProjectInstrumentCreate(ctx context.Context, arg ProjectInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, projectInstrumentCreate, arg.ProjectID, arg.InstrumentID) return err } -const unassignInstrumentFromProject = `-- name: UnassignInstrumentFromProject :exec +const projectInstrumentDelete = `-- name: ProjectInstrumentDelete :exec delete from project_instrument where project_id = $1 and instrument_id = $2 ` -type UnassignInstrumentFromProjectParams struct { +type ProjectInstrumentDeleteParams struct { ProjectID uuid.UUID `json:"project_id"` InstrumentID uuid.UUID `json:"instrument_id"` } -func (q *Queries) UnassignInstrumentFromProject(ctx context.Context, arg UnassignInstrumentFromProjectParams) error { - _, err := q.db.Exec(ctx, unassignInstrumentFromProject, arg.ProjectID, arg.InstrumentID) +func (q *Queries) ProjectInstrumentDelete(ctx context.Context, arg ProjectInstrumentDeleteParams) error { + _, err := q.db.Exec(ctx, projectInstrumentDelete, arg.ProjectID, arg.InstrumentID) return err } -const validateInstrumentNamesProjectUnique = `-- name: ValidateInstrumentNamesProjectUnique :many -select i.name +const projectInstrumentListForInstrumentNameProjects = `-- name: ProjectInstrumentListForInstrumentNameProjects :many +select i.name instrument_name from project_instrument pi inner join instrument i on pi.instrument_id = i.id -where pi.project_id = $1 -and i.name in ($2::text[]) +inner join project p on pi.project_id = p.id +where i.name = $1 +and pi.project_id in ($2::uuid[]) and not i.deleted +order by pi.project_id ` -type ValidateInstrumentNamesProjectUniqueParams struct { - ProjectID uuid.UUID `json:"project_id"` - InstrumentNames []string `json:"instrument_names"` +type ProjectInstrumentListForInstrumentNameProjectsParams struct { + InstrumentName string `json:"instrument_name"` + ProjectIds []uuid.UUID `json:"project_ids"` } -func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) { - rows, err := q.db.Query(ctx, validateInstrumentNamesProjectUnique, arg.ProjectID, arg.InstrumentNames) +func (q *Queries) ProjectInstrumentListForInstrumentNameProjects(ctx context.Context, arg ProjectInstrumentListForInstrumentNameProjectsParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentNameProjects, arg.InstrumentName, arg.ProjectIds) if err != nil { return nil, err } defer rows.Close() items := []string{} for rows.Next() { - var name string - if err := rows.Scan(&name); err != nil { + var instrument_name string + if err := rows.Scan(&instrument_name); err != nil { return nil, err } - items = append(items, name) + items = append(items, instrument_name) } if err := rows.Err(); err != nil { return nil, err @@ -74,43 +76,41 @@ func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, arg return items, nil } -const validateInstrumentsAssignerAuthorized = `-- name: ValidateInstrumentsAssignerAuthorized :many -select p.name as project_name, i.name as instrument_name +const projectInstrumentListForInstrumentProjectsProfileAdmin = `-- name: ProjectInstrumentListForInstrumentProjectsProfileAdmin :many +select p.name from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in ($1::uuid[]) +where pi.instrument_id = $1 +and pi.project_id in ($2::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr - where ppr.profile_id = $2 + where profile_id = $3 and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) ) and not i.deleted +order by p.name ` -type ValidateInstrumentsAssignerAuthorizedParams struct { - InstrumentIds []uuid.UUID `json:"instrument_ids"` - ProfileID uuid.UUID `json:"profile_id"` -} - -type ValidateInstrumentsAssignerAuthorizedRow struct { - ProjectName string `json:"project_name"` - InstrumentName string `json:"instrument_name"` +type ProjectInstrumentListForInstrumentProjectsProfileAdminParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ProjectIds []uuid.UUID `json:"project_ids"` + ProfileID uuid.UUID `json:"profile_id"` } -func (q *Queries) ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) { - rows, err := q.db.Query(ctx, validateInstrumentsAssignerAuthorized, arg.InstrumentIds, arg.ProfileID) +func (q *Queries) ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentProjectsProfileAdminParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentProjectsProfileAdmin, arg.InstrumentID, arg.ProjectIds, arg.ProfileID) if err != nil { return nil, err } defer rows.Close() - items := []ValidateInstrumentsAssignerAuthorizedRow{} + items := []string{} for rows.Next() { - var i ValidateInstrumentsAssignerAuthorizedRow - if err := rows.Scan(&i.ProjectName, &i.InstrumentName); err != nil { + var name string + if err := rows.Scan(&name); err != nil { return nil, err } - items = append(items, i) + items = append(items, name) } if err := rows.Err(); err != nil { return nil, err @@ -118,41 +118,43 @@ func (q *Queries) ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg return items, nil } -const validateProjectsAssignerAuthorized = `-- name: ValidateProjectsAssignerAuthorized :many -select p.name +const projectInstrumentListForInstrumentsProfileAdmin = `-- name: ProjectInstrumentListForInstrumentsProfileAdmin :many +select p.name as project_name, i.name as instrument_name from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id = $1 -and pi.project_id in ($2::uuid[]) +where pi.instrument_id in ($1::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr - where profile_id = $3 + where ppr.profile_id = $2 and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) ) and not i.deleted -order by p.name ` -type ValidateProjectsAssignerAuthorizedParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - ProjectIds []uuid.UUID `json:"project_ids"` - ProfileID uuid.UUID `json:"profile_id"` +type ProjectInstrumentListForInstrumentsProfileAdminParams struct { + InstrumentIds []uuid.UUID `json:"instrument_ids"` + ProfileID uuid.UUID `json:"profile_id"` +} + +type ProjectInstrumentListForInstrumentsProfileAdminRow struct { + ProjectName string `json:"project_name"` + InstrumentName string `json:"instrument_name"` } -func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) { - rows, err := q.db.Query(ctx, validateProjectsAssignerAuthorized, arg.InstrumentID, arg.ProjectIds, arg.ProfileID) +func (q *Queries) ProjectInstrumentListForInstrumentsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentsProfileAdminParams) ([]ProjectInstrumentListForInstrumentsProfileAdminRow, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentsProfileAdmin, arg.InstrumentIds, arg.ProfileID) if err != nil { return nil, err } defer rows.Close() - items := []string{} + items := []ProjectInstrumentListForInstrumentsProfileAdminRow{} for rows.Next() { - var name string - if err := rows.Scan(&name); err != nil { + var i ProjectInstrumentListForInstrumentsProfileAdminRow + if err := rows.Scan(&i.ProjectName, &i.InstrumentName); err != nil { return nil, err } - items = append(items, name) + items = append(items, i) } if err := rows.Err(); err != nil { return nil, err @@ -160,35 +162,33 @@ func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, arg Va return items, nil } -const validateProjectsInstrumentNameUnique = `-- name: ValidateProjectsInstrumentNameUnique :many -select i.name instrument_name +const projectInstrumentListForProjectInstrumentNames = `-- name: ProjectInstrumentListForProjectInstrumentNames :many +select i.name from project_instrument pi inner join instrument i on pi.instrument_id = i.id -inner join project p on pi.project_id = p.id -where i.name = $1 -and pi.project_id in ($2::uuid[]) +where pi.project_id = $1 +and i.name in ($2::text[]) and not i.deleted -order by pi.project_id ` -type ValidateProjectsInstrumentNameUniqueParams struct { - InstrumentName string `json:"instrument_name"` - ProjectIds []uuid.UUID `json:"project_ids"` +type ProjectInstrumentListForProjectInstrumentNamesParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentNames []string `json:"instrument_names"` } -func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]string, error) { - rows, err := q.db.Query(ctx, validateProjectsInstrumentNameUnique, arg.InstrumentName, arg.ProjectIds) +func (q *Queries) ProjectInstrumentListForProjectInstrumentNames(ctx context.Context, arg ProjectInstrumentListForProjectInstrumentNamesParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForProjectInstrumentNames, arg.ProjectID, arg.InstrumentNames) if err != nil { return nil, err } defer rows.Close() items := []string{} for rows.Next() { - var instrument_name string - if err := rows.Scan(&instrument_name); err != nil { + var name string + if err := rows.Scan(&name); err != nil { return nil, err } - items = append(items, instrument_name) + items = append(items, name) } if err := rows.Err(); err != nil { return nil, err diff --git a/api/internal/db/instrument_constant.sql_gen.go b/api/internal/db/instrument_constant.sql_gen.go index 9e66e1f2..7cf37ec8 100644 --- a/api/internal/db/instrument_constant.sql_gen.go +++ b/api/internal/db/instrument_constant.sql_gen.go @@ -11,42 +11,42 @@ import ( "github.com/google/uuid" ) -const createInstrumentConstant = `-- name: CreateInstrumentConstant :exec +const instrumentConstantCreate = `-- name: InstrumentConstantCreate :exec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) ` -type CreateInstrumentConstantParams struct { +type InstrumentConstantCreateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` } -func (q *Queries) CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error { - _, err := q.db.Exec(ctx, createInstrumentConstant, arg.InstrumentID, arg.TimeseriesID) +func (q *Queries) InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error { + _, err := q.db.Exec(ctx, instrumentConstantCreate, arg.InstrumentID, arg.TimeseriesID) return err } -const deleteInstrumentConstant = `-- name: DeleteInstrumentConstant :exec +const instrumentConstantDelete = `-- name: InstrumentConstantDelete :exec delete from instrument_constants where instrument_id = $1 and timeseries_id = $2 ` -type DeleteInstrumentConstantParams struct { +type InstrumentConstantDeleteParams struct { InstrumentID uuid.UUID `json:"instrument_id"` TimeseriesID uuid.UUID `json:"timeseries_id"` } -func (q *Queries) DeleteInstrumentConstant(ctx context.Context, arg DeleteInstrumentConstantParams) error { - _, err := q.db.Exec(ctx, deleteInstrumentConstant, arg.InstrumentID, arg.TimeseriesID) +func (q *Queries) InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error { + _, err := q.db.Exec(ctx, instrumentConstantDelete, arg.InstrumentID, arg.TimeseriesID) return err } -const listInstrumentConstants = `-- name: ListInstrumentConstants :many +const instrumentConstantList = `-- name: InstrumentConstantList :many select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t inner join instrument_constants ic on ic.timeseries_id = t.id where ic.instrument_id = $1 ` -func (q *Queries) ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listInstrumentConstants, instrumentID) +func (q *Queries) InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, instrumentConstantList, instrumentID) if err != nil { return nil, err } diff --git a/api/internal/db/instrument_group.sql_gen.go b/api/internal/db/instrument_group.sql_gen.go index fd319db8..db84436e 100644 --- a/api/internal/db/instrument_group.sql_gen.go +++ b/api/internal/db/instrument_group.sql_gen.go @@ -12,13 +12,13 @@ import ( "github.com/google/uuid" ) -const createInstrumentGroup = `-- name: CreateInstrumentGroup :one +const instrumentGroupCreate = `-- name: InstrumentGroupCreate :one insert into instrument_group (slug, name, description, creator, create_date, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, slug, name, description, creator, create_date, updater, update_date, project_id +returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id ` -type CreateInstrumentGroupParams struct { +type InstrumentGroupCreateParams struct { Name string `json:"name"` Description *string `json:"description"` Creator uuid.UUID `json:"creator"` @@ -26,29 +26,18 @@ type CreateInstrumentGroupParams struct { ProjectID *uuid.UUID `json:"project_id"` } -type CreateInstrumentGroupRow struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - ProjectID *uuid.UUID `json:"project_id"` -} - -func (q *Queries) CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) { - row := q.db.QueryRow(ctx, createInstrumentGroup, +func (q *Queries) InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroup, error) { + row := q.db.QueryRow(ctx, instrumentGroupCreate, arg.Name, arg.Description, arg.Creator, arg.CreateDate, arg.ProjectID, ) - var i CreateInstrumentGroupRow + var i InstrumentGroup err := row.Scan( &i.ID, + &i.Deleted, &i.Slug, &i.Name, &i.Description, @@ -61,52 +50,24 @@ func (q *Queries) CreateInstrumentGroup(ctx context.Context, arg CreateInstrumen return i, err } -const createInstrumentGroupInstruments = `-- name: CreateInstrumentGroupInstruments :exec -insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2) -` - -type CreateInstrumentGroupInstrumentsParams struct { - InstrumentGroupID uuid.UUID `json:"instrument_group_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) CreateInstrumentGroupInstruments(ctx context.Context, arg CreateInstrumentGroupInstrumentsParams) error { - _, err := q.db.Exec(ctx, createInstrumentGroupInstruments, arg.InstrumentGroupID, arg.InstrumentID) - return err -} - -const deleteFlagInstrumentGroup = `-- name: DeleteFlagInstrumentGroup :exec +const instrumentGroupDeleteFlag = `-- name: InstrumentGroupDeleteFlag :exec update instrument_group set deleted = true where id = $1 ` -func (q *Queries) DeleteFlagInstrumentGroup(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteFlagInstrumentGroup, id) +func (q *Queries) InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentGroupDeleteFlag, id) return err } -const deleteInstrumentGroupInstruments = `-- name: DeleteInstrumentGroupInstruments :exec -delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2 -` - -type DeleteInstrumentGroupInstrumentsParams struct { - InstrumentGroupID uuid.UUID `json:"instrument_group_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - -func (q *Queries) DeleteInstrumentGroupInstruments(ctx context.Context, arg DeleteInstrumentGroupInstrumentsParams) error { - _, err := q.db.Exec(ctx, deleteInstrumentGroupInstruments, arg.InstrumentGroupID, arg.InstrumentID) - return err -} - -const getInstrumentGroup = `-- name: GetInstrumentGroup :many +const instrumentGroupGet = `-- name: InstrumentGroupGet :many select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count from v_instrument_group where not deleted and id=$1 ` -func (q *Queries) GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) { - rows, err := q.db.Query(ctx, getInstrumentGroup, id) +func (q *Queries) InstrumentGroupGet(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, instrumentGroupGet, id) if err != nil { return nil, err } @@ -138,44 +99,42 @@ func (q *Queries) GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInst return items, nil } -const listInstrumentGroupInstruments = `-- name: ListInstrumentGroupInstruments :many -select -from instrument_group_instruments igi -inner join v_instrument_group on igi.instrument_id = inst.id -where igi.instrument_group_id = $1 and inst.deleted = false +const instrumentGroupInstrumentCreate = `-- name: InstrumentGroupInstrumentCreate :exec +insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2) ` -type ListInstrumentGroupInstrumentsRow struct { +type InstrumentGroupInstrumentCreateParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` } -func (q *Queries) ListInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID) ([]ListInstrumentGroupInstrumentsRow, error) { - rows, err := q.db.Query(ctx, listInstrumentGroupInstruments, instrumentGroupID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []ListInstrumentGroupInstrumentsRow{} - for rows.Next() { - var i ListInstrumentGroupInstrumentsRow - if err := rows.Scan(); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) InstrumentGroupInstrumentCreate(ctx context.Context, arg InstrumentGroupInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, instrumentGroupInstrumentCreate, arg.InstrumentGroupID, arg.InstrumentID) + return err +} + +const instrumentGroupInstrumentDelete = `-- name: InstrumentGroupInstrumentDelete :exec +delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2 +` + +type InstrumentGroupInstrumentDeleteParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) InstrumentGroupInstrumentDelete(ctx context.Context, arg InstrumentGroupInstrumentDeleteParams) error { + _, err := q.db.Exec(ctx, instrumentGroupInstrumentDelete, arg.InstrumentGroupID, arg.InstrumentID) + return err } -const listInstrumentGroups = `-- name: ListInstrumentGroups :many +const instrumentGroupList = `-- name: InstrumentGroupList :many select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count from v_instrument_group where not deleted ` -func (q *Queries) ListInstrumentGroups(ctx context.Context) ([]VInstrumentGroup, error) { - rows, err := q.db.Query(ctx, listInstrumentGroups) +func (q *Queries) InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, instrumentGroupList) if err != nil { return nil, err } @@ -207,14 +166,14 @@ func (q *Queries) ListInstrumentGroups(ctx context.Context) ([]VInstrumentGroup, return items, nil } -const listInstrumentGroupsForProject = `-- name: ListInstrumentGroupsForProject :many +const instrumentGroupListForProject = `-- name: InstrumentGroupListForProject :many select ig.id, ig.slug, ig.name, ig.description, ig.creator, ig.create_date, ig.updater, ig.update_date, ig.project_id, ig.deleted, ig.instrument_count, ig.timeseries_count from v_instrument_group ig where ig.project_id = $1 ` -func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) { - rows, err := q.db.Query(ctx, listInstrumentGroupsForProject, projectID) +func (q *Queries) InstrumentGroupListForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, instrumentGroupListForProject, projectID) if err != nil { return nil, err } @@ -246,33 +205,30 @@ func (q *Queries) ListInstrumentGroupsForProject(ctx context.Context, projectID return items, nil } -const updateInstrumentGroup = `-- name: UpdateInstrumentGroup :one +const instrumentGroupUpdate = `-- name: InstrumentGroupUpdate :one update instrument_group set name = $2, - deleted = $3, - description = $4, - updater = $5, - update_date = $6, - project_id = $7 + description = $3, + updater = $4, + update_date = $5, + project_id = $6 where id = $1 returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id ` -type UpdateInstrumentGroupParams struct { +type InstrumentGroupUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` - Deleted bool `json:"deleted"` Description *string `json:"description"` Updater *uuid.UUID `json:"updater"` UpdateDate *time.Time `json:"update_date"` ProjectID *uuid.UUID `json:"project_id"` } -func (q *Queries) UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) { - row := q.db.QueryRow(ctx, updateInstrumentGroup, +func (q *Queries) InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroup, error) { + row := q.db.QueryRow(ctx, instrumentGroupUpdate, arg.ID, arg.Name, - arg.Deleted, arg.Description, arg.Updater, arg.UpdateDate, diff --git a/api/internal/db/instrument_incl.sql_gen.go b/api/internal/db/instrument_incl.sql_gen.go index e3ee5e91..7966917f 100644 --- a/api/internal/db/instrument_incl.sql_gen.go +++ b/api/internal/db/instrument_incl.sql_gen.go @@ -12,20 +12,58 @@ import ( "github.com/google/uuid" ) -const createInclOpts = `-- name: CreateInclOpts :exec +const inclMeasurementListForInstrumentRange = `-- name: InclMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_incl_measurement m1 +where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_incl_measurement m2 +where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type InclMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) InclMeasurementListForInstrumentRange(ctx context.Context, arg InclMeasurementListForInstrumentRangeParams) ([]VInclMeasurement, error) { + rows, err := q.db.Query(ctx, inclMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInclMeasurement{} + for rows.Next() { + var i VInclMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const inclOptsCreate = `-- name: InclOptsCreate :exec insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) ` -type CreateInclOptsParams struct { +type InclOptsCreateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateInclOpts(ctx context.Context, arg CreateInclOptsParams) error { - _, err := q.db.Exec(ctx, createInclOpts, +func (q *Queries) InclOptsCreate(ctx context.Context, arg InclOptsCreateParams) error { + _, err := q.db.Exec(ctx, inclOptsCreate, arg.InstrumentID, arg.NumSegments, arg.BottomElevationTimeseriesID, @@ -34,7 +72,25 @@ func (q *Queries) CreateInclOpts(ctx context.Context, arg CreateInclOptsParams) return err } -const createInclSegment = `-- name: CreateInclSegment :exec +const inclOptsUpdate = `-- name: InclOptsUpdate :exec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type InclOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) InclOptsUpdate(ctx context.Context, arg InclOptsUpdateParams) error { + _, err := q.db.Exec(ctx, inclOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const inclSegmentCreate = `-- name: InclSegmentCreate :exec insert into incl_segment ( id, instrument_id, @@ -46,7 +102,7 @@ insert into incl_segment ( ) values ($1, $2, $3, $4, $5, $6, $7) ` -type CreateInclSegmentParams struct { +type InclSegmentCreateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` @@ -56,8 +112,8 @@ type CreateInclSegmentParams struct { B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` } -func (q *Queries) CreateInclSegment(ctx context.Context, arg CreateInclSegmentParams) error { - _, err := q.db.Exec(ctx, createInclSegment, +func (q *Queries) InclSegmentCreate(ctx context.Context, arg InclSegmentCreateParams) error { + _, err := q.db.Exec(ctx, inclSegmentCreate, arg.ID, arg.InstrumentID, arg.DepthTimeseriesID, @@ -69,12 +125,12 @@ func (q *Queries) CreateInclSegment(ctx context.Context, arg CreateInclSegmentPa return err } -const getAllInclSegmentsForInstrument = `-- name: GetAllInclSegmentsForInstrument :many +const inclSegmentListForInstrument = `-- name: InclSegmentListForInstrument :many select id, instrument_id, depth_timeseries_id, a0_timeseries_id, a180_timeseries_id, b0_timeseries_id, b180_timeseries_id from v_incl_segment where instrument_id = $1 ` -func (q *Queries) GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) { - rows, err := q.db.Query(ctx, getAllInclSegmentsForInstrument, instrumentID) +func (q *Queries) InclSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) { + rows, err := q.db.Query(ctx, inclSegmentListForInstrument, instrumentID) if err != nil { return nil, err } @@ -101,63 +157,7 @@ func (q *Queries) GetAllInclSegmentsForInstrument(ctx context.Context, instrumen return items, nil } -const getInclMeasurementsForInstrument = `-- name: GetInclMeasurementsForInstrument :many -select m1.instrument_id, m1.time, m1.measurements -from v_incl_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 -union -select m2.instrument_id, m2.time, m2.measurements -from v_incl_measurement m2 -where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 -order by time asc -` - -type GetInclMeasurementsForInstrumentParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` -} - -func (q *Queries) GetInclMeasurementsForInstrument(ctx context.Context, arg GetInclMeasurementsForInstrumentParams) ([]VInclMeasurement, error) { - rows, err := q.db.Query(ctx, getInclMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VInclMeasurement{} - for rows.Next() { - var i VInclMeasurement - if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const updateInclOpts = `-- name: UpdateInclOpts :exec -update incl_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 -` - -type UpdateInclOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` -} - -func (q *Queries) UpdateInclOpts(ctx context.Context, arg UpdateInclOptsParams) error { - _, err := q.db.Exec(ctx, updateInclOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) - return err -} - -const updateInclSegment = `-- name: UpdateInclSegment :exec +const inclSegmentUpdate = `-- name: InclSegmentUpdate :exec update incl_segment set depth_timeseries_id=$3, a0_timeseries_id=$4, @@ -167,7 +167,7 @@ update incl_segment set where id = $1 and instrument_id = $2 ` -type UpdateInclSegmentParams struct { +type InclSegmentUpdateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` @@ -177,8 +177,8 @@ type UpdateInclSegmentParams struct { B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` } -func (q *Queries) UpdateInclSegment(ctx context.Context, arg UpdateInclSegmentParams) error { - _, err := q.db.Exec(ctx, updateInclSegment, +func (q *Queries) InclSegmentUpdate(ctx context.Context, arg InclSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, inclSegmentUpdate, arg.ID, arg.InstrumentID, arg.DepthTimeseriesID, diff --git a/api/internal/db/instrument_ipi.sql_gen.go b/api/internal/db/instrument_ipi.sql_gen.go index bfc859ea..54a03882 100644 --- a/api/internal/db/instrument_ipi.sql_gen.go +++ b/api/internal/db/instrument_ipi.sql_gen.go @@ -12,20 +12,58 @@ import ( "github.com/google/uuid" ) -const createIpiOpts = `-- name: CreateIpiOpts :exec +const ipiMeasurementListForInstrumentRange = `-- name: IpiMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_ipi_measurement m1 +where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_ipi_measurement m2 +where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type IpiMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) IpiMeasurementListForInstrumentRange(ctx context.Context, arg IpiMeasurementListForInstrumentRangeParams) ([]VIpiMeasurement, error) { + rows, err := q.db.Query(ctx, ipiMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VIpiMeasurement{} + for rows.Next() { + var i VIpiMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ipiOptsCreate = `-- name: IpiOptsCreate :exec insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) ` -type CreateIpiOptsParams struct { +type IpiOptsCreateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error { - _, err := q.db.Exec(ctx, createIpiOpts, +func (q *Queries) IpiOptsCreate(ctx context.Context, arg IpiOptsCreateParams) error { + _, err := q.db.Exec(ctx, ipiOptsCreate, arg.InstrumentID, arg.NumSegments, arg.BottomElevationTimeseriesID, @@ -34,7 +72,25 @@ func (q *Queries) CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) er return err } -const createIpiSegment = `-- name: CreateIpiSegment :exec +const ipiOptsUpdate = `-- name: IpiOptsUpdate :exec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type IpiOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) IpiOptsUpdate(ctx context.Context, arg IpiOptsUpdateParams) error { + _, err := q.db.Exec(ctx, ipiOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const ipiSegmentCreate = `-- name: IpiSegmentCreate :exec insert into ipi_segment ( id, instrument_id, @@ -45,7 +101,7 @@ insert into ipi_segment ( ) values ($1, $2, $3, $4, $5, $6) ` -type CreateIpiSegmentParams struct { +type IpiSegmentCreateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` @@ -54,8 +110,8 @@ type CreateIpiSegmentParams struct { TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error { - _, err := q.db.Exec(ctx, createIpiSegment, +func (q *Queries) IpiSegmentCreate(ctx context.Context, arg IpiSegmentCreateParams) error { + _, err := q.db.Exec(ctx, ipiSegmentCreate, arg.ID, arg.InstrumentID, arg.LengthTimeseriesID, @@ -66,12 +122,12 @@ func (q *Queries) CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentPara return err } -const getAllIpiSegmentsForInstrument = `-- name: GetAllIpiSegmentsForInstrument :many +const ipiSegmentListForInstrument = `-- name: IpiSegmentListForInstrument :many select id, instrument_id, length_timeseries_id, length, tilt_timeseries_id, inc_dev_timeseries_id from v_ipi_segment where instrument_id = $1 ` -func (q *Queries) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) { - rows, err := q.db.Query(ctx, getAllIpiSegmentsForInstrument, instrumentID) +func (q *Queries) IpiSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) { + rows, err := q.db.Query(ctx, ipiSegmentListForInstrument, instrumentID) if err != nil { return nil, err } @@ -97,63 +153,7 @@ func (q *Queries) GetAllIpiSegmentsForInstrument(ctx context.Context, instrument return items, nil } -const getIpiMeasurementsForInstrument = `-- name: GetIpiMeasurementsForInstrument :many -select m1.instrument_id, m1.time, m1.measurements -from v_ipi_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 -union -select m2.instrument_id, m2.time, m2.measurements -from v_ipi_measurement m2 -where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 -order by time asc -` - -type GetIpiMeasurementsForInstrumentParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` -} - -func (q *Queries) GetIpiMeasurementsForInstrument(ctx context.Context, arg GetIpiMeasurementsForInstrumentParams) ([]VIpiMeasurement, error) { - rows, err := q.db.Query(ctx, getIpiMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VIpiMeasurement{} - for rows.Next() { - var i VIpiMeasurement - if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const updateIpiOpts = `-- name: UpdateIpiOpts :exec -update ipi_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 -` - -type UpdateIpiOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` -} - -func (q *Queries) UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error { - _, err := q.db.Exec(ctx, updateIpiOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) - return err -} - -const updateIpiSegment = `-- name: UpdateIpiSegment :exec +const ipiSegmentUpdate = `-- name: IpiSegmentUpdate :exec update ipi_segment set length_timeseries_id = $3, tilt_timeseries_id = $4, @@ -162,7 +162,7 @@ update ipi_segment set where id = $1 and instrument_id = $2 ` -type UpdateIpiSegmentParams struct { +type IpiSegmentUpdateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` @@ -171,8 +171,8 @@ type UpdateIpiSegmentParams struct { TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error { - _, err := q.db.Exec(ctx, updateIpiSegment, +func (q *Queries) IpiSegmentUpdate(ctx context.Context, arg IpiSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, ipiSegmentUpdate, arg.ID, arg.InstrumentID, arg.LengthTimeseriesID, diff --git a/api/internal/db/instrument_note.sql_gen.go b/api/internal/db/instrument_note.sql_gen.go index 94eb74e8..c257c93b 100644 --- a/api/internal/db/instrument_note.sql_gen.go +++ b/api/internal/db/instrument_note.sql_gen.go @@ -12,13 +12,13 @@ import ( "github.com/google/uuid" ) -const createInstrumentNote = `-- name: CreateInstrumentNote :one +const instrumentNoteCreate = `-- name: InstrumentNoteCreate :one insert into instrument_note (instrument_id, title, body, time, creator, create_date) values ($1, $2, $3, $4, $5, $6) returning id, instrument_id, title, body, time, creator, create_date, updater, update_date ` -type CreateInstrumentNoteParams struct { +type InstrumentNoteCreateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` Title string `json:"title"` Body string `json:"body"` @@ -27,8 +27,8 @@ type CreateInstrumentNoteParams struct { CreateDate time.Time `json:"create_date"` } -func (q *Queries) CreateInstrumentNote(ctx context.Context, arg CreateInstrumentNoteParams) (InstrumentNote, error) { - row := q.db.QueryRow(ctx, createInstrumentNote, +func (q *Queries) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteCreate, arg.InstrumentID, arg.Title, arg.Body, @@ -51,39 +51,46 @@ func (q *Queries) CreateInstrumentNote(ctx context.Context, arg CreateInstrument return i, err } -const deleteInstrumentNote = `-- name: DeleteInstrumentNote :exec +const instrumentNoteDelete = `-- name: InstrumentNoteDelete :exec delete from instrument_note where id = $1 ` -func (q *Queries) DeleteInstrumentNote(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteInstrumentNote, id) +func (q *Queries) InstrumentNoteDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentNoteDelete, id) return err } -const listInstrumentNotes = `-- name: ListInstrumentNotes :many -select - id, - instrument_id, - title, - body, - time, - creator, - create_date, - updater, - update_date +const instrumentNoteGet = `-- name: InstrumentNoteGet :one +select id, instrument_id, title, body, time, creator, create_date, updater, update_date from instrument_note -where ($2 is null or $2 = $1) -and ($3 is null or $3 = $1) +where id = $1 ` -type ListInstrumentNotesParams struct { - Column1 interface{} `json:"column_1"` - InstrumentID interface{} `json:"instrument_id"` - ID interface{} `json:"id"` +func (q *Queries) InstrumentNoteGet(ctx context.Context, id uuid.UUID) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteGet, id) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.Creator, + &i.CreateDate, + &i.Updater, + &i.UpdateDate, + ) + return i, err } -func (q *Queries) ListInstrumentNotes(ctx context.Context, arg ListInstrumentNotesParams) ([]InstrumentNote, error) { - rows, err := q.db.Query(ctx, listInstrumentNotes, arg.Column1, arg.InstrumentID, arg.ID) +const instrumentNoteListForInstrument = `-- name: InstrumentNoteListForInstrument :many +select id, instrument_id, title, body, time, creator, create_date, updater, update_date +from instrument_note +where instrument_id = $1 +` + +func (q *Queries) InstrumentNoteListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) { + rows, err := q.db.Query(ctx, instrumentNoteListForInstrument, instrumentID) if err != nil { return nil, err } @@ -112,18 +119,18 @@ func (q *Queries) ListInstrumentNotes(ctx context.Context, arg ListInstrumentNot return items, nil } -const updateInstrumentNote = `-- name: UpdateInstrumentNote :one +const instrumentNoteUpdate = `-- name: InstrumentNoteUpdate :one update instrument_note set - title = $2, - body = $3, - time = $4, - updater = $5, - update_date = $6 + title=$2, + body=$3, + time=$4, + updater=$5, + update_date=$6 where id = $1 returning id, instrument_id, title, body, time, creator, create_date, updater, update_date ` -type UpdateInstrumentNoteParams struct { +type InstrumentNoteUpdateParams struct { ID uuid.UUID `json:"id"` Title string `json:"title"` Body string `json:"body"` @@ -132,8 +139,8 @@ type UpdateInstrumentNoteParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) { - row := q.db.QueryRow(ctx, updateInstrumentNote, +func (q *Queries) InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUpdateParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteUpdate, arg.ID, arg.Title, arg.Body, diff --git a/api/internal/db/instrument_saa.sql_gen.go b/api/internal/db/instrument_saa.sql_gen.go index 7d141014..f5a44f33 100644 --- a/api/internal/db/instrument_saa.sql_gen.go +++ b/api/internal/db/instrument_saa.sql_gen.go @@ -12,20 +12,58 @@ import ( "github.com/google/uuid" ) -const createSaaOpts = `-- name: CreateSaaOpts :exec +const saaMeasurementListForInstrumentRange = `-- name: SaaMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_saa_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_saa_measurement m2 +where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type SaaMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) SaaMeasurementListForInstrumentRange(ctx context.Context, arg SaaMeasurementListForInstrumentRangeParams) ([]VSaaMeasurement, error) { + rows, err := q.db.Query(ctx, saaMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSaaMeasurement{} + for rows.Next() { + var i VSaaMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const saaOptsCreate = `-- name: SaaOptsCreate :exec insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) ` -type CreateSaaOptsParams struct { +type SaaOptsCreateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` InitialTime *time.Time `json:"initial_time"` } -func (q *Queries) CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error { - _, err := q.db.Exec(ctx, createSaaOpts, +func (q *Queries) SaaOptsCreate(ctx context.Context, arg SaaOptsCreateParams) error { + _, err := q.db.Exec(ctx, saaOptsCreate, arg.InstrumentID, arg.NumSegments, arg.BottomElevationTimeseriesID, @@ -34,7 +72,25 @@ func (q *Queries) CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) er return err } -const createSaaSegment = `-- name: CreateSaaSegment :exec +const saaOptsUpdate = `-- name: SaaOptsUpdate :exec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type SaaOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsUpdate(ctx context.Context, arg SaaOptsUpdateParams) error { + _, err := q.db.Exec(ctx, saaOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const saaSegmentCreate = `-- name: SaaSegmentCreate :exec insert into saa_segment ( id, instrument_id, @@ -46,7 +102,7 @@ insert into saa_segment ( ) values ($1, $2, $3, $4, $5, $6, $7) ` -type CreateSaaSegmentParams struct { +type SaaSegmentCreateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` @@ -56,8 +112,8 @@ type CreateSaaSegmentParams struct { TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error { - _, err := q.db.Exec(ctx, createSaaSegment, +func (q *Queries) SaaSegmentCreate(ctx context.Context, arg SaaSegmentCreateParams) error { + _, err := q.db.Exec(ctx, saaSegmentCreate, arg.ID, arg.InstrumentID, arg.LengthTimeseriesID, @@ -69,12 +125,12 @@ func (q *Queries) CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentPara return err } -const getAllSaaSegmentsForInstrument = `-- name: GetAllSaaSegmentsForInstrument :many +const saaSegmentListForInstrument = `-- name: SaaSegmentListForInstrument :many select id, instrument_id, length_timeseries_id, length, x_timeseries_id, y_timeseries_id, z_timeseries_id, temp_timeseries_id from v_saa_segment where instrument_id = $1 ` -func (q *Queries) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) { - rows, err := q.db.Query(ctx, getAllSaaSegmentsForInstrument, instrumentID) +func (q *Queries) SaaSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) { + rows, err := q.db.Query(ctx, saaSegmentListForInstrument, instrumentID) if err != nil { return nil, err } @@ -102,63 +158,7 @@ func (q *Queries) GetAllSaaSegmentsForInstrument(ctx context.Context, instrument return items, nil } -const getSaaMeasurementsForInstrument = `-- name: GetSaaMeasurementsForInstrument :many -select m1.instrument_id, m1.time, m1.measurements -from v_saa_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 -union -select m2.instrument_id, m2.time, m2.measurements -from v_saa_measurement m2 -where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 -order by time asc -` - -type GetSaaMeasurementsForInstrumentParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` -} - -func (q *Queries) GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) { - rows, err := q.db.Query(ctx, getSaaMeasurementsForInstrument, arg.InstrumentID, arg.Time, arg.Time_2) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VSaaMeasurement{} - for rows.Next() { - var i VSaaMeasurement - if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const updateSaaOpts = `-- name: UpdateSaaOpts :exec -update saa_opts set - bottom_elevation_timeseries_id = $2, - initial_time = $3 -where instrument_id = $1 -` - -type UpdateSaaOptsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` - InitialTime *time.Time `json:"initial_time"` -} - -func (q *Queries) UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error { - _, err := q.db.Exec(ctx, updateSaaOpts, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) - return err -} - -const updateSaaSegment = `-- name: UpdateSaaSegment :exec +const saaSegmentUpdate = `-- name: SaaSegmentUpdate :exec update saa_segment set length_timeseries_id = $3, x_timeseries_id = $4, @@ -168,7 +168,7 @@ update saa_segment set where id = $1 and instrument_id = $2 ` -type UpdateSaaSegmentParams struct { +type SaaSegmentUpdateParams struct { ID int32 `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` @@ -178,8 +178,8 @@ type UpdateSaaSegmentParams struct { TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` } -func (q *Queries) UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error { - _, err := q.db.Exec(ctx, updateSaaSegment, +func (q *Queries) SaaSegmentUpdate(ctx context.Context, arg SaaSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, saaSegmentUpdate, arg.ID, arg.InstrumentID, arg.LengthTimeseriesID, diff --git a/api/internal/db/instrument_status.sql_gen.go b/api/internal/db/instrument_status.sql_gen.go index a982de3e..235ecbef 100644 --- a/api/internal/db/instrument_status.sql_gen.go +++ b/api/internal/db/instrument_status.sql_gen.go @@ -12,67 +12,67 @@ import ( "github.com/google/uuid" ) -const createOrUpdateInstrumentStatus = `-- name: CreateOrUpdateInstrumentStatus :exec +const instrumentStatusCreateOrUpdate = `-- name: InstrumentStatusCreateOrUpdate :exec insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id ` -type CreateOrUpdateInstrumentStatusParams struct { +type InstrumentStatusCreateOrUpdateParams struct { InstrumentID uuid.UUID `json:"instrument_id"` StatusID uuid.UUID `json:"status_id"` Time time.Time `json:"time"` } -func (q *Queries) CreateOrUpdateInstrumentStatus(ctx context.Context, arg CreateOrUpdateInstrumentStatusParams) error { - _, err := q.db.Exec(ctx, createOrUpdateInstrumentStatus, arg.InstrumentID, arg.StatusID, arg.Time) +func (q *Queries) InstrumentStatusCreateOrUpdate(ctx context.Context, arg InstrumentStatusCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, instrumentStatusCreateOrUpdate, arg.InstrumentID, arg.StatusID, arg.Time) return err } -const deleteInstrumentStatus = `-- name: DeleteInstrumentStatus :exec +const instrumentStatusDelete = `-- name: InstrumentStatusDelete :exec delete from instrument_status where id = $1 ` -func (q *Queries) DeleteInstrumentStatus(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteInstrumentStatus, id) +func (q *Queries) InstrumentStatusDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentStatusDelete, id) return err } -const listInstrumentStatuses = `-- name: ListInstrumentStatuses :many -select - s.id, - s.status_id, - d.name status, - s.time -from instrument_status s -inner join status d on d.id = s.status_id -where ($1 is null or $1 = s.instrument_id) -and ($2 is null or $2 = s.id) -order by time desc +const instrumentStatusGet = `-- name: InstrumentStatusGet :one +select id, instrument_id, status_id, status, time from v_instrument_status +where id=$1 ` -type ListInstrumentStatusesParams struct { - InstrumentID interface{} `json:"instrument_id"` - ID interface{} `json:"id"` +func (q *Queries) InstrumentStatusGet(ctx context.Context, id uuid.UUID) (VInstrumentStatus, error) { + row := q.db.QueryRow(ctx, instrumentStatusGet, id) + var i VInstrumentStatus + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.StatusID, + &i.Status, + &i.Time, + ) + return i, err } -type ListInstrumentStatusesRow struct { - ID uuid.UUID `json:"id"` - StatusID uuid.UUID `json:"status_id"` - Status string `json:"status"` - Time time.Time `json:"time"` -} +const instrumentStatusListForInstrument = `-- name: InstrumentStatusListForInstrument :many +select id, instrument_id, status_id, status, time from v_instrument_status +where instrument_id=$1 +order by time desc +` -func (q *Queries) ListInstrumentStatuses(ctx context.Context, arg ListInstrumentStatusesParams) ([]ListInstrumentStatusesRow, error) { - rows, err := q.db.Query(ctx, listInstrumentStatuses, arg.InstrumentID, arg.ID) +func (q *Queries) InstrumentStatusListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInstrumentStatus, error) { + rows, err := q.db.Query(ctx, instrumentStatusListForInstrument, instrumentID) if err != nil { return nil, err } defer rows.Close() - items := []ListInstrumentStatusesRow{} + items := []VInstrumentStatus{} for rows.Next() { - var i ListInstrumentStatusesRow + var i VInstrumentStatus if err := rows.Scan( &i.ID, + &i.InstrumentID, &i.StatusID, &i.Status, &i.Time, diff --git a/api/internal/db/manual.go b/api/internal/db/manual.go new file mode 100644 index 00000000..acfa554b --- /dev/null +++ b/api/internal/db/manual.go @@ -0,0 +1,8 @@ +package db + +import "github.com/jackc/pgx/v5" + +func CollectRows[T any](rows pgx.Rows) ([]T, error) { + ss, err := pgx.CollectRows(rows, pgx.RowToStructByName[T]) + return ss, err +} diff --git a/api/internal/db/measurement.manual.go b/api/internal/db/measurement.manual.go new file mode 100644 index 00000000..e45ee041 --- /dev/null +++ b/api/internal/db/measurement.manual.go @@ -0,0 +1,99 @@ +package db + +import ( + "math" + "time" +) + +type MeasurementGetter interface { + getTime() time.Time + getValue() float64 +} + +func (m VTimeseriesMeasurement) getTime() time.Time { + return m.Time +} + +func (m VTimeseriesMeasurement) getValue() float64 { + return float64(m.Value) +} + +// A slightly modified LTTB (Largest-Triange-Three-Buckets) algorithm for downsampling timeseries measurements +// https://godoc.org/github.com/dgryski/go-lttb +func LTTB[T MeasurementGetter](data []T, threshold int) []T { + if threshold == 0 || threshold >= len(data) { + return data // Nothing to do + } + + if threshold < 3 { + threshold = 3 + } + + sampled := make([]T, 0, threshold) + + // Bucket size. Leave room for start and end data points + every := float64(len(data)-2) / float64(threshold-2) + + sampled = append(sampled, data[0]) // Always add the first point + + bucketStart := 1 + bucketCenter := int(math.Floor(every)) + 1 + + var a int + + for i := 0; i < threshold-2; i++ { + + bucketEnd := int(math.Floor(float64(i+2)*every)) + 1 + + // Calculate point average for next bucket (containing c) + avgRangeStart := bucketCenter + avgRangeEnd := bucketEnd + + if avgRangeEnd >= len(data) { + avgRangeEnd = len(data) + } + + avgRangeLength := float64(avgRangeEnd - avgRangeStart) + + var avgX, avgY float64 + for ; avgRangeStart < avgRangeEnd; avgRangeStart++ { + avgX += time.Duration(data[avgRangeStart].getTime().Unix()).Seconds() + avgY += data[avgRangeStart].getValue() + } + avgX /= avgRangeLength + avgY /= avgRangeLength + + // Get the range for this bucket + rangeOffs := bucketStart + rangeTo := bucketCenter + + // Point a + pointAX := time.Duration(data[a].getTime().UnixNano()).Seconds() + pointAY := data[a].getValue() + + maxArea := float64(-1.0) + + var nextA int + for ; rangeOffs < rangeTo; rangeOffs++ { + // Calculate triangle area over three buckets + area := (pointAX-avgX)*(data[rangeOffs].getValue()-pointAY) - (pointAX-time.Duration(data[rangeOffs].getTime().Unix()).Seconds())*(avgY-pointAY) + // We only care about the relative area here. + // Calling math.Abs() is slower than squaring + area *= area + if area > maxArea { + maxArea = area + nextA = rangeOffs // Next a is this b + } + } + + sampled = append(sampled, data[nextA]) // Pick this point from the bucket + a = nextA // This a is the next a (chosen b) + + bucketStart = bucketCenter + bucketCenter = bucketEnd + } + + sampled = append(sampled, data[len(data)-1]) // Always add last + + return sampled +} diff --git a/api/internal/db/measurement.sql_gen.go b/api/internal/db/measurement.sql_gen.go index f195fba5..c59264be 100644 --- a/api/internal/db/measurement.sql_gen.go +++ b/api/internal/db/measurement.sql_gen.go @@ -12,181 +12,112 @@ import ( "github.com/google/uuid" ) -const createOrUpdateTimeseriesMeasurement = `-- name: CreateOrUpdateTimeseriesMeasurement :exec +const timeseriesMeasurementCreate = `-- name: TimeseriesMeasurementCreate :exec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) -on conflict on constraint timeseries_unique_time do update set value = excluded.value +on conflict on constraint timeseries_unique_time do nothing ` -type CreateOrUpdateTimeseriesMeasurementParams struct { +type TimeseriesMeasurementCreateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` Value float64 `json:"value"` } -func (q *Queries) CreateOrUpdateTimeseriesMeasurement(ctx context.Context, arg CreateOrUpdateTimeseriesMeasurementParams) error { - _, err := q.db.Exec(ctx, createOrUpdateTimeseriesMeasurement, arg.TimeseriesID, arg.Time, arg.Value) - return err -} - -const createOrUpdateTimeseriesNote = `-- name: CreateOrUpdateTimeseriesNote :exec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) -on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation -` - -type CreateOrUpdateTimeseriesNoteParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Masked *bool `json:"masked"` - Validated *bool `json:"validated"` - Annotation *string `json:"annotation"` -} - -func (q *Queries) CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOrUpdateTimeseriesNoteParams) error { - _, err := q.db.Exec(ctx, createOrUpdateTimeseriesNote, - arg.TimeseriesID, - arg.Time, - arg.Masked, - arg.Validated, - arg.Annotation, - ) +func (q *Queries) TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementCreate, arg.TimeseriesID, arg.Time, arg.Value) return err } -const createTimeseriesMeasurement = `-- name: CreateTimeseriesMeasurement :exec +const timeseriesMeasurementCreateOrUpdate = `-- name: TimeseriesMeasurementCreateOrUpdate :exec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) -on conflict on constraint timeseries_unique_time do nothing +on conflict on constraint timeseries_unique_time do update set value = excluded.value ` -type CreateTimeseriesMeasurementParams struct { +type TimeseriesMeasurementCreateOrUpdateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` Value float64 `json:"value"` } -func (q *Queries) CreateTimeseriesMeasurement(ctx context.Context, arg CreateTimeseriesMeasurementParams) error { - _, err := q.db.Exec(ctx, createTimeseriesMeasurement, arg.TimeseriesID, arg.Time, arg.Value) +func (q *Queries) TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementCreateOrUpdate, arg.TimeseriesID, arg.Time, arg.Value) return err } -const createTimeseriesNote = `-- name: CreateTimeseriesNote :exec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) -on conflict on constraint notes_unique_time do nothing +const timeseriesMeasurementDelete = `-- name: TimeseriesMeasurementDelete :exec +delete from timeseries_measurement where timeseries_id=$1 and time=$2 ` -type CreateTimeseriesNoteParams struct { +type TimeseriesMeasurementDeleteParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` - Masked *bool `json:"masked"` - Validated *bool `json:"validated"` - Annotation *string `json:"annotation"` } -func (q *Queries) CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error { - _, err := q.db.Exec(ctx, createTimeseriesNote, - arg.TimeseriesID, - arg.Time, - arg.Masked, - arg.Validated, - arg.Annotation, - ) +func (q *Queries) TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementDelete, arg.TimeseriesID, arg.Time) return err } -const deleteTimeseriesMeasurement = `-- name: DeleteTimeseriesMeasurement :exec -delete from timeseries_measurement where timeseries_id = $1 and time = $2 -` - -type DeleteTimeseriesMeasurementParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` -} - -func (q *Queries) DeleteTimeseriesMeasurement(ctx context.Context, arg DeleteTimeseriesMeasurementParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesMeasurement, arg.TimeseriesID, arg.Time) - return err -} - -const deleteTimeseriesMeasurements = `-- name: DeleteTimeseriesMeasurements :exec -delete from timeseries_measurement where timeseries_id = $1 and time = $2 -` - -type DeleteTimeseriesMeasurementsParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` -} - -func (q *Queries) DeleteTimeseriesMeasurements(ctx context.Context, arg DeleteTimeseriesMeasurementsParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesMeasurements, arg.TimeseriesID, arg.Time) - return err -} - -const deleteTimeseriesMeasurementsRange = `-- name: DeleteTimeseriesMeasurementsRange :exec +const timeseriesMeasurementDeleteRange = `-- name: TimeseriesMeasurementDeleteRange :exec delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3 ` -type DeleteTimeseriesMeasurementsRangeParams struct { +type TimeseriesMeasurementDeleteRangeParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` After time.Time `json:"after"` Before time.Time `json:"before"` } -func (q *Queries) DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesMeasurementsRange, arg.TimeseriesID, arg.After, arg.Before) +func (q *Queries) TimeseriesMeasurementDeleteRange(ctx context.Context, arg TimeseriesMeasurementDeleteRangeParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementDeleteRange, arg.TimeseriesID, arg.After, arg.Before) return err } -const deleteTimeseriesNoteRange = `-- name: DeleteTimeseriesNoteRange :exec -delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 +const timeseriesMeasurementGetMostRecent = `-- name: TimeseriesMeasurementGetMostRecent :one +select time, value, timeseries_id +from timeseries_measurement +where timeseries_id = $1 +order by time desc +limit 1 ` -type DeleteTimeseriesNoteRangeParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - After time.Time `json:"after"` - Before time.Time `json:"before"` +func (q *Queries) TimeseriesMeasurementGetMostRecent(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesMeasurement, error) { + row := q.db.QueryRow(ctx, timeseriesMeasurementGetMostRecent, timeseriesID) + var i TimeseriesMeasurement + err := row.Scan(&i.Time, &i.Value, &i.TimeseriesID) + return i, err } -func (q *Queries) DeleteTimeseriesNoteRange(ctx context.Context, arg DeleteTimeseriesNoteRangeParams) error { - _, err := q.db.Exec(ctx, deleteTimeseriesNoteRange, arg.TimeseriesID, arg.After, arg.Before) - return err -} - -const getTimeseriesConstantMeasurement = `-- name: GetTimeseriesConstantMeasurement :many -select - m.timeseries_id, - m.time, - m.value -from timeseries_measurement m -inner join v_timeseries_stored t on t.id = m.timeseries_id -inner join parameter p on p.id = t.parameter_id -where t.instrument_id in ( - select instrument_id - from v_timeseries_stored t - where t.id= $1 -) -and p.name = $2 +const timeseriesMeasurementListRange = `-- name: TimeseriesMeasurementListRange :many +select timeseries_id, time, value, masked, validated, annotation from v_timeseries_measurement +where timeseries_id=$1 +and time > $2 +and time < $3 ` -type GetTimeseriesConstantMeasurementParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` -} - -type GetTimeseriesConstantMeasurementRow struct { +type TimeseriesMeasurementListRangeParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` - Time time.Time `json:"time"` - Value float64 `json:"value"` + AfterTime time.Time `json:"after_time"` + BeforeTime time.Time `json:"before_time"` } -func (q *Queries) GetTimeseriesConstantMeasurement(ctx context.Context, arg GetTimeseriesConstantMeasurementParams) ([]GetTimeseriesConstantMeasurementRow, error) { - rows, err := q.db.Query(ctx, getTimeseriesConstantMeasurement, arg.ID, arg.Name) +func (q *Queries) TimeseriesMeasurementListRange(ctx context.Context, arg TimeseriesMeasurementListRangeParams) ([]VTimeseriesMeasurement, error) { + rows, err := q.db.Query(ctx, timeseriesMeasurementListRange, arg.TimeseriesID, arg.AfterTime, arg.BeforeTime) if err != nil { return nil, err } defer rows.Close() - items := []GetTimeseriesConstantMeasurementRow{} + items := []VTimeseriesMeasurement{} for rows.Next() { - var i GetTimeseriesConstantMeasurementRow - if err := rows.Scan(&i.TimeseriesID, &i.Time, &i.Value); err != nil { + var i VTimeseriesMeasurement + if err := rows.Scan( + &i.TimeseriesID, + &i.Time, + &i.Value, + &i.Masked, + &i.Validated, + &i.Annotation, + ); err != nil { return nil, err } items = append(items, i) @@ -197,58 +128,79 @@ func (q *Queries) GetTimeseriesConstantMeasurement(ctx context.Context, arg GetT return items, nil } -const listTimeseriesMeasurements = `-- name: ListTimeseriesMeasurements :many -select - m.timeseries_id, - m.time, - m.value, - n.masked, - n.validated, - n.annotation -from timeseries_measurement m -left join timeseries_notes n on m.timeseries_id = n.timeseries_id and m.time = n.time -inner join timeseries t on t.id = m.timeseries_id -where t.id = $1 and m.time > $2 and m.time < $3 order by m.time asc +const timeseriesNoteCreate = `-- name: TimeseriesNoteCreate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing ` -type ListTimeseriesMeasurementsParams struct { - ID uuid.UUID `json:"id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` +type TimeseriesNoteCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteCreate, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err } -type ListTimeseriesMeasurementsRow struct { +const timeseriesNoteCreateOrUpdate = `-- name: TimeseriesNoteCreateOrUpdate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +` + +type TimeseriesNoteCreateOrUpdateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Time time.Time `json:"time"` - Value float64 `json:"value"` Masked *bool `json:"masked"` Validated *bool `json:"validated"` Annotation *string `json:"annotation"` } -func (q *Queries) ListTimeseriesMeasurements(ctx context.Context, arg ListTimeseriesMeasurementsParams) ([]ListTimeseriesMeasurementsRow, error) { - rows, err := q.db.Query(ctx, listTimeseriesMeasurements, arg.ID, arg.Time, arg.Time_2) - if err != nil { - return nil, err - } - defer rows.Close() - items := []ListTimeseriesMeasurementsRow{} - for rows.Next() { - var i ListTimeseriesMeasurementsRow - if err := rows.Scan( - &i.TimeseriesID, - &i.Time, - &i.Value, - &i.Masked, - &i.Validated, - &i.Annotation, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteCreateOrUpdate, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err +} + +const timeseriesNoteDelete = `-- name: TimeseriesNoteDelete :exec +delete from timeseries_notes where timeseries_id=$1 and time=$2 +` + +type TimeseriesNoteDeleteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteDelete, arg.TimeseriesID, arg.Time) + return err +} + +const timeseriesNoteDeleteRange = `-- name: TimeseriesNoteDeleteRange :exec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 +` + +type TimeseriesNoteDeleteRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesNoteDeleteRange(ctx context.Context, arg TimeseriesNoteDeleteRangeParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteDeleteRange, arg.TimeseriesID, arg.After, arg.Before) + return err } diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 235a803a..842cc8a2 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -1176,6 +1176,14 @@ type VInstrumentGroup struct { TimeseriesCount interface{} `json:"timeseries_count"` } +type VInstrumentStatus struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + Time time.Time `json:"time"` +} + type VInstrumentTelemetry struct { ID uuid.UUID `json:"id"` InstrumentID uuid.UUID `json:"instrument_id"` @@ -1368,6 +1376,15 @@ type VTimeseriesDependency struct { DependencyTimeseriesID *uuid.UUID `json:"dependency_timeseries_id"` } +type VTimeseriesMeasurement struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + type VTimeseriesProjectMap struct { TimeseriesID uuid.UUID `json:"timeseries_id"` ProjectID *uuid.UUID `json:"project_id"` @@ -1392,3 +1409,26 @@ type VUnit struct { MeasureID *uuid.UUID `json:"measure_id"` Measure string `json:"measure"` } + +type VUploaderConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + CreateDate time.Time `json:"create_date"` + Creator uuid.UUID `json:"creator"` + CreatorUsername string `json:"creator_username"` + UpdateDate *time.Time `json:"update_date"` + Updater *uuid.UUID `json:"updater"` + UpdaterUsername *string `json:"updater_username"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` +} diff --git a/api/internal/db/plot_config.sql_gen.go b/api/internal/db/plot_config.sql_gen.go index 110d2a61..cfcef6ec 100644 --- a/api/internal/db/plot_config.sql_gen.go +++ b/api/internal/db/plot_config.sql_gen.go @@ -12,12 +12,12 @@ import ( "github.com/google/uuid" ) -const createPlotConfig = `-- name: CreatePlotConfig :one +const plotConfigCreate = `-- name: PlotConfigCreate :one insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) returning id ` -type CreatePlotConfigParams struct { +type PlotConfigCreateParams struct { Name string `json:"name"` ProjectID uuid.UUID `json:"project_id"` Creator uuid.UUID `json:"creator"` @@ -25,8 +25,8 @@ type CreatePlotConfigParams struct { PlotType PlotType `json:"plot_type"` } -func (q *Queries) CreatePlotConfig(ctx context.Context, arg CreatePlotConfigParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createPlotConfig, +func (q *Queries) PlotConfigCreate(ctx context.Context, arg PlotConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, plotConfigCreate, arg.Name, arg.ProjectID, arg.Creator, @@ -38,65 +38,28 @@ func (q *Queries) CreatePlotConfig(ctx context.Context, arg CreatePlotConfigPara return id, err } -const createPlotConfigSettings = `-- name: CreatePlotConfigSettings :exec -insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) -values ($1, $2, $3, $4, $5, $6, $7) -` - -type CreatePlotConfigSettingsParams struct { - ID uuid.UUID `json:"id"` - ShowMasked bool `json:"show_masked"` - ShowNonvalidated bool `json:"show_nonvalidated"` - ShowComments bool `json:"show_comments"` - AutoRange bool `json:"auto_range"` - DateRange string `json:"date_range"` - Threshold int32 `json:"threshold"` -} - -func (q *Queries) CreatePlotConfigSettings(ctx context.Context, arg CreatePlotConfigSettingsParams) error { - _, err := q.db.Exec(ctx, createPlotConfigSettings, - arg.ID, - arg.ShowMasked, - arg.ShowNonvalidated, - arg.ShowComments, - arg.AutoRange, - arg.DateRange, - arg.Threshold, - ) - return err -} - -const deletePlotConfig = `-- name: DeletePlotConfig :exec +const plotConfigDelete = `-- name: PlotConfigDelete :exec delete from plot_configuration where project_id = $1 and id = $2 ` -type DeletePlotConfigParams struct { +type PlotConfigDeleteParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` } -func (q *Queries) DeletePlotConfig(ctx context.Context, arg DeletePlotConfigParams) error { - _, err := q.db.Exec(ctx, deletePlotConfig, arg.ProjectID, arg.ID) - return err -} - -const deletePlotConfigSettings = `-- name: DeletePlotConfigSettings :exec -delete from plot_configuration_settings where id = $1 -` - -func (q *Queries) DeletePlotConfigSettings(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deletePlotConfigSettings, id) +func (q *Queries) PlotConfigDelete(ctx context.Context, arg PlotConfigDeleteParams) error { + _, err := q.db.Exec(ctx, plotConfigDelete, arg.ProjectID, arg.ID) return err } -const getPlotConfig = `-- name: GetPlotConfig :one +const plotConfigGet = `-- name: PlotConfigGet :one select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = $1 ` -func (q *Queries) GetPlotConfig(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) { - row := q.db.QueryRow(ctx, getPlotConfig, id) +func (q *Queries) PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) { + row := q.db.QueryRow(ctx, plotConfigGet, id) var i VPlotConfiguration err := row.Scan( &i.ID, @@ -120,14 +83,14 @@ func (q *Queries) GetPlotConfig(ctx context.Context, id uuid.UUID) (VPlotConfigu return i, err } -const listPlotConfigsForProject = `-- name: ListPlotConfigsForProject :many +const plotConfigListForProject = `-- name: PlotConfigListForProject :many select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where project_id = $1 ` -func (q *Queries) ListPlotConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) { - rows, err := q.db.Query(ctx, listPlotConfigsForProject, projectID) +func (q *Queries) PlotConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, plotConfigListForProject, projectID) if err != nil { return nil, err } @@ -164,11 +127,48 @@ func (q *Queries) ListPlotConfigsForProject(ctx context.Context, projectID uuid. return items, nil } -const updatePlotConfig = `-- name: UpdatePlotConfig :exec +const plotConfigSettingsCreate = `-- name: PlotConfigSettingsCreate :exec +insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) +values ($1, $2, $3, $4, $5, $6, $7) +` + +type PlotConfigSettingsCreateParams struct { + ID uuid.UUID `json:"id"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` +} + +func (q *Queries) PlotConfigSettingsCreate(ctx context.Context, arg PlotConfigSettingsCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigSettingsCreate, + arg.ID, + arg.ShowMasked, + arg.ShowNonvalidated, + arg.ShowComments, + arg.AutoRange, + arg.DateRange, + arg.Threshold, + ) + return err +} + +const plotConfigSettingsDelete = `-- name: PlotConfigSettingsDelete :exec +delete from plot_configuration_settings where id = $1 +` + +func (q *Queries) PlotConfigSettingsDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigSettingsDelete, id) + return err +} + +const plotConfigUpdate = `-- name: PlotConfigUpdate :exec update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2 ` -type UpdatePlotConfigParams struct { +type PlotConfigUpdateParams struct { ProjectID uuid.UUID `json:"project_id"` ID uuid.UUID `json:"id"` Name string `json:"name"` @@ -176,8 +176,8 @@ type UpdatePlotConfigParams struct { UpdateDate *time.Time `json:"update_date"` } -func (q *Queries) UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error { - _, err := q.db.Exec(ctx, updatePlotConfig, +func (q *Queries) PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigUpdate, arg.ProjectID, arg.ID, arg.Name, diff --git a/api/internal/db/plot_config_bullseye.sql_gen.go b/api/internal/db/plot_config_bullseye.sql_gen.go index 86653b42..df10849e 100644 --- a/api/internal/db/plot_config_bullseye.sql_gen.go +++ b/api/internal/db/plot_config_bullseye.sql_gen.go @@ -12,31 +12,46 @@ import ( "github.com/google/uuid" ) -const createPlotBullseyeConfig = `-- name: CreatePlotBullseyeConfig :exec +const plotBullseyeConfigCreate = `-- name: PlotBullseyeConfigCreate :exec insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3) ` -type CreatePlotBullseyeConfigParams struct { +type PlotBullseyeConfigCreateParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` } -func (q *Queries) CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error { - _, err := q.db.Exec(ctx, createPlotBullseyeConfig, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) +func (q *Queries) PlotBullseyeConfigCreate(ctx context.Context, arg PlotBullseyeConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigCreate, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) return err } -const deletePlotBullseyeConfig = `-- name: DeletePlotBullseyeConfig :exec +const plotBullseyeConfigDelete = `-- name: PlotBullseyeConfigDelete :exec delete from plot_bullseye_config where plot_config_id = $1 ` -func (q *Queries) DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deletePlotBullseyeConfig, plotConfigID) +func (q *Queries) PlotBullseyeConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigDelete, plotConfigID) return err } -const listPlotConfigMeasurementsBullseyePlot = `-- name: ListPlotConfigMeasurementsBullseyePlot :many +const plotBullseyeConfigUpdate = `-- name: PlotBullseyeConfigUpdate :exec +UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 +` + +type PlotBullseyeConfigUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` +} + +func (q *Queries) PlotBullseyeConfigUpdate(ctx context.Context, arg PlotBullseyeConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigUpdate, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) + return err +} + +const plotConfigMeasurementListBullseye = `-- name: PlotConfigMeasurementListBullseye :many select t.time, locf(xm.value) as x, @@ -58,27 +73,27 @@ group by t.time order by t.time asc ` -type ListPlotConfigMeasurementsBullseyePlotParams struct { +type PlotConfigMeasurementListBullseyeParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -type ListPlotConfigMeasurementsBullseyePlotRow struct { +type PlotConfigMeasurementListBullseyeRow struct { Time time.Time `json:"time"` X interface{} `json:"x"` Y interface{} `json:"y"` } -func (q *Queries) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, arg ListPlotConfigMeasurementsBullseyePlotParams) ([]ListPlotConfigMeasurementsBullseyePlotRow, error) { - rows, err := q.db.Query(ctx, listPlotConfigMeasurementsBullseyePlot, arg.PlotConfigID, arg.Time, arg.Time_2) +func (q *Queries) PlotConfigMeasurementListBullseye(ctx context.Context, arg PlotConfigMeasurementListBullseyeParams) ([]PlotConfigMeasurementListBullseyeRow, error) { + rows, err := q.db.Query(ctx, plotConfigMeasurementListBullseye, arg.PlotConfigID, arg.After, arg.Before) if err != nil { return nil, err } defer rows.Close() - items := []ListPlotConfigMeasurementsBullseyePlotRow{} + items := []PlotConfigMeasurementListBullseyeRow{} for rows.Next() { - var i ListPlotConfigMeasurementsBullseyePlotRow + var i PlotConfigMeasurementListBullseyeRow if err := rows.Scan(&i.Time, &i.X, &i.Y); err != nil { return nil, err } @@ -89,18 +104,3 @@ func (q *Queries) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, ar } return items, nil } - -const updatePlotBullseyeConfig = `-- name: UpdatePlotBullseyeConfig :exec -UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 -` - -type UpdatePlotBullseyeConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` - YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` -} - -func (q *Queries) UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error { - _, err := q.db.Exec(ctx, updatePlotBullseyeConfig, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) - return err -} diff --git a/api/internal/db/plot_config_contour.sql_gen.go b/api/internal/db/plot_config_contour.sql_gen.go index 9e8b1f44..f5180365 100644 --- a/api/internal/db/plot_config_contour.sql_gen.go +++ b/api/internal/db/plot_config_contour.sql_gen.go @@ -12,66 +12,7 @@ import ( "github.com/google/uuid" ) -const createPlotContourConfig = `-- name: CreatePlotContourConfig :exec -insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) -values ($1, $2, $3, $4, $5, $6) -` - -type CreatePlotContourConfigParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - Time *time.Time `json:"time"` - LocfBackfill string `json:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing"` - ShowLabels bool `json:"show_labels"` -} - -func (q *Queries) CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error { - _, err := q.db.Exec(ctx, createPlotContourConfig, - arg.PlotConfigID, - arg.Time, - arg.LocfBackfill, - arg.GradientSmoothing, - arg.ContourSmoothing, - arg.ShowLabels, - ) - return err -} - -const createPlotContourConfigTimeseries = `-- name: CreatePlotContourConfigTimeseries :exec -insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) -on conflict (plot_contour_config_id, timeseries_id) do nothing -` - -type CreatePlotContourConfigTimeseriesParams struct { - PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) CreatePlotContourConfigTimeseries(ctx context.Context, arg CreatePlotContourConfigTimeseriesParams) error { - _, err := q.db.Exec(ctx, createPlotContourConfigTimeseries, arg.PlotContourConfigID, arg.TimeseriesID) - return err -} - -const deleteAllPlotContourConfigTimeseries = `-- name: DeleteAllPlotContourConfigTimeseries :exec -delete from plot_contour_config_timeseries where plot_contour_config_id = $1 -` - -func (q *Queries) DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllPlotContourConfigTimeseries, plotContourConfigID) - return err -} - -const deletePlotContourConfig = `-- name: DeletePlotContourConfig :exec -delete from plot_contour_config where plot_config_id = $1 -` - -func (q *Queries) DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deletePlotContourConfig, plotConfigID) - return err -} - -const listPlotConfigMeasurementsContourPlot = `-- name: ListPlotConfigMeasurementsContourPlot :many +const plotConfigMeasurementListContour = `-- name: PlotConfigMeasurementListContour :many select oi.x::double precision x, oi.y::double precision y, @@ -92,26 +33,26 @@ and mm.time = $2 group by pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y ` -type ListPlotConfigMeasurementsContourPlotParams struct { +type PlotConfigMeasurementListContourParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` Time time.Time `json:"time"` } -type ListPlotConfigMeasurementsContourPlotRow struct { +type PlotConfigMeasurementListContourRow struct { X float64 `json:"x"` Y float64 `json:"y"` Z interface{} `json:"z"` } -func (q *Queries) ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg ListPlotConfigMeasurementsContourPlotParams) ([]ListPlotConfigMeasurementsContourPlotRow, error) { - rows, err := q.db.Query(ctx, listPlotConfigMeasurementsContourPlot, arg.PlotConfigID, arg.Time) +func (q *Queries) PlotConfigMeasurementListContour(ctx context.Context, arg PlotConfigMeasurementListContourParams) ([]PlotConfigMeasurementListContourRow, error) { + rows, err := q.db.Query(ctx, plotConfigMeasurementListContour, arg.PlotConfigID, arg.Time) if err != nil { return nil, err } defer rows.Close() - items := []ListPlotConfigMeasurementsContourPlotRow{} + items := []PlotConfigMeasurementListContourRow{} for rows.Next() { - var i ListPlotConfigMeasurementsContourPlotRow + var i PlotConfigMeasurementListContourRow if err := rows.Scan(&i.X, &i.Y, &i.Z); err != nil { return nil, err } @@ -123,7 +64,42 @@ func (q *Queries) ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg return items, nil } -const listPlotContourConfigTimes = `-- name: ListPlotContourConfigTimes :many +const plotContourConfigCreate = `-- name: PlotContourConfigCreate :exec +insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) +values ($1, $2, $3, $4, $5, $6) +` + +type PlotContourConfigCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +func (q *Queries) PlotContourConfigCreate(ctx context.Context, arg PlotContourConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigCreate, + arg.PlotConfigID, + arg.Time, + arg.LocfBackfill, + arg.GradientSmoothing, + arg.ContourSmoothing, + arg.ShowLabels, + ) + return err +} + +const plotContourConfigDelete = `-- name: PlotContourConfigDelete :exec +delete from plot_contour_config where plot_config_id = $1 +` + +func (q *Queries) PlotContourConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotContourConfigDelete, plotConfigID) + return err +} + +const plotContourConfigListTimeRange = `-- name: PlotContourConfigListTimeRange :many select distinct mm.time from plot_contour_config_timeseries pcts inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id @@ -133,14 +109,14 @@ and mm.time < $3 order by time asc ` -type ListPlotContourConfigTimesParams struct { +type PlotContourConfigListTimeRangeParams struct { PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` - Time time.Time `json:"time"` - Time_2 time.Time `json:"time_2"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -func (q *Queries) ListPlotContourConfigTimes(ctx context.Context, arg ListPlotContourConfigTimesParams) ([]time.Time, error) { - rows, err := q.db.Query(ctx, listPlotContourConfigTimes, arg.PlotContourConfigID, arg.Time, arg.Time_2) +func (q *Queries) PlotContourConfigListTimeRange(ctx context.Context, arg PlotContourConfigListTimeRangeParams) ([]time.Time, error) { + rows, err := q.db.Query(ctx, plotContourConfigListTimeRange, arg.PlotContourConfigID, arg.After, arg.Before) if err != nil { return nil, err } @@ -159,12 +135,36 @@ func (q *Queries) ListPlotContourConfigTimes(ctx context.Context, arg ListPlotCo return items, nil } -const updatePlotContourConfig = `-- name: UpdatePlotContourConfig :exec +const plotContourConfigTimeseriesCreate = `-- name: PlotContourConfigTimeseriesCreate :exec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing +` + +type PlotContourConfigTimeseriesCreateParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) PlotContourConfigTimeseriesCreate(ctx context.Context, arg PlotContourConfigTimeseriesCreateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigTimeseriesCreate, arg.PlotContourConfigID, arg.TimeseriesID) + return err +} + +const plotContourConfigTimeseriesDeleteForPlotContourConfig = `-- name: PlotContourConfigTimeseriesDeleteForPlotContourConfig :exec +delete from plot_contour_config_timeseries where plot_contour_config_id = $1 +` + +func (q *Queries) PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx context.Context, plotContourConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotContourConfigTimeseriesDeleteForPlotContourConfig, plotContourConfigID) + return err +} + +const plotContourConfigUpdate = `-- name: PlotContourConfigUpdate :exec update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 where plot_config_id=$1 ` -type UpdatePlotContourConfigParams struct { +type PlotContourConfigUpdateParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` Time *time.Time `json:"time"` LocfBackfill string `json:"locf_backfill"` @@ -173,8 +173,8 @@ type UpdatePlotContourConfigParams struct { ShowLabels bool `json:"show_labels"` } -func (q *Queries) UpdatePlotContourConfig(ctx context.Context, arg UpdatePlotContourConfigParams) error { - _, err := q.db.Exec(ctx, updatePlotContourConfig, +func (q *Queries) PlotContourConfigUpdate(ctx context.Context, arg PlotContourConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigUpdate, arg.PlotConfigID, arg.Time, arg.LocfBackfill, diff --git a/api/internal/db/plot_config_profile.sql_gen.go b/api/internal/db/plot_config_profile.sql_gen.go index b930f047..39b9708c 100644 --- a/api/internal/db/plot_config_profile.sql_gen.go +++ b/api/internal/db/plot_config_profile.sql_gen.go @@ -11,30 +11,30 @@ import ( "github.com/google/uuid" ) -const createPlotProfileConfig = `-- name: CreatePlotProfileConfig :exec +const plotProfileConfigCreate = `-- name: PlotProfileConfigCreate :exec insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2) ` -type CreatePlotProfileConfigParams struct { +type PlotProfileConfigCreateParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` InstrumentID uuid.UUID `json:"instrument_id"` } -func (q *Queries) CreatePlotProfileConfig(ctx context.Context, arg CreatePlotProfileConfigParams) error { - _, err := q.db.Exec(ctx, createPlotProfileConfig, arg.PlotConfigID, arg.InstrumentID) +func (q *Queries) PlotProfileConfigCreate(ctx context.Context, arg PlotProfileConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotProfileConfigCreate, arg.PlotConfigID, arg.InstrumentID) return err } -const updatePlotProfileConfig = `-- name: UpdatePlotProfileConfig :exec +const plotProfileConfigUpdate = `-- name: PlotProfileConfigUpdate :exec update plot_profile_config set instrument_id=$2 where plot_config_id=$1 ` -type UpdatePlotProfileConfigParams struct { +type PlotProfileConfigUpdateParams struct { PlotConfigID uuid.UUID `json:"plot_config_id"` InstrumentID uuid.UUID `json:"instrument_id"` } -func (q *Queries) UpdatePlotProfileConfig(ctx context.Context, arg UpdatePlotProfileConfigParams) error { - _, err := q.db.Exec(ctx, updatePlotProfileConfig, arg.PlotConfigID, arg.InstrumentID) +func (q *Queries) PlotProfileConfigUpdate(ctx context.Context, arg PlotProfileConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotProfileConfigUpdate, arg.PlotConfigID, arg.InstrumentID) return err } diff --git a/api/internal/db/plot_config_scatter_line.sql_gen.go b/api/internal/db/plot_config_scatter_line.sql_gen.go index 2260cf93..0398539a 100644 --- a/api/internal/db/plot_config_scatter_line.sql_gen.go +++ b/api/internal/db/plot_config_scatter_line.sql_gen.go @@ -11,12 +11,12 @@ import ( "github.com/google/uuid" ) -const createPlotConfigCustomShape = `-- name: CreatePlotConfigCustomShape :exec +const plotConfigCustomShapeCreate = `-- name: PlotConfigCustomShapeCreate :exec insert into plot_configuration_custom_shape (plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) ` -type CreatePlotConfigCustomShapeParams struct { +type PlotConfigCustomShapeCreateParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` Enabled bool `json:"enabled"` Name string `json:"name"` @@ -24,8 +24,8 @@ type CreatePlotConfigCustomShapeParams struct { Color string `json:"color"` } -func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error { - _, err := q.db.Exec(ctx, createPlotConfigCustomShape, +func (q *Queries) PlotConfigCustomShapeCreate(ctx context.Context, arg PlotConfigCustomShapeCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeCreate, arg.PlotConfigurationID, arg.Enabled, arg.Name, @@ -35,116 +35,116 @@ func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlo return err } -const createPlotConfigScatterLineLayout = `-- name: CreatePlotConfigScatterLineLayout :exec -insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3) +const plotConfigCustomShapeDeleteForPlotConfig = `-- name: PlotConfigCustomShapeDeleteForPlotConfig :exec +delete from plot_configuration_custom_shape where plot_configuration_id=$1 ` -type CreatePlotConfigScatterLineLayoutParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - YAxisTitle *string `json:"y_axis_title"` - Y2AxisTitle *string `json:"y2_axis_title"` -} - -func (q *Queries) CreatePlotConfigScatterLineLayout(ctx context.Context, arg CreatePlotConfigScatterLineLayoutParams) error { - _, err := q.db.Exec(ctx, createPlotConfigScatterLineLayout, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) +func (q *Queries) PlotConfigCustomShapeDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeDeleteForPlotConfig, plotConfigurationID) return err } -const createPlotConfigTimeseriesTrace = `-- name: CreatePlotConfigTimeseriesTrace :exec -insert into plot_configuration_timeseries_trace -(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values -($1, $2, $3, $4, $5, $6, $7, $8) +const plotConfigCustomShapeUpdate = `-- name: PlotConfigCustomShapeUpdate :exec +update plot_configuration_custom_shape +set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1 ` -type CreatePlotConfigTimeseriesTraceParams struct { +type PlotConfigCustomShapeUpdateParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` - TraceOrder int32 `json:"trace_order"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` Color string `json:"color"` - LineStyle LineStyle `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis YAxis `json:"y_axis"` } -func (q *Queries) CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error { - _, err := q.db.Exec(ctx, createPlotConfigTimeseriesTrace, +func (q *Queries) PlotConfigCustomShapeUpdate(ctx context.Context, arg PlotConfigCustomShapeUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeUpdate, arg.PlotConfigurationID, - arg.TimeseriesID, - arg.TraceOrder, + arg.Enabled, + arg.Name, + arg.DataPoint, arg.Color, - arg.LineStyle, - arg.Width, - arg.ShowMarkers, - arg.YAxis, ) return err } -const deleteAllPlotConfigCustomShapes = `-- name: DeleteAllPlotConfigCustomShapes :exec -delete from plot_configuration_custom_shape where plot_configuration_id=$1 +const plotConfigScatterLineLayoutCreate = `-- name: PlotConfigScatterLineLayoutCreate :exec +insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3) ` -func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID *uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllPlotConfigCustomShapes, plotConfigurationID) +type PlotConfigScatterLineLayoutCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) PlotConfigScatterLineLayoutCreate(ctx context.Context, arg PlotConfigScatterLineLayoutCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigScatterLineLayoutCreate, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) return err } -const deleteAllPlotConfigTimeseriesTraces = `-- name: DeleteAllPlotConfigTimeseriesTraces :exec -delete from plot_configuration_timeseries_trace where plot_configuration_id=$1 +const plotConfigScatterLineLayoutUpdate = `-- name: PlotConfigScatterLineLayoutUpdate :exec +update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1 ` -func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID *uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllPlotConfigTimeseriesTraces, plotConfigurationID) +type PlotConfigScatterLineLayoutUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) PlotConfigScatterLineLayoutUpdate(ctx context.Context, arg PlotConfigScatterLineLayoutUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigScatterLineLayoutUpdate, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) return err } -const updatePlotConfigCustomShape = `-- name: UpdatePlotConfigCustomShape :exec -update plot_configuration_custom_shape -set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1 +const plotConfigTimeseriesTraceCreate = `-- name: PlotConfigTimeseriesTraceCreate :exec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8) ` -type UpdatePlotConfigCustomShapeParams struct { +type PlotConfigTimeseriesTraceCreateParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` } -func (q *Queries) UpdatePlotConfigCustomShape(ctx context.Context, arg UpdatePlotConfigCustomShapeParams) error { - _, err := q.db.Exec(ctx, updatePlotConfigCustomShape, +func (q *Queries) PlotConfigTimeseriesTraceCreate(ctx context.Context, arg PlotConfigTimeseriesTraceCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceCreate, arg.PlotConfigurationID, - arg.Enabled, - arg.Name, - arg.DataPoint, + arg.TimeseriesID, + arg.TraceOrder, arg.Color, + arg.LineStyle, + arg.Width, + arg.ShowMarkers, + arg.YAxis, ) return err } -const updatePlotConfigScatterLineLayout = `-- name: UpdatePlotConfigScatterLineLayout :exec -update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1 +const plotConfigTimeseriesTraceDeleteForPlotConfig = `-- name: PlotConfigTimeseriesTraceDeleteForPlotConfig :exec +delete from plot_configuration_timeseries_trace where plot_configuration_id=$1 ` -type UpdatePlotConfigScatterLineLayoutParams struct { - PlotConfigID uuid.UUID `json:"plot_config_id"` - YAxisTitle *string `json:"y_axis_title"` - Y2AxisTitle *string `json:"y2_axis_title"` -} - -func (q *Queries) UpdatePlotConfigScatterLineLayout(ctx context.Context, arg UpdatePlotConfigScatterLineLayoutParams) error { - _, err := q.db.Exec(ctx, updatePlotConfigScatterLineLayout, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) +func (q *Queries) PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceDeleteForPlotConfig, plotConfigurationID) return err } -const updatePlotConfigTimeseriesTrace = `-- name: UpdatePlotConfigTimeseriesTrace :exec +const plotConfigTimeseriesTraceUpdate = `-- name: PlotConfigTimeseriesTraceUpdate :exec update plot_configuration_timeseries_trace set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 where plot_configuration_id=$1 and timeseries_id=$2 ` -type UpdatePlotConfigTimeseriesTraceParams struct { +type PlotConfigTimeseriesTraceUpdateParams struct { PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` TimeseriesID *uuid.UUID `json:"timeseries_id"` TraceOrder int32 `json:"trace_order"` @@ -155,8 +155,8 @@ type UpdatePlotConfigTimeseriesTraceParams struct { YAxis YAxis `json:"y_axis"` } -func (q *Queries) UpdatePlotConfigTimeseriesTrace(ctx context.Context, arg UpdatePlotConfigTimeseriesTraceParams) error { - _, err := q.db.Exec(ctx, updatePlotConfigTimeseriesTrace, +func (q *Queries) PlotConfigTimeseriesTraceUpdate(ctx context.Context, arg PlotConfigTimeseriesTraceUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceUpdate, arg.PlotConfigurationID, arg.TimeseriesID, arg.TraceOrder, diff --git a/api/internal/db/profile.sql_gen.go b/api/internal/db/profile.sql_gen.go index c7df60b5..ef522030 100644 --- a/api/internal/db/profile.sql_gen.go +++ b/api/internal/db/profile.sql_gen.go @@ -12,99 +12,47 @@ import ( "github.com/google/uuid" ) -const createProfile = `-- name: CreateProfile :exec +const profileCreate = `-- name: ProfileCreate :one insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name ` -type CreateProfileParams struct { +type ProfileCreateParams struct { Edipi int64 `json:"edipi"` Username string `json:"username"` Email string `json:"email"` DisplayName string `json:"display_name"` } -func (q *Queries) CreateProfile(ctx context.Context, arg CreateProfileParams) error { - _, err := q.db.Exec(ctx, createProfile, +type ProfileCreateRow struct { + ID uuid.UUID `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` +} + +func (q *Queries) ProfileCreate(ctx context.Context, arg ProfileCreateParams) (ProfileCreateRow, error) { + row := q.db.QueryRow(ctx, profileCreate, arg.Edipi, arg.Username, arg.Email, arg.DisplayName, ) - return err -} - -const createProfileToken = `-- name: CreateProfileToken :one -insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning id, token_id, profile_id, issued, hash -` - -type CreateProfileTokenParams struct { - TokenID string `json:"token_id"` - ProfileID uuid.UUID `json:"profile_id"` - Hash string `json:"hash"` -} - -func (q *Queries) CreateProfileToken(ctx context.Context, arg CreateProfileTokenParams) (ProfileToken, error) { - row := q.db.QueryRow(ctx, createProfileToken, arg.TokenID, arg.ProfileID, arg.Hash) - var i ProfileToken + var i ProfileCreateRow err := row.Scan( &i.ID, - &i.TokenID, - &i.ProfileID, - &i.Issued, - &i.Hash, + &i.Username, + &i.Email, + &i.DisplayName, ) return i, err } -const deleteToken = `-- name: DeleteToken :exec -delete from profile_token where profile_id=$1 and token_id=$2 -` - -type DeleteTokenParams struct { - ProfileID uuid.UUID `json:"profile_id"` - TokenID string `json:"token_id"` -} - -func (q *Queries) DeleteToken(ctx context.Context, arg DeleteTokenParams) error { - _, err := q.db.Exec(ctx, deleteToken, arg.ProfileID, arg.TokenID) - return err -} - -const getIssuedTokens = `-- name: GetIssuedTokens :many -select token_id, issued from profile_token where profile_id = $1 -` - -type GetIssuedTokensRow struct { - TokenID string `json:"token_id"` - Issued time.Time `json:"issued"` -} - -func (q *Queries) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]GetIssuedTokensRow, error) { - rows, err := q.db.Query(ctx, getIssuedTokens, profileID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []GetIssuedTokensRow{} - for rows.Next() { - var i GetIssuedTokensRow - if err := rows.Scan(&i.TokenID, &i.Issued); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const getProfileForEDIPI = `-- name: GetProfileForEDIPI :one +const profileGetForEDIPI = `-- name: ProfileGetForEDIPI :one select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where edipi = $1 ` -func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int64) (VProfile, error) { - row := q.db.QueryRow(ctx, getProfileForEDIPI, edipi) +func (q *Queries) ProfileGetForEDIPI(ctx context.Context, edipi int64) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForEDIPI, edipi) var i VProfile err := row.Scan( &i.ID, @@ -119,13 +67,13 @@ func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int64) (VProfile return i, err } -const getProfileForEmail = `-- name: GetProfileForEmail :one +const profileGetForEmail = `-- name: ProfileGetForEmail :one select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where email ilike $1 limit 1 ` -func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (VProfile, error) { - row := q.db.QueryRow(ctx, getProfileForEmail, email) +func (q *Queries) ProfileGetForEmail(ctx context.Context, email string) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForEmail, email) var i VProfile err := row.Scan( &i.ID, @@ -140,7 +88,7 @@ func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (VProfil return i, err } -const getProfileForTokenID = `-- name: GetProfileForTokenID :one +const profileGetForToken = `-- name: ProfileGetForToken :one select p.id, p.edipi, p.username, p.email, p.is_admin from profile_token t left join v_profile p on p.id = t.profile_id @@ -148,7 +96,7 @@ where t.token_id = $1 limit 1 ` -type GetProfileForTokenIDRow struct { +type ProfileGetForTokenRow struct { ID *uuid.UUID `json:"id"` Edipi *int64 `json:"edipi"` Username *string `json:"username"` @@ -156,9 +104,9 @@ type GetProfileForTokenIDRow struct { IsAdmin *bool `json:"is_admin"` } -func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) { - row := q.db.QueryRow(ctx, getProfileForTokenID, tokenID) - var i GetProfileForTokenIDRow +func (q *Queries) ProfileGetForToken(ctx context.Context, tokenID string) (ProfileGetForTokenRow, error) { + row := q.db.QueryRow(ctx, profileGetForToken, tokenID) + var i ProfileGetForTokenRow err := row.Scan( &i.ID, &i.Edipi, @@ -169,13 +117,13 @@ func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (Get return i, err } -const getProfileForUsername = `-- name: GetProfileForUsername :one +const profileGetForUsername = `-- name: ProfileGetForUsername :one select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where username = $1 limit 1 ` -func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (VProfile, error) { - row := q.db.QueryRow(ctx, getProfileForUsername, username) +func (q *Queries) ProfileGetForUsername(ctx context.Context, username string) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForUsername, username) var i VProfile err := row.Scan( &i.ID, @@ -190,12 +138,49 @@ func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (V return i, err } -const getTokenInfo = `-- name: GetTokenInfo :one +const profileTokenCreate = `-- name: ProfileTokenCreate :one +insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning id, token_id, profile_id, issued, hash +` + +type ProfileTokenCreateParams struct { + TokenID string `json:"token_id"` + ProfileID uuid.UUID `json:"profile_id"` + Hash string `json:"hash"` +} + +func (q *Queries) ProfileTokenCreate(ctx context.Context, arg ProfileTokenCreateParams) (ProfileToken, error) { + row := q.db.QueryRow(ctx, profileTokenCreate, arg.TokenID, arg.ProfileID, arg.Hash) + var i ProfileToken + err := row.Scan( + &i.ID, + &i.TokenID, + &i.ProfileID, + &i.Issued, + &i.Hash, + ) + return i, err +} + +const profileTokenDelete = `-- name: ProfileTokenDelete :exec +delete from profile_token where profile_id=$1 and token_id=$2 +` + +type ProfileTokenDeleteParams struct { + ProfileID uuid.UUID `json:"profile_id"` + TokenID string `json:"token_id"` +} + +func (q *Queries) ProfileTokenDelete(ctx context.Context, arg ProfileTokenDeleteParams) error { + _, err := q.db.Exec(ctx, profileTokenDelete, arg.ProfileID, arg.TokenID) + return err +} + +const profileTokenGet = `-- name: ProfileTokenGet :one select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1 ` -func (q *Queries) GetTokenInfo(ctx context.Context, tokenID string) (ProfileToken, error) { - row := q.db.QueryRow(ctx, getTokenInfo, tokenID) +func (q *Queries) ProfileTokenGet(ctx context.Context, tokenID string) (ProfileToken, error) { + row := q.db.QueryRow(ctx, profileTokenGet, tokenID) var i ProfileToken err := row.Scan( &i.ID, @@ -207,19 +192,48 @@ func (q *Queries) GetTokenInfo(ctx context.Context, tokenID string) (ProfileToke return i, err } -const updateProfileForEDIPI = `-- name: UpdateProfileForEDIPI :exec +const profileTokenList = `-- name: ProfileTokenList :many +select token_id, issued from profile_token where profile_id = $1 +` + +type ProfileTokenListRow struct { + TokenID string `json:"token_id"` + Issued time.Time `json:"issued"` +} + +func (q *Queries) ProfileTokenList(ctx context.Context, profileID uuid.UUID) ([]ProfileTokenListRow, error) { + rows, err := q.db.Query(ctx, profileTokenList, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProfileTokenListRow{} + for rows.Next() { + var i ProfileTokenListRow + if err := rows.Scan(&i.TokenID, &i.Issued); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const profileUpdateForEDIPI = `-- name: ProfileUpdateForEDIPI :exec UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4 ` -type UpdateProfileForEDIPIParams struct { +type ProfileUpdateForEDIPIParams struct { Username string `json:"username"` Email string `json:"email"` DisplayName string `json:"display_name"` Edipi int64 `json:"edipi"` } -func (q *Queries) UpdateProfileForEDIPI(ctx context.Context, arg UpdateProfileForEDIPIParams) error { - _, err := q.db.Exec(ctx, updateProfileForEDIPI, +func (q *Queries) ProfileUpdateForEDIPI(ctx context.Context, arg ProfileUpdateForEDIPIParams) error { + _, err := q.db.Exec(ctx, profileUpdateForEDIPI, arg.Username, arg.Email, arg.DisplayName, @@ -228,32 +242,32 @@ func (q *Queries) UpdateProfileForEDIPI(ctx context.Context, arg UpdateProfileFo return err } -const updateProfileForEmail = `-- name: UpdateProfileForEmail :exec +const profileUpdateForEmail = `-- name: ProfileUpdateForEmail :exec update profile set username=$1, display_name=$2 where email ilike $3 ` -type UpdateProfileForEmailParams struct { +type ProfileUpdateForEmailParams struct { Username string `json:"username"` DisplayName string `json:"display_name"` Email string `json:"email"` } -func (q *Queries) UpdateProfileForEmail(ctx context.Context, arg UpdateProfileForEmailParams) error { - _, err := q.db.Exec(ctx, updateProfileForEmail, arg.Username, arg.DisplayName, arg.Email) +func (q *Queries) ProfileUpdateForEmail(ctx context.Context, arg ProfileUpdateForEmailParams) error { + _, err := q.db.Exec(ctx, profileUpdateForEmail, arg.Username, arg.DisplayName, arg.Email) return err } -const updateProfileForUsername = `-- name: UpdateProfileForUsername :exec +const profileUpdateForUsername = `-- name: ProfileUpdateForUsername :exec update profile set email=$1, display_name=$2 where username=$3 ` -type UpdateProfileForUsernameParams struct { +type ProfileUpdateForUsernameParams struct { Email string `json:"email"` DisplayName string `json:"display_name"` Username string `json:"username"` } -func (q *Queries) UpdateProfileForUsername(ctx context.Context, arg UpdateProfileForUsernameParams) error { - _, err := q.db.Exec(ctx, updateProfileForUsername, arg.Email, arg.DisplayName, arg.Username) +func (q *Queries) ProfileUpdateForUsername(ctx context.Context, arg ProfileUpdateForUsernameParams) error { + _, err := q.db.Exec(ctx, profileUpdateForUsername, arg.Email, arg.DisplayName, arg.Username) return err } diff --git a/api/internal/db/project.sql_gen.go b/api/internal/db/project.sql_gen.go index 6d5a11b1..5afef6f0 100644 --- a/api/internal/db/project.sql_gen.go +++ b/api/internal/db/project.sql_gen.go @@ -12,21 +12,53 @@ import ( "github.com/google/uuid" ) -const deleteFlagProject = `-- name: DeleteFlagProject :exec +const districtList = `-- name: DistrictList :many +select agency, id, name, initials, division_name, division_initials, office_id from v_district +` + +func (q *Queries) DistrictList(ctx context.Context) ([]VDistrict, error) { + rows, err := q.db.Query(ctx, districtList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrict{} + for rows.Next() { + var i VDistrict + if err := rows.Scan( + &i.Agency, + &i.ID, + &i.Name, + &i.Initials, + &i.DivisionName, + &i.DivisionInitials, + &i.OfficeID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectDeleteFlag = `-- name: ProjectDeleteFlag :exec update project set deleted=true where id = $1 ` -func (q *Queries) DeleteFlagProject(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteFlagProject, id) +func (q *Queries) ProjectDeleteFlag(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, projectDeleteFlag, id) return err } -const getProject = `-- name: GetProject :one +const projectGet = `-- name: ProjectGet :one select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project where id = $1 ` -func (q *Queries) GetProject(ctx context.Context, id uuid.UUID) (VProject, error) { - row := q.db.QueryRow(ctx, getProject, id) +func (q *Queries) ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error) { + row := q.db.QueryRow(ctx, projectGet, id) var i VProject err := row.Scan( &i.ID, @@ -49,82 +81,23 @@ func (q *Queries) GetProject(ctx context.Context, id uuid.UUID) (VProject, error return i, err } -const getProjectCount = `-- name: GetProjectCount :one +const projectGetCount = `-- name: ProjectGetCount :one select count(*) from project where not deleted ` -func (q *Queries) GetProjectCount(ctx context.Context) (int64, error) { - row := q.db.QueryRow(ctx, getProjectCount) +func (q *Queries) ProjectGetCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, projectGetCount) var count int64 err := row.Scan(&count) return count, err } -const listAdminProjects = `-- name: ListAdminProjects :many -select pr.project_id from profile_project_roles pr -inner join role ro on ro.id = pr.role_id -where pr.profile_id = $1 -and ro.name = 'ADMIN' -` - -func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { - rows, err := q.db.Query(ctx, listAdminProjects, profileID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []uuid.UUID{} - for rows.Next() { - var project_id uuid.UUID - if err := rows.Scan(&project_id); err != nil { - return nil, err - } - items = append(items, project_id) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listDistricts = `-- name: ListDistricts :many -select agency, id, name, initials, division_name, division_initials, office_id from v_district -` - -func (q *Queries) ListDistricts(ctx context.Context) ([]VDistrict, error) { - rows, err := q.db.Query(ctx, listDistricts) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VDistrict{} - for rows.Next() { - var i VDistrict - if err := rows.Scan( - &i.Agency, - &i.ID, - &i.Name, - &i.Initials, - &i.DivisionName, - &i.DivisionInitials, - &i.OfficeID, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listProjects = `-- name: ListProjects :many +const projectList = `-- name: ProjectList :many select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project ` -func (q *Queries) ListProjects(ctx context.Context) ([]VProject, error) { - rows, err := q.db.Query(ctx, listProjects) +func (q *Queries) ProjectList(ctx context.Context) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectList) if err != nil { return nil, err } @@ -160,13 +133,13 @@ func (q *Queries) ListProjects(ctx context.Context) ([]VProject, error) { return items, nil } -const listProjectsForFederalID = `-- name: ListProjectsForFederalID :many +const projectListForFederalID = `-- name: ProjectListForFederalID :many select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project -where federal_id = sqc.arg(federal_id) +where federal_id = $1 ` -func (q *Queries) ListProjectsForFederalID(ctx context.Context) ([]VProject, error) { - rows, err := q.db.Query(ctx, listProjectsForFederalID) +func (q *Queries) ProjectListForFederalID(ctx context.Context, federalID *string) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForFederalID, federalID) if err != nil { return nil, err } @@ -202,22 +175,19 @@ func (q *Queries) ListProjectsForFederalID(ctx context.Context) ([]VProject, err return items, nil } -const listProjectsForProfileRole = `-- name: ListProjectsForProfileRole :many -select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.deleted, p.slug, p.name, p.creator, p.creator_username, p.create_date, p.updater, p.updater_username, p.update_date, p.instrument_count, p.instrument_group_count -from v_project p -inner join profile_project_roles pr on pr.project_id = p.id -inner join role r on r.id = pr.role_id -where pr.profile_id = $1 -and r.name = $2 +const projectListForNameSearch = `-- name: ProjectListForNameSearch :many +select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +where name ilike '%'||$1||'%' +limit $2 ` -type ListProjectsForProfileRoleParams struct { - ProfileID uuid.UUID `json:"profile_id"` - Name string `json:"name"` +type ProjectListForNameSearchParams struct { + Name *string `json:"name"` + ResultLimit int32 `json:"result_limit"` } -func (q *Queries) ListProjectsForProfileRole(ctx context.Context, arg ListProjectsForProfileRoleParams) ([]VProject, error) { - rows, err := q.db.Query(ctx, listProjectsForProfileRole, arg.ProfileID, arg.Name) +func (q *Queries) ProjectListForNameSearch(ctx context.Context, arg ProjectListForNameSearchParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForNameSearch, arg.Name, arg.ResultLimit) if err != nil { return nil, err } @@ -253,19 +223,49 @@ func (q *Queries) ListProjectsForProfileRole(ctx context.Context, arg ListProjec return items, nil } -const searchProjects = `-- name: SearchProjects :many -select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project -where name ilike '%'||$1||'%' -limit $2 +const projectListForProfileAdmin = `-- name: ProjectListForProfileAdmin :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN' ` -type SearchProjectsParams struct { - Name *string `json:"name"` - ResultLimit int32 `json:"result_limit"` +func (q *Queries) ProjectListForProfileAdmin(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, projectListForProfileAdmin, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectListForProfileRole = `-- name: ProjectListForProfileRole :many +select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.deleted, p.slug, p.name, p.creator, p.creator_username, p.create_date, p.updater, p.updater_username, p.update_date, p.instrument_count, p.instrument_group_count +from v_project p +inner join profile_project_roles pr on pr.project_id = p.id +inner join role r on r.id = pr.role_id +where pr.profile_id = $1 +and r.name = $2 +` + +type ProjectListForProfileRoleParams struct { + ProfileID uuid.UUID `json:"profile_id"` + Name string `json:"name"` } -func (q *Queries) SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]VProject, error) { - rows, err := q.db.Query(ctx, searchProjects, arg.Name, arg.ResultLimit) +func (q *Queries) ProjectListForProfileRole(ctx context.Context, arg ProjectListForProfileRoleParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForProfileRole, arg.ProfileID, arg.Name) if err != nil { return nil, err } @@ -301,11 +301,11 @@ func (q *Queries) SearchProjects(ctx context.Context, arg SearchProjectsParams) return items, nil } -const updateProject = `-- name: UpdateProject :one +const projectUpdate = `-- name: ProjectUpdate :one update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id ` -type UpdateProjectParams struct { +type ProjectUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Updater *uuid.UUID `json:"updater"` @@ -314,8 +314,8 @@ type UpdateProjectParams struct { FederalID *string `json:"federal_id"` } -func (q *Queries) UpdateProject(ctx context.Context, arg UpdateProjectParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, updateProject, +func (q *Queries) ProjectUpdate(ctx context.Context, arg ProjectUpdateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, projectUpdate, arg.ID, arg.Name, arg.Updater, @@ -328,16 +328,16 @@ func (q *Queries) UpdateProject(ctx context.Context, arg UpdateProjectParams) (u return id, err } -const updateProjectImage = `-- name: UpdateProjectImage :exec +const projectUpdateImage = `-- name: ProjectUpdateImage :exec update project set image = $1 where id = $2 ` -type UpdateProjectImageParams struct { +type ProjectUpdateImageParams struct { Image *string `json:"image"` ID uuid.UUID `json:"id"` } -func (q *Queries) UpdateProjectImage(ctx context.Context, arg UpdateProjectImageParams) error { - _, err := q.db.Exec(ctx, updateProjectImage, arg.Image, arg.ID) +func (q *Queries) ProjectUpdateImage(ctx context.Context, arg ProjectUpdateImageParams) error { + _, err := q.db.Exec(ctx, projectUpdateImage, arg.Image, arg.ID) return err } diff --git a/api/internal/db/project_role.sql_gen.go b/api/internal/db/project_role.sql_gen.go index a89b31a7..64f0989f 100644 --- a/api/internal/db/project_role.sql_gen.go +++ b/api/internal/db/project_role.sql_gen.go @@ -11,22 +11,22 @@ import ( "github.com/google/uuid" ) -const createProfileProjectRole = `-- name: CreateProfileProjectRole :one +const profileProjectRoleCreate = `-- name: ProfileProjectRoleCreate :one insert into profile_project_roles (project_id, profile_id, role_id, granted_by) values ($1, $2, $3, $4) on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id returning id ` -type CreateProfileProjectRoleParams struct { +type ProfileProjectRoleCreateParams struct { ProjectID uuid.UUID `json:"project_id"` ProfileID uuid.UUID `json:"profile_id"` RoleID uuid.UUID `json:"role_id"` GrantedBy *uuid.UUID `json:"granted_by"` } -func (q *Queries) CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createProfileProjectRole, +func (q *Queries) ProfileProjectRoleCreate(ctx context.Context, arg ProfileProjectRoleCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, profileProjectRoleCreate, arg.ProjectID, arg.ProfileID, arg.RoleID, @@ -37,28 +37,28 @@ func (q *Queries) CreateProfileProjectRole(ctx context.Context, arg CreateProfil return id, err } -const deleteProfileProjectRole = `-- name: DeleteProfileProjectRole :exec +const profileProjectRoleDelete = `-- name: ProfileProjectRoleDelete :exec delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3 ` -type DeleteProfileProjectRoleParams struct { +type ProfileProjectRoleDeleteParams struct { ProjectID uuid.UUID `json:"project_id"` ProfileID uuid.UUID `json:"profile_id"` RoleID uuid.UUID `json:"role_id"` } -func (q *Queries) DeleteProfileProjectRole(ctx context.Context, arg DeleteProfileProjectRoleParams) error { - _, err := q.db.Exec(ctx, deleteProfileProjectRole, arg.ProjectID, arg.ProfileID, arg.RoleID) +func (q *Queries) ProfileProjectRoleDelete(ctx context.Context, arg ProfileProjectRoleDeleteParams) error { + _, err := q.db.Exec(ctx, profileProjectRoleDelete, arg.ProjectID, arg.ProfileID, arg.RoleID) return err } -const getProjectMembership = `-- name: GetProjectMembership :one +const profileProjectRoleGet = `-- name: ProfileProjectRoleGet :one select id, profile_id, username, email, role_id, role from v_profile_project_roles where id = $1 ` -type GetProjectMembershipRow struct { +type ProfileProjectRoleGetRow struct { ID uuid.UUID `json:"id"` ProfileID uuid.UUID `json:"profile_id"` Username string `json:"username"` @@ -67,9 +67,9 @@ type GetProjectMembershipRow struct { Role string `json:"role"` } -func (q *Queries) GetProjectMembership(ctx context.Context, id uuid.UUID) (GetProjectMembershipRow, error) { - row := q.db.QueryRow(ctx, getProjectMembership, id) - var i GetProjectMembershipRow +func (q *Queries) ProfileProjectRoleGet(ctx context.Context, id uuid.UUID) (ProfileProjectRoleGetRow, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGet, id) + var i ProfileProjectRoleGetRow err := row.Scan( &i.ID, &i.ProfileID, @@ -81,7 +81,7 @@ func (q *Queries) GetProjectMembership(ctx context.Context, id uuid.UUID) (GetPr return i, err } -const isProjectAdmin = `-- name: IsProjectAdmin :one +const profileProjectRoleGetIsAdmin = `-- name: ProfileProjectRoleGetIsAdmin :one select exists ( select 1 from profile_project_roles pr inner join role r on r.id = pr.role_id @@ -91,19 +91,19 @@ select exists ( ) ` -type IsProjectAdminParams struct { +type ProfileProjectRoleGetIsAdminParams struct { ProfileID uuid.UUID `json:"profile_id"` ProjectID uuid.UUID `json:"project_id"` } -func (q *Queries) IsProjectAdmin(ctx context.Context, arg IsProjectAdminParams) (bool, error) { - row := q.db.QueryRow(ctx, isProjectAdmin, arg.ProfileID, arg.ProjectID) +func (q *Queries) ProfileProjectRoleGetIsAdmin(ctx context.Context, arg ProfileProjectRoleGetIsAdminParams) (bool, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGetIsAdmin, arg.ProfileID, arg.ProjectID) var exists bool err := row.Scan(&exists) return exists, err } -const isProjectMember = `-- name: IsProjectMember :one +const profileProjectRoleGetIsMemberOrAdmin = `-- name: ProfileProjectRoleGetIsMemberOrAdmin :one select exists ( select 1 from profile_project_roles pr inner join role r on r.id = pr.role_id @@ -113,26 +113,26 @@ select exists ( ) ` -type IsProjectMemberParams struct { +type ProfileProjectRoleGetIsMemberOrAdminParams struct { ProfileID uuid.UUID `json:"profile_id"` ProjectID uuid.UUID `json:"project_id"` } -func (q *Queries) IsProjectMember(ctx context.Context, arg IsProjectMemberParams) (bool, error) { - row := q.db.QueryRow(ctx, isProjectMember, arg.ProfileID, arg.ProjectID) +func (q *Queries) ProfileProjectRoleGetIsMemberOrAdmin(ctx context.Context, arg ProfileProjectRoleGetIsMemberOrAdminParams) (bool, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGetIsMemberOrAdmin, arg.ProfileID, arg.ProjectID) var exists bool err := row.Scan(&exists) return exists, err } -const listProjectMembers = `-- name: ListProjectMembers :many +const profileProjectRoleListForProject = `-- name: ProfileProjectRoleListForProject :many select id, profile_id, username, email, role_id, role from v_profile_project_roles where project_id = $1 order by email ` -type ListProjectMembersRow struct { +type ProfileProjectRoleListForProjectRow struct { ID uuid.UUID `json:"id"` ProfileID uuid.UUID `json:"profile_id"` Username string `json:"username"` @@ -141,15 +141,15 @@ type ListProjectMembersRow struct { Role string `json:"role"` } -func (q *Queries) ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) { - rows, err := q.db.Query(ctx, listProjectMembers, projectID) +func (q *Queries) ProfileProjectRoleListForProject(ctx context.Context, projectID uuid.UUID) ([]ProfileProjectRoleListForProjectRow, error) { + rows, err := q.db.Query(ctx, profileProjectRoleListForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []ListProjectMembersRow{} + items := []ProfileProjectRoleListForProjectRow{} for rows.Next() { - var i ListProjectMembersRow + var i ProfileProjectRoleListForProjectRow if err := rows.Scan( &i.ID, &i.ProfileID, diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index c224ed06..c50f19ae 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -12,305 +12,315 @@ import ( ) type Querier interface { - AddTimeseriesToCollectionGroup(ctx context.Context, arg AddTimeseriesToCollectionGroupParams) error - AssignInstrumentToAlertConfig(ctx context.Context, arg AssignInstrumentToAlertConfigParams) error - AssignInstrumentToProject(ctx context.Context, arg AssignInstrumentToProjectParams) error - AssignInstrumentToProjectBatch(ctx context.Context, arg []AssignInstrumentToProjectBatchParams) *AssignInstrumentToProjectBatchBatchResults - AssignReportConfigPlotConfig(ctx context.Context, arg AssignReportConfigPlotConfigParams) error - AssignReportConfigPlotConfigBatch(ctx context.Context, arg []AssignReportConfigPlotConfigBatchParams) *AssignReportConfigPlotConfigBatchBatchResults - CompleteEvaluationSubmittal(ctx context.Context, id uuid.UUID) (Submittal, error) - CreateAlert(ctx context.Context, alertConfigID uuid.UUID) error - CreateAlertConfig(ctx context.Context, arg CreateAlertConfigParams) (uuid.UUID, error) - CreateAlertEmailSubscription(ctx context.Context, arg CreateAlertEmailSubscriptionParams) error - CreateAlertProfileSubscription(ctx context.Context, arg CreateAlertProfileSubscriptionParams) error - CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx context.Context, arg CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams) error - CreateAlertRead(ctx context.Context, arg CreateAlertReadParams) error - CreateAlerts(ctx context.Context, alertConfigID []uuid.UUID) *CreateAlertsBatchResults - CreateAwarePlatform(ctx context.Context, arg CreateAwarePlatformParams) error - CreateAwarePlatformBatch(ctx context.Context, arg []CreateAwarePlatformBatchParams) *CreateAwarePlatformBatchBatchResults - CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) - CreateCalculation(ctx context.Context, arg CreateCalculationParams) error - CreateCollectionGroup(ctx context.Context, arg CreateCollectionGroupParams) (CreateCollectionGroupRow, error) - CreateDatalogger(ctx context.Context, arg CreateDataloggerParams) (uuid.UUID, error) - CreateDataloggerError(ctx context.Context, arg CreateDataloggerErrorParams) error - CreateDataloggerHash(ctx context.Context, arg CreateDataloggerHashParams) error - CreateDataloggerTablePreview(ctx context.Context, arg CreateDataloggerTablePreviewParams) error - CreateEvaluation(ctx context.Context, arg CreateEvaluationParams) (uuid.UUID, error) - CreateEvaluationInstrument(ctx context.Context, arg CreateEvaluationInstrumentParams) error - CreateEvaluationInstrumentsBatch(ctx context.Context, arg []CreateEvaluationInstrumentsBatchParams) *CreateEvaluationInstrumentsBatchBatchResults - CreateHeartbeat(ctx context.Context, argTime time.Time) (time.Time, error) - CreateInclOpts(ctx context.Context, arg CreateInclOptsParams) error - CreateInclOptsBatch(ctx context.Context, arg []CreateInclOptsBatchParams) *CreateInclOptsBatchBatchResults - CreateInclSegment(ctx context.Context, arg CreateInclSegmentParams) error - CreateInclSegmentBatch(ctx context.Context, arg []CreateInclSegmentBatchParams) *CreateInclSegmentBatchBatchResults - CreateInstrument(ctx context.Context, arg CreateInstrumentParams) (CreateInstrumentRow, error) - CreateInstrumentConstant(ctx context.Context, arg CreateInstrumentConstantParams) error - CreateInstrumentConstantBatch(ctx context.Context, arg []CreateInstrumentConstantBatchParams) *CreateInstrumentConstantBatchBatchResults - CreateInstrumentGroup(ctx context.Context, arg CreateInstrumentGroupParams) (CreateInstrumentGroupRow, error) - CreateInstrumentGroupInstruments(ctx context.Context, arg CreateInstrumentGroupInstrumentsParams) error - CreateInstrumentGroupsBatch(ctx context.Context, arg []CreateInstrumentGroupsBatchParams) *CreateInstrumentGroupsBatchBatchResults - CreateInstrumentNote(ctx context.Context, arg CreateInstrumentNoteParams) (InstrumentNote, error) - CreateInstrumentNoteBatch(ctx context.Context, arg []CreateInstrumentNoteBatchParams) *CreateInstrumentNoteBatchBatchResults - CreateInstrumentsBatch(ctx context.Context, arg []CreateInstrumentsBatchParams) *CreateInstrumentsBatchBatchResults - CreateIpiOpts(ctx context.Context, arg CreateIpiOptsParams) error - CreateIpiOptsBatch(ctx context.Context, arg []CreateIpiOptsBatchParams) *CreateIpiOptsBatchBatchResults - CreateIpiSegment(ctx context.Context, arg CreateIpiSegmentParams) error - CreateIpiSegmentBatch(ctx context.Context, arg []CreateIpiSegmentBatchParams) *CreateIpiSegmentBatchBatchResults - CreateNextEvaluationSubmittal(ctx context.Context, id uuid.UUID) error - CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error - CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, arg CreateNextSubmittalFromNewAlertConfigDateParams) error - CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error - CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error - CreateOrUpdateEquivalencyTableRow(ctx context.Context, arg CreateOrUpdateEquivalencyTableRowParams) error - CreateOrUpdateInstrumentStatus(ctx context.Context, arg CreateOrUpdateInstrumentStatusParams) error - CreateOrUpdateInstrumentStatusBatch(ctx context.Context, arg []CreateOrUpdateInstrumentStatusBatchParams) *CreateOrUpdateInstrumentStatusBatchBatchResults - CreateOrUpdateTimeseriesMeasurement(ctx context.Context, arg CreateOrUpdateTimeseriesMeasurementParams) error - CreateOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesMeasurementsBatchParams) *CreateOrUpdateTimeseriesMeasurementsBatchBatchResults - CreateOrUpdateTimeseriesNote(ctx context.Context, arg CreateOrUpdateTimeseriesNoteParams) error - CreateOrUpdateTimeseriesNoteBatch(ctx context.Context, arg []CreateOrUpdateTimeseriesNoteBatchParams) *CreateOrUpdateTimeseriesNoteBatchBatchResults - CreatePlotBullseyeConfig(ctx context.Context, arg CreatePlotBullseyeConfigParams) error - CreatePlotConfig(ctx context.Context, arg CreatePlotConfigParams) (uuid.UUID, error) - CreatePlotConfigCustomShape(ctx context.Context, arg CreatePlotConfigCustomShapeParams) error - CreatePlotConfigCustomShapesBatch(ctx context.Context, arg []CreatePlotConfigCustomShapesBatchParams) *CreatePlotConfigCustomShapesBatchBatchResults - CreatePlotConfigScatterLineLayout(ctx context.Context, arg CreatePlotConfigScatterLineLayoutParams) error - CreatePlotConfigSettings(ctx context.Context, arg CreatePlotConfigSettingsParams) error - CreatePlotConfigTimeseriesTrace(ctx context.Context, arg CreatePlotConfigTimeseriesTraceParams) error - CreatePlotConfigTimeseriesTracesBatch(ctx context.Context, arg []CreatePlotConfigTimeseriesTracesBatchParams) *CreatePlotConfigTimeseriesTracesBatchBatchResults - CreatePlotContourConfig(ctx context.Context, arg CreatePlotContourConfigParams) error - CreatePlotContourConfigTimeseries(ctx context.Context, arg CreatePlotContourConfigTimeseriesParams) error - CreatePlotContourConfigTimeseriesBatch(ctx context.Context, arg []CreatePlotContourConfigTimeseriesBatchParams) *CreatePlotContourConfigTimeseriesBatchBatchResults - CreatePlotProfileConfig(ctx context.Context, arg CreatePlotProfileConfigParams) error - CreateProfile(ctx context.Context, arg CreateProfileParams) error - CreateProfileProjectRole(ctx context.Context, arg CreateProfileProjectRoleParams) (uuid.UUID, error) - CreateProfileToken(ctx context.Context, arg CreateProfileTokenParams) (ProfileToken, error) - CreateProjectsBatch(ctx context.Context, arg []CreateProjectsBatchParams) *CreateProjectsBatchBatchResults - CreateReportConfig(ctx context.Context, arg CreateReportConfigParams) (uuid.UUID, error) - CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) - CreateSaaOpts(ctx context.Context, arg CreateSaaOptsParams) error - CreateSaaOptsBatch(ctx context.Context, arg []CreateSaaOptsBatchParams) *CreateSaaOptsBatchBatchResults - CreateSaaSegment(ctx context.Context, arg CreateSaaSegmentParams) error - CreateSaaSegmentBatch(ctx context.Context, arg []CreateSaaSegmentBatchParams) *CreateSaaSegmentBatchBatchResults - CreateTimeseries(ctx context.Context, arg CreateTimeseriesParams) (CreateTimeseriesRow, error) - CreateTimeseriesBatch(ctx context.Context, arg []CreateTimeseriesBatchParams) *CreateTimeseriesBatchBatchResults - CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error - CreateTimeseriesCwmsBatch(ctx context.Context, arg []CreateTimeseriesCwmsBatchParams) *CreateTimeseriesCwmsBatchBatchResults - CreateTimeseriesMeasurement(ctx context.Context, arg CreateTimeseriesMeasurementParams) error - CreateTimeseriesMeasurementsBatch(ctx context.Context, arg []CreateTimeseriesMeasurementsBatchParams) *CreateTimeseriesMeasurementsBatchBatchResults - CreateTimeseriesNote(ctx context.Context, arg CreateTimeseriesNoteParams) error - CreateTimeseriesNotesBatch(ctx context.Context, arg []CreateTimeseriesNotesBatchParams) *CreateTimeseriesNotesBatchBatchResults - CreateUploaderConfig(ctx context.Context, arg CreateUploaderConfigParams) (uuid.UUID, error) - CreateUploaderConfigMappingsBatch(ctx context.Context, arg []CreateUploaderConfigMappingsBatchParams) *CreateUploaderConfigMappingsBatchBatchResults - DeleteAlertConfig(ctx context.Context, id uuid.UUID) error - DeleteAlertEmailSubscription(ctx context.Context, arg DeleteAlertEmailSubscriptionParams) error - DeleteAlertProfileSubscription(ctx context.Context, arg DeleteAlertProfileSubscriptionParams) error - DeleteAlertRead(ctx context.Context, arg DeleteAlertReadParams) error - DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - DeleteAllPlotConfigCustomShapes(ctx context.Context, plotConfigurationID *uuid.UUID) error - DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, plotConfigurationID *uuid.UUID) error - DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotContourConfigID uuid.UUID) error - DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error - DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error - DeleteCollectionGroup(ctx context.Context, arg DeleteCollectionGroupParams) error - DeleteDatalogger(ctx context.Context, arg DeleteDataloggerParams) error - DeleteDataloggerTable(ctx context.Context, id uuid.UUID) error - DeleteDataloggerTableError(ctx context.Context, arg DeleteDataloggerTableErrorParams) error - DeleteEquivalencyTable(ctx context.Context, dataloggerTableID *uuid.UUID) error - DeleteEquivalencyTableRow(ctx context.Context, id uuid.UUID) error - DeleteEvaluation(ctx context.Context, id uuid.UUID) error - DeleteFlagInstrument(ctx context.Context, arg DeleteFlagInstrumentParams) error - DeleteFlagInstrumentGroup(ctx context.Context, id uuid.UUID) error - DeleteFlagProject(ctx context.Context, id uuid.UUID) error - DeleteInstrumentConstant(ctx context.Context, arg DeleteInstrumentConstantParams) error - DeleteInstrumentGroupInstruments(ctx context.Context, arg DeleteInstrumentGroupInstrumentsParams) error - DeleteInstrumentNote(ctx context.Context, id uuid.UUID) error - DeleteInstrumentStatus(ctx context.Context, id uuid.UUID) error - DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error - DeletePlotConfig(ctx context.Context, arg DeletePlotConfigParams) error - DeletePlotConfigSettings(ctx context.Context, id uuid.UUID) error - DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error - DeleteProfileProjectRole(ctx context.Context, arg DeleteProfileProjectRoleParams) error - DeleteReportConfig(ctx context.Context, id uuid.UUID) error - DeleteTimeseries(ctx context.Context, id uuid.UUID) error - DeleteTimeseriesMeasurement(ctx context.Context, arg DeleteTimeseriesMeasurementParams) error - DeleteTimeseriesMeasurements(ctx context.Context, arg DeleteTimeseriesMeasurementsParams) error - DeleteTimeseriesMeasurementsRange(ctx context.Context, arg DeleteTimeseriesMeasurementsRangeParams) error - DeleteTimeseriesNoteRange(ctx context.Context, arg DeleteTimeseriesNoteRangeParams) error - DeleteToken(ctx context.Context, arg DeleteTokenParams) error - DeleteUploaderConfig(ctx context.Context, id uuid.UUID) error - GetAlert(ctx context.Context, arg GetAlertParams) (GetAlertRow, error) - GetAlertConfig(ctx context.Context, id uuid.UUID) (VAlertConfig, error) - GetAlertSubscription(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) - GetAlertSubscriptionForAlertConfig(ctx context.Context, arg GetAlertSubscriptionForAlertConfigParams) (AlertProfileSubscription, error) - GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) - GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) - GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) - GetCalculatedTimeseries(ctx context.Context, id uuid.UUID) (GetCalculatedTimeseriesRow, error) - GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) - GetDatalogger(ctx context.Context, id uuid.UUID) (VDatalogger, error) - GetDataloggerByModelSN(ctx context.Context, arg GetDataloggerByModelSNParams) (VDatalogger, error) - GetDataloggerHashByModelSN(ctx context.Context, arg GetDataloggerHashByModelSNParams) (string, error) - GetDataloggerIsActive(ctx context.Context, arg GetDataloggerIsActiveParams) (int32, error) - GetDataloggerModelName(ctx context.Context, id uuid.UUID) (*string, error) - GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) - GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) - GetEvaluation(ctx context.Context, id uuid.UUID) (VEvaluation, error) - GetHome(ctx context.Context) (GetHomeRow, error) - GetInclMeasurementsForInstrument(ctx context.Context, arg GetInclMeasurementsForInstrumentParams) ([]VInclMeasurement, error) - GetInstrument(ctx context.Context, id uuid.UUID) (VInstrument, error) - GetInstrumentCount(ctx context.Context) (int64, error) - GetInstrumentGroup(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) - GetIpiMeasurementsForInstrument(ctx context.Context, arg GetIpiMeasurementsForInstrumentParams) ([]VIpiMeasurement, error) - GetIsValidDataloggerTable(ctx context.Context, id uuid.UUID) (bool, error) - GetIsValidEquivalencyTableTimeseries(ctx context.Context, id uuid.UUID) (bool, error) - GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]GetIssuedTokensRow, error) - GetLatestHeartbeat(ctx context.Context) (interface{}, error) - GetOrCreateDataloggerTable(ctx context.Context, arg GetOrCreateDataloggerTableParams) (uuid.UUID, error) - GetPlotConfig(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) - GetProfileForEDIPI(ctx context.Context, edipi int64) (VProfile, error) - GetProfileForEmail(ctx context.Context, email string) (VProfile, error) - GetProfileForTokenID(ctx context.Context, tokenID string) (GetProfileForTokenIDRow, error) - GetProfileForUsername(ctx context.Context, username string) (VProfile, error) - GetProject(ctx context.Context, id uuid.UUID) (VProject, error) - GetProjectCount(ctx context.Context) (int64, error) - GetProjectMembership(ctx context.Context, id uuid.UUID) (GetProjectMembershipRow, error) - GetReportConfig(ctx context.Context, id uuid.UUID) (VReportConfig, error) - GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) - GetSaaMeasurementsForInstrument(ctx context.Context, arg GetSaaMeasurementsForInstrumentParams) ([]VSaaMeasurement, error) - GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) - GetTimeseries(ctx context.Context, id uuid.UUID) (VTimeseries, error) - GetTimeseriesConstantMeasurement(ctx context.Context, arg GetTimeseriesConstantMeasurementParams) ([]GetTimeseriesConstantMeasurementRow, error) - GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) - GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) - GetTokenInfo(ctx context.Context, tokenID string) (ProfileToken, error) - IsProjectAdmin(ctx context.Context, arg IsProjectAdminParams) (bool, error) - IsProjectMember(ctx context.Context, arg IsProjectMemberParams) (bool, error) - ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) - ListAlertConfigSubmittals(ctx context.Context, arg ListAlertConfigSubmittalsParams) ([]VSubmittal, error) - ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) - ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) - ListAlertConfigsForProjectAlertType(ctx context.Context, arg ListAlertConfigsForProjectAlertTypeParams) ([]VAlertConfig, error) - ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) - ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]ListAlertsForProfileRow, error) - ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) - ListAllDataloggers(ctx context.Context) ([]VDatalogger, error) - ListAndCheckAlertConfigs(ctx context.Context) ([]VAlertConfig, error) - ListAwareParameters(ctx context.Context) ([]ListAwareParametersRow, error) - ListAwarePlatformParameterEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) - ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]ListCollectionGroupsForProjectRow, error) - ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) - ListDistricts(ctx context.Context) ([]VDistrict, error) - ListDomainGroups(ctx context.Context) ([]VDomainGroup, error) - ListDomains(ctx context.Context) ([]VDomain, error) - ListEmailAutocomplete(ctx context.Context, arg ListEmailAutocompleteParams) ([]ListEmailAutocompleteRow, error) - ListEvaluationDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListEvaluationDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) - ListHeartbeats(ctx context.Context) ([]time.Time, error) - ListIncompleteEvaluationSubmittals(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) - ListIncompleteMeasurementSubmittals(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) - ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) - ListInstrumentEvaluations(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) - ListInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID) ([]ListInstrumentGroupInstrumentsRow, error) - ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) - ListInstrumentGroups(ctx context.Context) ([]VInstrumentGroup, error) - ListInstrumentGroupsForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) - ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]ListInstrumentIDNamesByIDsRow, error) - ListInstrumentNotes(ctx context.Context, arg ListInstrumentNotesParams) ([]InstrumentNote, error) - ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) - ListInstrumentStatuses(ctx context.Context, arg ListInstrumentStatusesParams) ([]ListInstrumentStatusesRow, error) - ListInstrumentSubmittals(ctx context.Context, arg ListInstrumentSubmittalsParams) ([]VSubmittal, error) - ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) - ListInstruments(ctx context.Context) ([]VInstrument, error) - ListInstrumentsForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) - ListMeasurementDistrictRollupsForProjectAlertConfig(ctx context.Context, arg ListMeasurementDistrictRollupsForProjectAlertConfigParams) ([]VDistrictRollup, error) - ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) - ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, arg ListPlotConfigMeasurementsBullseyePlotParams) ([]ListPlotConfigMeasurementsBullseyePlotRow, error) - ListPlotConfigMeasurementsContourPlot(ctx context.Context, arg ListPlotConfigMeasurementsContourPlotParams) ([]ListPlotConfigMeasurementsContourPlotRow, error) - ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) - ListPlotConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) - ListPlotContourConfigTimes(ctx context.Context, arg ListPlotContourConfigTimesParams) ([]time.Time, error) - ListProjectCountForInstruments(ctx context.Context, instrumentIds []uuid.UUID) ([]ListProjectCountForInstrumentsRow, error) - ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) - ListProjectEvaluationsByAlertConfig(ctx context.Context, arg ListProjectEvaluationsByAlertConfigParams) ([]VEvaluation, error) - ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ListProjectMembersRow, error) - ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) - ListProjectSubmittals(ctx context.Context, arg ListProjectSubmittalsParams) ([]VSubmittal, error) - ListProjects(ctx context.Context) ([]VProject, error) - ListProjectsForFederalID(ctx context.Context) ([]VProject, error) - ListProjectsForProfileRole(ctx context.Context, arg ListProjectsForProfileRoleParams) ([]VProject, error) - ListReportConfigPlotConfigs(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) - ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) - ListTimeseriesForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) - ListTimeseriesMeasurements(ctx context.Context, arg ListTimeseriesMeasurementsParams) ([]ListTimeseriesMeasurementsRow, error) - ListUnits(ctx context.Context) ([]VUnit, error) - ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmittal, error) - ListUploaderConfigMappings(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) - ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) - RegisterEmail(ctx context.Context, email string) (uuid.UUID, error) - RemoveTimeseriesFromCollectionGroup(ctx context.Context, arg RemoveTimeseriesFromCollectionGroupParams) error - RenameEmptyDataloggerTableName(ctx context.Context, arg RenameEmptyDataloggerTableNameParams) error - ResetDataloggerTableName(ctx context.Context, id uuid.UUID) error - SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]VProject, error) - UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID *uuid.UUID) error - UnassignAllReportConfigPlotConfig(ctx context.Context, reportConfigID uuid.UUID) error - UnassignInstrumentFromProject(ctx context.Context, arg UnassignInstrumentFromProjectParams) error - UnassignInstrumentFromProjectBatch(ctx context.Context, arg []UnassignInstrumentFromProjectBatchParams) *UnassignInstrumentFromProjectBatchBatchResults - UnassignReportConfigPlotConfig(ctx context.Context, arg UnassignReportConfigPlotConfigParams) error - UnassignReportConfigPlotConfigBatch(ctx context.Context, arg []UnassignReportConfigPlotConfigBatchParams) *UnassignReportConfigPlotConfigBatchBatchResults - UnregisterEmail(ctx context.Context, id uuid.UUID) error - UpdateAlertConfig(ctx context.Context, arg UpdateAlertConfigParams) error - UpdateAlertConfigLastReminded(ctx context.Context, arg UpdateAlertConfigLastRemindedParams) error - UpdateCollectionGroup(ctx context.Context, arg UpdateCollectionGroupParams) (UpdateCollectionGroupRow, error) - UpdateDatalogger(ctx context.Context, arg UpdateDataloggerParams) error - UpdateDataloggerHash(ctx context.Context, arg UpdateDataloggerHashParams) error - UpdateDataloggerTablePreview(ctx context.Context, arg UpdateDataloggerTablePreviewParams) error - UpdateDataloggerUpdater(ctx context.Context, arg UpdateDataloggerUpdaterParams) error - UpdateEquivalencyTableRow(ctx context.Context, arg UpdateEquivalencyTableRowParams) error - UpdateEvaluation(ctx context.Context, arg UpdateEvaluationParams) error - UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) - UpdateInclOpts(ctx context.Context, arg UpdateInclOptsParams) error - UpdateInclOptsBatch(ctx context.Context, arg []UpdateInclOptsBatchParams) *UpdateInclOptsBatchBatchResults - UpdateInclSegment(ctx context.Context, arg UpdateInclSegmentParams) error - UpdateInclSegmentsBatch(ctx context.Context, arg []UpdateInclSegmentsBatchParams) *UpdateInclSegmentsBatchBatchResults - UpdateInstrument(ctx context.Context, arg UpdateInstrumentParams) error - UpdateInstrumentGeometry(ctx context.Context, arg UpdateInstrumentGeometryParams) (uuid.UUID, error) - UpdateInstrumentGroup(ctx context.Context, arg UpdateInstrumentGroupParams) (InstrumentGroup, error) - UpdateInstrumentNote(ctx context.Context, arg UpdateInstrumentNoteParams) (InstrumentNote, error) - UpdateIpiOpts(ctx context.Context, arg UpdateIpiOptsParams) error - UpdateIpiOptsBatch(ctx context.Context, arg []UpdateIpiOptsBatchParams) *UpdateIpiOptsBatchBatchResults - UpdateIpiSegment(ctx context.Context, arg UpdateIpiSegmentParams) error - UpdateIpiSegmentsBatch(ctx context.Context, arg []UpdateIpiSegmentsBatchParams) *UpdateIpiSegmentsBatchBatchResults - UpdateMyAlertSubscription(ctx context.Context, arg UpdateMyAlertSubscriptionParams) error - UpdatePlotBullseyeConfig(ctx context.Context, arg UpdatePlotBullseyeConfigParams) error - UpdatePlotConfig(ctx context.Context, arg UpdatePlotConfigParams) error - UpdatePlotConfigCustomShape(ctx context.Context, arg UpdatePlotConfigCustomShapeParams) error - UpdatePlotConfigScatterLineLayout(ctx context.Context, arg UpdatePlotConfigScatterLineLayoutParams) error - UpdatePlotConfigTimeseriesTrace(ctx context.Context, arg UpdatePlotConfigTimeseriesTraceParams) error - UpdatePlotContourConfig(ctx context.Context, arg UpdatePlotContourConfigParams) error - UpdatePlotProfileConfig(ctx context.Context, arg UpdatePlotProfileConfigParams) error - UpdateProfileForEDIPI(ctx context.Context, arg UpdateProfileForEDIPIParams) error - UpdateProfileForEmail(ctx context.Context, arg UpdateProfileForEmailParams) error - UpdateProfileForUsername(ctx context.Context, arg UpdateProfileForUsernameParams) error - UpdateProject(ctx context.Context, arg UpdateProjectParams) (uuid.UUID, error) - UpdateProjectImage(ctx context.Context, arg UpdateProjectImageParams) error - UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error - UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error - UpdateSaaOpts(ctx context.Context, arg UpdateSaaOptsParams) error - UpdateSaaOptsBatch(ctx context.Context, arg []UpdateSaaOptsBatchParams) *UpdateSaaOptsBatchBatchResults - UpdateSaaSegment(ctx context.Context, arg UpdateSaaSegmentParams) error - UpdateSaaSegmentBatch(ctx context.Context, arg []UpdateSaaSegmentBatchParams) *UpdateSaaSegmentBatchBatchResults - UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error - UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, arg UpdateSubmittalCompletionDateOrWarningSentParams) error - UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) - UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error - UpdateUploaderConfig(ctx context.Context, arg UpdateUploaderConfigParams) error - ValidateInstrumentNamesProjectUnique(ctx context.Context, arg ValidateInstrumentNamesProjectUniqueParams) ([]string, error) - ValidateInstrumentsAssignerAuthorized(ctx context.Context, arg ValidateInstrumentsAssignerAuthorizedParams) ([]ValidateInstrumentsAssignerAuthorizedRow, error) - ValidateProjectsAssignerAuthorized(ctx context.Context, arg ValidateProjectsAssignerAuthorizedParams) ([]string, error) - ValidateProjectsInstrumentNameUnique(ctx context.Context, arg ValidateProjectsInstrumentNameUniqueParams) ([]string, error) - VerifyDataloggerExists(ctx context.Context, id uuid.UUID) (bool, error) - VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID *uuid.UUID) error - VerifyMissingSubmittal(ctx context.Context, id uuid.UUID) error + AlertConfigCreate(ctx context.Context, arg AlertConfigCreateParams) (uuid.UUID, error) + AlertConfigDelete(ctx context.Context, id uuid.UUID) error + AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfig, error) + AlertConfigInstrumentCreateAssignment(ctx context.Context, arg AlertConfigInstrumentCreateAssignmentParams) error + AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertConfigListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) + AlertConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) + AlertConfigListForProjectAlertType(ctx context.Context, arg AlertConfigListForProjectAlertTypeParams) ([]VAlertConfig, error) + AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAlertConfig, error) + AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error + AlertConfigUpdateLastReminded(ctx context.Context, arg AlertConfigUpdateLastRemindedParams) error + AlertCreate(ctx context.Context, alertConfigID uuid.UUID) error + AlertCreateBatch(ctx context.Context, alertConfigID []uuid.UUID) *AlertCreateBatchBatchResults + AlertEmailSubscriptionCreate(ctx context.Context, arg AlertEmailSubscriptionCreateParams) error + AlertEmailSubscriptionDelete(ctx context.Context, arg AlertEmailSubscriptionDeleteParams) error + AlertEmailSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow, error) + AlertListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) + AlertListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertListForProfileRow, error) + AlertListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) + AlertProfileSubscriptionCreate(ctx context.Context, arg AlertProfileSubscriptionCreateParams) error + AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx context.Context, arg AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams) error + AlertProfileSubscriptionDelete(ctx context.Context, arg AlertProfileSubscriptionDeleteParams) error + AlertProfileSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertReadCreate(ctx context.Context, arg AlertReadCreateParams) error + AlertReadDelete(ctx context.Context, arg AlertReadDeleteParams) error + AlertSubscriptionGet(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) + AlertSubscriptionGetForAlertConfigProfile(ctx context.Context, arg AlertSubscriptionGetForAlertConfigProfileParams) (AlertProfileSubscription, error) + AlertSubscriptionListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) + AlertSubscriptionUpdateForProfile(ctx context.Context, arg AlertSubscriptionUpdateForProfileParams) error + AwareParameterList(ctx context.Context) ([]AwareParameterListRow, error) + AwarePlatformCreate(ctx context.Context, arg AwarePlatformCreateParams) error + AwarePlatformCreateBatch(ctx context.Context, arg []AwarePlatformCreateBatchParams) *AwarePlatformCreateBatchBatchResults + AwarePlatformParameterListEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) + CalculationCreate(ctx context.Context, arg CalculationCreateParams) error + CalculationCreateOrUpdate(ctx context.Context, arg CalculationCreateOrUpdateParams) error + CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) + CollectionGroupDelete(ctx context.Context, arg CollectionGroupDeleteParams) error + CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) + CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) + CollectionGroupTimeseriesCreate(ctx context.Context, arg CollectionGroupTimeseriesCreateParams) error + CollectionGroupTimeseriesDelete(ctx context.Context, arg CollectionGroupTimeseriesDeleteParams) error + CollectionGroupTimeseriesUpdateSortOrder(ctx context.Context, arg CollectionGroupTimeseriesUpdateSortOrderParams) error + CollectionGroupUpdate(ctx context.Context, arg CollectionGroupUpdateParams) (CollectionGroup, error) + DataloggerCreate(ctx context.Context, arg DataloggerCreateParams) (uuid.UUID, error) + DataloggerDelete(ctx context.Context, arg DataloggerDeleteParams) error + DataloggerErrorCreate(ctx context.Context, arg DataloggerErrorCreateParams) error + DataloggerErrorCreateBatch(ctx context.Context, arg []DataloggerErrorCreateBatchParams) *DataloggerErrorCreateBatchBatchResults + DataloggerErrorDelete(ctx context.Context, arg DataloggerErrorDeleteParams) error + DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, error) + DataloggerGetActive(ctx context.Context, arg DataloggerGetActiveParams) (bool, error) + DataloggerGetExists(ctx context.Context, id uuid.UUID) (bool, error) + DataloggerGetForModelSn(ctx context.Context, arg DataloggerGetForModelSnParams) (VDatalogger, error) + DataloggerGetModelName(ctx context.Context, id uuid.UUID) (*string, error) + DataloggerHashCreate(ctx context.Context, arg DataloggerHashCreateParams) error + DataloggerHashGetForModelSn(ctx context.Context, arg DataloggerHashGetForModelSnParams) (string, error) + DataloggerHashUpdate(ctx context.Context, arg DataloggerHashUpdateParams) error + DataloggerList(ctx context.Context) ([]VDatalogger, error) + DataloggerListForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) + DataloggerTableDelete(ctx context.Context, id uuid.UUID) error + DataloggerTableGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) + DataloggerTableGetOrCreate(ctx context.Context, arg DataloggerTableGetOrCreateParams) (uuid.UUID, error) + DataloggerTablePreviewCreate(ctx context.Context, arg DataloggerTablePreviewCreateParams) error + DataloggerTablePreviewGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) + DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error + DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg DataloggerTableUpdateNameIfEmptyParams) error + DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error + DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error + DataloggerUpdateUpdater(ctx context.Context, arg DataloggerUpdateUpdaterParams) error + DistrictList(ctx context.Context) ([]VDistrict, error) + DistrictRollupListEvaluationForProjectAlertConfig(ctx context.Context, arg DistrictRollupListEvaluationForProjectAlertConfigParams) ([]VDistrictRollup, error) + DistrictRollupListMeasurementForProjectAlertConfig(ctx context.Context, arg DistrictRollupListMeasurementForProjectAlertConfigParams) ([]VDistrictRollup, error) + DomainGroupList(ctx context.Context) ([]VDomainGroup, error) + DomainList(ctx context.Context) ([]VDomain, error) + EmailAutocompleteList(ctx context.Context, arg EmailAutocompleteListParams) ([]EmailAutocompleteListRow, error) + EmailDelete(ctx context.Context, id uuid.UUID) error + EmailGetOrCreate(ctx context.Context, email string) (uuid.UUID, error) + EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error + EquivalencyTableDelete(ctx context.Context, id uuid.UUID) error + EquivalencyTableDeleteForDataloggerTable(ctx context.Context, dataloggerTableID *uuid.UUID) error + EquivalencyTableGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) + EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) + EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error + EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) + EvaluationDelete(ctx context.Context, id uuid.UUID) error + EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) + EvaluationInstrumentCreate(ctx context.Context, arg EvaluationInstrumentCreateParams) error + EvaluationInstrumentCreateBatch(ctx context.Context, arg []EvaluationInstrumentCreateBatchParams) *EvaluationInstrumentCreateBatchBatchResults + EvaluationInstrumentDeleteForEvaluation(ctx context.Context, evaluationID *uuid.UUID) error + EvaluationListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) + EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) + EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) + EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error + HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) + HeartbeatGetLatest(ctx context.Context) (time.Time, error) + HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error) + HomeGet(ctx context.Context) (HomeGetRow, error) + InclMeasurementListForInstrumentRange(ctx context.Context, arg InclMeasurementListForInstrumentRangeParams) ([]VInclMeasurement, error) + InclOptsCreate(ctx context.Context, arg InclOptsCreateParams) error + InclOptsCreateBatch(ctx context.Context, arg []InclOptsCreateBatchParams) *InclOptsCreateBatchBatchResults + InclOptsUpdate(ctx context.Context, arg InclOptsUpdateParams) error + InclOptsUpdateBatch(ctx context.Context, arg []InclOptsUpdateBatchParams) *InclOptsUpdateBatchBatchResults + InclSegmentCreate(ctx context.Context, arg InclSegmentCreateParams) error + InclSegmentCreateBatch(ctx context.Context, arg []InclSegmentCreateBatchParams) *InclSegmentCreateBatchBatchResults + InclSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) + InclSegmentUpdate(ctx context.Context, arg InclSegmentUpdateParams) error + InclSegmentUpdateBatch(ctx context.Context, arg []InclSegmentUpdateBatchParams) *InclSegmentUpdateBatchBatchResults + InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error + InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults + InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error + InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + InstrumentCreate(ctx context.Context, arg InstrumentCreateParams) (InstrumentCreateRow, error) + InstrumentCreateBatch(ctx context.Context, arg []InstrumentCreateBatchParams) *InstrumentCreateBatchBatchResults + InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error + InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) + InstrumentGetCount(ctx context.Context) (int64, error) + InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroup, error) + InstrumentGroupCreateBatch(ctx context.Context, arg []InstrumentGroupCreateBatchParams) *InstrumentGroupCreateBatchBatchResults + InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) error + InstrumentGroupGet(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) + InstrumentGroupInstrumentCreate(ctx context.Context, arg InstrumentGroupInstrumentCreateParams) error + InstrumentGroupInstrumentDelete(ctx context.Context, arg InstrumentGroupInstrumentDeleteParams) error + InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) + InstrumentGroupListForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) + InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroup, error) + InstrumentIDNameListByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]InstrumentIDNameListByIDsRow, error) + InstrumentList(ctx context.Context) ([]VInstrument, error) + InstrumentListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VInstrument, error) + InstrumentListForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) + InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) + InstrumentNoteCreateBatch(ctx context.Context, arg []InstrumentNoteCreateBatchParams) *InstrumentNoteCreateBatchBatchResults + InstrumentNoteDelete(ctx context.Context, id uuid.UUID) error + InstrumentNoteGet(ctx context.Context, id uuid.UUID) (InstrumentNote, error) + InstrumentNoteListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) + InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUpdateParams) (InstrumentNote, error) + InstrumentStatusCreateOrUpdate(ctx context.Context, arg InstrumentStatusCreateOrUpdateParams) error + InstrumentStatusCreateOrUpdateBatch(ctx context.Context, arg []InstrumentStatusCreateOrUpdateBatchParams) *InstrumentStatusCreateOrUpdateBatchBatchResults + InstrumentStatusDelete(ctx context.Context, id uuid.UUID) error + InstrumentStatusGet(ctx context.Context, id uuid.UUID) (VInstrumentStatus, error) + InstrumentStatusListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInstrumentStatus, error) + InstrumentUpdate(ctx context.Context, arg InstrumentUpdateParams) error + InstrumentUpdateGeometry(ctx context.Context, arg InstrumentUpdateGeometryParams) (uuid.UUID, error) + IpiMeasurementListForInstrumentRange(ctx context.Context, arg IpiMeasurementListForInstrumentRangeParams) ([]VIpiMeasurement, error) + IpiOptsCreate(ctx context.Context, arg IpiOptsCreateParams) error + IpiOptsCreateBatch(ctx context.Context, arg []IpiOptsCreateBatchParams) *IpiOptsCreateBatchBatchResults + IpiOptsUpdate(ctx context.Context, arg IpiOptsUpdateParams) error + IpiOptsUpdateBatch(ctx context.Context, arg []IpiOptsUpdateBatchParams) *IpiOptsUpdateBatchBatchResults + IpiSegmentCreate(ctx context.Context, arg IpiSegmentCreateParams) error + IpiSegmentCreateBatch(ctx context.Context, arg []IpiSegmentCreateBatchParams) *IpiSegmentCreateBatchBatchResults + IpiSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) + IpiSegmentUpdate(ctx context.Context, arg IpiSegmentUpdateParams) error + IpiSegmentUpdateBatch(ctx context.Context, arg []IpiSegmentUpdateBatchParams) *IpiSegmentUpdateBatchBatchResults + PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) + PlotBullseyeConfigCreate(ctx context.Context, arg PlotBullseyeConfigCreateParams) error + PlotBullseyeConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error + PlotBullseyeConfigUpdate(ctx context.Context, arg PlotBullseyeConfigUpdateParams) error + PlotConfigCreate(ctx context.Context, arg PlotConfigCreateParams) (uuid.UUID, error) + PlotConfigCustomShapeCreate(ctx context.Context, arg PlotConfigCustomShapeCreateParams) error + PlotConfigCustomShapeCreateBatch(ctx context.Context, arg []PlotConfigCustomShapeCreateBatchParams) *PlotConfigCustomShapeCreateBatchBatchResults + PlotConfigCustomShapeDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error + PlotConfigCustomShapeUpdate(ctx context.Context, arg PlotConfigCustomShapeUpdateParams) error + PlotConfigDelete(ctx context.Context, arg PlotConfigDeleteParams) error + PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) + PlotConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) + PlotConfigMeasurementListBullseye(ctx context.Context, arg PlotConfigMeasurementListBullseyeParams) ([]PlotConfigMeasurementListBullseyeRow, error) + PlotConfigMeasurementListContour(ctx context.Context, arg PlotConfigMeasurementListContourParams) ([]PlotConfigMeasurementListContourRow, error) + PlotConfigScatterLineLayoutCreate(ctx context.Context, arg PlotConfigScatterLineLayoutCreateParams) error + PlotConfigScatterLineLayoutUpdate(ctx context.Context, arg PlotConfigScatterLineLayoutUpdateParams) error + PlotConfigSettingsCreate(ctx context.Context, arg PlotConfigSettingsCreateParams) error + PlotConfigSettingsDelete(ctx context.Context, id uuid.UUID) error + PlotConfigTimeseriesTraceCreate(ctx context.Context, arg PlotConfigTimeseriesTraceCreateParams) error + PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error + PlotConfigTimeseriesTraceUpdate(ctx context.Context, arg PlotConfigTimeseriesTraceUpdateParams) error + PlotConfigTimeseriesTracesCreateBatch(ctx context.Context, arg []PlotConfigTimeseriesTracesCreateBatchParams) *PlotConfigTimeseriesTracesCreateBatchBatchResults + PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdateParams) error + PlotContourConfigCreate(ctx context.Context, arg PlotContourConfigCreateParams) error + PlotContourConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error + PlotContourConfigListTimeRange(ctx context.Context, arg PlotContourConfigListTimeRangeParams) ([]time.Time, error) + PlotContourConfigTimeseriesCreate(ctx context.Context, arg PlotContourConfigTimeseriesCreateParams) error + PlotContourConfigTimeseriesCreateBatch(ctx context.Context, arg []PlotContourConfigTimeseriesCreateBatchParams) *PlotContourConfigTimeseriesCreateBatchBatchResults + PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx context.Context, plotContourConfigID uuid.UUID) error + PlotContourConfigUpdate(ctx context.Context, arg PlotContourConfigUpdateParams) error + PlotProfileConfigCreate(ctx context.Context, arg PlotProfileConfigCreateParams) error + PlotProfileConfigUpdate(ctx context.Context, arg PlotProfileConfigUpdateParams) error + ProfileCreate(ctx context.Context, arg ProfileCreateParams) (ProfileCreateRow, error) + ProfileGetForEDIPI(ctx context.Context, edipi int64) (VProfile, error) + ProfileGetForEmail(ctx context.Context, email string) (VProfile, error) + ProfileGetForToken(ctx context.Context, tokenID string) (ProfileGetForTokenRow, error) + ProfileGetForUsername(ctx context.Context, username string) (VProfile, error) + ProfileProjectRoleCreate(ctx context.Context, arg ProfileProjectRoleCreateParams) (uuid.UUID, error) + ProfileProjectRoleDelete(ctx context.Context, arg ProfileProjectRoleDeleteParams) error + ProfileProjectRoleGet(ctx context.Context, id uuid.UUID) (ProfileProjectRoleGetRow, error) + ProfileProjectRoleGetIsAdmin(ctx context.Context, arg ProfileProjectRoleGetIsAdminParams) (bool, error) + ProfileProjectRoleGetIsMemberOrAdmin(ctx context.Context, arg ProfileProjectRoleGetIsMemberOrAdminParams) (bool, error) + ProfileProjectRoleListForProject(ctx context.Context, projectID uuid.UUID) ([]ProfileProjectRoleListForProjectRow, error) + ProfileTokenCreate(ctx context.Context, arg ProfileTokenCreateParams) (ProfileToken, error) + ProfileTokenDelete(ctx context.Context, arg ProfileTokenDeleteParams) error + ProfileTokenGet(ctx context.Context, tokenID string) (ProfileToken, error) + ProfileTokenList(ctx context.Context, profileID uuid.UUID) ([]ProfileTokenListRow, error) + ProfileUpdateForEDIPI(ctx context.Context, arg ProfileUpdateForEDIPIParams) error + ProfileUpdateForEmail(ctx context.Context, arg ProfileUpdateForEmailParams) error + ProfileUpdateForUsername(ctx context.Context, arg ProfileUpdateForUsernameParams) error + ProjectCreateBatch(ctx context.Context, arg []ProjectCreateBatchParams) *ProjectCreateBatchBatchResults + ProjectDeleteFlag(ctx context.Context, id uuid.UUID) error + ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error) + ProjectGetCount(ctx context.Context) (int64, error) + ProjectInstrumentCreate(ctx context.Context, arg ProjectInstrumentCreateParams) error + ProjectInstrumentCreateBatch(ctx context.Context, arg []ProjectInstrumentCreateBatchParams) *ProjectInstrumentCreateBatchBatchResults + ProjectInstrumentDelete(ctx context.Context, arg ProjectInstrumentDeleteParams) error + ProjectInstrumentDeleteBatch(ctx context.Context, arg []ProjectInstrumentDeleteBatchParams) *ProjectInstrumentDeleteBatchBatchResults + ProjectInstrumentListCountByInstrument(ctx context.Context, instrumentIds []uuid.UUID) ([]ProjectInstrumentListCountByInstrumentRow, error) + ProjectInstrumentListForInstrumentNameProjects(ctx context.Context, arg ProjectInstrumentListForInstrumentNameProjectsParams) ([]string, error) + ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentProjectsProfileAdminParams) ([]string, error) + ProjectInstrumentListForInstrumentsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentsProfileAdminParams) ([]ProjectInstrumentListForInstrumentsProfileAdminRow, error) + ProjectInstrumentListForProjectInstrumentNames(ctx context.Context, arg ProjectInstrumentListForProjectInstrumentNamesParams) ([]string, error) + ProjectInstrumentListProjectIDForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) + ProjectList(ctx context.Context) ([]VProject, error) + ProjectListForFederalID(ctx context.Context, federalID *string) ([]VProject, error) + ProjectListForNameSearch(ctx context.Context, arg ProjectListForNameSearchParams) ([]VProject, error) + ProjectListForProfileAdmin(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) + ProjectListForProfileRole(ctx context.Context, arg ProjectListForProfileRoleParams) ([]VProject, error) + ProjectUpdate(ctx context.Context, arg ProjectUpdateParams) (uuid.UUID, error) + ProjectUpdateImage(ctx context.Context, arg ProjectUpdateImageParams) error + ReportConfigCreate(ctx context.Context, arg ReportConfigCreateParams) (uuid.UUID, error) + ReportConfigDelete(ctx context.Context, id uuid.UUID) error + ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportConfig, error) + ReportConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) + ReportConfigListForReportConfigWithPlotConfig(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) + ReportConfigPlotConfigCreate(ctx context.Context, arg ReportConfigPlotConfigCreateParams) error + ReportConfigPlotConfigCreateBatch(ctx context.Context, arg []ReportConfigPlotConfigCreateBatchParams) *ReportConfigPlotConfigCreateBatchBatchResults + ReportConfigPlotConfigDelete(ctx context.Context, arg ReportConfigPlotConfigDeleteParams) error + ReportConfigPlotConfigDeleteBatch(ctx context.Context, arg []ReportConfigPlotConfigDeleteBatchParams) *ReportConfigPlotConfigDeleteBatchBatchResults + ReportConfigPlotConfigDeleteForReportConfig(ctx context.Context, reportConfigID uuid.UUID) error + ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdateParams) error + ReportDownloadJobCreate(ctx context.Context, arg ReportDownloadJobCreateParams) (ReportDownloadJob, error) + ReportDownloadJobGet(ctx context.Context, arg ReportDownloadJobGetParams) (ReportDownloadJob, error) + ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloadJobUpdateParams) error + SaaMeasurementListForInstrumentRange(ctx context.Context, arg SaaMeasurementListForInstrumentRangeParams) ([]VSaaMeasurement, error) + SaaOptsCreate(ctx context.Context, arg SaaOptsCreateParams) error + SaaOptsCreateBatch(ctx context.Context, arg []SaaOptsCreateBatchParams) *SaaOptsCreateBatchBatchResults + SaaOptsUpdate(ctx context.Context, arg SaaOptsUpdateParams) error + SaaOptsUpdateBatch(ctx context.Context, arg []SaaOptsUpdateBatchParams) *SaaOptsUpdateBatchBatchResults + SaaSegmentCreate(ctx context.Context, arg SaaSegmentCreateParams) error + SaaSegmentCreateBatch(ctx context.Context, arg []SaaSegmentCreateBatchParams) *SaaSegmentCreateBatchBatchResults + SaaSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) + SaaSegmentUpdate(ctx context.Context, arg SaaSegmentUpdateParams) error + SaaSegmentUpdateBatch(ctx context.Context, arg []SaaSegmentUpdateBatchParams) *SaaSegmentUpdateBatchBatchResults + SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUID) error + SubmittalCreateNextFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error + SubmittalCreateNextFromNewAlertConfigDate(ctx context.Context, arg SubmittalCreateNextFromNewAlertConfigDateParams) error + SubmittalListForAlertConfig(ctx context.Context, arg SubmittalListForAlertConfigParams) ([]VSubmittal, error) + SubmittalListForInstrument(ctx context.Context, arg SubmittalListForInstrumentParams) ([]VSubmittal, error) + SubmittalListForProject(ctx context.Context, arg SubmittalListForProjectParams) ([]VSubmittal, error) + SubmittalListIncompleteEvaluation(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) + SubmittalListIncompleteMeasurement(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) + SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmittal, error) + SubmittalUpdate(ctx context.Context, arg SubmittalUpdateParams) error + SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid.UUID) (Submittal, error) + SubmittalUpdateCompletionDateOrWarningSent(ctx context.Context, arg SubmittalUpdateCompletionDateOrWarningSentParams) error + SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) + SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error + SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error + TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) + TimeseriesComputedCreateOrUpdate(ctx context.Context, arg TimeseriesComputedCreateOrUpdateParams) error + TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error + TimeseriesComputedGet(ctx context.Context, id uuid.UUID) (TimeseriesComputedGetRow, error) + TimeseriesComputedListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]TimeseriesComputedListForInstrumentRow, error) + TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) + TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults + TimeseriesCwmsCreate(ctx context.Context, arg TimeseriesCwmsCreateParams) error + TimeseriesCwmsCreateBatch(ctx context.Context, arg []TimeseriesCwmsCreateBatchParams) *TimeseriesCwmsCreateBatchBatchResults + TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) + TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) + TimeseriesCwmsUpdate(ctx context.Context, arg TimeseriesCwmsUpdateParams) error + TimeseriesDelete(ctx context.Context, id uuid.UUID) error + TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) + TimeseriesGetAllBelongToProject(ctx context.Context, arg TimeseriesGetAllBelongToProjectParams) (bool, error) + TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) (bool, error) + TimeseriesListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + TimeseriesListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) + TimeseriesListForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) + TimeseriesListForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) + TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error + TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults + TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error + TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults + TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error + TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults + TimeseriesMeasurementDeleteRange(ctx context.Context, arg TimeseriesMeasurementDeleteRangeParams) error + TimeseriesMeasurementDeleteRangeBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteRangeBatchParams) *TimeseriesMeasurementDeleteRangeBatchBatchResults + TimeseriesMeasurementGetMostRecent(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesMeasurement, error) + TimeseriesMeasurementListRange(ctx context.Context, arg TimeseriesMeasurementListRangeParams) ([]VTimeseriesMeasurement, error) + TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error + TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults + TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error + TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults + TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error + TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults + TimeseriesNoteDeleteRange(ctx context.Context, arg TimeseriesNoteDeleteRangeParams) error + TimeseriesNoteDeleteRangeBatch(ctx context.Context, arg []TimeseriesNoteDeleteRangeBatchParams) *TimeseriesNoteDeleteRangeBatchBatchResults + TimeseriesUpdate(ctx context.Context, arg TimeseriesUpdateParams) error + UnitsList(ctx context.Context) ([]VUnit, error) + UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) + UploaderConfigDelete(ctx context.Context, id uuid.UUID) error + UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) + UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults + UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error + UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) + UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error } var _ Querier = (*Queries)(nil) diff --git a/api/internal/db/report_config.sql_gen.go b/api/internal/db/report_config.sql_gen.go index acf75cf2..a3490fa8 100644 --- a/api/internal/db/report_config.sql_gen.go +++ b/api/internal/db/report_config.sql_gen.go @@ -12,21 +12,7 @@ import ( "github.com/google/uuid" ) -const assignReportConfigPlotConfig = `-- name: AssignReportConfigPlotConfig :exec -insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) -` - -type AssignReportConfigPlotConfigParams struct { - ReportConfigID uuid.UUID `json:"report_config_id"` - PlotConfigID uuid.UUID `json:"plot_config_id"` -} - -func (q *Queries) AssignReportConfigPlotConfig(ctx context.Context, arg AssignReportConfigPlotConfigParams) error { - _, err := q.db.Exec(ctx, assignReportConfigPlotConfig, arg.ReportConfigID, arg.PlotConfigID) - return err -} - -const createReportConfig = `-- name: CreateReportConfig :one +const reportConfigCreate = `-- name: ReportConfigCreate :one insert into report_config ( name, slug, project_id, creator, description, date_range, date_range_enabled, show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled @@ -35,7 +21,7 @@ values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id ` -type CreateReportConfigParams struct { +type ReportConfigCreateParams struct { Name string `json:"name"` ProjectID uuid.UUID `json:"project_id"` Creator uuid.UUID `json:"creator"` @@ -48,8 +34,8 @@ type CreateReportConfigParams struct { ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` } -func (q *Queries) CreateReportConfig(ctx context.Context, arg CreateReportConfigParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createReportConfig, +func (q *Queries) ReportConfigCreate(ctx context.Context, arg ReportConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, reportConfigCreate, arg.Name, arg.ProjectID, arg.Creator, @@ -66,47 +52,21 @@ func (q *Queries) CreateReportConfig(ctx context.Context, arg CreateReportConfig return id, err } -const createReportDownloadJob = `-- name: CreateReportDownloadJob :one -insert into report_download_job (report_config_id, creator) values ($1, $2) returning id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date -` - -type CreateReportDownloadJobParams struct { - ReportConfigID *uuid.UUID `json:"report_config_id"` - Creator uuid.UUID `json:"creator"` -} - -func (q *Queries) CreateReportDownloadJob(ctx context.Context, arg CreateReportDownloadJobParams) (ReportDownloadJob, error) { - row := q.db.QueryRow(ctx, createReportDownloadJob, arg.ReportConfigID, arg.Creator) - var i ReportDownloadJob - err := row.Scan( - &i.ID, - &i.ReportConfigID, - &i.Creator, - &i.CreateDate, - &i.Status, - &i.FileKey, - &i.FileExpiry, - &i.Progress, - &i.ProgressUpdateDate, - ) - return i, err -} - -const deleteReportConfig = `-- name: DeleteReportConfig :exec +const reportConfigDelete = `-- name: ReportConfigDelete :exec delete from report_config where id=$1 ` -func (q *Queries) DeleteReportConfig(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteReportConfig, id) +func (q *Queries) ReportConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, reportConfigDelete, id) return err } -const getReportConfig = `-- name: GetReportConfig :one +const reportConfigGet = `-- name: ReportConfigGet :one select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where id = $1 ` -func (q *Queries) GetReportConfig(ctx context.Context, id uuid.UUID) (VReportConfig, error) { - row := q.db.QueryRow(ctx, getReportConfig, id) +func (q *Queries) ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportConfig, error) { + row := q.db.QueryRow(ctx, reportConfigGet, id) var i VReportConfig err := row.Scan( &i.ID, @@ -128,38 +88,12 @@ func (q *Queries) GetReportConfig(ctx context.Context, id uuid.UUID) (VReportCon return i, err } -const getReportDownloadJob = `-- name: GetReportDownloadJob :one -select id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date from report_download_job where id=$1 and creator=$2 -` - -type GetReportDownloadJobParams struct { - ID uuid.UUID `json:"id"` - Creator uuid.UUID `json:"creator"` -} - -func (q *Queries) GetReportDownloadJob(ctx context.Context, arg GetReportDownloadJobParams) (ReportDownloadJob, error) { - row := q.db.QueryRow(ctx, getReportDownloadJob, arg.ID, arg.Creator) - var i ReportDownloadJob - err := row.Scan( - &i.ID, - &i.ReportConfigID, - &i.Creator, - &i.CreateDate, - &i.Status, - &i.FileKey, - &i.FileExpiry, - &i.Progress, - &i.ProgressUpdateDate, - ) - return i, err -} - -const listProjectReportConfigs = `-- name: ListProjectReportConfigs :many +const reportConfigListForProject = `-- name: ReportConfigListForProject :many select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where project_id = $1 ` -func (q *Queries) ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) { - rows, err := q.db.Query(ctx, listProjectReportConfigs, projectID) +func (q *Queries) ReportConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) { + rows, err := q.db.Query(ctx, reportConfigListForProject, projectID) if err != nil { return nil, err } @@ -194,14 +128,14 @@ func (q *Queries) ListProjectReportConfigs(ctx context.Context, projectID uuid.U return items, nil } -const listReportConfigPlotConfigs = `-- name: ListReportConfigPlotConfigs :many +const reportConfigListForReportConfigWithPlotConfig = `-- name: ReportConfigListForReportConfigWithPlotConfig :many select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = any( select plot_config_id from report_config_plot_config where report_config_id = $1 ) ` -func (q *Queries) ListReportConfigPlotConfigs(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) { - rows, err := q.db.Query(ctx, listReportConfigPlotConfigs, reportConfigID) +func (q *Queries) ReportConfigListForReportConfigWithPlotConfig(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, reportConfigListForReportConfigWithPlotConfig, reportConfigID) if err != nil { return nil, err } @@ -238,36 +172,50 @@ func (q *Queries) ListReportConfigPlotConfigs(ctx context.Context, reportConfigI return items, nil } -const unassignAllReportConfigPlotConfig = `-- name: UnassignAllReportConfigPlotConfig :exec -delete from report_config_plot_config where report_config_id=$1 +const reportConfigPlotConfigCreate = `-- name: ReportConfigPlotConfigCreate :exec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) ` -func (q *Queries) UnassignAllReportConfigPlotConfig(ctx context.Context, reportConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, unassignAllReportConfigPlotConfig, reportConfigID) +type ReportConfigPlotConfigCreateParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigCreate(ctx context.Context, arg ReportConfigPlotConfigCreateParams) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigCreate, arg.ReportConfigID, arg.PlotConfigID) return err } -const unassignReportConfigPlotConfig = `-- name: UnassignReportConfigPlotConfig :exec +const reportConfigPlotConfigDelete = `-- name: ReportConfigPlotConfigDelete :exec delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 ` -type UnassignReportConfigPlotConfigParams struct { +type ReportConfigPlotConfigDeleteParams struct { ReportConfigID uuid.UUID `json:"report_config_id"` PlotConfigID uuid.UUID `json:"plot_config_id"` } -func (q *Queries) UnassignReportConfigPlotConfig(ctx context.Context, arg UnassignReportConfigPlotConfigParams) error { - _, err := q.db.Exec(ctx, unassignReportConfigPlotConfig, arg.ReportConfigID, arg.PlotConfigID) +func (q *Queries) ReportConfigPlotConfigDelete(ctx context.Context, arg ReportConfigPlotConfigDeleteParams) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigDelete, arg.ReportConfigID, arg.PlotConfigID) + return err +} + +const reportConfigPlotConfigDeleteForReportConfig = `-- name: ReportConfigPlotConfigDeleteForReportConfig :exec +delete from report_config_plot_config where report_config_id=$1 +` + +func (q *Queries) ReportConfigPlotConfigDeleteForReportConfig(ctx context.Context, reportConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigDeleteForReportConfig, reportConfigID) return err } -const updateReportConfig = `-- name: UpdateReportConfig :exec +const reportConfigUpdate = `-- name: ReportConfigUpdate :exec update report_config set name=$2, updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1 ` -type UpdateReportConfigParams struct { +type ReportConfigUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Updater *uuid.UUID `json:"updater"` @@ -281,8 +229,8 @@ type UpdateReportConfigParams struct { ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` } -func (q *Queries) UpdateReportConfig(ctx context.Context, arg UpdateReportConfigParams) error { - _, err := q.db.Exec(ctx, updateReportConfig, +func (q *Queries) ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdateParams) error { + _, err := q.db.Exec(ctx, reportConfigUpdate, arg.ID, arg.Name, arg.Updater, @@ -298,11 +246,63 @@ func (q *Queries) UpdateReportConfig(ctx context.Context, arg UpdateReportConfig return err } -const updateReportDownloadJob = `-- name: UpdateReportDownloadJob :exec +const reportDownloadJobCreate = `-- name: ReportDownloadJobCreate :one +insert into report_download_job (report_config_id, creator) values ($1, $2) returning id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date +` + +type ReportDownloadJobCreateParams struct { + ReportConfigID *uuid.UUID `json:"report_config_id"` + Creator uuid.UUID `json:"creator"` +} + +func (q *Queries) ReportDownloadJobCreate(ctx context.Context, arg ReportDownloadJobCreateParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, reportDownloadJobCreate, arg.ReportConfigID, arg.Creator) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.Creator, + &i.CreateDate, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdateDate, + ) + return i, err +} + +const reportDownloadJobGet = `-- name: ReportDownloadJobGet :one +select id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date from report_download_job where id=$1 and creator=$2 +` + +type ReportDownloadJobGetParams struct { + ID uuid.UUID `json:"id"` + Creator uuid.UUID `json:"creator"` +} + +func (q *Queries) ReportDownloadJobGet(ctx context.Context, arg ReportDownloadJobGetParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, reportDownloadJobGet, arg.ID, arg.Creator) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.Creator, + &i.CreateDate, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdateDate, + ) + return i, err +} + +const reportDownloadJobUpdate = `-- name: ReportDownloadJobUpdate :exec update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1 ` -type UpdateReportDownloadJobParams struct { +type ReportDownloadJobUpdateParams struct { ID uuid.UUID `json:"id"` Status JobStatus `json:"status"` Progress int32 `json:"progress"` @@ -311,8 +311,8 @@ type UpdateReportDownloadJobParams struct { FileExpiry *time.Time `json:"file_expiry"` } -func (q *Queries) UpdateReportDownloadJob(ctx context.Context, arg UpdateReportDownloadJobParams) error { - _, err := q.db.Exec(ctx, updateReportDownloadJob, +func (q *Queries) ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloadJobUpdateParams) error { + _, err := q.db.Exec(ctx, reportDownloadJobUpdate, arg.ID, arg.Status, arg.Progress, diff --git a/api/internal/db/submittal.sql_gen.go b/api/internal/db/submittal.sql_gen.go index eff482e2..7ad72208 100644 --- a/api/internal/db/submittal.sql_gen.go +++ b/api/internal/db/submittal.sql_gen.go @@ -12,21 +12,21 @@ import ( "github.com/google/uuid" ) -const listAlertConfigSubmittals = `-- name: ListAlertConfigSubmittals :many +const submittalListForAlertConfig = `-- name: SubmittalListForAlertConfig :many select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent from v_submittal where alert_config_id = $1 -and ($2 = false or (completion_date is null and not marked_as_missing)) +and ($2::boolean = false or (completion_date is null and not marked_as_missing)) order by due_date desc ` -type ListAlertConfigSubmittalsParams struct { - AlertConfigID uuid.UUID `json:"alert_config_id"` - ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +type SubmittalListForAlertConfigParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` } -func (q *Queries) ListAlertConfigSubmittals(ctx context.Context, arg ListAlertConfigSubmittalsParams) ([]VSubmittal, error) { - rows, err := q.db.Query(ctx, listAlertConfigSubmittals, arg.AlertConfigID, arg.ShowIncompleteMissing) +func (q *Queries) SubmittalListForAlertConfig(ctx context.Context, arg SubmittalListForAlertConfigParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForAlertConfig, arg.AlertConfigID, arg.ShowIncompleteMissing) if err != nil { return nil, err } @@ -59,22 +59,22 @@ func (q *Queries) ListAlertConfigSubmittals(ctx context.Context, arg ListAlertCo return items, nil } -const listInstrumentSubmittals = `-- name: ListInstrumentSubmittals :many +const submittalListForInstrument = `-- name: SubmittalListForInstrument :many select sub.id, sub.alert_config_id, sub.alert_config_name, sub.alert_type_id, sub.alert_type_name, sub.project_id, sub.submittal_status_id, sub.submittal_status_name, sub.completion_date, sub.create_date, sub.due_date, sub.marked_as_missing, sub.warning_sent from v_submittal sub inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id where aci.instrument_id = $1 -and ($2 = false or (completion_date is null and not marked_as_missing)) +and ($2::boolean = false or (completion_date is null and not marked_as_missing)) order by sub.due_date desc ` -type ListInstrumentSubmittalsParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +type SubmittalListForInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` } -func (q *Queries) ListInstrumentSubmittals(ctx context.Context, arg ListInstrumentSubmittalsParams) ([]VSubmittal, error) { - rows, err := q.db.Query(ctx, listInstrumentSubmittals, arg.InstrumentID, arg.ShowIncompleteMissing) +func (q *Queries) SubmittalListForInstrument(ctx context.Context, arg SubmittalListForInstrumentParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForInstrument, arg.InstrumentID, arg.ShowIncompleteMissing) if err != nil { return nil, err } @@ -107,21 +107,21 @@ func (q *Queries) ListInstrumentSubmittals(ctx context.Context, arg ListInstrume return items, nil } -const listProjectSubmittals = `-- name: ListProjectSubmittals :many +const submittalListForProject = `-- name: SubmittalListForProject :many select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent from v_submittal where project_id = $1 -and ($2 = false or (completion_date is null and not marked_as_missing)) +and ($2::boolean = false or (completion_date is null and not marked_as_missing)) order by due_date desc, alert_type_name asc ` -type ListProjectSubmittalsParams struct { - ProjectID uuid.UUID `json:"project_id"` - ShowIncompleteMissing interface{} `json:"show_incomplete_missing"` +type SubmittalListForProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` } -func (q *Queries) ListProjectSubmittals(ctx context.Context, arg ListProjectSubmittalsParams) ([]VSubmittal, error) { - rows, err := q.db.Query(ctx, listProjectSubmittals, arg.ProjectID, arg.ShowIncompleteMissing) +func (q *Queries) SubmittalListForProject(ctx context.Context, arg SubmittalListForProjectParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForProject, arg.ProjectID, arg.ShowIncompleteMissing) if err != nil { return nil, err } @@ -154,7 +154,7 @@ func (q *Queries) ListProjectSubmittals(ctx context.Context, arg ListProjectSubm return items, nil } -const listUnverifiedMissingSubmittals = `-- name: ListUnverifiedMissingSubmittals :many +const submittalListUnverifiedMissing = `-- name: SubmittalListUnverifiedMissing :many select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent from v_submittal where completion_date is null @@ -162,8 +162,8 @@ and not marked_as_missing order by due_date desc ` -func (q *Queries) ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmittal, error) { - rows, err := q.db.Query(ctx, listUnverifiedMissingSubmittals) +func (q *Queries) SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListUnverifiedMissing) if err != nil { return nil, err } @@ -196,7 +196,7 @@ func (q *Queries) ListUnverifiedMissingSubmittals(ctx context.Context) ([]VSubmi return items, nil } -const updateSubmittal = `-- name: UpdateSubmittal :exec +const submittalUpdate = `-- name: SubmittalUpdate :exec update submittal set submittal_status_id = $2, completion_date = $3, @@ -204,15 +204,15 @@ update submittal set where id = $1 ` -type UpdateSubmittalParams struct { +type SubmittalUpdateParams struct { ID uuid.UUID `json:"id"` SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` CompletionDate *time.Time `json:"completion_date"` WarningSent bool `json:"warning_sent"` } -func (q *Queries) UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams) error { - _, err := q.db.Exec(ctx, updateSubmittal, +func (q *Queries) SubmittalUpdate(ctx context.Context, arg SubmittalUpdateParams) error { + _, err := q.db.Exec(ctx, submittalUpdate, arg.ID, arg.SubmittalStatusID, arg.CompletionDate, @@ -221,30 +221,30 @@ func (q *Queries) UpdateSubmittal(ctx context.Context, arg UpdateSubmittalParams return err } -const verifyMissingAlertConfigSubmittals = `-- name: VerifyMissingAlertConfigSubmittals :exec +const submittalUpdateVerifyMissing = `-- name: SubmittalUpdateVerifyMissing :exec update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true -where alert_config_id = $1 +where id = $1 and completion_date is null and now() > due_date ` -func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID *uuid.UUID) error { - _, err := q.db.Exec(ctx, verifyMissingAlertConfigSubmittals, alertConfigID) +func (q *Queries) SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalUpdateVerifyMissing, id) return err } -const verifyMissingSubmittal = `-- name: VerifyMissingSubmittal :exec +const submittalUpdateVerifyMissingForAlertConfig = `-- name: SubmittalUpdateVerifyMissingForAlertConfig :exec update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true -where id = $1 +where alert_config_id = $1 and completion_date is null and now() > due_date ` -func (q *Queries) VerifyMissingSubmittal(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, verifyMissingSubmittal, id) +func (q *Queries) SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalUpdateVerifyMissingForAlertConfig, alertConfigID) return err } diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go index 2eb55631..8779ba05 100644 --- a/api/internal/db/timeseries.sql_gen.go +++ b/api/internal/db/timeseries.sql_gen.go @@ -11,13 +11,13 @@ import ( "github.com/google/uuid" ) -const createTimeseries = `-- name: CreateTimeseries :one +const timeseriesCreate = `-- name: TimeseriesCreate :one insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) returning id, instrument_id, slug, name, parameter_id, unit_id, type ` -type CreateTimeseriesParams struct { +type TimeseriesCreateParams struct { InstrumentID *uuid.UUID `json:"instrument_id"` Name string `json:"name"` ParameterID uuid.UUID `json:"parameter_id"` @@ -25,7 +25,7 @@ type CreateTimeseriesParams struct { Type NullTimeseriesType `json:"type"` } -type CreateTimeseriesRow struct { +type TimeseriesCreateRow struct { ID uuid.UUID `json:"id"` InstrumentID *uuid.UUID `json:"instrument_id"` Slug string `json:"slug"` @@ -35,15 +35,15 @@ type CreateTimeseriesRow struct { Type NullTimeseriesType `json:"type"` } -func (q *Queries) CreateTimeseries(ctx context.Context, arg CreateTimeseriesParams) (CreateTimeseriesRow, error) { - row := q.db.QueryRow(ctx, createTimeseries, +func (q *Queries) TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) { + row := q.db.QueryRow(ctx, timeseriesCreate, arg.InstrumentID, arg.Name, arg.ParameterID, arg.UnitID, arg.Type, ) - var i CreateTimeseriesRow + var i TimeseriesCreateRow err := row.Scan( &i.ID, &i.InstrumentID, @@ -56,32 +56,21 @@ func (q *Queries) CreateTimeseries(ctx context.Context, arg CreateTimeseriesPara return i, err } -const deleteTimeseries = `-- name: DeleteTimeseries :exec +const timeseriesDelete = `-- name: TimeseriesDelete :exec delete from timeseries where id = $1 ` -func (q *Queries) DeleteTimeseries(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteTimeseries, id) +func (q *Queries) TimeseriesDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, timeseriesDelete, id) return err } -const getStoredTimeseriesExists = `-- name: GetStoredTimeseriesExists :one -select exists (select id from v_timeseries_stored where id = $1) -` - -func (q *Queries) GetStoredTimeseriesExists(ctx context.Context, id uuid.UUID) (bool, error) { - row := q.db.QueryRow(ctx, getStoredTimeseriesExists, id) - var exists bool - err := row.Scan(&exists) - return exists, err -} - -const getTimeseries = `-- name: GetTimeseries :one +const timeseriesGet = `-- name: TimeseriesGet :one select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries where id=$1 ` -func (q *Queries) GetTimeseries(ctx context.Context, id uuid.UUID) (VTimeseries, error) { - row := q.db.QueryRow(ctx, getTimeseries, id) +func (q *Queries) TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) { + row := q.db.QueryRow(ctx, timeseriesGet, id) var i VTimeseries err := row.Scan( &i.ID, @@ -101,40 +90,49 @@ func (q *Queries) GetTimeseries(ctx context.Context, id uuid.UUID) (VTimeseries, return i, err } -const getTimeseriesProjectMap = `-- name: GetTimeseriesProjectMap :many -select timeseries_id, project_id -from v_timeseries_project_map -where timeseries_id in ($1::uuid[]) +const timeseriesGetAllBelongToProject = `-- name: TimeseriesGetAllBelongToProject :one +select not exists ( + select true + from timeseries ts + where not ts.instrument_id = any ( + select p.instrument_id + from project_instrument p + where p.project_id = $1 + ) + and ts.id = any($2::uuid[]) +) ` -func (q *Queries) GetTimeseriesProjectMap(ctx context.Context, timeseriesIds []uuid.UUID) ([]VTimeseriesProjectMap, error) { - rows, err := q.db.Query(ctx, getTimeseriesProjectMap, timeseriesIds) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VTimeseriesProjectMap{} - for rows.Next() { - var i VTimeseriesProjectMap - if err := rows.Scan(&i.TimeseriesID, &i.ProjectID); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +type TimeseriesGetAllBelongToProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + TimeseriesIds []uuid.UUID `json:"timeseries_ids"` } -const listInstrumentGroupTimeseries = `-- name: ListInstrumentGroupTimeseries :many -select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t -inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id -where gi.instrument_group_id = $1 +func (q *Queries) TimeseriesGetAllBelongToProject(ctx context.Context, arg TimeseriesGetAllBelongToProjectParams) (bool, error) { + row := q.db.QueryRow(ctx, timeseriesGetAllBelongToProject, arg.ProjectID, arg.TimeseriesIds) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const timeseriesGetExistsStored = `-- name: TimeseriesGetExistsStored :one +select exists (select id from v_timeseries_stored where id = $1) +` + +func (q *Queries) TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, timeseriesGetExistsStored, id) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const timeseriesListForInstrument = `-- name: TimeseriesListForInstrument :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries +where instrument_id = $1 ` -func (q *Queries) ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listInstrumentGroupTimeseries, instrumentGroupID) +func (q *Queries) TimeseriesListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForInstrument, instrumentID) if err != nil { return nil, err } @@ -167,13 +165,14 @@ func (q *Queries) ListInstrumentGroupTimeseries(ctx context.Context, instrumentG return items, nil } -const listInstrumentTimeseries = `-- name: ListInstrumentTimeseries :many -select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries -where instrument_id = $1 +const timeseriesListForInstrumentGroup = `-- name: TimeseriesListForInstrumentGroup :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id +where gi.instrument_group_id = $1 ` -func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listInstrumentTimeseries, instrumentID) +func (q *Queries) TimeseriesListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForInstrumentGroup, instrumentGroupID) if err != nil { return nil, err } @@ -206,14 +205,14 @@ func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uui return items, nil } -const listPlotConfigTimeseries = `-- name: ListPlotConfigTimeseries :many +const timeseriesListForPlotConfig = `-- name: TimeseriesListForPlotConfig :many select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id where pct.plot_configuration_id = $1 ` -func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listPlotConfigTimeseries, plotConfigurationID) +func (q *Queries) TimeseriesListForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForPlotConfig, plotConfigurationID) if err != nil { return nil, err } @@ -246,14 +245,14 @@ func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfiguratio return items, nil } -const listTimeseriesForProject = `-- name: ListTimeseriesForProject :many +const timeseriesListForProject = `-- name: TimeseriesListForProject :many select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t inner join project_instrument p on p.instrument_id = t.instrument_id where p.project_id = $1 ` -func (q *Queries) ListTimeseriesForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, listTimeseriesForProject, projectID) +func (q *Queries) TimeseriesListForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForProject, projectID) if err != nil { return nil, err } @@ -286,13 +285,12 @@ func (q *Queries) ListTimeseriesForProject(ctx context.Context, projectID uuid.U return items, nil } -const updateTimeseries = `-- name: UpdateTimeseries :one +const timeseriesUpdate = `-- name: TimeseriesUpdate :exec update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 where id = $1 -returning id ` -type UpdateTimeseriesParams struct { +type TimeseriesUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` InstrumentID *uuid.UUID `json:"instrument_id"` @@ -300,15 +298,13 @@ type UpdateTimeseriesParams struct { UnitID uuid.UUID `json:"unit_id"` } -func (q *Queries) UpdateTimeseries(ctx context.Context, arg UpdateTimeseriesParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, updateTimeseries, +func (q *Queries) TimeseriesUpdate(ctx context.Context, arg TimeseriesUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesUpdate, arg.ID, arg.Name, arg.InstrumentID, arg.ParameterID, arg.UnitID, ) - var id uuid.UUID - err := row.Scan(&id) - return id, err + return err } diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go index 51de6365..62311be3 100644 --- a/api/internal/db/timeseries_calculated.sql_gen.go +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -11,27 +11,59 @@ import ( "github.com/google/uuid" ) -const createCalculatedTimeseries = `-- name: CreateCalculatedTimeseries :one -INSERT INTO timeseries ( +const calculationCreate = `-- name: CalculationCreate :exec +insert into calculation (timeseries_id, contents) values ($1,$2) +` + +type CalculationCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +func (q *Queries) CalculationCreate(ctx context.Context, arg CalculationCreateParams) error { + _, err := q.db.Exec(ctx, calculationCreate, arg.TimeseriesID, arg.Contents) + return err +} + +const calculationCreateOrUpdate = `-- name: CalculationCreateOrUpdate :exec +with p as ( + select contents from calculation where timeseries_id=$1 +) +insert into calculation (timeseries_id, contents) values ($1, $2) +on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents) +` + +type CalculationCreateOrUpdateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +func (q *Queries) CalculationCreateOrUpdate(ctx context.Context, arg CalculationCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, calculationCreateOrUpdate, arg.TimeseriesID, arg.Contents) + return err +} + +const timeseriesComputedCreate = `-- name: TimeseriesComputedCreate :one +insert into timeseries ( instrument_id, parameter_id, unit_id, slug, name, type -) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') -RETURNING id +) values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +returning id ` -type CreateCalculatedTimeseriesParams struct { +type TimeseriesComputedCreateParams struct { InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` UnitID uuid.UUID `json:"unit_id"` Name string `json:"name"` } -func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, arg CreateCalculatedTimeseriesParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createCalculatedTimeseries, +func (q *Queries) TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, timeseriesComputedCreate, arg.InstrumentID, arg.ParameterID, arg.UnitID, @@ -42,21 +74,7 @@ func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, arg CreateCalc return id, err } -const createCalculation = `-- name: CreateCalculation :exec -insert into calculation (timeseries_id, contents) values ($1,$2) -` - -type CreateCalculationParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Contents *string `json:"contents"` -} - -func (q *Queries) CreateCalculation(ctx context.Context, arg CreateCalculationParams) error { - _, err := q.db.Exec(ctx, createCalculation, arg.TimeseriesID, arg.Contents) - return err -} - -const createOrUpdateCalculatedTimeseries = `-- name: CreateOrUpdateCalculatedTimeseries :exec +const timeseriesComputedCreateOrUpdate = `-- name: TimeseriesComputedCreateOrUpdate :exec with p as ( select id, slug, name, instrument_id, parameter_id, unit_id, type from timeseries where id=$1 @@ -79,7 +97,7 @@ on conflict (id) do update set type='computed' ` -type CreateOrUpdateCalculatedTimeseriesParams struct { +type TimeseriesComputedCreateOrUpdateParams struct { ID uuid.UUID `json:"id"` InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` @@ -87,8 +105,8 @@ type CreateOrUpdateCalculatedTimeseriesParams struct { Name string `json:"name"` } -func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg CreateOrUpdateCalculatedTimeseriesParams) error { - _, err := q.db.Exec(ctx, createOrUpdateCalculatedTimeseries, +func (q *Queries) TimeseriesComputedCreateOrUpdate(ctx context.Context, arg TimeseriesComputedCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesComputedCreateOrUpdate, arg.ID, arg.InstrumentID, arg.ParameterID, @@ -98,34 +116,16 @@ func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, arg Cr return err } -const createOrUpdateCalculation = `-- name: CreateOrUpdateCalculation :exec -with p as ( - select contents from calculation where timeseries_id=$1 -) -insert into calculation (timeseries_id, contents) values ($1, $2) -on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents) -` - -type CreateOrUpdateCalculationParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Contents *string `json:"contents"` -} - -func (q *Queries) CreateOrUpdateCalculation(ctx context.Context, arg CreateOrUpdateCalculationParams) error { - _, err := q.db.Exec(ctx, createOrUpdateCalculation, arg.TimeseriesID, arg.Contents) - return err -} - -const deleteCalculatedTimeseries = `-- name: DeleteCalculatedTimeseries :exec +const timeseriesComputedDelete = `-- name: TimeseriesComputedDelete :exec delete from timeseries where id = $1 and id in (select timeseries_id from calculation) ` -func (q *Queries) DeleteCalculatedTimeseries(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteCalculatedTimeseries, id) +func (q *Queries) TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, timeseriesComputedDelete, id) return err } -const getCalculatedTimeseries = `-- name: GetCalculatedTimeseries :one +const timeseriesComputedGet = `-- name: TimeseriesComputedGet :one select id, instrument_id, @@ -135,10 +135,10 @@ select name as formula_name, coalesce(contents, '') as formula from v_timeseries_computed -where id=$1 +where id = $1 ` -type GetCalculatedTimeseriesRow struct { +type TimeseriesComputedGetRow struct { ID uuid.UUID `json:"id"` InstrumentID *uuid.UUID `json:"instrument_id"` ParameterID uuid.UUID `json:"parameter_id"` @@ -148,9 +148,9 @@ type GetCalculatedTimeseriesRow struct { Formula string `json:"formula"` } -func (q *Queries) GetCalculatedTimeseries(ctx context.Context, id uuid.UUID) (GetCalculatedTimeseriesRow, error) { - row := q.db.QueryRow(ctx, getCalculatedTimeseries, id) - var i GetCalculatedTimeseriesRow +func (q *Queries) TimeseriesComputedGet(ctx context.Context, id uuid.UUID) (TimeseriesComputedGetRow, error) { + row := q.db.QueryRow(ctx, timeseriesComputedGet, id) + var i TimeseriesComputedGetRow err := row.Scan( &i.ID, &i.InstrumentID, @@ -162,3 +162,54 @@ func (q *Queries) GetCalculatedTimeseries(ctx context.Context, id uuid.UUID) (Ge ) return i, err } + +const timeseriesComputedListForInstrument = `-- name: TimeseriesComputedListForInstrument :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where instrument_id = $1 +` + +type TimeseriesComputedListForInstrumentRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + FormulaName string `json:"formula_name"` + Formula string `json:"formula"` +} + +func (q *Queries) TimeseriesComputedListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]TimeseriesComputedListForInstrumentRow, error) { + rows, err := q.db.Query(ctx, timeseriesComputedListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []TimeseriesComputedListForInstrumentRow{} + for rows.Next() { + var i TimeseriesComputedListForInstrumentRow + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.ParameterID, + &i.UnitID, + &i.Slug, + &i.FormulaName, + &i.Formula, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/timeseries_cwms.sql_gen.go b/api/internal/db/timeseries_cwms.sql_gen.go index 468caa47..b5729e03 100644 --- a/api/internal/db/timeseries_cwms.sql_gen.go +++ b/api/internal/db/timeseries_cwms.sql_gen.go @@ -12,12 +12,12 @@ import ( "github.com/google/uuid" ) -const createTimeseriesCwms = `-- name: CreateTimeseriesCwms :exec +const timeseriesCwmsCreate = `-- name: TimeseriesCwmsCreate :exec insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values ($1, $2, $3, $4, $5) ` -type CreateTimeseriesCwmsParams struct { +type TimeseriesCwmsCreateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` CwmsTimeseriesID string `json:"cwms_timeseries_id"` CwmsOfficeID string `json:"cwms_office_id"` @@ -25,8 +25,8 @@ type CreateTimeseriesCwmsParams struct { CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } -func (q *Queries) CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseriesCwmsParams) error { - _, err := q.db.Exec(ctx, createTimeseriesCwms, +func (q *Queries) TimeseriesCwmsCreate(ctx context.Context, arg TimeseriesCwmsCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesCwmsCreate, arg.TimeseriesID, arg.CwmsTimeseriesID, arg.CwmsOfficeID, @@ -36,13 +36,13 @@ func (q *Queries) CreateTimeseriesCwms(ctx context.Context, arg CreateTimeseries return err } -const getTimeseriesCwms = `-- name: GetTimeseriesCwms :one +const timeseriesCwmsGet = `-- name: TimeseriesCwmsGet :one select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms where id = $1 ` -func (q *Queries) GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) { - row := q.db.QueryRow(ctx, getTimeseriesCwms, id) +func (q *Queries) TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) { + row := q.db.QueryRow(ctx, timeseriesCwmsGet, id) var i VTimeseriesCwm err := row.Scan( &i.ID, @@ -66,13 +66,13 @@ func (q *Queries) GetTimeseriesCwms(ctx context.Context, id uuid.UUID) (VTimeser return i, err } -const listTimeseriesCwms = `-- name: ListTimeseriesCwms :many +const timeseriesCwmsList = `-- name: TimeseriesCwmsList :many select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms where instrument_id = $1 ` -func (q *Queries) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) { - rows, err := q.db.Query(ctx, listTimeseriesCwms, instrumentID) +func (q *Queries) TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) { + rows, err := q.db.Query(ctx, timeseriesCwmsList, instrumentID) if err != nil { return nil, err } @@ -109,7 +109,7 @@ func (q *Queries) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID return items, nil } -const updateTimeseriesCwms = `-- name: UpdateTimeseriesCwms :exec +const timeseriesCwmsUpdate = `-- name: TimeseriesCwmsUpdate :exec update timeseries_cwms set cwms_timeseries_id=$2, cwms_office_id=$3, @@ -118,7 +118,7 @@ update timeseries_cwms set where timeseries_id=$1 ` -type UpdateTimeseriesCwmsParams struct { +type TimeseriesCwmsUpdateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` CwmsTimeseriesID string `json:"cwms_timeseries_id"` CwmsOfficeID string `json:"cwms_office_id"` @@ -126,8 +126,8 @@ type UpdateTimeseriesCwmsParams struct { CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } -func (q *Queries) UpdateTimeseriesCwms(ctx context.Context, arg UpdateTimeseriesCwmsParams) error { - _, err := q.db.Exec(ctx, updateTimeseriesCwms, +func (q *Queries) TimeseriesCwmsUpdate(ctx context.Context, arg TimeseriesCwmsUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesCwmsUpdate, arg.TimeseriesID, arg.CwmsTimeseriesID, arg.CwmsOfficeID, diff --git a/api/internal/db/timeseries_process.manual.go b/api/internal/db/timeseries_process.manual.go new file mode 100644 index 00000000..02bc3898 --- /dev/null +++ b/api/internal/db/timeseries_process.manual.go @@ -0,0 +1,450 @@ +package db + +import ( + "context" + "encoding/json" + "fmt" + "math" + "strconv" + "strings" + "time" + + "github.com/Knetic/govaluate" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/tidwall/btree" +) + +type ProcessTimeseriesInfo struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Variable string `json:"variable" db:"variable"` + IsComputed bool `json:"is_computed" db:"is_computed"` + Formula *string `json:"formula" db:"formula"` +} + +type DBProcessTimeseries struct { + ProcessTimeseriesInfo + Measurements string `json:"measurements" db:"measurements"` + NextMeasurementLow *string `json:"next_measurement_low" db:"next_measurement_low"` + NextMeasurementHigh *string `json:"next_measurement_high" db:"next_measurement_high"` +} + +type ProcessTimeseries struct { + ProcessTimeseriesInfo + Measurements []ProcessMeasurement `json:"measurements" db:"measurements"` + NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` + NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` + TimeWindow util.TimeWindow `json:"time_window" db:"-"` +} + +type ProcessMeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []ProcessMeasurement `json:"items"` +} + +type ProcessTimeseriesResponseCollection []ProcessTimeseries + +type ProcessMeasurement struct { + Time time.Time `json:"time" db:"time"` + Value float64 `json:"value" db:"value"` + Error string `json:"error,omitempty" db:"-"` +} + +func (m ProcessMeasurement) Lean() map[time.Time]float64 { + return map[time.Time]float64{m.Time: m.Value} +} + +// ProcessMeasurementFilter for conveniently passsing SQL query paramters to functions +type ProcessMeasurementFilter struct { + TimeseriesID *uuid.UUID `db:"timeseries_id"` + InstrumentID *uuid.UUID `db:"instrument_id"` + InstrumentGroupID *uuid.UUID `db:"instrument_group_id"` + InstrumentIDs []uuid.UUID `db:"instrument_ids"` + TimeseriesIDs []uuid.UUID `db:"timeseries_ids"` + After time.Time `db:"after"` + Before time.Time `db:"before"` +} + +type TimeseriesMeasurementCollectionCollection struct { + Items []MeasurementCollection +} + +func (cc *TimeseriesMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { + dd := make(map[uuid.UUID]struct{}) + for _, item := range cc.Items { + dd[item.TimeseriesID] = struct{}{} + } + return dd +} + +type Measurement struct { + VTimeseriesMeasurement + Error string `json:"error,omitempty"` +} + +type FloatNanInf float64 + +func (j FloatNanInf) MarshalJSON() ([]byte, error) { + if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { + return []byte("null"), nil + } + + return []byte(fmt.Sprintf("%f", float64(j))), nil +} + +func (j *FloatNanInf) UnmarshalJSON(v []byte) error { + switch strings.ToLower(string(v)) { + case `"nan"`, "nan", "", "null", "undefined": + *j = FloatNanInf(math.NaN()) + case `"inf"`, "inf": + *j = FloatNanInf(math.Inf(1)) + default: + var fv float64 + if err := json.Unmarshal(v, &fv); err != nil { + *j = FloatNanInf(math.NaN()) + return nil + } + *j = FloatNanInf(fv) + } + return nil +} + +type MeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []Measurement `json:"items"` +} + +// BTreeNode represents node for btree used for computing timeseries +type BTreeNode struct { + Key time.Time + Value map[string]interface{} +} + +type MeasurementCollectionLean struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []MeasurementLean `json:"items"` +} + +type MeasurementLean map[time.Time]float64 + +func (ml MeasurementLean) getTime() time.Time { + var t time.Time + for k := range ml { + t = k + } + return t +} + +func (ml MeasurementLean) getValue() float64 { + var m float64 + for _, v := range ml { + m = v + } + return m +} + +func (mrc *ProcessTimeseriesResponseCollection) GroupByInstrument(threshold int) (map[uuid.UUID][]MeasurementCollectionLean, error) { + if len(*mrc) == 0 { + return make(map[uuid.UUID][]MeasurementCollectionLean), nil + } + + tmp := make(map[uuid.UUID]map[uuid.UUID][]MeasurementLean) + + for _, t := range *mrc { + if _, hasInstrument := tmp[t.InstrumentID]; !hasInstrument { + tmp[t.InstrumentID] = make(map[uuid.UUID][]MeasurementLean, 0) + } + if _, hasTimeseries := tmp[t.InstrumentID][t.TimeseriesID]; !hasTimeseries { + tmp[t.InstrumentID][t.TimeseriesID] = make([]MeasurementLean, 0) + } + for _, m := range t.Measurements { + tmp[t.InstrumentID][t.TimeseriesID] = append(tmp[t.InstrumentID][t.TimeseriesID], MeasurementLean{m.Time: m.Value}) + } + } + + res := make(map[uuid.UUID][]MeasurementCollectionLean) + + for instrumentID := range tmp { + res[instrumentID] = make([]MeasurementCollectionLean, 0) + + for tsID := range tmp[instrumentID] { + res[instrumentID] = append(res[instrumentID], + MeasurementCollectionLean{ + TimeseriesID: tsID, + Items: LTTB(tmp[instrumentID][tsID], threshold), + }, + ) + } + } + + return res, nil +} + +func (mrc *ProcessTimeseriesResponseCollection) CollectSingleTimeseries(threshold int, tsID uuid.UUID) (MeasurementCollection, error) { + if len(*mrc) == 0 { + return MeasurementCollection{ + TimeseriesID: tsID, + Items: make([]Measurement, 0), + }, nil + } + + for _, t := range *mrc { + if t.TimeseriesID == tsID { + mmts := make([]Measurement, len(t.Measurements)) + for i, m := range t.Measurements { + mmts[i] = Measurement{ + VTimeseriesMeasurement: VTimeseriesMeasurement{ + TimeseriesID: t.TimeseriesID, + Time: m.Time, + Value: m.Value, + }, + Error: m.Error, + } + } + return MeasurementCollection{TimeseriesID: t.TimeseriesID, Items: LTTB(mmts, threshold)}, nil + } + } + + return MeasurementCollection{}, fmt.Errorf("requested timeseries does not match any in the result") +} + +// SelectMeasurements returns measurements for the timeseries specified in the filter +func (q *Queries) ProcessMeasurementListDynamic(ctx context.Context, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { + tss, err := queryTimeseriesMeasurements(ctx, q, f) + if err != nil { + return tss, err + } + tss, err = processLOCF(tss) + if err != nil { + return tss, err + } + return tss, nil +} + +// collectAggregate creates a btree of all sorted times (key) and measurements (value; as variable map) from an array of Timeseries +func collectAggregate(tss *ProcessTimeseriesResponseCollection) *btree.BTreeG[BTreeNode] { + // Get unique set of all measurement times of timeseries dependencies for non-regularized values + btm := btree.NewBTreeG(func(a, b BTreeNode) bool { return a.Key.Before(b.Key) }) + for _, ts := range *tss { + if ts.NextMeasurementLow != nil { + if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementLow.Time}); !exists { + btm.Set(BTreeNode{Key: ts.NextMeasurementLow.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementLow.Value}}) + } else { + item.Value[ts.Variable] = ts.NextMeasurementLow.Value + btm.Set(item) + } + } + for _, m := range ts.Measurements { + if item, exists := btm.Get(BTreeNode{Key: m.Time}); !exists { + btm.Set(BTreeNode{Key: m.Time, Value: map[string]interface{}{ts.Variable: m.Value}}) + } else { + item.Value[ts.Variable] = m.Value + btm.Set(item) + } + } + if ts.NextMeasurementHigh != nil { + if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementHigh.Time}); !exists { + btm.Set(BTreeNode{Key: ts.NextMeasurementHigh.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementHigh.Value}}) + } else { + item.Value[ts.Variable] = ts.NextMeasurementHigh.Value + btm.Set(item) + } + } + } + return btm +} + +// processLOCF calculates computed timeseries using "Last-Observation-Carried-Forward" algorithm +func processLOCF(tss ProcessTimeseriesResponseCollection) (ProcessTimeseriesResponseCollection, error) { + tssFinal := make(ProcessTimeseriesResponseCollection, 0) + var variableMap *btree.BTreeG[BTreeNode] + // Check if any computed timeseries present, collect aggregates used for calculations if so + for _, ts := range tss { + if ts.IsComputed { + variableMap = collectAggregate(&tss) + break + } + } + // Add any stored timeseries to the result + // Do calculations for computed timeseries and add to result + for _, ts := range tss { + // Array of existing measurements + a1 := make([]ProcessMeasurement, 0) + if ts.NextMeasurementLow != nil { + a1 = append(a1, *ts.NextMeasurementLow) + } + a1 = append(a1, ts.Measurements...) + if ts.NextMeasurementHigh != nil { + a1 = append(a1, *ts.NextMeasurementHigh) + } + + // Could do some additional checks before adding, like if the + // timeseries was actual requested or if it was just in the result as a + // dependency of the computed timeseries, just returning them all for now + if !ts.IsComputed { + tssFinal = append(tssFinal, ProcessTimeseries{ + ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, + Measurements: a1, + TimeWindow: ts.TimeWindow, + }) + continue + } + + // By now, all of the stored timeseries have been processed; + // the query is ordered in a way that priortizes stored timeseries + expr, err := govaluate.NewEvaluableExpression(*ts.Formula) + if err != nil { + continue + } + + // Do calculations + remember := make(map[string]interface{}) + a2 := make([]ProcessMeasurement, 0) + + it := variableMap.Iter() + for it.Next() { + item := it.Item() + + // fill in any missing gaps of data + for k, v := range remember { + if _, exists := item.Value[k]; !exists { + item.Value[k] = v + } + } + // Add/Update the most recent values + for k, v := range item.Value { + remember[k] = v + } + + val, err := expr.Evaluate(item.Value) + if err != nil { + continue + } + val64, err := strconv.ParseFloat(fmt.Sprint(val), 64) + if err != nil { + continue + } + + a2 = append(a2, ProcessMeasurement{Time: item.Key, Value: val64}) + } + it.Release() + + tssFinal = append(tssFinal, ProcessTimeseries{ + ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, + Measurements: a2, + TimeWindow: ts.TimeWindow, + }) + } + + return tssFinal, nil +} + +// queryTimeseriesMeasurements selects stored measurements and dependencies for computed measurements +func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { + var filterSQL string + var filterArg interface{} + // short circuiting before executing SQL query greatly improves query perfomance, + // rather than adding all parameters to the query with logical OR + switch { + case f.TimeseriesID != nil: + filterSQL = `id=$1` + filterArg = f.TimeseriesID + case f.InstrumentID != nil: + filterSQL = `instrument_id=$1` + filterArg = f.InstrumentID + case f.InstrumentGroupID != nil: + filterSQL = ` + instrument_id = any( + SELECT instrument_id + FROM instrument_group_instruments + WHERE instrument_group_id=$1 + )` + filterArg = f.InstrumentGroupID + case len(f.InstrumentIDs) > 0: + filterSQL = `instrument_id = any($1)` + filterArg = f.InstrumentIDs + case len(f.TimeseriesIDs) > 0: + filterSQL = `id = any($1)` + filterArg = f.TimeseriesIDs + default: + return nil, fmt.Errorf("must supply valid filter for timeseries_measurement query") + } + + listTimeseriesMeasurments := ` + with required_timeseries as ( + ( + select id + from v_timeseries_stored + where ` + filterSQL + ` + ) + union all + ( + select dependency_timeseries_id as id + from v_timeseries_dependency + where ` + filterSQL + ` + ) + ), next_low as ( + select nlm.timeseries_id as timeseries_id, nlm.time, m1.value + from ( + select timeseries_id, max(time) as time + from timeseries_measurement + where timeseries_id in (select id from required_timeseries) and time < $2 + group by timeseries_id + ) nlm + inner join timeseries_measurement m1 on m1.time = nlm.time and m1.timeseries_id = nlm.timeseries_id + ), next_high as ( + select nhm.timeseries_id as timeseries_id, nhm.time, m2.value + from ( + select timeseries_id, min(time) as time + from timeseries_measurement + where timeseries_id in (select id from required_timeseries) and time > $3 + group by timeseries_id + ) nhm + inner join timeseries_measurement m2 on m2.time = nhm.time and m2.timeseries_id = nhm.timeseries_id + ) + ( + select + rt.id timeseries_id, + ts.instrument_id, + i.slug || '.' || ts.slug variable, + false is_computed, + null formula, + coalesce(( + select array_agg(time, value order by time asc) + from timeseries_measurement + where timeseries_id = rt.id and time >= $2 and time <= $3 + ), '{}') measurements, + (select nl.time, nl.value) next_measurement_low, + (select nh.time, nh.value) next_measurement_high + from required_timeseries rt + inner join timeseries ts on ts.id = rt.id + inner join instrument i on i.id = ts.instrument_id + left join next_low nl on nl.timeseries_id = rt.id + left join next_high nh on nh.timeseries_id = rt.id + ) + union all + ( + select + id timeseries_id, + instrument_id, + slug variable, + true is_computed, + contents formula, + array[] measurements, + null next_measurement_low, + null next_measurement_high + from v_timeseries_computed + where ` + filterSQL + ` + and contents is not null + ) + order by is_computed + ` + + rows, err := q.db.Query(ctx, listTimeseriesMeasurments, filterArg, f.After, f.Before) + if err != nil { + return make(ProcessTimeseriesResponseCollection, 0), err + } + return pgx.CollectRows[ProcessTimeseries](rows, pgx.RowToStructByNameLax) +} diff --git a/api/internal/db/unit.sql_gen.go b/api/internal/db/unit.sql_gen.go index 9a1010c9..930c2f45 100644 --- a/api/internal/db/unit.sql_gen.go +++ b/api/internal/db/unit.sql_gen.go @@ -9,14 +9,14 @@ import ( "context" ) -const listUnits = `-- name: ListUnits :many +const unitsList = `-- name: UnitsList :many select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure from v_unit order by name ` -func (q *Queries) ListUnits(ctx context.Context) ([]VUnit, error) { - rows, err := q.db.Query(ctx, listUnits) +func (q *Queries) UnitsList(ctx context.Context) ([]VUnit, error) { + rows, err := q.db.Query(ctx, unitsList) if err != nil { return nil, err } diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go index 021e10d6..153c0caf 100644 --- a/api/internal/db/uploader.sql_gen.go +++ b/api/internal/db/uploader.sql_gen.go @@ -12,13 +12,13 @@ import ( "github.com/google/uuid" ) -const createUploaderConfig = `-- name: CreateUploaderConfig :one +const uploaderConfigCreate = `-- name: UploaderConfigCreate :one insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) returning id ` -type CreateUploaderConfigParams struct { +type UploaderConfigCreateParams struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Description string `json:"description"` @@ -28,8 +28,8 @@ type CreateUploaderConfigParams struct { TzName string `json:"tz_name"` } -func (q *Queries) CreateUploaderConfig(ctx context.Context, arg CreateUploaderConfigParams) (uuid.UUID, error) { - row := q.db.QueryRow(ctx, createUploaderConfig, +func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, uploaderConfigCreate, arg.ProjectID, arg.Name, arg.Description, @@ -43,54 +43,21 @@ func (q *Queries) CreateUploaderConfig(ctx context.Context, arg CreateUploaderCo return id, err } -const deleteAllUploaderConfigMappingsForUploaderConfig = `-- name: DeleteAllUploaderConfigMappingsForUploaderConfig :exec -delete from uploader_config_mapping where uploader_config_id=$1 -` - -func (q *Queries) DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteAllUploaderConfigMappingsForUploaderConfig, uploaderConfigID) - return err -} - -const deleteUploaderConfig = `-- name: DeleteUploaderConfig :exec +const uploaderConfigDelete = `-- name: UploaderConfigDelete :exec delete from uploader_config where id=$1 ` -func (q *Queries) DeleteUploaderConfig(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, deleteUploaderConfig, id) +func (q *Queries) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, uploaderConfigDelete, id) return err } -const listUploaderConfigMappings = `-- name: ListUploaderConfigMappings :many -select uploader_config_id, field_name, timeseries_id from uploader_config_mapping where uploader_config_id=$1 -` - -func (q *Queries) ListUploaderConfigMappings(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) { - rows, err := q.db.Query(ctx, listUploaderConfigMappings, uploaderConfigID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []UploaderConfigMapping{} - for rows.Next() { - var i UploaderConfigMapping - if err := rows.Scan(&i.UploaderConfigID, &i.FieldName, &i.TimeseriesID); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listUploaderConfigsForProject = `-- name: ListUploaderConfigsForProject :many +const uploaderConfigListForProject = `-- name: UploaderConfigListForProject :many select id, project_id, slug, name, description, create_date, creator, update_date, updater, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field from uploader_config where project_id=$1 ` -func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { - rows, err := q.db.Query(ctx, listUploaderConfigsForProject, projectID) +func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { + rows, err := q.db.Query(ctx, uploaderConfigListForProject, projectID) if err != nil { return nil, err } @@ -128,7 +95,40 @@ func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID u return items, nil } -const updateUploaderConfig = `-- name: UpdateUploaderConfig :exec +const uploaderConfigMappingDeleteForUploaderConfig = `-- name: UploaderConfigMappingDeleteForUploaderConfig :exec +delete from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, uploaderConfigMappingDeleteForUploaderConfig, uploaderConfigID) + return err +} + +const uploaderConfigMappingList = `-- name: UploaderConfigMappingList :many +select uploader_config_id, field_name, timeseries_id from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) { + rows, err := q.db.Query(ctx, uploaderConfigMappingList, uploaderConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []UploaderConfigMapping{} + for rows.Next() { + var i UploaderConfigMapping + if err := rows.Scan(&i.UploaderConfigID, &i.FieldName, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const uploaderConfigUpdate = `-- name: UploaderConfigUpdate :exec update uploader_config set name=$2, description=$3, @@ -139,7 +139,7 @@ update uploader_config set where id=$1 ` -type UpdateUploaderConfigParams struct { +type UploaderConfigUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Description string `json:"description"` @@ -149,8 +149,8 @@ type UpdateUploaderConfigParams struct { TzName string `json:"tz_name"` } -func (q *Queries) UpdateUploaderConfig(ctx context.Context, arg UpdateUploaderConfigParams) error { - _, err := q.db.Exec(ctx, updateUploaderConfig, +func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error { + _, err := q.db.Exec(ctx, uploaderConfigUpdate, arg.ID, arg.Name, arg.Description, diff --git a/api/internal/dto/alert.go b/api/internal/dto/alert.go new file mode 100644 index 00000000..da83b841 --- /dev/null +++ b/api/internal/dto/alert.go @@ -0,0 +1,19 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Alert struct { + Read *bool `json:"read,omitempty"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ProjectName string `json:"project_name" db:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + CreateDate time.Time `json:"create_date" db:"create_date"` + Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` +} diff --git a/api/internal/dto/alert_check.go b/api/internal/dto/alert_check.go new file mode 100644 index 00000000..d6c21f0b --- /dev/null +++ b/api/internal/dto/alert_check.go @@ -0,0 +1,53 @@ +package dto + +import ( + "github.com/google/uuid" +) + +var ( + GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") + YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") + RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") + + MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") + EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") +) + +const ( + warning = "Warning" + alert = "Alert" + reminder = "Reminder" +) + +type AlertCheck struct { + AlertConfigID uuid.UUID `db:"alert_config_id"` + SubmittalID uuid.UUID `db:"submittal_id"` + ShouldWarn bool `db:"should_warn"` + ShouldAlert bool `db:"should_alert"` + ShouldRemind bool `db:"should_remind"` + Submittal Submittal `db:"-"` +} + +func (ck AlertCheck) GetShouldWarn() bool { + return ck.ShouldWarn +} + +func (ck AlertCheck) GetShouldAlert() bool { + return ck.ShouldAlert +} + +func (ck AlertCheck) GetShouldRemind() bool { + return ck.ShouldRemind +} + +func (ck AlertCheck) GetSubmittal() Submittal { + return ck.Submittal +} + +func (ck *AlertCheck) SetSubmittal(sub Submittal) { + ck.Submittal = sub +} + +type AlertConfigMap map[uuid.UUID]AlertConfig + +type SubmittalMap map[uuid.UUID]Submittal diff --git a/api/internal/dto/alert_config.go b/api/internal/dto/alert_config.go new file mode 100644 index 00000000..84c8e9d6 --- /dev/null +++ b/api/internal/dto/alert_config.go @@ -0,0 +1,41 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type AlertConfig struct { + ID uuid.UUID `json:"id" db:"id"` + Name string `json:"name" db:"name"` + Body string `json:"body" db:"body"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ProjectName string `json:"project_name" db:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` + AlertType string `json:"alert_type" db:"alert_type"` + StartDate time.Time `json:"start_date" db:"start_date"` + ScheduleInterval string `json:"schedule_interval" db:"schedule_interval"` + RemindInterval string `json:"remind_interval" db:"remind_interval"` + WarningInterval string `json:"warning_interval" db:"warning_interval"` + LastChecked *time.Time `json:"last_checked" db:"last_checked"` + LastReminded *time.Time `json:"last_reminded" db:"last_reminded"` + Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` + AlertEmailSubscriptions dbJSONSlice[EmailAutocompleteResult] `json:"alert_email_subscriptions" db:"alert_email_subscriptions"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts" db:"mute_consecutive_alerts"` + CreateNextSubmittalFrom *time.Time `json:"-" db:"-"` + AuditInfo +} + +type AlertConfigInstrument struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentName string `json:"instrument_name" db:"instrument_name"` +} + +func (a *AlertConfig) GetToAddresses() []string { + emails := make([]string, len(a.AlertEmailSubscriptions)) + for idx := range a.AlertEmailSubscriptions { + emails[idx] = a.AlertEmailSubscriptions[idx].Email + } + return emails +} diff --git a/api/internal/model/alert_evaluation_check.go b/api/internal/dto/alert_evaluation_check.go similarity index 77% rename from api/internal/model/alert_evaluation_check.go rename to api/internal/dto/alert_evaluation_check.go index 71669f0b..09c808ee 100644 --- a/api/internal/model/alert_evaluation_check.go +++ b/api/internal/dto/alert_evaluation_check.go @@ -1,7 +1,6 @@ -package model +package dto import ( - "context" "fmt" "github.com/USACE/instrumentation-api/api/internal/config" @@ -33,7 +32,7 @@ func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { a.AlertChecks = ec } -func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { +func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { if emailType == "" { return fmt.Errorf("must provide emailType") } @@ -63,19 +62,3 @@ func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.Alert } return nil } - -const getAllIncompleteEvaluationSubmittals = ` - SELECT * FROM v_alert_check_evaluation_submittal - WHERE submittal_id = ANY( - SELECT id FROM submittal - WHERE completion_date IS NULL AND NOT marked_as_missing - ) -` - -func (q *Queries) GetAllIncompleteEvaluationSubmittals(ctx context.Context) ([]*EvaluationCheck, error) { - ecs := make([]*EvaluationCheck, 0) - if err := q.db.SelectContext(ctx, &ecs, getAllIncompleteEvaluationSubmittals); err != nil { - return nil, err - } - return ecs, nil -} diff --git a/api/internal/model/alert_measurement_check.go b/api/internal/dto/alert_measurement_check.go similarity index 79% rename from api/internal/model/alert_measurement_check.go rename to api/internal/dto/alert_measurement_check.go index 39578be7..53a3bbde 100644 --- a/api/internal/model/alert_measurement_check.go +++ b/api/internal/dto/alert_measurement_check.go @@ -1,7 +1,6 @@ -package model +package dto import ( - "context" "fmt" "github.com/USACE/instrumentation-api/api/internal/config" @@ -40,7 +39,7 @@ func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { a.AlertChecks = mc } -func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { +func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { if emailType == "" { return fmt.Errorf("must provide emailType") } @@ -72,19 +71,3 @@ func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.Alert } return nil } - -const getAllIncompleteMeasurementSubmittals = ` - SELECT * FROM v_alert_check_measurement_submittal - WHERE submittal_id = ANY( - SELECT id FROM submittal - WHERE completion_date IS NULL AND NOT marked_as_missing - ) -` - -func (q *Queries) GetAllIncompleteMeasurementSubmittals(ctx context.Context) ([]*MeasurementCheck, error) { - mcs := make([]*MeasurementCheck, 0) - if err := q.db.SelectContext(ctx, &mcs, getAllIncompleteMeasurementSubmittals); err != nil { - return nil, err - } - return mcs, nil -} diff --git a/api/internal/dto/alert_subscription.go b/api/internal/dto/alert_subscription.go new file mode 100644 index 00000000..75fa77e9 --- /dev/null +++ b/api/internal/dto/alert_subscription.go @@ -0,0 +1,54 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type AlertSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + AlertSubscriptionSettings +} + +type AlertSubscriptionSettings struct { + MuteUI bool `json:"mute_ui" db:"mute_ui"` + MuteNotify bool `json:"mute_notify" db:"mute_notify"` +} + +type AlertSubscriptionCollection struct { + Items []AlertSubscription `json:"items"` +} + +type EmailAlert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"profile_id"` + MuteNotify bool `json:"mute_notify" db:"mute_notify"` +} + +type Email struct { + ID uuid.UUID `json:"id" db:"id"` + Email string `json:"email" db:"email"` +} + +func (c *AlertSubscriptionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var a AlertSubscription + if err := json.Unmarshal(b, &a); err != nil { + return err + } + c.Items = []AlertSubscription{a} + default: + c.Items = make([]AlertSubscription, 0) + } + return nil +} diff --git a/api/internal/dto/autocomplete.go b/api/internal/dto/autocomplete.go new file mode 100644 index 00000000..e5b4063c --- /dev/null +++ b/api/internal/dto/autocomplete.go @@ -0,0 +1,12 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type EmailAutocompleteResult struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type" db:"user_type"` + Username *string `json:"username"` + Email string `json:"email"` +} diff --git a/api/internal/dto/aware.go b/api/internal/dto/aware.go new file mode 100644 index 00000000..dc61698e --- /dev/null +++ b/api/internal/dto/aware.go @@ -0,0 +1,25 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type AwareParameter struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` +} + +type AwarePlatformParameterConfig struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` +} + +type AwarePlatformParameterEnabled struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameterKey string `json:"aware_parameter_key" db:"aware_parameter_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/dto/collection_group.go b/api/internal/dto/collection_group.go new file mode 100644 index 00000000..2739283e --- /dev/null +++ b/api/internal/dto/collection_group.go @@ -0,0 +1,28 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type CollectionGroup struct { + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Slug string `json:"slug" db:"slug"` + Name string `json:"name" db:"name"` + SortOrder int32 `json:"sort_order" db:"sort_order"` + AuditInfo +} + +type CollectionGroupDetails struct { + CollectionGroup + Timeseries []collectionGroupDetailsTimeseries `json:"timeseries"` +} + +type collectionGroupDetailsTimeseries struct { + Timeseries + LatestTime *time.Time `json:"latest_time" db:"latest_time"` + LatestValue *float32 `json:"latest_value" db:"latest_value"` + SortOrder int `json:"sort_order" db:"sort_order"` +} diff --git a/api/internal/model/common.go b/api/internal/dto/common.go similarity index 94% rename from api/internal/model/common.go rename to api/internal/dto/common.go index 3a3bc0f0..0713990b 100644 --- a/api/internal/model/common.go +++ b/api/internal/dto/common.go @@ -1,4 +1,4 @@ -package model +package dto import ( "encoding/json" @@ -8,7 +8,6 @@ import ( "github.com/google/uuid" ) -// AuditInfo holds common information about object creator and updater type AuditInfo struct { CreatorID uuid.UUID `json:"creator_id" db:"creator"` CreatorUsername *string `json:"creator_username,omitempty" db:"creator_username"` @@ -48,11 +47,6 @@ type InstrumentIDName struct { InstrumentName string `json:"instrument_name"` } -type DataloggerTable struct { - ID uuid.UUID `json:"id"` - TableName string `json:"table_name"` -} - type DataloggerEquivalencyTableField struct { ID uuid.UUID `json:"id"` FieldName string `json:"field_name"` diff --git a/api/internal/dto/datalogger.go b/api/internal/dto/datalogger.go new file mode 100644 index 00000000..fbe777b7 --- /dev/null +++ b/api/internal/dto/datalogger.go @@ -0,0 +1,51 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgtype" +) + +type Telemetry struct { + ID uuid.UUID + TypeID string + TypeSlug string + TypeName string +} + +type Datalogger struct { + ID uuid.UUID `json:"id" db:"id"` + Name string `json:"name" db:"name"` + SN string `json:"sn" db:"sn"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Slug string `json:"slug" db:"slug"` + ModelID uuid.UUID `json:"model_id" db:"model_id"` + Model *string `json:"model" db:"model"` + Errors []string `json:"errors" db:"-"` + PgErrors pgtype.TextArray `json:"-" db:"errors"` + Tables dbJSONSlice[DataloggerTable] `json:"tables" db:"tables"` + AuditInfo +} + +type DataloggerWithKey struct { + Datalogger + Key string `json:"key"` +} + +type DataloggerTable struct { + ID uuid.UUID `json:"id" db:"id"` + TableName string `json:"table_name" db:"table_name"` +} + +type DataloggerTablePreview struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` + UpdateDate time.Time `json:"update_date" db:"update_date"` + Preview json.RawMessage `json:"preview" db:"preview"` +} + +type DataloggerError struct { + DataloggerTableID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` + Errors []string `json:"errors" db:"errors"` +} diff --git a/api/internal/model/datalogger_parser.go b/api/internal/dto/datalogger_parser.go similarity index 99% rename from api/internal/model/datalogger_parser.go rename to api/internal/dto/datalogger_parser.go index 411404f7..b7605d73 100644 --- a/api/internal/model/datalogger_parser.go +++ b/api/internal/dto/datalogger_parser.go @@ -1,4 +1,4 @@ -package model +package dto import ( "encoding/csv" diff --git a/api/internal/dto/db.go b/api/internal/dto/db.go new file mode 100644 index 00000000..36cd0b0f --- /dev/null +++ b/api/internal/dto/db.go @@ -0,0 +1,41 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/lib/pq" +) + +type dbSlice[T any] []T + +func (d *dbSlice[T]) Scan(src interface{}) error { + value := make([]T, 0) + if err := pq.Array(&value).Scan(src); err != nil { + return err + } + *d = dbSlice[T](value) + return nil +} + +type dbJSONSlice[T any] []T + +func (d *dbJSONSlice[T]) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("failed type assertion") + } + return json.Unmarshal([]byte(b), d) +} + +func MapToStruct[T any](v map[string]interface{}) (T, error) { + var o T + s, err := json.Marshal(v) + if err != nil { + return o, err + } + if err := json.Unmarshal(s, &o); err != nil { + return o, err + } + return o, nil +} diff --git a/api/internal/dto/district_rollup.go b/api/internal/dto/district_rollup.go new file mode 100644 index 00000000..b16527ea --- /dev/null +++ b/api/internal/dto/district_rollup.go @@ -0,0 +1,21 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type DistrictRollup struct { + AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` + DistrictInitials *string `json:"district_initials" db:"district_initials"` + ProjectName string `json:"project_name" db:"project_name"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Month time.Time `json:"month" db:"the_month"` + ExpectedTotalSubmittals int `json:"expected_total_submittals" db:"expected_total_submittals"` + ActualTotalSubmittals int `json:"actual_total_submittals" db:"actual_total_submittals"` + RedSubmittals int `json:"red_submittals" db:"red_submittals"` + YellowSubmittals int `json:"yellow_submittals" db:"yellow_submittals"` + GreenSubmittals int `json:"green_submittals" db:"green_submittals"` +} diff --git a/api/internal/dto/domain.go b/api/internal/dto/domain.go new file mode 100644 index 00000000..11224dff --- /dev/null +++ b/api/internal/dto/domain.go @@ -0,0 +1,27 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type Domain struct { + ID uuid.UUID `json:"id" db:"id"` + Group string `json:"group" db:"group"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type DomainGroup struct { + Group string `json:"group" db:"group"` + Opts dbJSONSlice[DomainGroupOption] `json:"opts" db:"opts"` +} + +type DomainGroupOption struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type DomainGroupCollection []DomainGroup + +type DomainMap map[string][]DomainGroupOption diff --git a/api/internal/dto/equivalency_table.go b/api/internal/dto/equivalency_table.go new file mode 100644 index 00000000..a1270cac --- /dev/null +++ b/api/internal/dto/equivalency_table.go @@ -0,0 +1,20 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type EquivalencyTable struct { + DataloggerID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` + DataloggerTableName string `json:"datalogger_table_name" db:"datalogger_table_name"` + Rows dbJSONSlice[EquivalencyTableRow] `json:"rows" db:"fields"` +} + +type EquivalencyTableRow struct { + ID uuid.UUID `json:"id" db:"id"` + FieldName string `json:"field_name" db:"field_name"` + DisplayName string `json:"display_name" db:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id" db:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/model/error.go b/api/internal/dto/error.go similarity index 98% rename from api/internal/model/error.go rename to api/internal/dto/error.go index ac355381..88111bb7 100644 --- a/api/internal/model/error.go +++ b/api/internal/dto/error.go @@ -1,4 +1,4 @@ -package model +package dto import ( "fmt" diff --git a/api/internal/dto/evaluation.go b/api/internal/dto/evaluation.go new file mode 100644 index 00000000..fbbdb25b --- /dev/null +++ b/api/internal/dto/evaluation.go @@ -0,0 +1,27 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Evaluation struct { + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ProjectName string `json:"project_name" db:"project_name"` + AlertConfigID *uuid.UUID `json:"alert_config_id" db:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name" db:"alert_config_name"` + SubmittalID *uuid.UUID `json:"submittal_id" db:"submittal_id"` + Name string `json:"name" db:"name"` + Body string `json:"body" db:"body"` + StartDate time.Time `json:"start_date" db:"start_date"` + EndDate time.Time `json:"end_date" db:"end_date"` + Instruments dbJSONSlice[EvaluationInstrument] `json:"instruments" db:"instruments"` + AuditInfo +} + +type EvaluationInstrument struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentName string `json:"instrument_name" db:"instrument_name"` +} diff --git a/api/internal/dto/heartbeat.go b/api/internal/dto/heartbeat.go new file mode 100644 index 00000000..2a606463 --- /dev/null +++ b/api/internal/dto/heartbeat.go @@ -0,0 +1,9 @@ +package dto + +import ( + "time" +) + +type Heartbeat struct { + Time time.Time `json:"time"` +} diff --git a/api/internal/dto/home.go b/api/internal/dto/home.go new file mode 100644 index 00000000..795ce63c --- /dev/null +++ b/api/internal/dto/home.go @@ -0,0 +1,9 @@ +package dto + +type Home struct { + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + InstrumetGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` + ProjectCount int `json:"project_count" db:"project_count"` + NewInstruments7D int `json:"new_instruments_7d" db:"new_instruments_7d"` + NewMeasurements2H int `json:"new_measurements_2h" db:"new_measurements_2h"` +} diff --git a/api/internal/dto/instrument.go b/api/internal/dto/instrument.go new file mode 100644 index 00000000..80ca4ca4 --- /dev/null +++ b/api/internal/dto/instrument.go @@ -0,0 +1,59 @@ +package dto + +import ( + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/google/uuid" +) + +type Instrument struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + AwareID *uuid.UUID `json:"aware_id,omitempty"` + Groups dbSlice[uuid.UUID] `json:"groups" db:"groups"` + Constants dbSlice[uuid.UUID] `json:"constants" db:"constants"` + AlertConfigs dbSlice[uuid.UUID] `json:"alert_configs" db:"alert_configs"` + StatusID uuid.UUID `json:"status_id" db:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time" db:"status_time"` + Deleted bool `json:"-"` + TypeID uuid.UUID `json:"type_id" db:"type_id"` + Type string `json:"type"` + Icon *string `json:"icon" db:"icon"` + Geometry db.Geometry `json:"geometry,omitempty"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"offset" db:"station_offset"` + Projects dbJSONSlice[IDSlugName] `json:"projects" db:"projects"` + NIDID *string `json:"nid_id" db:"nid_id"` + USGSID *string `json:"usgs_id" db:"usgs_id"` + HasCwms bool `json:"has_cwms" db:"has_cwms"` + ShowCwmsTab bool `json:"show_cwms_tab" db:"show_cwms_tab"` + Opts Opts `json:"opts" db:"opts"` + AuditInfo +} + +// InstrumentCollection is a collection of Instrument items +type InstrumentCollection []Instrument + +// Shorten returns an instrument collection with individual objects limited to ID and Struct fields +func (ic InstrumentCollection) Shorten() IDSlugCollection { + ss := IDSlugCollection{Items: make([]IDSlug, 0)} + for _, n := range ic { + s := IDSlug{ID: n.ID, Slug: n.Slug} + + ss.Items = append(ss.Items, s) + } + return ss +} + +type InstrumentCount struct { + InstrumentCount int `json:"instrument_count"` +} + +type InstrumentsProjectCount struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentName string `json:"instrument_name" db:"instrument_name"` + ProjectCount int `json:"project_count" db:"project_count"` +} diff --git a/api/internal/dto/instrument_assign.go b/api/internal/dto/instrument_assign.go new file mode 100644 index 00000000..d9ad0d1b --- /dev/null +++ b/api/internal/dto/instrument_assign.go @@ -0,0 +1,28 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type ReasonCode int + +const ( + None ReasonCode = iota + Unauthorized + InvalidName + InvalidUnassign +) + +type InstrumentsValidation struct { + ReasonCode ReasonCode `json:"-"` + IsValid bool `json:"is_valid"` + Errors []string `json:"errors"` +} + +type ProjectInstrumentAssignments struct { + InstrumentIDs []uuid.UUID `json:"instrument_ids"` +} + +type InstrumentProjectAssignments struct { + ProjectIDs []uuid.UUID `json:"project_ids"` +} diff --git a/api/internal/dto/instrument_group.go b/api/internal/dto/instrument_group.go new file mode 100644 index 00000000..ec4dbc9a --- /dev/null +++ b/api/internal/dto/instrument_group.go @@ -0,0 +1,51 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type InstrumentGroup struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"-"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID *uuid.UUID `json:"project_id" db:"project_id"` + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + TimeseriesCount int `json:"timeseries_count" db:"timeseries_count"` + AuditInfo +} + +type InstrumentGroupCollection struct { + Items []InstrumentGroup +} + +func (c InstrumentGroupCollection) Shorten() IDSlugCollection { + ss := IDSlugCollection{Items: make([]IDSlug, 0)} + for _, n := range c.Items { + s := IDSlug{ID: n.ID, Slug: n.Slug} + ss.Items = append(ss.Items, s) + } + return ss +} + +func (c *InstrumentGroupCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var g InstrumentGroup + if err := json.Unmarshal(b, &g); err != nil { + return err + } + c.Items = []InstrumentGroup{g} + default: + c.Items = make([]InstrumentGroup, 0) + } + return nil +} diff --git a/api/internal/dto/instrument_incl.go b/api/internal/dto/instrument_incl.go new file mode 100644 index 00000000..bd708a8c --- /dev/null +++ b/api/internal/dto/instrument_incl.go @@ -0,0 +1,29 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + InclParameterID = uuid.MustParse("3ea5ed77-c926-4696-a580-a3fde0f9a556") +) + +type InclOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type InclSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id" db:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id" db:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id" db:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id" db:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id" db:"b180_timeseries_id"` +} diff --git a/api/internal/dto/instrument_ipi.go b/api/internal/dto/instrument_ipi.go new file mode 100644 index 00000000..d4213bc6 --- /dev/null +++ b/api/internal/dto/instrument_ipi.go @@ -0,0 +1,29 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + IpiParameterID = uuid.MustParse("a9a5ad45-b2e5-4744-816e-d3184f2c08bd") +) + +type IpiOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type IpiSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Length *float64 `json:"length" db:"length"` + LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id" db:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id" db:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` +} diff --git a/api/internal/dto/instrument_note.go b/api/internal/dto/instrument_note.go new file mode 100644 index 00000000..b0781423 --- /dev/null +++ b/api/internal/dto/instrument_note.go @@ -0,0 +1,40 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type InstrumentNote struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + AuditInfo +} + +type InstrumentNoteCollection struct { + Items []InstrumentNote +} + +func (c *InstrumentNoteCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var n InstrumentNote + if err := json.Unmarshal(b, &n); err != nil { + return err + } + c.Items = []InstrumentNote{n} + default: + c.Items = make([]InstrumentNote, 0) + } + return nil +} diff --git a/api/internal/dto/instrument_saa.go b/api/internal/dto/instrument_saa.go new file mode 100644 index 00000000..68a5d1d5 --- /dev/null +++ b/api/internal/dto/instrument_saa.go @@ -0,0 +1,30 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + SaaParameterID = uuid.MustParse("6d12ca4c-b618-41cd-87a2-a248980a0d69") +) + +type SaaOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type SaaSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Length *float64 `json:"length" db:"length"` + LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id" db:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id" db:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id" db:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` +} diff --git a/api/internal/dto/instrument_status.go b/api/internal/dto/instrument_status.go new file mode 100644 index 00000000..97e4c2ca --- /dev/null +++ b/api/internal/dto/instrument_status.go @@ -0,0 +1,18 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type InstrumentStatus struct { + ID uuid.UUID `json:"id"` + Time time.Time `json:"time"` + StatusID uuid.UUID `json:"status_id" db:"status_id"` + Status string `json:"status"` +} + +type InstrumentStatusCollection struct { + Items []InstrumentStatus +} diff --git a/api/internal/model/job.go b/api/internal/dto/job.go similarity index 93% rename from api/internal/model/job.go rename to api/internal/dto/job.go index 40773ffa..0e2db95f 100644 --- a/api/internal/model/job.go +++ b/api/internal/dto/job.go @@ -1,4 +1,4 @@ -package model +package dto import "github.com/google/uuid" diff --git a/api/internal/dto/measurement.go b/api/internal/dto/measurement.go new file mode 100644 index 00000000..6365f88d --- /dev/null +++ b/api/internal/dto/measurement.go @@ -0,0 +1,74 @@ +package dto + +import ( + "encoding/json" + "fmt" + "math" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type TimeseriesMeasurementCollectionCollection struct { + Items []MeasurementCollection +} + +func (cc *TimeseriesMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &cc.Items); err != nil { + return err + } + case "OBJECT": + var mc MeasurementCollection + if err := json.Unmarshal(b, &mc); err != nil { + return err + } + cc.Items = []MeasurementCollection{mc} + default: + cc.Items = make([]MeasurementCollection, 0) + } + return nil +} + +type Measurement struct { + TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` + Time time.Time `json:"time"` + Value FloatNanInf `json:"value"` + Error string `json:"error,omitempty"` + TimeseriesNote +} + +type FloatNanInf float64 + +func (j FloatNanInf) MarshalJSON() ([]byte, error) { + if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { + return []byte("null"), nil + } + + return []byte(fmt.Sprintf("%f", float64(j))), nil +} + +func (j *FloatNanInf) UnmarshalJSON(v []byte) error { + switch strings.ToLower(string(v)) { + case `"nan"`, "nan", "", "null", "undefined": + *j = FloatNanInf(math.NaN()) + case `"inf"`, "inf": + *j = FloatNanInf(math.Inf(1)) + default: + var fv float64 + if err := json.Unmarshal(v, &fv); err != nil { + *j = FloatNanInf(math.NaN()) + return nil + } + *j = FloatNanInf(fv) + } + return nil +} + +type MeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []Measurement `json:"items"` +} diff --git a/api/internal/dto/measurement_inclinometer.go b/api/internal/dto/measurement_inclinometer.go new file mode 100644 index 00000000..d85180b8 --- /dev/null +++ b/api/internal/dto/measurement_inclinometer.go @@ -0,0 +1,84 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/jmoiron/sqlx/types" +) + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurement struct { + TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` + Time time.Time `json:"time"` + Values types.JSONText `json:"values"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date" db:"create_date"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementValues struct { + Depth int `json:"depth" db:"depth"` + A0 float32 `json:"a0" db:"a0"` + A180 float32 `json:"a180" db:"a180"` + B0 float32 `json:"b0" db:"b0"` + B180 float32 `json:"b180" db:"b180"` + AChecksum float32 `json:"aChecksum" db:"a_checksum"` + AComb float32 `json:"aComb" db:"a_comb"` + AIncrement float32 `json:"aIncrement" db:"a_increment"` + ACumDev float32 `json:"aCumDev" db:"a_cum_dev"` + BChecksum float32 `json:"bChecksum" db:"b_checksum"` + BComb float32 `json:"bComb" db:"b_comb"` + BIncrement float32 `json:"bIncrement" db:"b_increment"` + BCumDev float32 `json:"bCumDev" db:"b_cum_dev"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementLean map[time.Time]types.JSONText + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Inclinometers []InclinometerMeasurement `json:"inclinometers"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollectionLean struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []InclinometerMeasurementLean `json:"items"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollectionCollection struct { + Items []InclinometerMeasurementCollection +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +func (cc *InclinometerMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { + dd := make(map[uuid.UUID]struct{}) + for _, item := range cc.Items { + dd[item.TimeseriesID] = struct{}{} + } + return dd +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +func (cc *InclinometerMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &cc.Items); err != nil { + return err + } + case "OBJECT": + var mc InclinometerMeasurementCollection + if err := json.Unmarshal(b, &mc); err != nil { + return err + } + cc.Items = []InclinometerMeasurementCollection{mc} + default: + cc.Items = make([]InclinometerMeasurementCollection, 0) + } + return nil +} diff --git a/api/internal/dto/plot_config.go b/api/internal/dto/plot_config.go new file mode 100644 index 00000000..b0107eff --- /dev/null +++ b/api/internal/dto/plot_config.go @@ -0,0 +1,73 @@ +package dto + +import ( + "fmt" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +const ( + ScatterLinePlotType = "scatter-line" + ProfilePlotType = "profile" + ContourPlotType = "contour" + BullseyePlotType = "bullseye" +) + +type PlotConfig struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ReportConfigs dbJSONSlice[IDSlugName] `json:"report_configs" db:"report_configs"` + PlotType string `json:"plot_type" db:"plot_type"` + Display Opts `json:"display" db:"display"` + PlotConfigSettings + AuditInfo +} + +// PlotConfigSettings describes options for displaying the plot consistently. +// Specifically, whether to ignore data entries in a timeseries that have been masked, +// or whether to display user comments. +type PlotConfigSettings struct { + ShowMasked bool `json:"show_masked" db:"show_masked"` + ShowNonValidated bool `json:"show_nonvalidated" db:"show_nonvalidated"` + ShowComments bool `json:"show_comments" db:"show_comments"` + AutoRange bool `json:"auto_range" db:"auto_range"` + DateRange string `json:"date_range" db:"date_range"` + Threshold int `json:"threshold" db:"threshold"` +} + +// DateRangeTimeWindow creates a TimeWindow from a date range string. +// +// Acceptable date range strings are "lifetime", "5 years", "1 year", or a fixed date in the +// format "YYYY-MM-DD YYYY-MM-DD" with after and before dates separated by a single whitespace. +func (pc *PlotConfig) DateRangeTimeWindow() (util.TimeWindow, error) { + switch dr := strings.ToLower(pc.DateRange); dr { + case "lifetime": + return util.TimeWindow{After: time.Time{}, Before: time.Now()}, nil + case "5 years": + return util.TimeWindow{After: time.Now().AddDate(-5, 0, 0), Before: time.Now()}, nil + case "1 year": + return util.TimeWindow{After: time.Now().AddDate(-1, 0, 0), Before: time.Now()}, nil + case "1 month": + return util.TimeWindow{After: time.Now().AddDate(0, -1, 0), Before: time.Now()}, nil + default: + cdr := strings.Split(dr, " ") + invalidDateErr := fmt.Errorf("invalid date range; custom date range must be in format \"YYYY-MM-DD YYYY-MM-DD\"") + if len(cdr) != 2 { + return util.TimeWindow{}, invalidDateErr + } + after, err := time.Parse("2006-01-02", cdr[0]) + if err != nil { + return util.TimeWindow{}, invalidDateErr + } + before, err := time.Parse("2006-01-02", cdr[1]) + if err != nil { + return util.TimeWindow{}, invalidDateErr + } + return util.TimeWindow{After: after, Before: before}, nil + } +} diff --git a/api/internal/dto/plot_config_bullseye.go b/api/internal/dto/plot_config_bullseye.go new file mode 100644 index 00000000..65ed8fff --- /dev/null +++ b/api/internal/dto/plot_config_bullseye.go @@ -0,0 +1,33 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type PlotConfigBullseyePlot struct { + PlotConfig + Display PlotConfigBullseyePlotDisplay `json:"display" db:"display"` +} + +type PlotConfigBullseyePlotDisplay struct { + XAxisTimeseriesID uuid.UUID `json:"x_axis_timeseries_id" db:"x_axis_timeseries_id"` + YAxisTimeseriesID uuid.UUID `json:"y_axis_timeseries_id" db:"y_axis_timeseries_id"` +} + +func (d *PlotConfigBullseyePlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigMeasurementBullseyePlot struct { + Time time.Time `json:"time" db:"time"` + X *float64 `json:"x" db:"x"` + Y *float64 `json:"y" db:"y"` +} diff --git a/api/internal/dto/plot_config_contour.go b/api/internal/dto/plot_config_contour.go new file mode 100644 index 00000000..c67167cb --- /dev/null +++ b/api/internal/dto/plot_config_contour.go @@ -0,0 +1,37 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type PlotConfigContourPlot struct { + PlotConfig + Display PlotConfigContourPlotDisplay `json:"display" db:"display"` +} + +type PlotConfigContourPlotDisplay struct { + TimeseriesIDs dbSlice[uuid.UUID] `json:"timeseries_ids" db:"timeseries_ids"` + Time *time.Time `json:"time" db:"time"` + LocfBackfill string `json:"locf_backfill" db:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing" db:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing" db:"contour_smoothing"` + ShowLabels bool `json:"show_labels" db:"show_labels"` +} + +func (d *PlotConfigContourPlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigMeasurementContourPlot struct { + X float64 `json:"x" db:"x"` + Y float64 `json:"y" db:"y"` + Z *float64 `json:"z" db:"z"` +} diff --git a/api/internal/dto/plot_config_profile.go b/api/internal/dto/plot_config_profile.go new file mode 100644 index 00000000..ec964df1 --- /dev/null +++ b/api/internal/dto/plot_config_profile.go @@ -0,0 +1,26 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/google/uuid" +) + +type PlotConfigProfilePlot struct { + PlotConfig + Display PlotConfigProfilePlotDisplay `json:"display" db:"display"` +} + +type PlotConfigProfilePlotDisplay struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentType string `json:"instrument_type,omitempty" db:"instrument_type"` +} + +func (d *PlotConfigProfilePlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} diff --git a/api/internal/dto/plot_config_scatter_line.go b/api/internal/dto/plot_config_scatter_line.go new file mode 100644 index 00000000..7f81c825 --- /dev/null +++ b/api/internal/dto/plot_config_scatter_line.go @@ -0,0 +1,55 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/google/uuid" +) + +type PlotConfigScatterLinePlot struct { + PlotConfig + Display PlotConfigScatterLineDisplay `json:"display" db:"display"` + // TODO AlertConfigIDs []string +} + +type PlotConfigScatterLineDisplay struct { + Traces []PlotConfigScatterLineTimeseriesTrace `json:"traces"` + Layout PlotConfigScatterLineLayout `json:"layout"` +} + +func (d *PlotConfigScatterLineDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigScatterLineTimeseriesTrace struct { + PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + Name string `json:"name"` // read-only + Parameter string `json:"parameter"` // read-only + TraceOrder int `json:"trace_order"` + TraceType string `json:"trace_type"` + Color string `json:"color"` + LineStyle string `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis string `json:"y_axis"` // y1 or y2, default y1 +} + +type PlotConfigScatterLineLayout struct { + CustomShapes []PlotConfigScatterLineCustomShape `json:"custom_shapes"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +type PlotConfigScatterLineCustomShape struct { + PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} diff --git a/api/internal/dto/profile.go b/api/internal/dto/profile.go new file mode 100644 index 00000000..c6a00322 --- /dev/null +++ b/api/internal/dto/profile.go @@ -0,0 +1,51 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Profile struct { + ID uuid.UUID `json:"id" db:"id"` + Tokens []TokenInfoProfile `json:"tokens"` + IsAdmin bool `json:"is_admin" db:"is_admin"` + Roles dbSlice[string] `json:"roles" db:"roles"` + ProfileInfo +} + +type TokenInfoProfile struct { + TokenID string `json:"token_id" db:"token_id"` + Issued time.Time `json:"issued"` +} + +type ProfileInfo struct { + EDIPI int `json:"-" db:"edipi"` + Username string `json:"username" db:"username"` + DisplayName string `json:"display_name" db:"display_name"` + Email string `json:"email" db:"email"` +} + +type TokenInfo struct { + ID uuid.UUID `json:"-"` + TokenID string `json:"token_id" db:"token_id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + Issued time.Time `json:"issued"` + Hash string `json:"-"` +} + +// Token includes all TokenInfo and the actual token string generated for a user +// this is only returned the first time a token is generated +type Token struct { + SecretToken string `json:"secret_token"` + TokenInfo +} + +type ProfileClaims struct { + PreferredUsername string + Name string + Email string + SubjectDN *string + CacUID *int + X509Presented bool +} diff --git a/api/internal/dto/project.go b/api/internal/dto/project.go new file mode 100644 index 00000000..fc6d38cf --- /dev/null +++ b/api/internal/dto/project.go @@ -0,0 +1,35 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type District struct { + Agency string `json:"agency" db:"agency"` + ID uuid.UUID `json:"id" db:"id"` + Name string `json:"name" db:"name"` + Initials string `json:"initials" db:"initials"` + DivisionName string `json:"division_name" db:"division_name"` + DivisionInitials string `json:"division_initials" db:"division_initials"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` +} + +type Project struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + FederalID *string `json:"federal_id" db:"federal_id"` + DistrictID *uuid.UUID `json:"district_id" db:"district_id"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` + Image *string `json:"image" db:"image"` + Deleted bool `json:"-"` + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + InstrumentGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` + AuditInfo +} + +type ProjectCount struct { + ProjectCount int `json:"project_count"` +} + +type ProjectCollection []Project diff --git a/api/internal/dto/project_role.go b/api/internal/dto/project_role.go new file mode 100644 index 00000000..4e76479d --- /dev/null +++ b/api/internal/dto/project_role.go @@ -0,0 +1,14 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type ProjectMembership struct { + ID uuid.UUID `json:"id" db:"id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + Username *string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id" db:"role_id"` + Role string `json:"role"` +} diff --git a/api/internal/dto/report_config.go b/api/internal/dto/report_config.go new file mode 100644 index 00000000..ecedf26a --- /dev/null +++ b/api/internal/dto/report_config.go @@ -0,0 +1,53 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type ReportConfig struct { + ID uuid.UUID `json:"id" db:"id"` + Slug string `json:"slug" db:"slug"` + Name string `json:"name" db:"name"` + Description string `json:"description" db:"description"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ProjectName string `json:"project_name" db:"project_name"` + DistrictName *string `json:"district_name" db:"district_name"` + PlotConfigs dbJSONSlice[IDSlugName] `json:"plot_configs" db:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides" db:"global_overrides"` + AuditInfo +} + +type ReportDownloadJob struct { + ID uuid.UUID `json:"id" db:"id"` + ReportConfigID uuid.UUID `json:"report_config_id" db:"report_config_id"` + Creator uuid.UUID `json:"creator" db:"creator"` + CreateDate time.Time `json:"create_date" db:"create_date"` + Status string `json:"status" db:"status"` + FileKey *string `json:"file_key" db:"file_key"` + FileExpiry *time.Time `json:"file_expiry" db:"file_expiry"` + Progress int `json:"progress" db:"progress"` + ProgressUpdateDate time.Time `json:"progress_update_date" db:"progress_update_date"` +} + +func (o *ReportConfigGlobalOverrides) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + +type ReportConfigWithPlotConfigs struct { + ReportConfig + PlotConfigs []PlotConfigScatterLinePlot `json:"plot_configs"` +} + +type ReportConfigJobMessage struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + JobID uuid.UUID `json:"job_id"` + IsLandscape bool `json:"is_landscape"` +} diff --git a/api/internal/model/search.go b/api/internal/dto/search.go similarity index 66% rename from api/internal/model/search.go rename to api/internal/dto/search.go index e41343cb..12fa396c 100644 --- a/api/internal/model/search.go +++ b/api/internal/dto/search.go @@ -1,10 +1,9 @@ -package model +package dto import ( "github.com/google/uuid" ) -// EmailAutocompleteResult stores search result in profiles and emails type SearchResult struct { ID uuid.UUID `json:"id"` Type string `json:"type"` diff --git a/api/internal/dto/submittal.go b/api/internal/dto/submittal.go new file mode 100644 index 00000000..2a16c1c9 --- /dev/null +++ b/api/internal/dto/submittal.go @@ -0,0 +1,23 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Submittal struct { + ID uuid.UUID `json:"id" db:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` + AlertConfigName string `json:"alert_config_name" db:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` + AlertTypeName string `json:"alert_type_name" db:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id" db:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name" db:"submittal_status_name"` + CompletionDate *time.Time `json:"completion_date" db:"completion_date"` + CreateDate time.Time `json:"create_date" db:"create_date"` + DueDate time.Time `json:"due_date" db:"due_date"` + MarkedAsMissing bool `json:"marked_as_missing" db:"marked_as_missing"` + WarningSent bool `json:"warning_sent" db:"warning_sent"` +} diff --git a/api/internal/dto/timeseries.go b/api/internal/dto/timeseries.go new file mode 100644 index 00000000..bd060965 --- /dev/null +++ b/api/internal/dto/timeseries.go @@ -0,0 +1,65 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +const ( + StandardTimeseriesType = "standard" + ConstantTimeseriesType = "constant" + ComputedTimeseriesType = "computed" + CwmsTimeseriesType = "cwms" +) + +type Timeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Variable string `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentSlug string `json:"instrument_slug" db:"instrument_slug"` + Instrument string `json:"instrument,omitempty"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + Parameter string `json:"parameter,omitempty"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` + Unit string `json:"unit,omitempty"` + Values []Measurement `json:"values,omitempty"` + Type string `json:"type" db:"type"` + IsComputed bool `json:"is_computed" db:"is_computed"` +} + +type TimeseriesNote struct { + Masked *bool `json:"masked,omitempty"` + Validated *bool `json:"validated,omitempty"` + Annotation *string `json:"annotation,omitempty"` +} + +type TimeseriesCollectionItems struct { + Items []Timeseries +} + +func (c *TimeseriesCollectionItems) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var t Timeseries + if err := json.Unmarshal(b, &t); err != nil { + return err + } + c.Items = []Timeseries{t} + default: + c.Items = make([]Timeseries, 0) + } + return nil +} + +var ( + UnknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") + UnknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") +) diff --git a/api/internal/dto/timeseries_calculated.go b/api/internal/dto/timeseries_calculated.go new file mode 100644 index 00000000..eda9ab27 --- /dev/null +++ b/api/internal/dto/timeseries_calculated.go @@ -0,0 +1,15 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type CalculatedTimeseries struct { + ID uuid.UUID `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` + Slug string `json:"slug" db:"slug"` + FormulaName string `json:"formula_name" db:"formula_name"` + Formula string `json:"formula" db:"formula"` +} diff --git a/api/internal/dto/timeseries_cwms.go b/api/internal/dto/timeseries_cwms.go new file mode 100644 index 00000000..96c957e7 --- /dev/null +++ b/api/internal/dto/timeseries_cwms.go @@ -0,0 +1,13 @@ +package dto + +import ( + "time" +) + +type TimeseriesCwms struct { + Timeseries + CwmsTimeseriesID string `json:"cwms_timeseries_id" db:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id" db:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time" db:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time" db:"cwms_extent_latest_time"` +} diff --git a/api/internal/model/unit.go b/api/internal/dto/unit.go similarity index 56% rename from api/internal/model/unit.go rename to api/internal/dto/unit.go index d8377517..88896ca0 100644 --- a/api/internal/model/unit.go +++ b/api/internal/dto/unit.go @@ -1,12 +1,9 @@ -package model +package dto import ( - "context" - "github.com/google/uuid" ) -// Unit is a unit data structure type Unit struct { ID uuid.UUID `json:"id"` Name string `json:"name"` @@ -21,18 +18,3 @@ var ( MeterUnitID = uuid.MustParse("ae06a7db-1e18-4994-be41-9d5a408d6cad") FeetUnitID = uuid.MustParse("f777f2e2-5e32-424e-a1ca-19d16cd8abce") ) - -const listUnits = ` - SELECT id, name, abbreviation, unit_family_id, unit_family, measure_id, measure - FROM v_unit - ORDER BY name -` - -// ListUnits returns a slice of units -func (q *Queries) ListUnits(ctx context.Context) ([]Unit, error) { - uu := make([]Unit, 0) - if err := q.db.SelectContext(ctx, &uu, listUnits); err != nil { - return nil, err - } - return uu, nil -} diff --git a/api/internal/dto/uploader.go b/api/internal/dto/uploader.go new file mode 100644 index 00000000..f9ed85c1 --- /dev/null +++ b/api/internal/dto/uploader.go @@ -0,0 +1,28 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type UploaderConfigType string + +const ( + CSV, DUX, TOA5 UploaderConfigType = "csv", "dux", "toa5" +) + +type UploaderConfig struct { + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Name string `json:"name" db:"name"` + Slug string `json:"slug" db:"slug"` + Description string `json:"description" db:"description"` + Type UploaderConfigType `json:"type" db:"type"` + TzName string `json:"tz_name" db:"tz_name"` + AuditInfo +} + +type UploaderConfigMapping struct { + UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` + FieldName string `json:"field_name" db:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/email/email.go b/api/internal/email/email.go index 843da825..ed5d53b1 100644 --- a/api/internal/email/email.go +++ b/api/internal/email/email.go @@ -54,7 +54,7 @@ func FormatAlertConfigTemplates(templContent EmailTemplateContent, data any) (Em }, nil } -func ConstructAndSendEmail(ec EmailContent, cfg config.AlertCheckConfig) error { +func ConstructAndSendEmail(ec EmailContent, cfg *config.AlertCheckConfig) error { if len(ec.To) == 0 { if cfg.EmailSendMocked { log.Print("no email subs") diff --git a/api/internal/handler/alert.go b/api/internal/handler/alert.go index a80096fb..aef238e8 100644 --- a/api/internal/handler/alert.go +++ b/api/internal/handler/alert.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -18,7 +19,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Alert +// @Success 200 {array} dto.Alert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,7 +29,7 @@ func (h *ApiHandler) ListAlertsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - aa, err := h.AlertService.GetAllAlertsForInstrument(c.Request().Context(), instrumentID) + aa, err := h.DBService.AlertListForInstrument(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -42,16 +43,16 @@ func (h *ApiHandler) ListAlertsForInstrument(c echo.Context) error { // @Tags alert // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.Alert +// @Success 200 {array} dto.Alert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts [get] // @Security Bearer func (h *ApiHandler) ListMyAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID - aa, err := h.AlertService.GetAllAlertsForProfile(c.Request().Context(), profileID) + aa, err := h.DBService.AlertListForProfile(c.Request().Context(), profileID) if err != nil { return httperr.InternalServerError(err) } @@ -67,20 +68,23 @@ func (h *ApiHandler) ListMyAlerts(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Alert +// @Success 200 {object} dto.Alert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/read [post] // @Security Bearer func (h *ApiHandler) DoAlertRead(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertService.DoAlertRead(c.Request().Context(), profileID, alertID) + a, err := h.DBService.AlertReadCreate(c.Request().Context(), db.AlertReadCreateParams{ + ProfileID: profileID, + AlertID: alertID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -96,20 +100,23 @@ func (h *ApiHandler) DoAlertRead(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Alert +// @Success 200 {object} dto.Alert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/unread [post] // @Security Bearer func (h *ApiHandler) DoAlertUnread(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertService.DoAlertUnread(c.Request().Context(), profileID, alertID) + a, err := h.DBService.AlertReadDelete(c.Request().Context(), db.AlertReadDeleteParams{ + ProfileID: profileID, + AlertID: alertID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/alert_check.go b/api/internal/handler/alert_check.go index 26af1bb8..ffba80a6 100644 --- a/api/internal/handler/alert_check.go +++ b/api/internal/handler/alert_check.go @@ -6,5 +6,5 @@ import ( func (h *AlertCheckHandler) DoAlertChecks() error { ctx := context.Background() - return h.AlertCheckService.DoAlertChecks(ctx) + return h.DBService.DoAlertChecks(ctx, h.Config) } diff --git a/api/internal/handler/alert_config.go b/api/internal/handler/alert_config.go index 6a25335e..c361af23 100644 --- a/api/internal/handler/alert_config.go +++ b/api/internal/handler/alert_config.go @@ -4,41 +4,45 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllAlertConfigsForProject godoc +// ListAlertConfigsForProject godoc // // @Summary lists alert configs for a project // @Tags alert-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs [get] -func (h *ApiHandler) GetAllAlertConfigsForProject(c echo.Context) error { +func (h *ApiHandler) ListAlertConfigsForProject(c echo.Context) error { projectID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var aa []model.AlertConfig + var aa []db.VAlertConfig if qp := c.QueryParam("alert_type_id"); qp != "" { alertTypeID, err := uuid.Parse(qp) if err != nil { return httperr.MalformedID(err) } - aa, err = h.AlertConfigService.GetAllAlertConfigsForProjectAndAlertType(c.Request().Context(), projectID, alertTypeID) + aa, err = h.DBService.AlertConfigListForProjectAlertType(c.Request().Context(), db.AlertConfigListForProjectAlertTypeParams{ + ProjectID: projectID, + AlertTypeID: alertTypeID, + }) if err != nil { return httperr.InternalServerError(err) } } else { - aa, err = h.AlertConfigService.GetAllAlertConfigsForProject(c.Request().Context(), projectID) + aa, err = h.DBService.AlertConfigListForProject(c.Request().Context(), projectID) if err != nil { return httperr.InternalServerError(err) } @@ -53,7 +57,7 @@ func (h *ApiHandler) GetAllAlertConfigsForProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,7 +67,7 @@ func (h *ApiHandler) ListInstrumentAlertConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - aa, err := h.AlertConfigService.GetAllAlertConfigsForInstrument(c.Request().Context(), instrumentID) + aa, err := h.DBService.AlertConfigListForInstrument(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -77,7 +81,7 @@ func (h *ApiHandler) ListInstrumentAlertConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {object} model.AlertConfig +// @Success 200 {object} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -87,7 +91,7 @@ func (h *ApiHandler) GetAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertConfigService.GetOneAlertConfig(c.Request().Context(), acID) + a, err := h.DBService.AlertConfigGet(c.Request().Context(), acID) if err != nil { return httperr.InternalServerError(err) } @@ -101,16 +105,16 @@ func (h *ApiHandler) GetAlertConfig(c echo.Context) error { // @Accept json // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param alert_config body model.AlertConfig true "alert config payload" +// @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {object} model.AlertConfig +// @Success 200 {object} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs [post] // @Security Bearer func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { - ac := model.AlertConfig{} + var ac dto.AlertConfig if err := c.Bind(&ac); err != nil { return httperr.MalformedBody(err) } @@ -118,10 +122,10 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) ac.ProjectID, ac.CreatorID, ac.CreateDate = projectID, profile.ID, time.Now() - acNew, err := h.AlertConfigService.CreateAlertConfig(c.Request().Context(), ac) + acNew, err := h.DBService.AlertConfigCreate(c.Request().Context(), ac) if err != nil { return httperr.InternalServerError(err) } @@ -136,16 +140,16 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Param alert_config body model.AlertConfig true "alert config payload" +// @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs/{alert_config_id} [put] // @Security Bearer func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { - var ac model.AlertConfig + var ac dto.AlertConfig if err := c.Bind(&ac); err != nil { return httperr.MalformedBody(err) } @@ -153,10 +157,10 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() ac.UpdaterID, ac.UpdateDate = &p.ID, &t - aUpdated, err := h.AlertConfigService.UpdateAlertConfig(c.Request().Context(), acID, ac) + aUpdated, err := h.DBService.AlertConfigUpdate(c.Request().Context(), acID, ac) if err != nil { return httperr.InternalServerError(err) } @@ -171,7 +175,7 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { // @Param project_id path string true "Project ID" Format(uuid) // @Param alert_config_id path string true "instrument uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -182,7 +186,7 @@ func (h *ApiHandler) DeleteAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.AlertConfigService.DeleteAlertConfig(c.Request().Context(), acID); err != nil { + if err := h.DBService.AlertConfigDelete(c.Request().Context(), acID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/alert_subscription.go b/api/internal/handler/alert_subscription.go index 4cd13b97..1e0a4eae 100644 --- a/api/internal/handler/alert_subscription.go +++ b/api/internal/handler/alert_subscription.go @@ -4,8 +4,9 @@ import ( "errors" "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -20,21 +21,21 @@ import ( // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.AlertSubscription +// @Success 200 {object} dto.AlertSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/subscribe [post] // @Security Bearer func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - pa, err := h.AlertSubscriptionService.SubscribeProfileToAlerts(c.Request().Context(), alertConfigID, profileID) + pa, err := h.DBService.AlertProfileSubscriptionCreateForAlertConfigProfile(c.Request().Context(), alertConfigID, profileID) if err != nil { return httperr.InternalServerError(err) } @@ -57,14 +58,17 @@ func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/unsubscribe [post] // @Security Bearer func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - if err = h.AlertSubscriptionService.UnsubscribeProfileToAlerts(c.Request().Context(), alertConfigID, profileID); err != nil { + if err = h.DBService.AlertProfileSubscriptionDelete(c.Request().Context(), db.AlertProfileSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -76,16 +80,16 @@ func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { // @Tags alert-subscription // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.AlertSubscription +// @Success 200 {array} dto.AlertSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alert_subscriptions [get] // @Security Bearer func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) profileID := p.ID - ss, err := h.AlertSubscriptionService.ListMyAlertSubscriptions(c.Request().Context(), profileID) + ss, err := h.DBService.AlertSubscriptionListForProfile(c.Request().Context(), profileID) if err != nil { return httperr.InternalServerError(err) } @@ -99,16 +103,16 @@ func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { // @Accept json // @Produce json // @Param alert_subscription_id path string true "alert subscription id" Format(uuid) -// @Param alert_subscription body model.AlertSubscription true "alert subscription payload" +// @Param alert_subscription body dto.AlertSubscription true "alert subscription payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertSubscription +// @Success 200 {array} dto.AlertSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /alert_subscriptions/{alert_subscription_id} [put] // @Security Bearer func (h *ApiHandler) UpdateMyAlertSubscription(c echo.Context) error { - var s model.AlertSubscription + var s dto.AlertSubscription if err := c.Bind(&s); err != nil { return httperr.MalformedBody(err) } @@ -118,15 +122,15 @@ func (h *ApiHandler) UpdateMyAlertSubscription(c echo.Context) error { } s.ID = sID - p := c.Get("profile").(model.Profile) - t, err := h.AlertSubscriptionService.GetAlertSubscriptionByID(c.Request().Context(), sID) + p := c.Get("profile").(dto.Profile) + t, err := h.DBService.AlertSubscriptionGet(c.Request().Context(), sID) if err != nil { return httperr.InternalServerError(err) } if p.ID != t.ProfileID { return httperr.Unauthorized(errors.New("profile id or requester did not match alert subscription id")) } - sUpdated, err := h.AlertSubscriptionService.UpdateMyAlertSubscription(c.Request().Context(), s) + sUpdated, err := h.DBService.AlertProfileSubscriptionUpdateForProfile(c.Request().Context(), s) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/autocomplete.go b/api/internal/handler/autocomplete.go index cd9594ed..79d804aa 100644 --- a/api/internal/handler/autocomplete.go +++ b/api/internal/handler/autocomplete.go @@ -1,8 +1,9 @@ package handler import ( + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" @@ -15,7 +16,7 @@ import ( // @Tags autocomplete // @Produce json // @Param q query string true "search query string" -// @Success 200 {array} model.EmailAutocompleteResult +// @Success 200 {array} dto.EmailAutocompleteResult // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -23,10 +24,13 @@ import ( func (h *ApiHandler) ListEmailAutocomplete(c echo.Context) error { searchText := c.QueryParam("q") if searchText == "" { - return c.JSON(http.StatusOK, make([]model.EmailAutocompleteResult, 0)) + return c.JSON(http.StatusOK, make([]dto.EmailAutocompleteResult, 0)) } - limit := 5 - rr, err := h.EmailAutocompleteService.ListEmailAutocomplete(c.Request().Context(), searchText, limit) + var limit int32 = 5 + rr, err := h.DBService.EmailAutocompleteList(c.Request().Context(), db.EmailAutocompleteListParams{ + SearchKeyword: &searchText, + ResultLimit: limit, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/aware.go b/api/internal/handler/aware.go index 9ea5391c..794061e2 100644 --- a/api/internal/handler/aware.go +++ b/api/internal/handler/aware.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -13,13 +13,13 @@ import ( // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} model.AwareParameter +// @Success 200 {array} dto.AwareParameter // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /aware/parameters [get] func (h *ApiHandler) ListAwareParameters(c echo.Context) error { - pp, err := h.AwareParameterService.ListAwareParameters(c.Request().Context()) + pp, err := h.DBService.AwareParameterList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -31,13 +31,13 @@ func (h *ApiHandler) ListAwareParameters(c echo.Context) error { // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} model.AwarePlatformParameterConfig +// @Success 200 {array} dto.AwarePlatformParameterConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /aware/data_acquisition_config [get] func (h *ApiHandler) ListAwarePlatformParameterConfig(c echo.Context) error { - cc, err := h.AwareParameterService.ListAwarePlatformParameterConfig(c.Request().Context()) + cc, err := h.DBService.AwarePlatformParameterConfigList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index e90fca0f..6b15966b 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -5,12 +5,12 @@ import ( "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/labstack/echo/v4" ) @@ -20,7 +20,7 @@ import ( // @Tags collection-groups // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -30,7 +30,7 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - cc, err := h.CollectionGroupService.ListCollectionGroups(c.Request().Context(), pID) + cc, err := h.DBService.CollectionGroupListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -44,13 +44,13 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" Format(uuid) -// @Success 200 {object} model.CollectionGroupDetails +// @Success 200 {object} dto.CollectionGroupDetails // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups/{collection_group_id} [get] func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { - pID, err := uuid.Parse(c.Param("project_id")) + _, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } @@ -58,7 +58,7 @@ func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - d, err := h.CollectionGroupService.GetCollectionGroupDetails(c.Request().Context(), pID, cgID) + d, err := h.DBService.CollectionGroupDetailsGet(c.Request().Context(), cgID) if err != nil { return httperr.InternalServerError(err) } @@ -72,16 +72,16 @@ func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { // @Tags collection-groups // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param collection_group body model.CollectionGroup true "collection group payload" +// @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {array} model.CollectionGroup +// @Success 200 {array} dto.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups [post] // @Security Bearer func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { - var cg model.CollectionGroup + var cg dto.CollectionGroup // Bind Information Provided if err := c.Bind(&cg); err != nil { return httperr.MalformedBody(err) @@ -92,14 +92,14 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { return httperr.MalformedID(err) } cg.ProjectID = pID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) cg.CreatorID, cg.CreateDate = p.ID, time.Now() - cgNew, err := h.CollectionGroupService.CreateCollectionGroup(c.Request().Context(), cg) + cgNew, err := h.DBService.CollectionGroupCreate(c.Request().Context(), cg) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, []model.CollectionGroup{cgNew}) + return c.JSON(http.StatusCreated, []db.CollectionGroup{cgNew}) } // UpdateCollectionGroup godoc @@ -109,16 +109,16 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" -// @Param collection_group body model.CollectionGroup true "collection group payload" +// @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {object} model.CollectionGroup +// @Success 200 {object} dto.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups/{collection_group_id} [put] // @Security Bearer func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { - var cg model.CollectionGroup + var cg dto.CollectionGroup if err := c.Bind(&cg); err != nil { return httperr.MalformedBody(err) } @@ -135,10 +135,10 @@ func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { } cg.ID = cgID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() cg.UpdaterID, cg.UpdateDate = &p.ID, &t - cgUpdated, err := h.CollectionGroupService.UpdateCollectionGroup(c.Request().Context(), cg) + cgUpdated, err := h.DBService.CollectionGroupUpdate(c.Request().Context(), cg) if err != nil { return httperr.InternalServerError(err) } @@ -168,7 +168,10 @@ func (h *ApiHandler) DeleteCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.DeleteCollectionGroup(c.Request().Context(), pID, cgID); err != nil { + if err := h.DBService.CollectionGroupDelete(c.Request().Context(), db.CollectionGroupDeleteParams{ + ProjectID: pID, + ID: cgID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -198,17 +201,21 @@ func (h *ApiHandler) AddTimeseriesToCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var sortOrder int + var sortOrder int32 soParam := c.QueryParam("sort_order") if soParam != "" { - so64, err := strconv.ParseInt(soParam, 10, 0) + so64, err := strconv.ParseInt(soParam, 10, 32) if err != nil { return httperr.BadRequest(err) } - sortOrder = int(so64) + sortOrder = int32(so64) } - if err := h.CollectionGroupService.AddTimeseriesToCollectionGroup(c.Request().Context(), cgID, tsID, sortOrder); err != nil { + if err := h.DBService.CollectionGroupTimeseriesCreate(c.Request().Context(), db.CollectionGroupTimeseriesCreateParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + SortOrder: sortOrder, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, make(map[string]interface{})) @@ -238,17 +245,21 @@ func (h *ApiHandler) UpdateTimeseriesCollectionGroupSortOrder(c echo.Context) er if err != nil { return httperr.MalformedID(err) } - var sortOrder int + var sortOrder int32 soParam := c.QueryParam("sort_order") if soParam != "" { - so64, err := strconv.ParseInt(soParam, 10, 0) + so64, err := strconv.ParseInt(soParam, 10, 32) if err != nil { return httperr.BadRequest(err) } - sortOrder = int(so64) + sortOrder = int32(so64) } - if err := h.CollectionGroupService.UpdateTimeseriesCollectionGroupSortOrder(c.Request().Context(), cgID, tsID, sortOrder); err != nil { + if err := h.DBService.CollectionGroupTimeseriesUpdateSortOrder(c.Request().Context(), db.CollectionGroupTimeseriesUpdateSortOrderParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + SortOrder: sortOrder, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -278,7 +289,10 @@ func (h *ApiHandler) RemoveTimeseriesFromCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.RemoveTimeseriesFromCollectionGroup(c.Request().Context(), cgID, tsID); err != nil { + if err := h.DBService.CollectionGroupTimeseriesDelete(c.Request().Context(), db.CollectionGroupTimeseriesDeleteParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/datalogger.go b/api/internal/handler/datalogger.go index 16f62707..363d1a06 100644 --- a/api/internal/handler/datalogger.go +++ b/api/internal/handler/datalogger.go @@ -6,8 +6,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,7 +19,7 @@ import ( // @Tags datalogger // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.Datalogger +// @Success 200 {array} dto.Datalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -32,7 +33,7 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { return httperr.MalformedID(err) } - dls, err := h.DataloggerService.ListProjectDataloggers(c.Request().Context(), pID) + dls, err := h.DBService.DataloggerListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -40,7 +41,8 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { return c.JSON(http.StatusOK, dls) } - dls, err := h.DataloggerService.ListAllDataloggers(c.Request().Context()) + // TODO: do we actually need this? If so it should probably be paginated + dls, err := h.DBService.DataloggerList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -54,9 +56,9 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { // @Tags datalogger // @Accept json // @Produce json -// @Param datalogger body model.Datalogger true "datalogger payload" +// @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {array} model.DataloggerWithKey +// @Success 200 {array} dto.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,25 +66,28 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { // @Security Bearer func (h *ApiHandler) CreateDatalogger(c echo.Context) error { ctx := c.Request().Context() - n := model.Datalogger{} + n := dto.Datalogger{} if err := c.Bind(&n); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) n.CreatorID = p.ID if n.Name == "" { return httperr.BadRequest(errors.New("valid `name` field required")) } - model, err := h.DataloggerService.GetDataloggerModelName(ctx, n.ModelID) + model, err := h.DBService.DataloggerGetModelName(ctx, n.ModelID) if err != nil { return httperr.BadRequest(fmt.Errorf("data logger model id %s not found", n.ModelID)) } // check if datalogger with model and sn already exists and is not deleted - exists, err := h.DataloggerService.GetDataloggerIsActive(ctx, model, n.SN) + exists, err := h.DBService.DataloggerGetActive(ctx, db.DataloggerGetActiveParams{ + Model: model, + Sn: n.SN, + }) if err != nil { return httperr.InternalServerError(err) } @@ -91,7 +96,7 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { return httperr.BadRequest(errors.New("active data logger model with this model and serial number already exist")) } - dl, err := h.DataloggerService.CreateDatalogger(ctx, n) + dl, err := h.DBService.DataloggerCreate(ctx, n) if err != nil { return httperr.InternalServerError(err) } @@ -106,7 +111,7 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerWithKey +// @Success 200 {object} dto.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -119,17 +124,21 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { return httperr.MalformedID(err) } - u := model.Datalogger{ID: dlID} + u := dto.Datalogger{ID: dlID} - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.NotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() u.UpdaterID, u.UpdateDate = &profile.ID, &t - dl, err := h.DataloggerService.CycleDataloggerKey(ctx, u) + dl, err := h.DBService.DataloggerHashUpdate(ctx, u) if err != nil { return httperr.InternalServerError(err) } @@ -144,7 +153,7 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Datalogger +// @Success 200 {object} dto.Datalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -155,7 +164,7 @@ func (h *ApiHandler) GetDatalogger(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - dl, err := h.DataloggerService.GetOneDatalogger(c.Request().Context(), dlID) + dl, err := h.DBService.DataloggerGet(c.Request().Context(), dlID) if err != nil { httperr.ServerErrorOrNotFound(err) } @@ -169,9 +178,9 @@ func (h *ApiHandler) GetDatalogger(c echo.Context) error { // @Tags datalogger // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) -// @Param datalogger body model.Datalogger true "datalogger payload" +// @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {object} model.Datalogger +// @Success 200 {object} dto.Datalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -184,21 +193,25 @@ func (h *ApiHandler) UpdateDatalogger(c echo.Context) error { return httperr.MalformedID(err) } - u := model.Datalogger{ID: dlID} + u := dto.Datalogger{ID: dlID} if err := c.Bind(&u); err != nil { return httperr.MalformedBody(err) } u.ID = dlID - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { return httperr.InternalServerError(err) } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) + } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() u.UpdaterID, u.UpdateDate = &profile.ID, &t - dlUpdated, err := h.DataloggerService.UpdateDatalogger(ctx, u) + dlUpdated, err := h.DBService.DataloggerUpdate(ctx, u) if err != nil { return httperr.InternalServerError(err) } @@ -226,16 +239,20 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { return httperr.InternalServerError(err) } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) + } + profile := c.Get("profile").(dto.Profile) - d := model.Datalogger{ID: dlID} - profile := c.Get("profile").(model.Profile) - t := time.Now() - d.UpdaterID, d.UpdateDate = &profile.ID, &t - - if err := h.DataloggerService.DeleteDatalogger(ctx, d); err != nil { + if err := h.DBService.DataloggerDelete(ctx, db.DataloggerDeleteParams{ + ID: dlID, + Updater: profile.ID, + UpdateDate: time.Now(), + }); err != nil { return httperr.InternalServerError(err) } @@ -250,7 +267,7 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerTablePreview +// @Success 200 {object} dto.DataloggerTablePreview // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -265,7 +282,7 @@ func (h *ApiHandler) GetDataloggerTablePreview(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - preview, err := h.DataloggerService.GetDataloggerTablePreview(c.Request().Context(), dataloggerTableID) + preview, err := h.DBService.DataloggerTablePreviewGet(c.Request().Context(), dataloggerTableID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -280,7 +297,7 @@ func (h *ApiHandler) GetDataloggerTablePreview(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerTablePreview +// @Success 200 {object} dto.DataloggerTablePreview // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -295,7 +312,7 @@ func (h *ApiHandler) ResetDataloggerTableName(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.DataloggerService.ResetDataloggerTableName(c.Request().Context(), dataloggerTableID); err != nil { + if err := h.DBService.DataloggerUpdateTableNameBlank(c.Request().Context(), dataloggerTableID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"datalogger_table_id": dataloggerTableID}) diff --git a/api/internal/handler/datalogger_telemetry.go b/api/internal/handler/datalogger_telemetry.go index a32a35f4..7d880f84 100644 --- a/api/internal/handler/datalogger_telemetry.go +++ b/api/internal/handler/datalogger_telemetry.go @@ -9,8 +9,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -33,7 +34,10 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) ctx := c.Request().Context() // Make sure datalogger is active - dl, err := h.DataloggerTelemetryService.GetDataloggerByModelSN(ctx, modelName, sn) + dl, err := h.DBService.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &modelName, + Sn: sn, + }) if err != nil { return httperr.InternalServerError(err) } @@ -48,13 +52,11 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) return httperr.MalformedBody(err) } - var prv model.DataloggerTablePreview - if err := prv.Preview.Set(rawJSON); err != nil { - return httperr.InternalServerError(err) - } + var prv dto.DataloggerTablePreview + prv.Preview = rawJSON prv.UpdateDate = time.Now() - if _, err := h.DataloggerTelemetryService.UpdateDataloggerTablePreview(ctx, dl.ID, preparse, prv); err != nil { + if _, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, preparse, prv); err != nil { return httperr.InternalServerError(err) } @@ -72,7 +74,7 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) // CSIJSON Output Format: https://help.campbellsci.com/crbasic/cr350/#parameters/mqtt_outputformat.htm?Highlight=CSIJSON // // HTTPPost: https://help.campbellsci.com/crbasic/cr350/#Instructions/httppost.htm?Highlight=httppost -func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) echo.HandlerFunc { +func getCR6Handler(h *TelemetryHandler, dl db.VDatalogger, rawJSON []byte) echo.HandlerFunc { return func(c echo.Context) error { // Errors are cellected and sent to datalogger preview for debugging since datalogger clients cannot parse responses em := make([]string, 0) @@ -83,21 +85,21 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech // to collect logs to be previewed in the core web application. The error code returned to the client datalogger // will sill be relavent to the arm of control flow that raised it. defer func() { - if err := h.DataloggerTelemetryService.UpdateDataloggerTableError(ctx, dl.ID, &tn, &model.DataloggerError{Errors: em}); err != nil { + if err := h.DBService.DataloggerTableErrorUpdate(ctx, dl.ID, &tn, &dto.DataloggerError{Errors: em}); err != nil { log.Printf(err.Error()) } }() // Upload Datalogger Measurements - var pl model.DataloggerPayload + var pl dto.DataloggerPayload if err := json.Unmarshal(rawJSON, &pl); err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusBadRequest, err.Error())) return httperr.MalformedBody(err) } // Check sn from route param matches sn in request body - if dl.SN != pl.Head.Environment.SerialNo { - snErr := fmt.Sprint(snErrMsg, dl.SN) + if dl.Sn != pl.Head.Environment.SerialNo { + snErr := fmt.Sprint(snErrMsg, dl.Sn) em = append(em, fmt.Sprintf("%d: %s", http.StatusBadRequest, snErr)) return httperr.BadRequest(errors.New(snErr)) } @@ -111,34 +113,32 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech // reroute deferred errors and previews to respective table tn = pl.Head.Environment.TableName - var prv model.DataloggerTablePreview - if err := prv.Preview.Set(rawJSON); err != nil { - return httperr.MalformedBody(err) - } + var prv dto.DataloggerTablePreview + prv.Preview = rawJSON prv.UpdateDate = time.Now() - tableID, err := h.DataloggerTelemetryService.UpdateDataloggerTablePreview(ctx, dl.ID, tn, prv) + tableID, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, tn, prv) if err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - eqt, err := h.EquivalencyTableService.GetEquivalencyTable(ctx, tableID) + eqt, err := h.DBService.EquivalencyTableGet(ctx, tableID) if err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - eqtFields := make(map[string]model.EquivalencyTableRow) - for _, r := range eqt.Rows { - eqtFields[r.FieldName] = model.EquivalencyTableRow{ + eqtFields := make(map[string]dto.EquivalencyTableRow) + for _, r := range eqt.Fields { + eqtFields[r.FieldName] = dto.EquivalencyTableRow{ TimeseriesID: r.TimeseriesID, InstrumentID: r.InstrumentID, } } fields := pl.Head.Fields - mcs := make([]model.MeasurementCollection, len(fields)) + mcs := make([]dto.MeasurementCollection, len(fields)) // Error if there is no field name in equivalency table to map the field name in the raw payload to // delete the keys that were used, check for any dangling afterwards @@ -161,7 +161,7 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech } // collect measurements - items := make([]model.Measurement, len(pl.Data)) + items := make([]dto.Measurement, len(pl.Data)) for j, d := range pl.Data { // To avoid complications of daylight savings and related issues, // all incoming datalogger timestamps are expected to be in UTC @@ -178,10 +178,10 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech delete(eqtFields, f.Name) continue } - items[j] = model.Measurement{TimeseriesID: *row.TimeseriesID, Time: t, Value: model.FloatNanInf(v)} + items[j] = dto.Measurement{TimeseriesID: *row.TimeseriesID, Time: t, Value: dto.FloatNanInf(v)} } - mcs[i] = model.MeasurementCollection{TimeseriesID: *row.TimeseriesID, Items: items} + mcs[i] = dto.MeasurementCollection{TimeseriesID: *row.TimeseriesID, Items: items} delete(eqtFields, f.Name) } @@ -191,11 +191,11 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech em = append(em, fmt.Sprintf("field '%s' in equivalency table does not match any fields from datalogger", eqtName)) } - if _, err = h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(ctx, mcs); err != nil { + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mcs); err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"model": *dl.Model, "sn": dl.SN}) + return c.JSON(http.StatusOK, map[string]interface{}{"model": *dl.Model, "sn": dl.Sn}) } } diff --git a/api/internal/handler/district_rollup.go b/api/internal/handler/district_rollup.go index 900a313b..8432c7fc 100644 --- a/api/internal/handler/district_rollup.go +++ b/api/internal/handler/district_rollup.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,18 +19,18 @@ const timeRangeErrMessage = "maximum requested time range exceeded (5 years)" // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} model.DistrictRollup +// @Success 200 {array} dto.DistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/district_rollup/evaluation_submittals [get] func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow from, to := c.QueryParam("from_timestamp_month"), c.QueryParam("to_timestamp_month") if err := tw.SetWindow(from, to, time.Now().AddDate(-1, 0, 0), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -38,7 +39,11 @@ func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { return httperr.Message(http.StatusBadRequest, timeRangeErrMessage) } - project, err := h.DistrictRollupService.ListEvaluationDistrictRollup(c.Request().Context(), id, tw) + project, err := h.DBService.DistrictRollupListEvaluationForProjectAlertConfig(c.Request().Context(), db.DistrictRollupListEvaluationForProjectAlertConfigParams{ + ProjectID: pID, + StartMonthTime: tw.After, + EndMonthTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -51,18 +56,18 @@ func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} model.DistrictRollup +// @Success 200 {array} dto.DistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/district_rollup/measurement_submittals [get] func (h *ApiHandler) ListProjectMeasurementDistrictRollup(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow from, to := c.QueryParam("from_timestamp_month"), c.QueryParam("to_timestamp_month") if err := tw.SetWindow(from, to, time.Now().AddDate(-1, 0, 0), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -71,7 +76,11 @@ func (h *ApiHandler) ListProjectMeasurementDistrictRollup(c echo.Context) error return httperr.Message(http.StatusBadRequest, timeRangeErrMessage) } - project, err := h.DistrictRollupService.ListMeasurementDistrictRollup(c.Request().Context(), id, tw) + project, err := h.DBService.DistrictRollupListMeasurementForProjectAlertConfig(c.Request().Context(), db.DistrictRollupListMeasurementForProjectAlertConfigParams{ + ProjectID: pID, + StartMonthTime: tw.After, + EndMonthTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/domain.go b/api/internal/handler/domain.go index ad8cf96d..5d2a3dea 100644 --- a/api/internal/handler/domain.go +++ b/api/internal/handler/domain.go @@ -4,22 +4,21 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) -// GetDomains godoc +// ListDomains godoc // // @Summary lists all domains // @Tags domain // @Produce json -// @Success 200 {array} model.Domain +// @Success 200 {array} dto.Domain // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains [get] -func (h *ApiHandler) GetDomains(c echo.Context) error { - dd, err := h.DomainService.GetDomains(c.Request().Context()) +func (h *ApiHandler) ListDomains(c echo.Context) error { + dd, err := h.DBService.DomainList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -31,13 +30,13 @@ func (h *ApiHandler) GetDomains(c echo.Context) error { // @Summary Get map with domain group as key // @Tags domain // @Produce json -// @Success 200 {object} model.DomainMap +// @Success 200 {object} dto.DomainMap // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains/map [get] func (h *ApiHandler) GetDomainMap(c echo.Context) error { - dm, err := h.DomainService.GetDomainMap(c.Request().Context()) + dm, err := h.DBService.DomainMapGet(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -49,13 +48,13 @@ func (h *ApiHandler) GetDomainMap(c echo.Context) error { // @Summary lists time zone options // @Tags domain // @Produce json -// @Success 200 {array} model.TimezoneOption +// @Success 200 {array} dto.TimezoneOption // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains [get] func (h *ApiHandler) ListTimezoneOptions(c echo.Context) error { - dd, err := h.DomainService.ListTimezoneOptions(c.Request().Context()) + dd, err := h.DBService.PgTimezoneNamesList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/equivalency_table.go b/api/internal/handler/equivalency_table.go index a0df6b0b..9ff947d0 100644 --- a/api/internal/handler/equivalency_table.go +++ b/api/internal/handler/equivalency_table.go @@ -1,11 +1,12 @@ package handler import ( + "errors" "fmt" "net/http" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,7 +19,7 @@ import ( // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.EquivalencyTable +// @Success 200 {array} dto.EquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -37,11 +38,15 @@ func (h *ApiHandler) GetEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - t, err := h.EquivalencyTableService.GetEquivalencyTable(ctx, dataloggerTableID) + t, err := h.DBService.EquivalencyTableGet(ctx, dataloggerTableID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -56,9 +61,9 @@ func (h *ApiHandler) GetEquivalencyTable(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) -// @Param equivalency_table body model.EquivalencyTable true "equivalency table payload" +// @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} model.EquivalencyTable +// @Success 200 {object} dto.EquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,7 +76,7 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { return httperr.MalformedID(err) } - t := model.EquivalencyTable{DataloggerID: dlID} + t := dto.EquivalencyTable{DataloggerID: dlID} if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } @@ -90,7 +95,7 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { if t.DataloggerTableName == "" { return httperr.Message(http.StatusBadRequest, "payload must contain datalogger_table_name field") } - dataloggerTableID, err = h.DataloggerService.GetOrCreateDataloggerTable(ctx, dlID, t.DataloggerTableName) + dataloggerTableID, err = h.DBService.DataloggerTableGetOrCreate(ctx, dlID, t.DataloggerTableName) if err != nil { httperr.InternalServerError(err) } @@ -99,15 +104,23 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { t.DataloggerID = dlID t.DataloggerTableID = dataloggerTableID - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.EquivalencyTableService.GetIsValidDataloggerTable(ctx, dataloggerTableID); err != nil { + valid, err := h.DBService.DataloggerTableGetIsValid(ctx, dataloggerTableID) + if err != nil { + return httperr.InternalServerError(err) + } + if !valid { return httperr.Message(http.StatusBadRequest, fmt.Sprintf("invalid datalogger table %s %s", t.DataloggerID, t.DataloggerTableName)) } - eqt, err := h.EquivalencyTableService.CreateOrUpdateEquivalencyTable(ctx, t) + eqt, err := h.DBService.EquivalencyTableCreateOrUpdate(ctx, t) if err != nil { return httperr.InternalServerError(err) } @@ -122,9 +135,9 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) -// @Param equivalency_table body model.EquivalencyTable true "equivalency table payload" +// @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} model.EquivalencyTable +// @Success 200 {object} dto.EquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -141,7 +154,7 @@ func (h *ApiHandler) UpdateEquivalencyTable(c echo.Context) error { return httperr.MalformedID(err) } - t := model.EquivalencyTable{DataloggerID: dlID, DataloggerTableID: dataloggerTableID} + t := dto.EquivalencyTable{DataloggerID: dlID, DataloggerTableID: dataloggerTableID} if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } @@ -151,11 +164,15 @@ func (h *ApiHandler) UpdateEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - eqtUpdated, err := h.EquivalencyTableService.UpdateEquivalencyTable(ctx, t) + eqtUpdated, err := h.DBService.EquivalencyTableUpdate(ctx, t) if err != nil { return httperr.InternalServerError(err) } @@ -190,11 +207,15 @@ func (h *ApiHandler) DeleteEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.DataloggerService.DeleteDataloggerTable(ctx, dataloggerTableID); err != nil { + if err := h.DBService.DataloggerTableDelete(ctx, dataloggerTableID); err != nil { return httperr.InternalServerError(err) } @@ -232,11 +253,15 @@ func (h *ApiHandler) DeleteEquivalencyTableRow(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.EquivalencyTableService.DeleteEquivalencyTableRow(ctx, rowID); err != nil { + if err := h.DBService.EquivalencyTableDelete(ctx, rowID); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/evaluation.go b/api/internal/handler/evaluation.go index 09c219d5..e45de8b1 100644 --- a/api/internal/handler/evaluation.go +++ b/api/internal/handler/evaluation.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,7 +18,7 @@ import ( // @Tags evaluation // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Evaluation +// @Success 200 {array} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,18 +29,21 @@ func (h *ApiHandler) ListProjectEvaluations(c echo.Context) error { return httperr.MalformedID(err) } ctx := c.Request().Context() - var ee []model.Evaluation + var ee []db.VEvaluation if qp := c.QueryParam("alert_config_id"); qp != "" { alertConfigID, err := uuid.Parse(qp) if err != nil { return httperr.MalformedID(err) } - ee, err = h.EvaluationService.ListProjectEvaluationsByAlertConfig(ctx, projectID, alertConfigID) + ee, err = h.DBService.EvaluationListForProjectAlertConfig(ctx, db.EvaluationListForProjectAlertConfigParams{ + ProjectID: projectID, + AlertConfigID: &alertConfigID, + }) if err != nil { return httperr.InternalServerError(err) } } else { - ee, err = h.EvaluationService.ListProjectEvaluations(ctx, projectID) + ee, err = h.DBService.EvaluationListForProject(ctx, projectID) if err != nil { return httperr.InternalServerError(err) } @@ -54,7 +58,7 @@ func (h *ApiHandler) ListProjectEvaluations(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Evaluation +// @Success 200 {array} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,7 +68,7 @@ func (h *ApiHandler) ListInstrumentEvaluations(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - ee, err := h.EvaluationService.ListInstrumentEvaluations(c.Request().Context(), instrumentID) + ee, err := h.DBService.EvaluationListForInstrument(c.Request().Context(), &instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -78,7 +82,7 @@ func (h *ApiHandler) ListInstrumentEvaluations(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) -// @Success 200 {object} model.Evaluation +// @Success 200 {object} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -88,7 +92,7 @@ func (h *ApiHandler) GetEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - ev, err := h.EvaluationService.GetEvaluation(c.Request().Context(), acID) + ev, err := h.DBService.EvaluationGet(c.Request().Context(), acID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -101,16 +105,16 @@ func (h *ApiHandler) GetEvaluation(c echo.Context) error { // @Tags evaluation // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param evaluation body model.Evaluation true "evaluation payload" +// @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} model.Evaluation +// @Success 200 {object} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/evaluations [post] // @Security Bearer func (h *ApiHandler) CreateEvaluation(c echo.Context) error { - ev := model.Evaluation{} + ev := dto.Evaluation{} if err := c.Bind(&ev); err != nil { return httperr.MalformedBody(err) } @@ -118,10 +122,10 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) ev.ProjectID, ev.CreatorID, ev.CreateDate = projectID, profile.ID, time.Now() - evNew, err := h.EvaluationService.CreateEvaluation(c.Request().Context(), ev) + evNew, err := h.DBService.EvaluationCreate(c.Request().Context(), ev) if err != nil { return httperr.InternalServerError(err) } @@ -135,16 +139,16 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) -// @Param evaluation body model.Evaluation true "evaluation payload" +// @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} model.Evaluation +// @Success 200 {object} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/evaluations/{evaluation_id} [put] // @Security Bearer func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { - var ev model.Evaluation + var ev dto.Evaluation if err := c.Bind(&ev); err != nil { return httperr.MalformedBody(err) } @@ -152,10 +156,10 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() ev.UpdaterID, ev.UpdateDate = &p.ID, &t - evUpdated, err := h.EvaluationService.UpdateEvaluation(c.Request().Context(), evID, ev) + evUpdated, err := h.DBService.EvaluationUpdate(c.Request().Context(), evID, ev) if err != nil { return httperr.InternalServerError(err) } @@ -170,7 +174,7 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -181,7 +185,7 @@ func (h *ApiHandler) DeleteEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.EvaluationService.DeleteEvaluation(c.Request().Context(), acID); err != nil { + if err := h.DBService.EvaluationDelete(c.Request().Context(), acID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index e92ba178..e4b31da2 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -1,19 +1,13 @@ package handler import ( - "context" - "log" "net/http" "time" "github.com/USACE/instrumentation-api/api/internal/cloud" "github.com/USACE/instrumentation-api/api/internal/config" "github.com/USACE/instrumentation-api/api/internal/middleware" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/USACE/instrumentation-api/api/internal/service" - "github.com/jackc/pgx/v5" - "github.com/jackc/pgx/v5/pgxpool" - pgxgeom "github.com/twpayne/pgx-geom" ) func newHttpClient() *http.Client { @@ -25,155 +19,48 @@ func newHttpClient() *http.Client { } } -func newDbConnPool(cfg config.DBConfig) *pgxpool.Pool { - config, err := pgxpool.ParseConfig(cfg.ConnStr()) - if err != nil { - log.Fatal(err) - } - - config.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error { - return pgxgeom.Register(ctx, conn) - } - - pool, err := pgxpool.NewWithConfig(context.Background(), config) - if err != nil { - log.Fatal(err) - } - - return pool -} - type ApiHandler struct { - Middleware middleware.Middleware - BlobService cloud.Blob - AlertService service.AlertService - AlertConfigService service.AlertConfigService - AlertSubscriptionService service.AlertSubscriptionService - EmailAutocompleteService service.EmailAutocompleteService - AwareParameterService service.AwareParameterService - CollectionGroupService service.CollectionGroupService - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - DistrictRollupService service.DistrictRollupService - DomainService service.DomainService - EquivalencyTableService service.EquivalencyTableService - EvaluationService service.EvaluationService - HeartbeatService service.HeartbeatService - HomeService service.HomeService - InstrumentService service.InstrumentService - InstrumentAssignService service.InstrumentAssignService - InstrumentConstantService service.InstrumentConstantService - InstrumentGroupService service.InstrumentGroupService - InstrumentNoteService service.InstrumentNoteService - InstrumentStatusService service.InstrumentStatusService - IpiInstrumentService service.IpiInstrumentService - MeasurementService service.MeasurementService - InclinometerMeasurementService service.InclinometerMeasurementService - OpendcsService service.OpendcsService - PlotConfigService service.PlotConfigService - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - ProjectService service.ProjectService - ReportConfigService service.ReportConfigService - SaaInstrumentService service.SaaInstrumentService - SubmittalService service.SubmittalService - TimeseriesService service.TimeseriesService - TimeseriesCwmsService service.TimeseriesCwmsService - CalculatedTimeseriesService service.CalculatedTimeseriesService - ProcessTimeseriesService service.ProcessTimeseriesService - UnitService service.UnitService - UploaderService service.UploaderService + DBService *service.DBService + Middleware middleware.Middleware + BlobService cloud.Blob + PubsubService cloud.Pubsub + Config *config.ApiConfig } func NewApi(cfg *config.ApiConfig) *ApiHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + ds := service.NewDBService(cfg.DBConfig) return &ApiHandler{ - Middleware: mw, - BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), - AlertService: service.NewAlertService(db, q), - AlertConfigService: service.NewAlertConfigService(db, q), - AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), - EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), - AwareParameterService: service.NewAwareParameterService(db, q), - CollectionGroupService: service.NewCollectionGroupService(db, q), - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - DistrictRollupService: service.NewDistrictRollupService(db, q), - DomainService: service.NewDomainService(db, q), - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - EvaluationService: service.NewEvaluationService(db, q), - HeartbeatService: service.NewHeartbeatService(db, q), - HomeService: service.NewHomeService(db, q), - InstrumentService: service.NewInstrumentService(db, q), - InstrumentAssignService: service.NewInstrumentAssignService(db, q), - InstrumentConstantService: service.NewInstrumentConstantService(db, q), - InstrumentGroupService: service.NewInstrumentGroupService(db, q), - InstrumentNoteService: service.NewInstrumentNoteService(db, q), - InstrumentStatusService: service.NewInstrumentStatusService(db, q), - IpiInstrumentService: service.NewIpiInstrumentService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), - OpendcsService: service.NewOpendcsService(db, q), - PlotConfigService: service.NewPlotConfigService(db, q), - ProfileService: profileService, - ProjectRoleService: service.NewProjectRoleService(db, q), - ProjectService: service.NewProjectService(db, q), - ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), - SaaInstrumentService: service.NewSaaInstrumentService(db, q), - SubmittalService: service.NewSubmittalService(db, q), - TimeseriesService: service.NewTimeseriesService(db, q), - TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), - CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), - ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), - UnitService: service.NewUnitService(db, q), - UploaderService: service.NewUploaderService(db, q), + DBService: ds, + Middleware: middleware.NewMiddleware(&cfg.ServerConfig, ds), + BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), + PubsubService: cloud.NewSQSPubsub(&cfg.AWSSQSConfig), + Config: cfg, } } type TelemetryHandler struct { - Middleware middleware.Middleware - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - EquivalencyTableService service.EquivalencyTableService - MeasurementService service.MeasurementService + DBService *service.DBService + Middleware middleware.Middleware } func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + ds := service.NewDBService(cfg.DBConfig) return &TelemetryHandler{ - Middleware: mw, - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - MeasurementService: service.NewMeasurementService(db, q), + DBService: ds, + Middleware: middleware.NewMiddleware(&cfg.ServerConfig, ds), } } type AlertCheckHandler struct { - AlertCheckService service.AlertCheckService + DBService *service.DBService + Config *config.AlertCheckConfig } func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - return &AlertCheckHandler{ - AlertCheckService: service.NewAlertCheckService(db, q, cfg), + DBService: service.NewDBService(cfg.DBConfig), } } diff --git a/api/internal/handler/heartbeat.go b/api/internal/handler/heartbeat.go index faa6cd34..79148db0 100644 --- a/api/internal/handler/heartbeat.go +++ b/api/internal/handler/heartbeat.go @@ -1,10 +1,13 @@ package handler import ( + "errors" "net/http" + "strconv" + "time" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -29,10 +32,10 @@ func (h *TelemetryHandler) Healthcheck(c echo.Context) error { // @Tags heartbeat // @Produce json // @Param key query string true "api key" -// @Success 200 {object} model.Heartbeat +// @Success 200 {object} dto.Heartbeat // @Router /heartbeat [post] func (h *ApiHandler) DoHeartbeat(c echo.Context) error { - hb, err := h.HeartbeatService.DoHeartbeat(c.Request().Context()) + hb, err := h.DBService.HeartbeatCreate(c.Request().Context(), time.Now()) if err != nil { return httperr.InternalServerError(err) } @@ -44,10 +47,10 @@ func (h *ApiHandler) DoHeartbeat(c echo.Context) error { // @Summary gets the latest heartbeat // @Tags heartbeat // @Produce json -// @Success 200 {object} model.Heartbeat +// @Success 200 {object} dto.Heartbeat // @Router /heartbeat/latest [get] func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { - hb, err := h.HeartbeatService.GetLatestHeartbeat(c.Request().Context()) + hb, err := h.DBService.HeartbeatGetLatest(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -59,10 +62,19 @@ func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { // @Summary returns all heartbeats // @Tags heartbeat // @Produce json -// @Success 200 {array} model.Heartbeat +// @Success 200 {array} dto.Heartbeat // @Router /heartbeats [get] func (h *ApiHandler) ListHeartbeats(c echo.Context) error { - hh, err := h.HeartbeatService.ListHeartbeats(c.Request().Context()) + var limit int32 = 50 + limitParam := c.QueryParam("limit") + if limitParam == "" { + limit64, err := strconv.ParseInt(limitParam, 10, 32) + if err != nil { + return httperr.BadRequest(errors.New("invalid value for parameter \"limit\"")) + } + limit = int32(limit64) + } + hh, err := h.DBService.HeartbeatList(c.Request().Context(), limit) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/home.go b/api/internal/handler/home.go index 094ab632..ba1018ed 100644 --- a/api/internal/handler/home.go +++ b/api/internal/handler/home.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -13,11 +13,11 @@ import ( // @Summary gets information for the homepage // @Tags home // @Produce json -// @Success 200 {object} model.Home +// @Success 200 {object} dto.Home // @Failure 500 {object} echo.HTTPError // @Router /home [get] func (h *ApiHandler) GetHome(c echo.Context) error { - home, err := h.HomeService.GetHome(c.Request().Context()) + home, err := h.DBService.HomeGet(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument.go b/api/internal/handler/instrument.go index 47f366a1..fa532ceb 100644 --- a/api/internal/handler/instrument.go +++ b/api/internal/handler/instrument.go @@ -5,9 +5,9 @@ import ( "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/paulmach/orb/geojson" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -18,13 +18,13 @@ import ( // @Summary lists all instruments // @Tags instrument // @Produce json -// @Success 200 {array} model.Instrument +// @Success 200 {array} dto.Instrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments [get] func (h *ApiHandler) ListInstruments(c echo.Context) error { - nn, err := h.InstrumentService.ListInstruments(c.Request().Context()) + nn, err := h.DBService.InstrumentList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -36,13 +36,13 @@ func (h *ApiHandler) ListInstruments(c echo.Context) error { // @Summary gets the total number of non deleted instruments in the system // @Tags instrument // @Produce json -// @Success 200 {object} model.InstrumentCount +// @Success 200 {object} dto.InstrumentCount // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/count [get] func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { - ic, err := h.InstrumentService.GetInstrumentCount(c.Request().Context()) + ic, err := h.DBService.InstrumentGetCount(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -55,7 +55,7 @@ func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { // @Tags instrument // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} model.Instrument +// @Success 200 {object} dto.Instrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -65,7 +65,7 @@ func (h *ApiHandler) GetInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - n, err := h.InstrumentService.GetInstrument(c.Request().Context(), id) + n, err := h.DBService.InstrumentGet(c.Request().Context(), id) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -80,9 +80,9 @@ func (h *ApiHandler) GetInstrument(c echo.Context) error { // @Produce json // @Param project_id path string true "project id" Format(uuid) // @Param instrument_id path string true "instrument id" Format(uuid) -// @Param instrument body model.InstrumentCollection true "instrument collection payload" +// @Param instrument body dto.InstrumentCollection true "instrument collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.IDSlugName +// @Success 200 {array} dto.IDSlugName // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -96,26 +96,26 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { return httperr.MalformedID(err) } - ic := model.InstrumentCollection{} + ic := dto.InstrumentCollection{} if err := c.Bind(&ic); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() instrumentNames := make([]string, len(ic)) for idx := range ic { instrumentNames[idx] = ic[idx].Name - var prj model.IDSlugName + var prj dto.IDSlugName prj.ID = projectID - ic[idx].Projects = []model.IDSlugName{prj} + ic[idx].Projects = []dto.IDSlugName{prj} ic[idx].CreatorID = p.ID ic[idx].CreateDate = t } if strings.ToLower(c.QueryParam("dry_run")) == "true" { - v, err := h.InstrumentAssignService.ValidateInstrumentNamesProjectUnique(ctx, projectID, instrumentNames) + v, err := h.DBService.ProjectInstrumentGetInstrumentNamesUniqueForProject(ctx, projectID, instrumentNames) if err != nil { return httperr.InternalServerError(err) } @@ -125,7 +125,7 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { return c.JSON(http.StatusOK, v) } - nn, err := h.InstrumentService.CreateInstruments(ctx, ic) + nn, err := h.DBService.InstrumentCreateBatch(ctx, ic) if err != nil { return httperr.InternalServerError(err) } @@ -140,9 +140,9 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument body model.Instrument true "instrument payload" +// @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} model.Instrument +// @Success 200 {object} dto.Instrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -158,23 +158,21 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { return httperr.MalformedID(err) } - var i model.Instrument + var i dto.Instrument if err := c.Bind(&i); err != nil { return httperr.MalformedBody(err) } i.ID = iID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() i.UpdaterID, i.UpdateDate = &p.ID, &t - // update - iUpdated, err := h.InstrumentService.UpdateInstrument(c.Request().Context(), pID, i) + iUpdated, err := h.DBService.InstrumentUpdate(c.Request().Context(), pID, i) if err != nil { return httperr.InternalServerError(err) } - // return updated instrument return c.JSON(http.StatusOK, iUpdated) } @@ -185,9 +183,9 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument body model.Instrument true "instrument payload" +// @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} model.Instrument +// @Success 200 {object} dto.Instrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -202,14 +200,18 @@ func (h *ApiHandler) UpdateInstrumentGeometry(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var geom geojson.Geometry + var geom db.Geometry if err := c.Bind(&geom); err != nil { return httperr.MalformedBody(err) } - // profile of user creating instruments - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - instrument, err := h.InstrumentService.UpdateInstrumentGeometry(c.Request().Context(), projectID, instrumentID, geom, p) + instrument, err := h.DBService.InstrumentUpdateGeometry(c.Request().Context(), db.InstrumentUpdateGeometryParams{ + ProjectID: projectID, + ID: instrumentID, + Geometry: geom, + Updater: &p.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -236,12 +238,15 @@ func (h *ApiHandler) DeleteFlagInstrument(c echo.Context) error { return httperr.MalformedID(err) } - iID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentService.DeleteFlagInstrument(c.Request().Context(), pID, iID); err != nil { + if err := h.DBService.InstrumentDeleteFlag(c.Request().Context(), db.InstrumentDeleteFlagParams{ + ID: instID, + ProjectID: pID, + }); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_assign.go b/api/internal/handler/instrument_assign.go index 6f7c046d..41dcdc19 100644 --- a/api/internal/handler/instrument_assign.go +++ b/api/internal/handler/instrument_assign.go @@ -4,8 +4,8 @@ import ( "net/http" "strings" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -19,7 +19,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} dto.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -35,9 +35,9 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - v, err := h.InstrumentAssignService.AssignInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -55,7 +55,7 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} dto.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,9 +71,9 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - v, err := h.InstrumentAssignService.UnassignInstrumentsFromProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -89,10 +89,10 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param project_ids body model.InstrumentProjectAssignments true "project uuids" +// @Param project_ids body dto.InstrumentProjectAssignments true "project uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} dto.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -103,10 +103,10 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - pl := model.InstrumentProjectAssignments{ProjectIDs: make([]uuid.UUID, 0)} + pl := dto.InstrumentProjectAssignments{ProjectIDs: make([]uuid.UUID, 0)} if err := c.Bind(&pl); err != nil { return httperr.MalformedBody(err) } @@ -114,13 +114,13 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { ctx := c.Request().Context() switch strings.ToLower(c.QueryParam("action")) { case "assign": - v, err := h.InstrumentAssignService.AssignProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, v) case "unassign": - v, err := h.InstrumentAssignService.UnassignProjectsFromInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -137,10 +137,10 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { // @Description must be Project (or Application) Admin of all existing instrument projects and project to be assigned // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param instrument_ids body model.ProjectInstrumentAssignments true "instrument uuids" +// @Param instrument_ids body dto.ProjectInstrumentAssignments true "instrument uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} dto.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -151,10 +151,10 @@ func (h *ApiHandler) UpdateProjectInstrumentAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - pl := model.ProjectInstrumentAssignments{InstrumentIDs: make([]uuid.UUID, 0)} + pl := dto.ProjectInstrumentAssignments{InstrumentIDs: make([]uuid.UUID, 0)} if err := c.Bind(&pl); err != nil { return httperr.MalformedBody(err) } @@ -162,13 +162,13 @@ func (h *ApiHandler) UpdateProjectInstrumentAssignments(c echo.Context) error { ctx := c.Request().Context() switch strings.ToLower(c.QueryParam("action")) { case "assign": - v, err := h.InstrumentAssignService.AssignInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, v) case "unassign": - v, err := h.InstrumentAssignService.UnassignInstrumentsFromProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_constant.go b/api/internal/handler/instrument_constant.go index 15305335..bf5e2a3f 100644 --- a/api/internal/handler/instrument_constant.go +++ b/api/internal/handler/instrument_constant.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,17 +17,17 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/constants [get] func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - cc, err := h.InstrumentConstantService.ListInstrumentConstants(c.Request().Context(), instrumentID) + cc, err := h.DBService.InstrumentConstantList(c.Request().Context(), instID) if err != nil { return httperr.InternalServerError(err) } @@ -40,9 +41,9 @@ func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_collection_items body model.TimeseriesCollectionItems true "timeseries collection items payload" +// @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" // @Param key query string false "api key" -// @Success 200 {array} model.Timeseries +// @Success 200 {array} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -50,20 +51,20 @@ func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { // @Security Bearer func (h *ApiHandler) CreateInstrumentConstants(c echo.Context) error { ctx := c.Request().Context() - var tc model.TimeseriesCollectionItems + var tc dto.TimeseriesCollectionItems if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } for idx := range tc.Items { - if instrumentID != tc.Items[idx].InstrumentID { + if instID != tc.Items[idx].InstrumentID { return httperr.Message(http.StatusBadRequest, "all instrument ids in body must match query parameter") } } - tt, err := h.InstrumentConstantService.CreateInstrumentConstants(ctx, tc.Items) + tt, err := h.DBService.InstrumentConstantCreateBatch(ctx, tc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -86,15 +87,18 @@ func (h *ApiHandler) CreateInstrumentConstants(c echo.Context) error { // @Router /projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id} [delete] // @Security Bearer func (h *ApiHandler) DeleteInstrumentConstant(c echo.Context) error { - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - timeseriesID, err := uuid.Parse(c.Param("timeseries_id")) + tsID, err := uuid.Parse(c.Param("timeseries_id")) if err != nil { return httperr.MalformedID(err) } - err = h.InstrumentConstantService.DeleteInstrumentConstant(c.Request().Context(), instrumentID, timeseriesID) + err = h.DBService.InstrumentConstantDelete(c.Request().Context(), db.InstrumentConstantDeleteParams{ + InstrumentID: instID, + TimeseriesID: tsID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_group.go b/api/internal/handler/instrument_group.go index 751c2b86..5ef3f298 100644 --- a/api/internal/handler/instrument_group.go +++ b/api/internal/handler/instrument_group.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,13 +17,13 @@ import ( // @Summary lists all instrument groups // @Tags instrument-group // @Produce json -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {array} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instrument_groups [get] func (h *ApiHandler) ListInstrumentGroups(c echo.Context) error { - groups, err := h.InstrumentGroupService.ListInstrumentGroups(c.Request().Context()) + groups, err := h.DBService.InstrumentGroupList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -35,7 +36,7 @@ func (h *ApiHandler) ListInstrumentGroups(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} model.InstrumentGroup +// @Success 200 {object} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -45,7 +46,7 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - g, err := h.InstrumentGroupService.GetInstrumentGroup(c.Request().Context(), id) + g, err := h.DBService.InstrumentGroupGet(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -57,9 +58,9 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { // @Summary creats an instrument group from an array of instruments // @Tags instrument-group // @Produce json -// @Param instrument_group body model.InstrumentGroup true "instrument group payload" +// @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 201 {object} model.InstrumentGroup +// @Success 201 {object} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -67,12 +68,12 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { // @Security Bearer func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { - gc := model.InstrumentGroupCollection{} + gc := dto.InstrumentGroupCollection{} if err := c.Bind(&gc); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() for idx := range gc.Items { @@ -80,7 +81,7 @@ func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { gc.Items[idx].CreateDate = t } - gg, err := h.InstrumentGroupService.CreateInstrumentGroup(c.Request().Context(), gc.Items) + gg, err := h.DBService.InstrumentGroupCreateBatch(c.Request().Context(), gc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -93,9 +94,9 @@ func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Param instrument_group body model.InstrumentGroup true "instrument group payload" +// @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 200 {object} model.InstrumentGroup +// @Success 200 {object} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -107,18 +108,18 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { return httperr.MalformedID(err) } - g := model.InstrumentGroup{ID: gID} + g := dto.InstrumentGroup{ID: gID} if err := c.Bind(&g); err != nil { return httperr.MalformedBody(err) } g.ID = gID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() g.UpdaterID, g.UpdateDate = &p.ID, &t - gUpdated, err := h.InstrumentGroupService.UpdateInstrumentGroup(c.Request().Context(), g) + gUpdated, err := h.DBService.InstrumentGroupUpdate(c.Request().Context(), g) if err != nil { return httperr.InternalServerError(err) } @@ -132,7 +133,7 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {array} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -143,7 +144,7 @@ func (h *ApiHandler) DeleteFlagInstrumentGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentGroupService.DeleteFlagInstrumentGroup(c.Request().Context(), id); err != nil { + if err := h.DBService.InstrumentGroupDeleteFlag(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -155,7 +156,7 @@ func (h *ApiHandler) DeleteFlagInstrumentGroup(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} model.Instrument +// @Success 200 {array} dto.Instrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -165,7 +166,7 @@ func (h *ApiHandler) ListInstrumentGroupInstruments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - nn, err := h.InstrumentGroupService.ListInstrumentGroupInstruments(c.Request().Context(), id) + nn, err := h.DBService.InstrumentListForInstrumentGroup(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -190,11 +191,14 @@ func (h *ApiHandler) CreateInstrumentGroupInstruments(c echo.Context) error { if err != nil || instrumentGroupID == uuid.Nil { return httperr.MalformedID(err) } - var i model.Instrument + var i dto.Instrument if err := c.Bind(&i); err != nil || i.ID == uuid.Nil { return httperr.MalformedBody(err) } - if err := h.InstrumentGroupService.CreateInstrumentGroupInstruments(c.Request().Context(), instrumentGroupID, i.ID); err != nil { + if err := h.DBService.InstrumentGroupInstrumentCreate(c.Request().Context(), db.InstrumentGroupInstrumentCreateParams{ + InstrumentGroupID: instrumentGroupID, + InstrumentID: i.ID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, make(map[string]interface{})) @@ -225,7 +229,10 @@ func (h *ApiHandler) DeleteInstrumentGroupInstruments(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.InstrumentGroupService.DeleteInstrumentGroupInstruments(c.Request().Context(), instrumentGroupID, instrumentID); err != nil { + if err := h.DBService.InstrumentGroupInstrumentDelete(c.Request().Context(), db.InstrumentGroupInstrumentDeleteParams{ + InstrumentGroupID: instrumentGroupID, + InstrumentID: instrumentID, + }); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_incl.go b/api/internal/handler/instrument_incl.go new file mode 100644 index 00000000..0c7977fd --- /dev/null +++ b/api/internal/handler/instrument_incl.go @@ -0,0 +1,95 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// ListInclSegmentsForInstrument godoc +// +// @Summary gets all incl segments for an instrument +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Success 200 {array} dto.InclSegment +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/segments [get] +func (h *ApiHandler) ListInclSegmentsForInstrument(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + ss, err := h.DBService.InclSegmentListForInstrument(c.Request().Context(), iID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, ss) +} + +// GetInclMeasurementsForInstrument godoc +// +// @Summary creates instrument notes +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Param after query string false "after time" Format(date-time) +// @Param before query string true "before time" Format(date-time) +// @Success 200 {array} dto.InclMeasurements +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/measurements [get] +func (h *ApiHandler) GetInclMeasurementsForInstrument(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + var tw util.TimeWindow + a, b := c.QueryParam("after"), c.QueryParam("before") + if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { + return httperr.MalformedDate(err) + } + mm, err := h.DBService.InclMeasurementListForInstrumentRange(c.Request().Context(), db.InclMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) + if err != nil { + return httperr.MalformedID(err) + } + return c.JSON(http.StatusOK, mm) +} + +// UpdateInclSegments godoc +// +// @Summary updates multiple segments for an incl instrument +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Param instrument_segments body []dto.InclSegment true "incl instrument segments payload" +// @Param key query string false "api key" +// @Success 200 {array} dto.InclSegment +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/segments [put] +// @Security Bearer +func (h *ApiHandler) UpdateInclSegments(c echo.Context) error { + segs := make([]dto.InclSegment, 0) + if err := c.Bind(&segs); err != nil { + return httperr.MalformedBody(err) + } + if err := h.DBService.InclSegmentUpdateBatch(c.Request().Context(), segs); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, segs) +} diff --git a/api/internal/handler/instrument_ipi.go b/api/internal/handler/instrument_ipi.go index 1d312554..5c51b030 100644 --- a/api/internal/handler/instrument_ipi.go +++ b/api/internal/handler/instrument_ipi.go @@ -4,29 +4,31 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllIpiSegmentsForInstrument godoc +// ListIpiSegmentsForInstrument godoc // // @Summary gets all ipi segments for an instrument // @Tags instrument-ipi // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.IpiSegment +// @Success 200 {array} dto.IpiSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/ipi/{instrument_id}/segments [get] -func (h *ApiHandler) GetAllIpiSegmentsForInstrument(c echo.Context) error { +func (h *ApiHandler) ListIpiSegmentsForInstrument(c echo.Context) error { iID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - ss, err := h.IpiInstrumentService.GetAllIpiSegmentsForInstrument(c.Request().Context(), iID) + ss, err := h.DBService.IpiSegmentListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +43,7 @@ func (h *ApiHandler) GetAllIpiSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} model.IpiMeasurements +// @Success 200 {array} dto.IpiMeasurements // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,12 +53,16 @@ func (h *ApiHandler) GetIpiMeasurementsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.IpiInstrumentService.GetIpiMeasurementsForInstrument(c.Request().Context(), iID, tw) + mm, err := h.DBService.IpiMeasurementListForInstrumentRange(c.Request().Context(), db.IpiMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -69,20 +75,20 @@ func (h *ApiHandler) GetIpiMeasurementsForInstrument(c echo.Context) error { // @Tags instrument-ipi // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_segments body []model.IpiSegment true "ipi instrument segments payload" +// @Param instrument_segments body []dto.IpiSegment true "ipi instrument segments payload" // @Param key query string false "api key" -// @Success 200 {array} model.IpiSegment +// @Success 200 {array} dto.IpiSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/ipi/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateIpiSegments(c echo.Context) error { - segs := make([]model.IpiSegment, 0) + segs := make([]dto.IpiSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.IpiInstrumentService.UpdateIpiSegments(c.Request().Context(), segs); err != nil { + if err := h.DBService.IpiSegmentUpdateBatch(c.Request().Context(), segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_ipi_test.go b/api/internal/handler/instrument_ipi_test.go index 1093082e..49319710 100644 --- a/api/internal/handler/instrument_ipi_test.go +++ b/api/internal/handler/instrument_ipi_test.go @@ -101,7 +101,7 @@ func TestIpiInstruments(t *testing.T) { tests := []HTTPTest{ { - Name: "GetAllIpiSegmentsForInstrument", + Name: "ListIpiSegmentsForInstrument", URL: fmt.Sprintf("/instruments/ipi/%s/segments", testIpiInstrumentID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/instrument_note.go b/api/internal/handler/instrument_note.go index fe28b17a..ac3bc779 100644 --- a/api/internal/handler/instrument_note.go +++ b/api/internal/handler/instrument_note.go @@ -4,38 +4,20 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// ListInstrumentNotes godoc -// -// @Summary gets all instrument notes -// @Tags instrument-note -// @Produce json -// @Success 200 {array} model.InstrumentNote -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /instruments/notes [get] -func (h *ApiHandler) ListInstrumentNotes(c echo.Context) error { - notes, err := h.InstrumentNoteService.ListInstrumentNotes(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, notes) -} - // ListInstrumentInstrumentNotes godoc // // @Summary gets instrument notes for a single instrument // @Tags instrument-note // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.InstrumentNote +// @Success 200 {array} dto.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -45,7 +27,7 @@ func (h *ApiHandler) ListInstrumentInstrumentNotes(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - notes, err := h.InstrumentNoteService.ListInstrumentInstrumentNotes(c.Request().Context(), iID) + notes, err := h.DBService.InstrumentNoteListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -58,7 +40,7 @@ func (h *ApiHandler) ListInstrumentInstrumentNotes(c echo.Context) error { // @Tags instrument-note // @Produce json // @Param note_id path string true "note uuid" Format(uuid) -// @Success 200 {object} model.InstrumentNote +// @Success 200 {object} dto.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -68,7 +50,7 @@ func (h *ApiHandler) GetInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - note, err := h.InstrumentNoteService.GetInstrumentNote(c.Request().Context(), nID) + note, err := h.DBService.InstrumentNoteGet(c.Request().Context(), nID) if err != nil { return httperr.InternalServerError(err) } @@ -80,28 +62,28 @@ func (h *ApiHandler) GetInstrumentNote(c echo.Context) error { // @Summary creates instrument notes // @Tags instrument-note // @Produce json -// @Param instrument_note body model.InstrumentNoteCollection true "instrument note collection payload" +// @Param instrument_note body dto.InstrumentNoteCollection true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.InstrumentNote +// @Success 200 {array} dto.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/notes [post] // @Security Bearer func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { - nc := model.InstrumentNoteCollection{} + nc := dto.InstrumentNoteCollection{} if err := c.Bind(&nc); err != nil { return httperr.MalformedBody(err) } // profile and timestamp - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() for idx := range nc.Items { nc.Items[idx].CreatorID = p.ID nc.Items[idx].CreateDate = t } - nn, err := h.InstrumentNoteService.CreateInstrumentNote(c.Request().Context(), nc.Items) + nn, err := h.DBService.InstrumentNoteCreateBatch(c.Request().Context(), nc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -115,9 +97,9 @@ func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { // @Tags instrument-note // @Produce json // @Param note_id path string true "note uuid" Format(uuid) -// @Param instrument_note body model.InstrumentNote true "instrument note collection payload" +// @Param instrument_note body dto.InstrumentNote true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -128,17 +110,17 @@ func (h *ApiHandler) UpdateInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - n := model.InstrumentNote{ID: noteID} + n := dto.InstrumentNote{ID: noteID} if err := c.Bind(&n); err != nil { return httperr.MalformedBody(err) } n.ID = noteID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() n.UpdaterID, n.UpdateDate = &p.ID, &t - nUpdated, err := h.InstrumentNoteService.UpdateInstrumentNote(c.Request().Context(), n) + nUpdated, err := h.DBService.InstrumentNoteUpdate(c.Request().Context(), n) if err != nil { return httperr.InternalServerError(err) } @@ -164,7 +146,7 @@ func (h *ApiHandler) DeleteInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentNoteService.DeleteInstrumentNote(c.Request().Context(), noteID); err != nil { + if err := h.DBService.InstrumentNoteDelete(c.Request().Context(), noteID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/instrument_saa.go b/api/internal/handler/instrument_saa.go index 6d7cd007..726d73ea 100644 --- a/api/internal/handler/instrument_saa.go +++ b/api/internal/handler/instrument_saa.go @@ -4,29 +4,31 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllSaaSegmentsForInstrument godoc +// ListSaaSegmentsForInstrument godoc // // @Summary gets all saa segments for an instrument // @Tags instrument-saa // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.SaaSegment +// @Success 200 {array} dto.SaaSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/saa/{instrument_id}/segments [get] -func (h *ApiHandler) GetAllSaaSegmentsForInstrument(c echo.Context) error { +func (h *ApiHandler) ListSaaSegmentsForInstrument(c echo.Context) error { iID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - ss, err := h.SaaInstrumentService.GetAllSaaSegmentsForInstrument(c.Request().Context(), iID) + ss, err := h.DBService.SaaSegmentListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +43,7 @@ func (h *ApiHandler) GetAllSaaSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} model.SaaMeasurements +// @Success 200 {array} dto.SaaMeasurements // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,12 +53,16 @@ func (h *ApiHandler) GetSaaMeasurementsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.SaaInstrumentService.GetSaaMeasurementsForInstrument(c.Request().Context(), iID, tw) + mm, err := h.DBService.SaaMeasurementListForInstrumentRange(c.Request().Context(), db.SaaMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) if err != nil { return httperr.MalformedID(err) } @@ -69,20 +75,20 @@ func (h *ApiHandler) GetSaaMeasurementsForInstrument(c echo.Context) error { // @Tags instrument-saa // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_segments body []model.SaaSegment true "saa instrument segments payload" +// @Param instrument_segments body []dto.SaaSegment true "saa instrument segments payload" // @Param key query string false "api key" -// @Success 200 {array} model.SaaSegment +// @Success 200 {array} dto.SaaSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/saa/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateSaaSegments(c echo.Context) error { - segs := make([]model.SaaSegment, 0) + segs := make([]dto.SaaSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.SaaInstrumentService.UpdateSaaSegments(c.Request().Context(), segs); err != nil { + if err := h.DBService.SaaSegmentUpdateBatch(c.Request().Context(), segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_saa_test.go b/api/internal/handler/instrument_saa_test.go index 80a861ce..5ef43cb8 100644 --- a/api/internal/handler/instrument_saa_test.go +++ b/api/internal/handler/instrument_saa_test.go @@ -112,7 +112,7 @@ func TestSaaInstruments(t *testing.T) { tests := []HTTPTest{ { - Name: "GetAllSaaSegmentsForInstrument", + Name: "ListSaaSegmentsForInstrument", URL: fmt.Sprintf("/instruments/saa/%s/segments", testSaaInstrumentID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/instrument_status.go b/api/internal/handler/instrument_status.go index f84c7711..98fadac3 100644 --- a/api/internal/handler/instrument_status.go +++ b/api/internal/handler/instrument_status.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,7 +16,7 @@ import ( // @Tags instrument-status // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.InstrumentStatus +// @Success 200 {array} dto.InstrumentStatus // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -27,7 +27,7 @@ func (h *ApiHandler) ListInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - ss, err := h.InstrumentStatusService.ListInstrumentStatus(c.Request().Context(), id) + ss, err := h.DBService.InstrumentStatusListForInstrument(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +41,7 @@ func (h *ApiHandler) ListInstrumentStatus(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param status_id path string true "status uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} dto.AlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +52,7 @@ func (h *ApiHandler) GetInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - s, err := h.InstrumentStatusService.GetInstrumentStatus(c.Request().Context(), id) + s, err := h.DBService.InstrumentStatusGet(c.Request().Context(), id) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -65,7 +65,7 @@ func (h *ApiHandler) GetInstrumentStatus(c echo.Context) error { // @Tags instrument-status // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_status body model.InstrumentStatusCollection true "instrument status collection paylaod" +// @Param instrument_status body dto.InstrumentStatusCollection true "instrument status collection paylaod" // @Param key query string false "api key" // @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError @@ -79,7 +79,7 @@ func (h *ApiHandler) CreateOrUpdateInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - var sc model.InstrumentStatusCollection + var sc dto.InstrumentStatusCollection if err := c.Bind(&sc); err != nil { return httperr.MalformedBody(err) } @@ -91,7 +91,7 @@ func (h *ApiHandler) CreateOrUpdateInstrumentStatus(c echo.Context) error { sc.Items[idx].ID = id } - if err := h.InstrumentStatusService.CreateOrUpdateInstrumentStatus(c.Request().Context(), instrumentID, sc.Items); err != nil { + if err := h.DBService.InstrumentStatusCreateOrUpdateBatch(c.Request().Context(), instrumentID, sc.Items); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, make(map[string]interface{})) @@ -116,7 +116,7 @@ func (h *ApiHandler) DeleteInstrumentStatus(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentStatusService.DeleteInstrumentStatus(c.Request().Context(), id); err != nil { + if err := h.DBService.InstrumentStatusDelete(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/measurement.go b/api/internal/handler/measurement.go index 74bf87da..6699cd82 100644 --- a/api/internal/handler/measurement.go +++ b/api/internal/handler/measurement.go @@ -1,13 +1,16 @@ package handler import ( + "errors" "log" "net/http" "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -20,9 +23,9 @@ import ( // @Accept json // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string false "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -30,7 +33,7 @@ import ( // @Security Bearer func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) error { ctx := c.Request().Context() - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } @@ -40,17 +43,27 @@ func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) return httperr.MalformedID(err) } - dd := mcc.TimeseriesIDs() - if err := h.TimeseriesService.AssertTimeseriesLinkedToProject(ctx, pID, dd); err != nil { - return httperr.InternalServerError(err) + tt := make([]uuid.UUID, len(mcc.Items)) + for idx, mc := range mcc.Items { + tt[idx] = mc.TimeseriesID } - stored, err := h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(ctx, mcc.Items) + valid, err := h.DBService.TimeseriesGetAllBelongToProject(ctx, db.TimeseriesGetAllBelongToProjectParams{ + ProjectID: pID, + TimeseriesIds: tt, + }) if err != nil { return httperr.InternalServerError(err) } + if !valid { + return httperr.BadRequest(errors.New("one or more timeseries do not belong to an instrument in this project")) + } - return c.JSON(http.StatusCreated, stored) + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mcc.Items); err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusCreated, map[string]interface{}{}) } // CreateOrUpdateTimeseriesMeasurements godoc @@ -58,24 +71,23 @@ func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) // @Summary creates or updates one or more timeseries measurements // @Tags measurement // @Produce json -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string true "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries_measurements [post] func (h *ApiHandler) CreateOrUpdateTimeseriesMeasurements(c echo.Context) error { - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } - stored, err := h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(c.Request().Context(), mcc.Items) - if err != nil { + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(c.Request().Context(), mcc.Items); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, stored) + return c.JSON(http.StatusCreated, map[string]interface{}{}) } // UpdateTimeseriesMeasurements godoc @@ -86,29 +98,32 @@ func (h *ApiHandler) CreateOrUpdateTimeseriesMeasurements(c echo.Context) error // @Param project_id path string true "project uuid" Format(uuid) // @Param after query string false "after timestamp" Format(date-time) // @Param before query string false "before timestamp" Format(date-time) -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string false "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/timeseries_measurements [put] // @Security Bearer func (h *ApiHandler) UpdateTimeseriesMeasurements(c echo.Context) error { - var tw model.TimeWindow + var tw *util.TimeWindow + var twParam util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") - if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) + if a != "" && b != "" { + if err := twParam.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { + return httperr.MalformedDate(err) + } + tw = &twParam } - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } - stored, err := h.MeasurementService.UpdateTimeseriesMeasurements(c.Request().Context(), mcc.Items, tw) - if err != nil { + if err := h.DBService.TimeseriesMeasurementUpdateBatch(c.Request().Context(), mcc.Items, tw); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, stored) + return c.JSON(http.StatusOK, map[string]interface{}{}) } // DeleteTimeserieMeasurements godoc @@ -136,7 +151,10 @@ func (h *ApiHandler) DeleteTimeserieMeasurements(c echo.Context) error { if err != nil { return httperr.MalformedDate(err) } - if err := h.MeasurementService.DeleteTimeserieMeasurements(c.Request().Context(), id, t); err != nil { + if err := h.DBService.TimeseriesMeasurementDelete(c.Request().Context(), db.TimeseriesMeasurementDeleteParams{ + TimeseriesID: id, + Time: t, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -148,9 +166,9 @@ func (h *ApiHandler) DeleteTimeserieMeasurements(c echo.Context) error { // @Tags measurement // @Accept json,mpfd // @Produce json -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection false "json array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection false "json array of timeseries measurement collections" // @Param timeseries_measurement_collections formData file false "TOA5 file of timeseries measurement collections" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {array} dto.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -186,7 +204,7 @@ func (h *ApiHandler) createOrUpdateTimeseriesMeasurementsMultipartFormData(c ech } }() - if err := h.DataloggerTelemetryService.CreateOrUpdateDataloggerTOA5MeasurementCollection(c.Request().Context(), src); err != nil { + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateDataloggerTOA5Upload(c.Request().Context(), src); err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/api/internal/handler/measurement_inclinometer.go b/api/internal/handler/measurement_inclinometer.go deleted file mode 100644 index 85b813f7..00000000 --- a/api/internal/handler/measurement_inclinometer.go +++ /dev/null @@ -1,158 +0,0 @@ -package handler - -import ( - "encoding/json" - "net/http" - "time" - - "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - - "github.com/google/uuid" - "github.com/labstack/echo/v4" -) - -// ListInclinometerMeasurements godoc -// -// @Summary lists all measurements for an inclinometer -// @Tags measurement-inclinometer -// @Produce json -// @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param after query string false "after timestamp" Format(date-time) -// @Param before query string false "before timestamp" Format(date-time) -// @Success 200 {object} model.InclinometerMeasurementCollection -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /timeseries/{timeseries_id}/inclinometer_measurements [get] -func (h *ApiHandler) ListInclinometerMeasurements(c echo.Context) error { - - tsID, err := uuid.Parse(c.Param("timeseries_id")) - if err != nil { - return httperr.MalformedID(err) - } - - var tw model.TimeWindow - a, b := c.QueryParam("after"), c.QueryParam("before") - if err = tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) - } - - ctx := c.Request().Context() - - im, err := h.InclinometerMeasurementService.ListInclinometerMeasurements(ctx, tsID, tw) - if err != nil { - return httperr.InternalServerError(err) - } - - cm, err := h.MeasurementService.GetTimeseriesConstantMeasurement(ctx, tsID, "inclinometer-constant") - if err != nil { - return httperr.InternalServerError(err) - } - - for idx := range im.Inclinometers { - values, err := h.InclinometerMeasurementService.ListInclinometerMeasurementValues(ctx, tsID, im.Inclinometers[idx].Time, float64(cm.Value)) - if err != nil { - return httperr.InternalServerError(err) - } - - jsonValues, err := json.Marshal(values) - if err != nil { - return httperr.InternalServerError(err) - } - im.Inclinometers[idx].Values = jsonValues - } - - return c.JSON(http.StatusOK, im) -} - -// CreateOrUpdateProjectInclinometerMeasurements godoc -// -// @Summary creates or updates one or more inclinometer measurements -// @Tags measurement-inclinometer -// @Produce json -// @Param project_id path string true "project uuid" Format(uuid) -// @Param timeseries_measurement_collections body model.InclinometerMeasurementCollectionCollection true "inclinometer measurement collections" -// @Param key query string false "api key" -// @Success 200 {array} model.InclinometerMeasurementCollection -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /projects/{project_id}/inclinometer_measurements [post] -// @Security Bearer -func (h *ApiHandler) CreateOrUpdateProjectInclinometerMeasurements(c echo.Context) error { - var mcc model.InclinometerMeasurementCollectionCollection - if err := c.Bind(&mcc); err != nil { - return httperr.MalformedBody(err) - } - - pID, err := uuid.Parse(c.Param("project_id")) - if err != nil { - return httperr.MalformedID(err) - } - - ctx := c.Request().Context() - - dd := mcc.TimeseriesIDs() - if err := h.TimeseriesService.AssertTimeseriesLinkedToProject(ctx, pID, dd); err != nil { - return httperr.InternalServerError(err) - } - - p := c.Get("profile").(model.Profile) - - stored, err := h.InclinometerMeasurementService.CreateOrUpdateInclinometerMeasurements(ctx, mcc.Items, p, time.Now()) - if err != nil { - return httperr.InternalServerError(err) - } - - //create inclinometer constant if doesn't exist - if len(mcc.Items) > 0 { - cm, err := h.MeasurementService.GetTimeseriesConstantMeasurement(ctx, mcc.Items[0].TimeseriesID, "inclinometer-constant") - if err != nil { - return httperr.InternalServerError(err) - } - - if cm.TimeseriesID == uuid.Nil { - err := h.InclinometerMeasurementService.CreateTimeseriesConstant(ctx, mcc.Items[0].TimeseriesID, "inclinometer-constant", "Meters", 20000) - if err != nil { - return httperr.InternalServerError(err) - } - } - - } - return c.JSON(http.StatusCreated, stored) -} - -// DeleteInclinometerMeasurements godoc -// -// @Summary deletes a single inclinometer measurement by timestamp -// @Tags measurement-inclinometer -// @Produce json -// @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param time query string true "timestamp of measurement to delete" Format(date-time) -// @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /timeseries/{timeseries_id}/inclinometer_measurements [delete] -// @Security Bearer -func (h *ApiHandler) DeleteInclinometerMeasurements(c echo.Context) error { - // id from url params - id, err := uuid.Parse(c.Param("timeseries_id")) - if err != nil { - return httperr.MalformedID(err) - } - - timeString := c.QueryParam("time") - - t, err := time.Parse(time.RFC3339, timeString) - if err != nil { - return httperr.MalformedDate(err) - } - - if err := h.InclinometerMeasurementService.DeleteInclinometerMeasurement(c.Request().Context(), id, t); err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, make(map[string]interface{})) -} diff --git a/api/internal/handler/media.go b/api/internal/handler/media.go index 81f4c6d3..ecc7485e 100644 --- a/api/internal/handler/media.go +++ b/api/internal/handler/media.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) diff --git a/api/internal/handler/opendcs.go b/api/internal/handler/opendcs.go index f2067af2..bce924ac 100644 --- a/api/internal/handler/opendcs.go +++ b/api/internal/handler/opendcs.go @@ -4,8 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/labstack/echo/v4" ) @@ -14,13 +12,13 @@ import ( // @Summary lists all instruments, represented as opendcs sites // @Tags opendcs // @Produce xml -// @Success 200 {array} model.Site +// @Success 200 {array} dto.Site // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /opendcs/sites [get] func (h *ApiHandler) ListOpendcsSites(c echo.Context) error { - ss, err := h.OpendcsService.ListOpendcsSites(c.Request().Context()) + ss, err := h.DBService.OpendcsSiteList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config.go b/api/internal/handler/plot_config.go index a35dc029..e0cefaf4 100644 --- a/api/internal/handler/plot_config.go +++ b/api/internal/handler/plot_config.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,7 +16,7 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.PlotConfig +// @Success 200 {array} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -27,7 +27,7 @@ func (h *ApiHandler) ListPlotConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - cc, err := h.PlotConfigService.ListPlotConfigs(c.Request().Context(), pID) + cc, err := h.DBService.PlotConfigListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +41,7 @@ func (h *ApiHandler) ListPlotConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +52,7 @@ func (h *ApiHandler) GetPlotConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - g, err := h.PlotConfigService.GetPlotConfig(c.Request().Context(), cID) + g, err := h.DBService.PlotConfigGet(c.Request().Context(), cID) if err != nil { return httperr.InternalServerError(err) } @@ -83,7 +83,10 @@ func (h *ApiHandler) DeletePlotConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.PlotConfigService.DeletePlotConfig(c.Request().Context(), pID, cID); err != nil { + if err := h.DBService.PlotConfigDelete(c.Request().Context(), db.PlotConfigDeleteParams{ + ID: cID, + ProjectID: pID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/plot_config_bullseye.go b/api/internal/handler/plot_config_bullseye.go index 122e5146..f47e3b7b 100644 --- a/api/internal/handler/plot_config_bullseye.go +++ b/api/internal/handler/plot_config_bullseye.go @@ -4,8 +4,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +18,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigBullseyePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/bullseye_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { - var pc model.PlotConfigBullseyePlot + var pc dto.PlotConfigBullseyePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +44,10 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) pc.CreatorID, pc.CreateDate = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigBullseyePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateBullseye(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +61,16 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigBullseyePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/bullseye_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { - var pc model.PlotConfigBullseyePlot + var pc dto.PlotConfigBullseyePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +93,11 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) tNow := time.Now() pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigBullseyePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateBullseye(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -110,7 +112,7 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} []model.PlotConfigMeasurementBullseyePlot +// @Success 200 {object} []dto.PlotConfigMeasurementBullseyePlot // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -125,12 +127,16 @@ func (h *ApiHandler) ListPlotConfigMeasurementsBullseyePlot(c echo.Context) erro if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.PlotConfigService.ListPlotConfigMeasurementsBullseyePlot(c.Request().Context(), pcID, tw) + mm, err := h.DBService.PlotConfigMeasurementListBullseye(c.Request().Context(), db.PlotConfigMeasurementListBullseyeParams{ + PlotConfigID: pcID, + After: tw.After, + Before: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_contour.go b/api/internal/handler/plot_config_contour.go index 81fb430c..d73b9a56 100644 --- a/api/internal/handler/plot_config_contour.go +++ b/api/internal/handler/plot_config_contour.go @@ -4,8 +4,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +18,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigContourPlot true "plot config payload" +// @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/contour_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { - var pc model.PlotConfigContourPlot + var pc dto.PlotConfigContourPlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +44,10 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) pc.CreatorID, pc.CreateDate = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigContourPlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateContour(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +61,16 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigContourPlot true "plot config payload" +// @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/contour_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigContourPlot(c echo.Context) error { - var pc model.PlotConfigContourPlot + var pc dto.PlotConfigContourPlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +93,11 @@ func (h *ApiHandler) UpdatePlotConfigContourPlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) tNow := time.Now() pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigContourPlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateContour(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -127,12 +129,16 @@ func (h *ApiHandler) ListPlotConfigTimesContourPlot(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - tt, err := h.PlotConfigService.ListPlotConfigTimesContourPlot(c.Request().Context(), pcID, tw) + tt, err := h.DBService.PlotContourConfigListTimeRange(c.Request().Context(), db.PlotContourConfigListTimeRangeParams{ + PlotContourConfigID: pcID, + After: tw.After, + Before: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -148,7 +154,7 @@ func (h *ApiHandler) ListPlotConfigTimesContourPlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param time query string true "time" // @Param key query string false "api key" -// @Success 200 {object} model.AggregatePlotConfigMeasurementsContourPlot +// @Success 200 {object} dto.AggregatePlotConfigMeasurementsContourPlot // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -168,7 +174,7 @@ func (h *ApiHandler) GetPlotConfigMeasurementsContourPlot(c echo.Context) error if err != nil { return httperr.MalformedDate(err) } - mm, err := h.PlotConfigService.GetPlotConfigMeasurementsContourPlot(c.Request().Context(), pcID, t) + mm, err := h.DBService.PlotConfigMeasurementListContour(c.Request().Context(), pcID, t) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_profile.go b/api/internal/handler/plot_config_profile.go index 86e1b913..9c54ae06 100644 --- a/api/internal/handler/plot_config_profile.go +++ b/api/internal/handler/plot_config_profile.go @@ -4,8 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +16,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigProfilePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/profile_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { - var pc model.PlotConfigProfilePlot + var pc dto.PlotConfigProfilePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +42,10 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) pc.CreatorID, pc.CreateDate = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigProfilePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateProfile(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +59,16 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigProfilePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/profile_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigProfilePlot(c echo.Context) error { - var pc model.PlotConfigProfilePlot + var pc dto.PlotConfigProfilePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +91,11 @@ func (h *ApiHandler) UpdatePlotConfigProfilePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) tNow := time.Now() pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigProfilePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateProfile(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_scatter_line.go b/api/internal/handler/plot_config_scatter_line.go index 0bacd329..9a4d84af 100644 --- a/api/internal/handler/plot_config_scatter_line.go +++ b/api/internal/handler/plot_config_scatter_line.go @@ -4,8 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,9 +16,9 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigScatterLinePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -26,7 +26,7 @@ import ( // @Router /projects/{project_id}/plot_configurations [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { - var pc model.PlotConfigScatterLinePlot + var pc dto.PlotConfigScatterLinePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -43,10 +43,10 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) pc.CreatorID, pc.CreateDate = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigScatterLinePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateScatterLine(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -60,9 +60,9 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigScatterLinePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} dto.PlotConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -70,7 +70,7 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { // @Router /projects/{project_id}/plot_configurations/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigScatterLinePlot(c echo.Context) error { - var pc model.PlotConfigScatterLinePlot + var pc dto.PlotConfigScatterLinePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -93,11 +93,11 @@ func (h *ApiHandler) UpdatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) tNow := time.Now() pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigScatterLinePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateScatterLine(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/profile.go b/api/internal/handler/profile.go index 4916cc33..b3c38dbd 100644 --- a/api/internal/handler/profile.go +++ b/api/internal/handler/profile.go @@ -5,8 +5,9 @@ import ( "errors" "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -15,14 +16,14 @@ import ( // @Summary creates a user profile // @Tags profile // @Produce json -// @Success 200 {object} model.Profile +// @Success 200 {object} db.ProfileCreateRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /profiles [post] // @Security ClaimsOnly func (h *ApiHandler) CreateProfile(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) if !claims.X509Presented { return httperr.Forbidden(errors.New("invalid value for claim x509_presented")) @@ -31,14 +32,12 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { return httperr.Forbidden(errors.New("unable to create profile; cacUID claim is nil")) } - p := model.ProfileInfo{ + pNew, err := h.DBService.ProfileCreate(c.Request().Context(), db.ProfileCreateParams{ Username: claims.PreferredUsername, DisplayName: claims.Name, Email: claims.Email, - EDIPI: *claims.CacUID, - } - - pNew, err := h.ProfileService.CreateProfile(c.Request().Context(), p) + Edipi: int64(*claims.CacUID), + }) if err != nil { return httperr.InternalServerError(err) } @@ -50,7 +49,7 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { // @Summary gets profile for current authenticated user // @Tags profile // @Produce json -// @Success 200 {object} model.Profile +// @Success 200 {object} db.VProfile // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -58,9 +57,9 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { // @Security ClaimsOnly func (h *ApiHandler) GetMyProfile(c echo.Context) error { ctx := c.Request().Context() - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { if errors.Is(err, sql.ErrNoRows) { return h.CreateProfile(c) @@ -68,7 +67,7 @@ func (h *ApiHandler) GetMyProfile(c echo.Context) error { return httperr.InternalServerError(err) } - pValidated, err := h.ProfileService.UpdateProfileForClaims(ctx, p, claims) + pValidated, err := h.DBService.ProfileUpdateForClaims(ctx, p, claims) if err != nil { return httperr.InternalServerError(err) } @@ -81,21 +80,21 @@ func (h *ApiHandler) GetMyProfile(c echo.Context) error { // @Summary creates token for a profile // @Tags profile // @Produce json -// @Success 200 {object} model.Token +// @Success 200 {object} dto.Token // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_tokens [post] // @Security ClaimsOnly func (h *ApiHandler) CreateToken(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) ctx := c.Request().Context() - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.InternalServerError(err) } - token, err := h.ProfileService.CreateProfileToken(ctx, p.ID) + token, err := h.DBService.ProfileTokenCreate(ctx, p.ID) if err != nil { return httperr.InternalServerError(err) } @@ -115,7 +114,7 @@ func (h *ApiHandler) CreateToken(c echo.Context) error { // @Router /my_tokens/{token_id} [delete] // @Security ClaimsOnly func (h *ApiHandler) DeleteToken(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) ctx := c.Request().Context() tokenID := c.Param("token_id") @@ -123,11 +122,14 @@ func (h *ApiHandler) DeleteToken(c echo.Context) error { return httperr.Message(http.StatusBadRequest, "bad token id") } - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.InternalServerError(err) } - if err := h.ProfileService.DeleteToken(ctx, p.ID, tokenID); err != nil { + if err := h.DBService.ProfileTokenDelete(ctx, db.ProfileTokenDeleteParams{ + ProfileID: p.ID, + TokenID: tokenID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/project.go b/api/internal/handler/project.go index c02a3cf1..a2a4ba97 100644 --- a/api/internal/handler/project.go +++ b/api/internal/handler/project.go @@ -5,8 +5,9 @@ import ( "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,13 +18,13 @@ import ( // @Summary lists all districts // @Tags project // @Produce json -// @Success 200 {array} model.District +// @Success 200 {array} dto.District // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /districts [get] func (h *ApiHandler) ListDistricts(c echo.Context) error { - dd, err := h.ProjectService.ListDistricts(c.Request().Context()) + dd, err := h.DBService.DistrictList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -36,7 +37,7 @@ func (h *ApiHandler) ListDistricts(c echo.Context) error { // @Tags project // @Produce json // @Param federal_id query string false "federal id" -// @Success 200 {array} model.Project +// @Success 200 {array} dto.Project // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -46,14 +47,14 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { fedID := c.QueryParam("federal_id") if fedID != "" { - projects, err := h.ProjectService.ListProjectsByFederalID(ctx, fedID) + projects, err := h.DBService.ProjectListForFederalID(ctx, &fedID) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, projects) } - projects, err := h.ProjectService.ListProjects(ctx) + projects, err := h.DBService.ProjectList(ctx) if err != nil { return httperr.InternalServerError(err) } @@ -66,7 +67,7 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { // @Tags project // @Produce json // @Param role query string false "role" -// @Success 200 {array} model.Project +// @Success 200 {array} dto.Project // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -75,10 +76,10 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { func (h *ApiHandler) ListMyProjects(c echo.Context) error { ctx := c.Request().Context() - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) if p.IsAdmin { - projects, err := h.ProjectService.ListProjects(ctx) + projects, err := h.DBService.ProjectList(ctx) if err != nil { return httperr.InternalServerError(err) } @@ -89,7 +90,10 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { if role != "" { role = strings.ToLower(role) if role == "admin" || role == "member" { - projects, err := h.ProjectService.ListProjectsForProfileRole(ctx, p.ID, role) + projects, err := h.DBService.ProjectListForProfileRole(ctx, db.ProjectListForProfileRoleParams{ + ProfileID: p.ID, + Name: role, + }) if err != nil { return httperr.InternalServerError(err) } @@ -98,7 +102,7 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { return httperr.Message(http.StatusBadRequest, "role parameter must be 'admin' or 'member'") } - projects, err := h.ProjectService.ListProjectsForProfile(ctx, p.ID) + projects, err := h.DBService.ProjectListForProfileAdmin(ctx, p.ID) if err != nil { return httperr.InternalServerError(err) } @@ -111,7 +115,7 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Project +// @Success 200 {array} dto.Project // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -121,7 +125,7 @@ func (h *ApiHandler) ListProjectInstruments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - nn, err := h.ProjectService.ListProjectInstruments(c.Request().Context(), id) + nn, err := h.DBService.InstrumentListForProject(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -134,7 +138,7 @@ func (h *ApiHandler) ListProjectInstruments(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {array} dto.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -144,7 +148,7 @@ func (h *ApiHandler) ListProjectInstrumentGroups(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - gg, err := h.ProjectService.ListProjectInstrumentGroups(c.Request().Context(), id) + gg, err := h.DBService.InstrumentGroupListForProject(c.Request().Context(), &id) if err != nil { return httperr.InternalServerError(err) } @@ -156,13 +160,13 @@ func (h *ApiHandler) ListProjectInstrumentGroups(c echo.Context) error { // @Summary gets the total number of non-deleted projects in the system // @Tags project // @Produce json -// @Success 200 {object} model.ProjectCount +// @Success 200 {object} dto.ProjectCount // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/count [get] func (h *ApiHandler) GetProjectCount(c echo.Context) error { - pc, err := h.ProjectService.GetProjectCount(c.Request().Context()) + pc, err := h.DBService.ProjectGetCount(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -175,7 +179,7 @@ func (h *ApiHandler) GetProjectCount(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {object} model.Project +// @Success 200 {object} dto.Project // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -185,7 +189,7 @@ func (h *ApiHandler) GetProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - project, err := h.ProjectService.GetProject(c.Request().Context(), id) + project, err := h.DBService.ProjectGet(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -197,21 +201,21 @@ func (h *ApiHandler) GetProject(c echo.Context) error { // @Summary accepts an array of instruments for bulk upload to the database // @Tags project // @Produce json -// @Param project_collection body model.ProjectCollection true "project collection payload" +// @Param project_collection body dto.ProjectCollection true "project collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.IDSlugName +// @Success 200 {array} dto.IDSlugName // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects [post] // @Security Bearer func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { - var pc model.ProjectCollection + var pc dto.ProjectCollection if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) t := time.Now() for idx := range pc { @@ -222,7 +226,7 @@ func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { pc[idx].CreateDate = t } - pp, err := h.ProjectService.CreateProjectBulk(c.Request().Context(), pc) + pp, err := h.DBService.ProjectCreateBatch(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -235,9 +239,9 @@ func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param project body model.Project true "project payload" +// @Param project body dto.Project true "project payload" // @Param key query string false "api key" -// @Success 200 {object} model.Project +// @Success 200 {object} dto.Project // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -248,17 +252,17 @@ func (h *ApiHandler) UpdateProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var p model.Project + var p dto.Project if err := c.Bind(&p); err != nil { return httperr.MalformedBody(err) } p.ID = id - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() p.UpdaterID, p.UpdateDate = &profile.ID, &t - pUpdated, err := h.ProjectService.UpdateProject(c.Request().Context(), p) + pUpdated, err := h.DBService.ProjectUpdate(c.Request().Context(), p) if err != nil { return httperr.InternalServerError(err) } @@ -283,7 +287,7 @@ func (h *ApiHandler) DeleteFlagProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.ProjectService.DeleteFlagProject(c.Request().Context(), id); err != nil { + if err := h.DBService.ProjectDeleteFlag(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -313,11 +317,11 @@ func (h *ApiHandler) UploadProjectImage(c echo.Context) error { if err != nil || fh == nil { return httperr.Message(http.StatusBadRequest, "attached form file 'image' required") } - if fh.Size > 2000000 { + if fh.Size > 2_000_000 { return httperr.Message(http.StatusBadRequest, "image exceeds max size of 2MB") } - if err := h.ProjectService.UploadProjectImage(c.Request().Context(), projectID, *fh, h.BlobService.UploadContext); err != nil { + if err := h.DBService.ProjectUploadImage(c.Request().Context(), projectID, *fh, h.BlobService); err != nil { return httperr.ServerErrorOrNotFound(err) } diff --git a/api/internal/handler/project_role.go b/api/internal/handler/project_role.go index 2b4ac9fb..620ccbf2 100644 --- a/api/internal/handler/project_role.go +++ b/api/internal/handler/project_role.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,18 +18,18 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.ProjectMembership +// @Success 200 {array} dto.ProjectMembership // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/members [get] // @Security Bearer func (h *ApiHandler) ListProjectMembers(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - mm, err := h.ProjectRoleService.ListProjectMembers(c.Request().Context(), id) + mm, err := h.DBService.ProfileProjectRoleListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -44,7 +45,7 @@ func (h *ApiHandler) ListProjectMembers(c echo.Context) error { // @Param profile_id path string true "profile uuid" Format(uuid) // @Param role_id path string true "role uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.ProjectMembership +// @Success 200 {object} dto.ProjectMembership // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,11 +64,14 @@ func (h *ApiHandler) AddProjectMemberRole(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } + grantedBy := c.Get("profile").(dto.Profile) - // profile granting role to profile_id - grantedBy := c.Get("profile").(model.Profile) - - r, err := h.ProjectRoleService.AddProjectMemberRole(c.Request().Context(), projectID, profileID, roleID, grantedBy.ID) + r, err := h.DBService.ProfileProjectRoleCreate(c.Request().Context(), db.ProfileProjectRoleCreateParams{ + ProjectID: projectID, + ProfileID: profileID, + RoleID: roleID, + GrantedBy: &grantedBy.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -104,7 +108,11 @@ func (h *ApiHandler) RemoveProjectMemberRole(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.ProjectRoleService.RemoveProjectMemberRole(c.Request().Context(), projectID, profileID, roleID); err != nil { + if err := h.DBService.ProfileProjectRoleDelete(c.Request().Context(), db.ProfileProjectRoleDeleteParams{ + ProjectID: projectID, + ProfileID: profileID, + RoleID: roleID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/project_test.go b/api/internal/handler/project_test.go index 0b79f3c2..0fd5bec5 100644 --- a/api/internal/handler/project_test.go +++ b/api/internal/handler/project_test.go @@ -134,7 +134,7 @@ func TestProjects(t *testing.T) { ExpectedSchema: arrSchema, }, { - Name: "ListProjectsByFederalID", + Name: "ListProjectsForFederalID", URL: fmt.Sprintf("/projects?federal_id=%s", testProjectFederalID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/report_config.go b/api/internal/handler/report_config.go index 0c6add4a..426401ae 100644 --- a/api/internal/handler/report_config.go +++ b/api/internal/handler/report_config.go @@ -7,8 +7,10 @@ import ( "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -22,7 +24,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" // @Accept application/json -// @Success 200 {object} model.ReportConfig +// @Success 200 {object} dto.ReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -33,7 +35,7 @@ func (h *ApiHandler) ListProjectReportConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - rcs, err := h.ReportConfigService.ListProjectReportConfigs(c.Request().Context(), pID) + rcs, err := h.DBService.ReportConfigListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -46,10 +48,10 @@ func (h *ApiHandler) ListProjectReportConfigs(c echo.Context) error { // @Tags report-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param report_config body model.ReportConfig true "report config payload" +// @Param report_config body dto.ReportConfig true "report config payload" // @Param key query string false "api key" // @Accept application/json -// @Success 201 {object} model.ReportConfig +// @Success 201 {object} dto.ReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -60,17 +62,17 @@ func (h *ApiHandler) CreateReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var rc model.ReportConfig + var rc dto.ReportConfig if err := c.Bind(&rc); err != nil { return httperr.MalformedBody(err) } rc.ProjectID = pID - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() rc.CreatorID, rc.CreateDate = profile.ID, t - rcNew, err := h.ReportConfigService.CreateReportConfig(c.Request().Context(), rc) + rcNew, err := h.DBService.ReportConfigCreate(c.Request().Context(), rc) if err != nil { return httperr.InternalServerError(err) } @@ -85,7 +87,7 @@ func (h *ApiHandler) CreateReportConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param report_config_id path string true "report config uuid" Format(uuid) -// @Param report_config body model.ReportConfig true "report config payload" +// @Param report_config body dto.ReportConfig true "report config payload" // @Param key query string false "api key" // @Accept application/json // @Success 200 {object} map[string]interface{} @@ -103,18 +105,18 @@ func (h *ApiHandler) UpdateReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var rc model.ReportConfig + var rc dto.ReportConfig if err := c.Bind(&rc); err != nil { return httperr.MalformedBody(err) } rc.ID = rcID rc.ProjectID = pID - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() rc.UpdaterID, rc.UpdateDate = &profile.ID, &t - if err := h.ReportConfigService.UpdateReportConfig(c.Request().Context(), rc); err != nil { + if err := h.DBService.ReportConfigUpdate(c.Request().Context(), rc); err != nil { return httperr.InternalServerError(err) } @@ -140,7 +142,7 @@ func (h *ApiHandler) DeleteReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.ReportConfigService.DeleteReportConfig(c.Request().Context(), rcID); err != nil { + if err := h.DBService.ReportConfigDelete(c.Request().Context(), rcID); err != nil { return httperr.InternalServerError(err) } @@ -154,7 +156,7 @@ func (h *ApiHandler) DeleteReportConfig(c echo.Context) error { // @Produce json // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string true "api key" -// @Success 200 {object} model.ReportConfigWithPlotConfigs +// @Success 200 {object} dto.ReportConfigWithPlotConfigs // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -164,7 +166,7 @@ func (h *ApiHandler) GetReportConfigWithPlotConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - rcs, err := h.ReportConfigService.GetReportConfigWithPlotConfigs(c.Request().Context(), rcID) + rcs, err := h.DBService.ReportConfigWithPlotConfigsGet(c.Request().Context(), rcID) if err != nil { return httperr.InternalServerError(err) } @@ -179,7 +181,7 @@ func (h *ApiHandler) GetReportConfigWithPlotConfigs(c echo.Context) error { // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 201 {object} model.ReportDownloadJob +// @Success 201 {object} dto.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -191,9 +193,14 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { return httperr.MalformedID(err) } isLandscape := strings.ToLower(c.QueryParam("is_landscape")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - j, err := h.ReportConfigService.CreateReportDownloadJob(c.Request().Context(), rcID, p.ID, isLandscape) + j, err := h.DBService.ReportDownloadJobCreate(c.Request().Context(), h.PubsubService, service.ReportDownloadJobCreateOpts{ + ReportConfigID: rcID, + ProfileID: p.ID, + IsLandscape: isLandscape, + IsMock: h.Config.AuthJWTMocked, + }) if err != nil { return httperr.InternalServerError(err) } @@ -210,7 +217,7 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { // @Param job_id path string true "download job uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 200 {object} model.ReportDownloadJob +// @Success 200 {object} dto.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -221,9 +228,12 @@ func (h *ApiHandler) GetReportDownloadJob(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - j, err := h.ReportConfigService.GetReportDownloadJob(c.Request().Context(), jobID, p.ID) + j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ + ID: jobID, + Creator: p.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -236,7 +246,7 @@ func (h *ApiHandler) GetReportDownloadJob(c echo.Context) error { // @Summary updates a job that creates a pdf report // @Tags report-config // @Param job_id path string true "download job uuid" Format(uuid) -// @Param report_download_job body model.ReportDownloadJob true "report download job payload" +// @Param report_download_job body dto.ReportDownloadJob true "report download job payload" // @Param key query string true "api key" // @Accept application/json // @Produce application/json @@ -250,14 +260,14 @@ func (h *ApiHandler) UpdateReportDownloadJob(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var j model.ReportDownloadJob + var j dto.ReportDownloadJob if err := c.Bind(&j); err != nil { return httperr.MalformedBody(err) } j.ID = jobID j.ProgressUpdateDate = time.Now() - if err := h.ReportConfigService.UpdateReportDownloadJob(c.Request().Context(), j); err != nil { + if err := h.DBService.ReportDownloadJobUpdate(c.Request().Context(), j); err != nil { return httperr.InternalServerError(err) } @@ -283,9 +293,12 @@ func (h *ApiHandler) DownloadReport(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(dto.Profile) - j, err := h.ReportConfigService.GetReportDownloadJob(c.Request().Context(), jobID, p.ID) + j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ + ID: jobID, + Creator: p.ID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/search.go b/api/internal/handler/search.go index ad9aebf7..c4a20335 100644 --- a/api/internal/handler/search.go +++ b/api/internal/handler/search.go @@ -1,50 +1,41 @@ package handler import ( - "context" - "fmt" - + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" "github.com/labstack/echo/v4" ) -type searchFunc func(ctx context.Context, searchText string, limit int) ([]model.SearchResult, error) - -// Search godoc +// ProjectSearch godoc // // @Summary allows searching using a string on different entities // @Tags search // @Produce json // @Param entity path string true "entity to search (i.e. projects, etc.)" // @Param q query string false "search string" -// @Success 200 {array} model.SearchResult +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError -// @Router /search/{entity} [get] -func (h *ApiHandler) Search(c echo.Context) error { - var fn searchFunc - pfn := &fn - switch entity := c.Param("entity"); entity { - case "projects": - *pfn = h.ProjectService.SearchProjects - default: - return httperr.Message(http.StatusBadRequest, fmt.Sprintf("search not implemented for entity: %s", entity)) - } - +// @Router /search/projects [get] +func (h *ApiHandler) ProjectSearch(c echo.Context) error { searchText := c.QueryParam("q") if searchText == "" { - return c.JSON(http.StatusOK, make([]model.SearchResult, 0)) + return c.JSON(http.StatusOK, make([]dto.SearchResult, 0)) } // Get Desired Number of Results; Hardcode 5 for now; - limit := 5 - rr, err := fn(c.Request().Context(), searchText, limit) + var limit int32 = 5 + ps, err := h.DBService.ProjectListForNameSearch(c.Request().Context(), db.ProjectListForNameSearchParams{ + Name: &searchText, + ResultLimit: limit, + }) + if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, rr) + return c.JSON(http.StatusOK, ps) } diff --git a/api/internal/handler/submittal.go b/api/internal/handler/submittal.go index 203d888e..c6156a32 100644 --- a/api/internal/handler/submittal.go +++ b/api/internal/handler/submittal.go @@ -4,8 +4,8 @@ import ( "net/http" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -17,24 +17,27 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} model.Submittal +// @Success 200 {array} dto.Submittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/submittals [get] func (h *ApiHandler) ListProjectSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListProjectSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForProject(c.Request().Context(), db.SubmittalListForProjectParams{ + ProjectID: pID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -48,24 +51,27 @@ func (h *ApiHandler) ListProjectSubmittals(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} model.Submittal +// @Success 200 {array} dto.Submittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/{instrument_id}/submittals [get] func (h *ApiHandler) ListInstrumentSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListInstrumentSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForInstrument(c.Request().Context(), db.SubmittalListForInstrumentParams{ + InstrumentID: instID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -78,24 +84,27 @@ func (h *ApiHandler) ListInstrumentSubmittals(c echo.Context) error { // @Tags submittal // @Produce json // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {array} model.Submittal +// @Success 200 {array} dto.Submittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /alert_configs/{alert_config_id}/submittals [get] func (h *ApiHandler) ListAlertConfigSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("alert_config_id")) + acID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListAlertConfigSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForAlertConfig(c.Request().Context(), db.SubmittalListForAlertConfigParams{ + AlertConfigID: acID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -116,14 +125,14 @@ func (h *ApiHandler) ListAlertConfigSubmittals(c echo.Context) error { // @Router /submittals/{submittal_id}/verify_missing [put] // @Security Bearer func (h *ApiHandler) VerifyMissingSubmittal(c echo.Context) error { - id, err := uuid.Parse(c.Param("submittal_id")) + subID, err := uuid.Parse(c.Param("submittal_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.SubmittalService.VerifyMissingSubmittal(c.Request().Context(), id); err != nil { + if err := h.DBService.SubmittalUpdateVerifyMissing(c.Request().Context(), subID); err != nil { return httperr.ServerErrorOrNotFound(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"submittal_id": id}) + return c.JSON(http.StatusOK, map[string]interface{}{"submittal_id": subID}) } // VerifyMissingAlertConfigSubmittals godoc @@ -140,12 +149,12 @@ func (h *ApiHandler) VerifyMissingSubmittal(c echo.Context) error { // @Router /alert_configs/{alert_config_id}/submittals/verify_missing [put] // @Security Bearer func (h *ApiHandler) VerifyMissingAlertConfigSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("alert_config_id")) + acID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.SubmittalService.VerifyMissingAlertConfigSubmittals(c.Request().Context(), id); err != nil { + if err := h.DBService.SubmittalUpdateVerifyMissingForAlertConfig(c.Request().Context(), &acID); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"alert_config_id": id}) + return c.JSON(http.StatusOK, map[string]interface{}{"alert_config_id": acID}) } diff --git a/api/internal/handler/timeseries.go b/api/internal/handler/timeseries.go index 55a6c5fe..ff01abbb 100644 --- a/api/internal/handler/timeseries.go +++ b/api/internal/handler/timeseries.go @@ -1,8 +1,8 @@ package handler import ( + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" @@ -17,7 +17,7 @@ import ( // @Produce json // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} model.Timeseries +// @Success 200 {object} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,7 +28,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - t, err := h.TimeseriesService.GetTimeseries(c.Request().Context(), tsID) + t, err := h.DBService.TimeseriesGet(c.Request().Context(), tsID) if err != nil { return httperr.InternalServerError(err) } @@ -42,7 +42,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +52,7 @@ func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListInstrumentTimeseries(c.Request().Context(), nID) + tt, err := h.DBService.TimeseriesListForInstrument(c.Request().Context(), nID) if err != nil { return httperr.InternalServerError(err) } @@ -65,7 +65,7 @@ func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -75,7 +75,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListInstrumentGroupTimeseries(c.Request().Context(), gID) + tt, err := h.DBService.TimeseriesListForInstrumentGroup(c.Request().Context(), gID) if err != nil { return httperr.InternalServerError(err) } @@ -88,7 +88,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -98,7 +98,7 @@ func (h *ApiHandler) ListProjectTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListProjectTimeseries(c.Request().Context(), pID) + tt, err := h.DBService.TimeseriesListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -110,24 +110,23 @@ func (h *ApiHandler) ListProjectTimeseries(c echo.Context) error { // @Summary creates one or more timeseries // @Tags timeseries // @Produce json -// @Param timeseries_collection_items body model.TimeseriesCollectionItems true "timeseries collection items payload" +// @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" // @Param key query string false "api key" -// @Success 200 {array} map[string]uuid.UUID +// @Success 200 {object} map[string]uuid.UUID // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries [post] // @Security Bearer func (h *ApiHandler) CreateTimeseries(c echo.Context) error { - var tc model.TimeseriesCollectionItems + var tc dto.TimeseriesCollectionItems if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } - tt, err := h.TimeseriesService.CreateTimeseriesBatch(c.Request().Context(), tc.Items) - if err != nil { + if err := h.DBService.TimeseriesCreateBatch(c.Request().Context(), tc.Items); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, tt) + return c.JSON(http.StatusCreated, map[string]interface{}{}) } // UpdateTimeseries godoc @@ -136,7 +135,7 @@ func (h *ApiHandler) CreateTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param timeseries body model.Timeseries true "timeseries payload" +// @Param timeseries body dto.Timeseries true "timeseries payload" // @Param key query string false "api key" // @Success 200 {object} map[string]uuid.UUID // @Failure 400 {object} echo.HTTPError @@ -149,12 +148,12 @@ func (h *ApiHandler) UpdateTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - t := model.Timeseries{} + var t dto.Timeseries if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } t.ID = id - if _, err := h.TimeseriesService.UpdateTimeseries(c.Request().Context(), t); err != nil { + if err := h.DBService.TimeseriesUpdate(c.Request().Context(), t); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, t) @@ -178,7 +177,7 @@ func (h *ApiHandler) DeleteTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.TimeseriesService.DeleteTimeseries(c.Request().Context(), id); err != nil { + if err := h.DBService.TimeseriesDelete(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/timeseries_calculated.go b/api/internal/handler/timeseries_calculated.go index 12fa2b1e..15abe0a1 100644 --- a/api/internal/handler/timeseries_calculated.go +++ b/api/internal/handler/timeseries_calculated.go @@ -6,8 +6,8 @@ import ( "github.com/google/uuid" "github.com/labstack/echo/v4" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" ) // GetInstrumentCalculations godoc @@ -15,7 +15,7 @@ import ( // @Summary lists calculations associated with an instrument // @Tags formula // @Produce json -// @Success 200 {array} model.CalculatedTimeseries +// @Success 200 {array} dto.CalculatedTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -29,7 +29,7 @@ func (h *ApiHandler) GetInstrumentCalculations(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - formulas, err := h.CalculatedTimeseriesService.GetAllCalculatedTimeseriesForInstrument(c.Request().Context(), instrumentID) + formulas, err := h.DBService.TimeseriesComputedListForInstrument(c.Request().Context(), &instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -49,7 +49,7 @@ func (h *ApiHandler) GetInstrumentCalculations(c echo.Context) error { // @Router /formulas [post] // @Security Bearer func (h *ApiHandler) CreateCalculation(c echo.Context) error { - var formula model.CalculatedTimeseries + var formula dto.CalculatedTimeseries if err := c.Bind(&formula); err != nil { return httperr.MalformedBody(err) } @@ -58,7 +58,7 @@ func (h *ApiHandler) CreateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.CalculatedTimeseriesService.CreateCalculatedTimeseries(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedCreateOrUpdate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": formula.ID}) @@ -71,7 +71,7 @@ func (h *ApiHandler) CreateCalculation(c echo.Context) error { // @Produce json // @Param formula_id path string true "formula uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.CalculatedTimeseries +// @Success 200 {array} dto.CalculatedTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -83,7 +83,7 @@ func (h *ApiHandler) UpdateCalculation(c echo.Context) error { return httperr.MalformedID(err) } - var formula model.CalculatedTimeseries + var formula dto.CalculatedTimeseries if err := c.Bind(&formula); err != nil { return httperr.MalformedBody(err) } @@ -93,7 +93,7 @@ func (h *ApiHandler) UpdateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.CalculatedTimeseriesService.UpdateCalculatedTimeseries(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedCreateOrUpdate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, formula) @@ -117,7 +117,7 @@ func (h *ApiHandler) DeleteCalculation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CalculatedTimeseriesService.DeleteCalculatedTimeseries(c.Request().Context(), calculationID); err != nil { + if err := h.DBService.TimeseriesComputedDelete(c.Request().Context(), calculationID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/timeseries_cwms.go b/api/internal/handler/timeseries_cwms.go index 19f21b9a..1291f58e 100644 --- a/api/internal/handler/timeseries_cwms.go +++ b/api/internal/handler/timeseries_cwms.go @@ -3,9 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -17,7 +16,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.TimeseriesCwms +// @Success 200 {array} dto.TimeseriesCwms // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -32,7 +31,7 @@ func (h *ApiHandler) ListTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - tss, err := h.TimeseriesCwmsService.ListTimeseriesCwms(c.Request().Context(), instrumentID) + tss, err := h.DBService.TimeseriesCwmsList(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -47,8 +46,8 @@ func (h *ApiHandler) ListTimeseriesCwms(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_cwms_arr body []model.TimeseriesCwms true "array of cwms timeseries to create" -// @Success 200 {array} model.TimeseriesCwms +// @Param timeseries_cwms_arr body []dto.TimeseriesCwms true "array of cwms timeseries to create" +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,17 +62,16 @@ func (h *ApiHandler) CreateTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - var tcc []model.TimeseriesCwms + var tcc []dto.TimeseriesCwms if err := c.Bind(&tcc); err != nil { return httperr.MalformedBody(err) } - tss, err := h.TimeseriesCwmsService.CreateTimeseriesCwmsBatch(c.Request().Context(), instrumentID, tcc) - if err != nil { + if err := h.DBService.TimeseriesCwmsCreateBatch(c.Request().Context(), instrumentID, tcc); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, tss) + return c.JSON(http.StatusCreated, map[string]interface{}{"instrument_id": instrumentID}) } // UpdateTimeseriesCwms godoc @@ -84,8 +82,8 @@ func (h *ApiHandler) CreateTimeseriesCwms(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param timeseries_cwms body model.TimeseriesCwms true "cwms timeseries to update" -// @Success 200 {array} model.TimeseriesCwms +// @Param timeseries_cwms body dto.TimeseriesCwms true "cwms timeseries to update" +// @Success 200 {array} dto.TimeseriesCwms // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -104,14 +102,14 @@ func (h *ApiHandler) UpdateTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - var tc model.TimeseriesCwms + var tc dto.TimeseriesCwms if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } tc.InstrumentID = instrumentID tc.ID = timeseriesID - if err := h.TimeseriesCwmsService.UpdateTimeseriesCwms(c.Request().Context(), tc); err != nil { + if err := h.DBService.TimeseriesCwmsUpdate(c.Request().Context(), tc); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/timeseries_process.go b/api/internal/handler/timeseries_process.go index 1819689d..fff15114 100644 --- a/api/internal/handler/timeseries_process.go +++ b/api/internal/handler/timeseries_process.go @@ -5,8 +5,9 @@ import ( "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -30,7 +31,7 @@ const ( // @Param after query string false "after time" Format(date-time) // @param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {object} dto.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -42,13 +43,13 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro return httperr.MalformedID(err) } - isStored, err := h.TimeseriesService.GetStoredTimeseriesExists(c.Request().Context(), tsID) + isStored, err := h.DBService.TimeseriesGetExistsStored(c.Request().Context(), tsID) if err != nil { return httperr.InternalServerError(err) } if isStored { - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -65,14 +66,18 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro threshold = tr } - resBody, err := h.MeasurementService.ListTimeseriesMeasurements(c.Request().Context(), tsID, tw, threshold) + resBody, err := h.DBService.TimeseriesMeasurementListRange(c.Request().Context(), db.TimeseriesMeasurementListRangeParams{ + TimeseriesID: tsID, + AfterTime: tw.After, + BeforeTime: tw.Before, + }, threshold) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, resBody) } - f := model.ProcessMeasurementFilter{TimeseriesID: &tsID} + f := db.ProcessMeasurementFilter{TimeseriesID: &tsID} selectMeasurements := selectMeasurementsHandler(h, f, byTimeseriesRequest) return selectMeasurements(c) @@ -87,7 +92,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro // @Param after query string false "after time" Format(date-time) // @Param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {object} dto.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -97,7 +102,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrument(c echo.Context) erro if err != nil { return httperr.MalformedID(err) } - f := model.ProcessMeasurementFilter{InstrumentID: &iID} + f := db.ProcessMeasurementFilter{InstrumentID: &iID} selectMeasurements := selectMeasurementsHandler(h, f, byInstrumentRequest) return selectMeasurements(c) @@ -109,7 +114,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrument(c echo.Context) erro // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {object} dto.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -119,7 +124,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrumentGroup(c echo.Context) if err != nil { return httperr.MalformedID(err) } - f := model.ProcessMeasurementFilter{InstrumentGroupID: &igID} + f := db.ProcessMeasurementFilter{InstrumentGroupID: &igID} selectMeasurements := selectMeasurementsHandler(h, f, byInstrumentGroupRequest) return selectMeasurements(c) @@ -132,7 +137,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrumentGroup(c echo.Context) // @Accept json // @Produce json // @Param instrument_ids body []uuid.UUID true "array of instrument uuids" -// @Success 200 {array} map[uuid.UUID]model.MeasurementCollectionLean +// @Success 200 {array} map[uuid.UUID]dto.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -142,38 +147,15 @@ func (h *ApiHandler) ListTimeseriesMeasurementsExplorer(c echo.Context) error { if err := (&echo.DefaultBinder{}).BindBody(c, &iIDs); err != nil { return httperr.MalformedBody(err) } - f := model.ProcessMeasurementFilter{InstrumentIDs: iIDs} + f := db.ProcessMeasurementFilter{InstrumentIDs: iIDs} selectMeasurements := selectMeasurementsHandler(h, f, explorerRequest) return selectMeasurements(c) } -// ListInclinometerTimeseriesMeasurementsExplorer godoc -// -// @Summary list inclinometer timeseries measurements for explorer page -// @Tags explorer -// @Accept json -// @Produce json -// @Param instrument_ids body []uuid.UUID true "array of inclinometer instrument uuids" -// @Success 200 {array} map[uuid.UUID]model.InclinometerMeasurementCollectionLean -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /inclinometer_explorer [post] -func (h *ApiHandler) ListInclinometerTimeseriesMeasurementsExplorer(c echo.Context) error { - var iIDs []uuid.UUID - if err := (&echo.DefaultBinder{}).BindBody(c, &iIDs); err != nil { - return httperr.MalformedBody(err) - } - f := model.ProcessMeasurementFilter{InstrumentIDs: iIDs} - - selectMeasurements := selectInclinometerMeasurementsHandler(h, f) - return selectMeasurements(c) -} - -func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, requestType processTimeseriesType) echo.HandlerFunc { +func selectMeasurementsHandler(h *ApiHandler, f db.ProcessMeasurementFilter, requestType processTimeseriesType) echo.HandlerFunc { return func(c echo.Context) error { - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -193,7 +175,7 @@ func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, threshold = tr } - mrc, err := h.ProcessTimeseriesService.SelectMeasurements(c.Request().Context(), f) + mrc, err := h.DBService.ProcessMeasurementListDynamic(c.Request().Context(), f) if err != nil { return httperr.InternalServerError(err) } @@ -214,25 +196,3 @@ func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, } } } - -func selectInclinometerMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter) echo.HandlerFunc { - return func(c echo.Context) error { - var tw model.TimeWindow - a, b := c.QueryParam("after"), c.QueryParam("before") - if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) - } - - f.After = tw.After - f.Before = tw.Before - - mrc, err := h.ProcessTimeseriesService.SelectInclinometerMeasurements(c.Request().Context(), f) - if err != nil { - return httperr.InternalServerError(err) - } - - resBody, err := mrc.GroupByInstrument() - - return c.JSON(http.StatusOK, resBody) - } -} diff --git a/api/internal/handler/unit.go b/api/internal/handler/unit.go index 27028bee..ddd3e0f4 100644 --- a/api/internal/handler/unit.go +++ b/api/internal/handler/unit.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -13,11 +12,11 @@ import ( // @Summary lists the available units // @Tags unit // @Produce json -// @Success 200 {array} model.Unit +// @Success 200 {array} dto.Unit // @Failure 400 {object} echo.HTTPError // @Router /units [get] func (h *ApiHandler) ListUnits(c echo.Context) error { - uu, err := h.UnitService.ListUnits(c.Request().Context()) + uu, err := h.DBService.UnitsList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/uploader.go b/api/internal/handler/uploader.go index dbe6f8ec..da163b3f 100644 --- a/api/internal/handler/uploader.go +++ b/api/internal/handler/uploader.go @@ -4,8 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,7 +16,7 @@ import ( // @Tags uploader // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.UploaderConfig +// @Success 200 {array} dto.UploaderConfig // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs [get] func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { @@ -24,7 +24,7 @@ func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - uu, err := h.UploaderService.ListUploaderConfigsForProject(c.Request().Context(), projectID) + uu, err := h.DBService.UploaderConfigListForProject(c.Request().Context(), projectID) if err != nil { return httperr.InternalServerError(err) } @@ -38,7 +38,7 @@ func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) -// @Success 200 {array} model.UploaderConfigMapping +// @Success 200 {array} dto.UploaderConfigMapping // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [get] func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { @@ -50,7 +50,7 @@ func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - mm, err := h.UploaderService.ListUploaderConfigMappings(c.Request().Context(), ucID) + mm, err := h.DBService.UploaderConfigMappingList(c.Request().Context(), ucID) if err != nil { return httperr.InternalServerError(err) } @@ -63,7 +63,7 @@ func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { // @Tags uploader // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param uploader_config body model.UploaderConfig true "uploader config payload" +// @Param uploader_config body dto.UploaderConfig true "uploader config payload" // @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs [post] @@ -72,18 +72,18 @@ func (h *ApiHandler) CreateUploaderConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var uc model.UploaderConfig + var uc dto.UploaderConfig if err := c.Bind(&uc); err != nil { return httperr.MalformedBody(err) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) uc.CreatorID = profile.ID uc.CreateDate = time.Now() uc.ProjectID = projectID - newID, err := h.UploaderService.CreateUploaderConfig(c.Request().Context(), uc) + newID, err := h.DBService.UploaderConfigCreate(c.Request().Context(), uc) if err != nil { return httperr.InternalServerError(err) } @@ -97,7 +97,7 @@ func (h *ApiHandler) CreateUploaderConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) -// @Param uploader_config body model.UploaderConfig true "uploader config payload" +// @Param uploader_config body dto.UploaderConfig true "uploader config payload" // @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id} [put] @@ -110,12 +110,12 @@ func (h *ApiHandler) UpdateUploaderConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var uc model.UploaderConfig + var uc dto.UploaderConfig if err := c.Bind(&uc); err != nil { return httperr.MalformedBody(err) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(dto.Profile) t := time.Now() uc.UpdaterID = &profile.ID @@ -123,7 +123,7 @@ func (h *ApiHandler) UpdateUploaderConfig(c echo.Context) error { uc.ProjectID = projectID uc.ID = ucID - if err := h.UploaderService.UpdateUploaderConfig(c.Request().Context(), uc); err != nil { + if err := h.DBService.UploaderConfigUpdate(c.Request().Context(), uc); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) @@ -148,7 +148,7 @@ func (h *ApiHandler) DeleteUploaderConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.UploaderService.DeleteUploaderConfig(c.Request().Context(), ucID); err != nil { + if err := h.DBService.UploaderConfigDelete(c.Request().Context(), ucID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) @@ -161,7 +161,7 @@ func (h *ApiHandler) DeleteUploaderConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) -// @Param uploader_config_mappings body []model.UploaderConfigMapping true "uploader config mappings payload" +// @Param uploader_config_mappings body []dto.UploaderConfigMapping true "uploader config mappings payload" // @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [post] @@ -174,11 +174,11 @@ func (h *ApiHandler) CreateUploaderConfigMappings(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - mm := make([]model.UploaderConfigMapping, 0) + mm := make([]dto.UploaderConfigMapping, 0) if err := c.Bind(&mm); err != nil { return httperr.MalformedBody(err) } - if err := h.UploaderService.CreateUploaderConfigMappings(c.Request().Context(), ucID, mm); err != nil { + if err := h.DBService.UploaderConfigMappingCreateBatch(c.Request().Context(), ucID, mm); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, map[string]interface{}{"id": ucID}) @@ -191,7 +191,7 @@ func (h *ApiHandler) CreateUploaderConfigMappings(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) -// @Param uploader_config_mappings body []model.UploaderConfigMapping true "uploader config mappings payload" +// @Param uploader_config_mappings body []dto.UploaderConfigMapping true "uploader config mappings payload" // @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [put] @@ -204,11 +204,11 @@ func (h *ApiHandler) UpdateUploaderConfigMappings(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - mm := make([]model.UploaderConfigMapping, 0) + mm := make([]dto.UploaderConfigMapping, 0) if err := c.Bind(&mm); err != nil { return httperr.MalformedBody(err) } - if err := h.UploaderService.UpdateUploaderConfigMappings(c.Request().Context(), ucID, mm); err != nil { + if err := h.DBService.UploaderConfigMappingUpdateBatch(c.Request().Context(), ucID, mm); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) @@ -233,7 +233,7 @@ func (h *ApiHandler) DeleteAllUploaderConfigMappingsForUploaderConfig(c echo.Con if err != nil { return httperr.MalformedID(err) } - if err := h.UploaderService.DeleteAllUploaderConfigMappingsForUploaderConfig(c.Request().Context(), ucID); err != nil { + if err := h.DBService.UploaderConfigMappingDeleteForUploaderConfig(c.Request().Context(), ucID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) diff --git a/api/internal/middleware/audit.go b/api/internal/middleware/audit.go index f7edc46c..7642f497 100644 --- a/api/internal/middleware/audit.go +++ b/api/internal/middleware/audit.go @@ -5,30 +5,31 @@ import ( "strconv" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { +func mapClaims(user *jwt.Token) (dto.ProfileClaims, error) { claims, ok := user.Claims.(jwt.MapClaims) if !ok { - return model.ProfileClaims{}, errors.New("unable to map claims") + return dto.ProfileClaims{}, errors.New("unable to map claims") } preferredUsername, ok := claims["preferred_username"].(string) if !ok || preferredUsername == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: email") + return dto.ProfileClaims{}, errors.New("error parsing token claims: email") } email, ok := claims["email"].(string) if !ok || email == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: email") + return dto.ProfileClaims{}, errors.New("error parsing token claims: email") } name, ok := claims["name"].(string) if !ok || name == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: name") + return dto.ProfileClaims{}, errors.New("error parsing token claims: name") } dnClaim, exists := claims["subjectDN"] @@ -36,7 +37,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { if exists && dnClaim != nil { dnStr, ok := dnClaim.(string) if !ok { - return model.ProfileClaims{}, errors.New("error parsing token claims: subjectDN") + return dto.ProfileClaims{}, errors.New("error parsing token claims: subjectDN") } subjectDN = &dnStr } @@ -46,7 +47,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { if exists && cacUIDClaim != nil { cacUIDClaims, err := strconv.Atoi(cacUIDClaim.(string)) if err != nil { - return model.ProfileClaims{}, errors.New("error parsing token claims: cacUID") + return dto.ProfileClaims{}, errors.New("error parsing token claims: cacUID") } cacUID = &cacUIDClaims } @@ -57,7 +58,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { x509Presented = true } - return model.ProfileClaims{ + return dto.ProfileClaims{ PreferredUsername: preferredUsername, Name: name, Email: email, @@ -96,7 +97,7 @@ func (m *mw) AttachClaims(next echo.HandlerFunc) echo.HandlerFunc { func (m *mw) RequireClaims(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - _, ok := c.Get("claims").(model.ProfileClaims) + _, ok := c.Get("claims").(dto.ProfileClaims) if !ok { return httperr.Forbidden(errors.New("no valid claims for user")) } @@ -112,7 +113,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // lookup superuser profile; the "EDIPI" of the Superuser is consistently 79. // The superuser is initialized as part of database and seed data initialization if c.Get("ApplicationKeyAuthSuccess") == true { - p, err := m.ProfileService.GetProfileWithTokensForEDIPI(ctx, 79) + p, err := m.DBService.ProfileGetForEDIPI(ctx, 79) if err != nil { return httperr.Forbidden(err) } @@ -123,7 +124,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // If a User was authenticated via KeyAuth, lookup the user's profile using key_id if c.Get("KeyAuthSuccess") == true { keyID := c.Get("KeyAuthKeyID").(string) - p, err := m.ProfileService.GetProfileWithTokensForTokenID(ctx, keyID) + p, err := m.DBService.ProfileGetForToken(ctx, keyID) if err != nil { return httperr.Forbidden(err) } @@ -131,12 +132,12 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { return next(c) } - claims, ok := c.Get("claims").(model.ProfileClaims) + claims, ok := c.Get("claims").(dto.ProfileClaims) if !ok { return httperr.Forbidden(errors.New("could not bind claims from context")) } - p, err := m.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := m.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.Forbidden(err) } @@ -149,7 +150,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // IsApplicationAdmin checks that a profile is an application admin func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(dto.Profile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -164,7 +165,7 @@ func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(dto.Profile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -175,7 +176,10 @@ func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { if err != nil { return httperr.MalformedID(err) } - authorized, err := m.ProjectRoleService.IsProjectAdmin(c.Request().Context(), p.ID, projectID) + authorized, err := m.DBService.ProfileProjectRoleGetIsAdmin(c.Request().Context(), db.ProfileProjectRoleGetIsAdminParams{ + ProfileID: p.ID, + ProjectID: projectID, + }) if err != nil || !authorized { return httperr.ForbiddenRole(err) } @@ -187,7 +191,7 @@ func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectMember(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(dto.Profile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -198,7 +202,10 @@ func (m *mw) IsProjectMember(next echo.HandlerFunc) echo.HandlerFunc { if err != nil { return httperr.MalformedID(err) } - authorized, err := m.ProjectRoleService.IsProjectMember(c.Request().Context(), p.ID, projectID) + authorized, err := m.DBService.ProfileProjectRoleGetIsMemberOrAdmin(c.Request().Context(), db.ProfileProjectRoleGetIsMemberOrAdminParams{ + ProfileID: p.ID, + ProjectID: projectID, + }) if err != nil || !authorized { return httperr.ForbiddenRole(err) } diff --git a/api/internal/middleware/key.go b/api/internal/middleware/key.go index 565cda6c..a2f95a0d 100644 --- a/api/internal/middleware/key.go +++ b/api/internal/middleware/key.go @@ -3,6 +3,7 @@ package middleware import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/password" "github.com/labstack/echo/v4" @@ -73,7 +74,7 @@ func keyAuth(isDisabled bool, appKey string, h HashExtractorFunc) echo.Middlewar func getHashExtractorFunc(ctx context.Context, m *mw) HashExtractorFunc { return func(keyID string) (string, error) { - k, err := m.ProfileService.GetTokenInfoByTokenID(ctx, keyID) + k, err := m.DBService.ProfileTokenGet(ctx, keyID) if err != nil { return "", err } @@ -90,7 +91,10 @@ type DataloggerHashExtractorFunc func(modelName, sn string) (string, error) func getDataloggerHashExtractorFunc(ctx context.Context, m *mw) DataloggerHashExtractorFunc { return func(modelName, sn string) (string, error) { - hash, err := m.DataloggerTelemetryService.GetDataloggerHashByModelSN(ctx, modelName, sn) + hash, err := m.DBService.DataloggerHashGetForModelSn(ctx, db.DataloggerHashGetForModelSnParams{ + Model: &modelName, + Sn: sn, + }) if err != nil { return "", err } diff --git a/api/internal/middleware/middleware.go b/api/internal/middleware/middleware.go index 7eaaf21c..4e8c7ffa 100644 --- a/api/internal/middleware/middleware.go +++ b/api/internal/middleware/middleware.go @@ -26,14 +26,12 @@ type Middleware interface { } type mw struct { - cfg *config.ServerConfig - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - DataloggerTelemetryService service.DataloggerTelemetryService + cfg *config.ServerConfig + DBService *service.DBService } var _ Middleware = (*mw)(nil) -func NewMiddleware(cfg *config.ServerConfig, profileService service.ProfileService, projectRoleService service.ProjectRoleService, dataloggerTelemetryService service.DataloggerTelemetryService) *mw { - return &mw{cfg, profileService, projectRoleService, dataloggerTelemetryService} +func NewMiddleware(cfg *config.ServerConfig, db *service.DBService) *mw { + return &mw{cfg, db} } diff --git a/api/internal/model/alert.go b/api/internal/model/alert.go deleted file mode 100644 index 382494f1..00000000 --- a/api/internal/model/alert.go +++ /dev/null @@ -1,115 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -// Alert is an alert, triggered by an AlertConfig evaluating to true -type Alert struct { - Read *bool `json:"read,omitempty"` - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` -} - -const createAlerts = ` - INSERT INTO alert (alert_config_id) VALUES ($1) -` - -// CreateAlerts creates one or more new alerts -func (q *Queries) CreateAlerts(ctx context.Context, id uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createAlerts, id) - return err -} - -const getAllAlertsForProject = ` - SELECT * FROM v_alert WHERE project_id = $1 -` - -// GetAllAlertsForProject lists all alerts for a given instrument ID -func (q *Queries) GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForProject, projectID); err != nil { - return nil, err - } - return aa, nil -} - -const getAllAlertsForInstrument = ` - SELECT * FROM v_alert - WHERE alert_config_id = ANY( - SELECT id FROM alert_config_instrument - WHERE instrument_id = $1 - ) -` - -// GetAllAlertsForInstrument lists all alerts for a given instrument ID -func (q *Queries) GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForInstrument, instrumentID); err != nil { - return nil, err - } - return aa, nil -} - -const getAllAlertsForProfile = ` - SELECT a.*, - CASE WHEN r.alert_id IS NOT NULL THEN true ELSE false - END AS read - FROM v_alert a - LEFT JOIN alert_read r ON r.alert_id = a.id - WHERE a.alert_config_id IN ( - SELECT alert_config_id - FROM alert_profile_subscription - WHERE profile_id = $1 - ) -` - -// GetAllAlertsForProfile returns all alerts for which a profile is subscribed to the AlertConfig -func (q *Queries) GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForProfile, profileID); err != nil { - return nil, err - } - return aa, nil -} - -const getOneAlertForProfile = getAllAlertsForProfile + ` - AND a.id = $2 -` - -// GetOneAlertForProfile returns a single alert for which a profile is subscribed -func (q *Queries) GetOneAlertForProfile(ctx context.Context, profileID, alertID uuid.UUID) (Alert, error) { - var a Alert - err := q.db.GetContext(ctx, &a, getOneAlertForProfile, profileID, alertID) - return a, err -} - -const doAlertRead = ` - INSERT INTO alert_read (profile_id, alert_id) VALUES ($1, $2) - ON CONFLICT DO NOTHING -` - -// DoAlertRead marks an alert as read for a profile -func (q *Queries) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, doAlertRead, profileID, alertID) - return err -} - -const doAlertUnread = ` - DELETE FROM alert_read WHERE profile_id = $1 AND alert_id = $2 -` - -// DoAlertUnread marks an alert as unread for a profile -func (q *Queries) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, doAlertUnread, profileID, alertID) - return err -} diff --git a/api/internal/model/alert_check.go b/api/internal/model/alert_check.go deleted file mode 100644 index 6b1c9bcb..00000000 --- a/api/internal/model/alert_check.go +++ /dev/null @@ -1,118 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - - "github.com/google/uuid" -) - -var ( - GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") - YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") - RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") - - MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") - EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") -) - -const ( - warning = "Warning" - alert = "Alert" - reminder = "Reminder" -) - -type AlertCheck struct { - AlertConfigID uuid.UUID `db:"alert_config_id"` - SubmittalID uuid.UUID `db:"submittal_id"` - ShouldWarn bool `db:"should_warn"` - ShouldAlert bool `db:"should_alert"` - ShouldRemind bool `db:"should_remind"` - Submittal Submittal `db:"-"` -} - -func (ck AlertCheck) GetShouldWarn() bool { - return ck.ShouldWarn -} - -func (ck AlertCheck) GetShouldAlert() bool { - return ck.ShouldAlert -} - -func (ck AlertCheck) GetShouldRemind() bool { - return ck.ShouldRemind -} - -func (ck AlertCheck) GetSubmittal() Submittal { - return ck.Submittal -} - -func (ck *AlertCheck) SetSubmittal(sub Submittal) { - ck.Submittal = sub -} - -type AlertConfigMap map[uuid.UUID]AlertConfig - -type SubmittalMap map[uuid.UUID]Submittal - -const listAndCheckAlertConfigs = ` - UPDATE alert_config ac1 - SET last_checked = now() - FROM ( - SELECT * - FROM v_alert_config - ) ac2 - WHERE ac1.id = ac2.id - RETURNING ac2.* -` - -func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - if err := q.db.SelectContext(ctx, &aa, listAndCheckAlertConfigs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return make([]AlertConfig, 0), nil - } - return nil, err - } - return aa, nil -} - -const updateAlertConfigLastReminded = ` - UPDATE alert_config SET - last_reminded = $2 - WHERE id = $1 -` - -func (q *Queries) UpdateAlertConfigLastReminded(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, updateAlertConfigLastReminded, ac.ID, ac.LastReminded) - return err -} - -const updateSubmittalCompletionDateOrWarningSent = ` - UPDATE submittal SET - submittal_status_id = $2, - completion_date = $3, - warning_sent = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, sub Submittal) error { - _, err := q.db.ExecContext(ctx, updateSubmittalCompletionDateOrWarningSent, sub.ID, sub.SubmittalStatusID, sub.CompletionDate, sub.WarningSent) - return err -} - -const createNextSubmittalFromNewAlertConfigDate = ` - INSERT INTO submittal (alert_config_id, create_date, due_date) - SELECT - ac.id, - $2::TIMESTAMPTZ, - $2::TIMESTAMPTZ + ac.schedule_interval - FROM alert_config ac - WHERE ac.id = $1 -` - -func (q *Queries) CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, createNextSubmittalFromNewAlertConfigDate, ac.ID, ac.CreateNextSubmittalFrom) - return err -} diff --git a/api/internal/model/alert_config.go b/api/internal/model/alert_config.go deleted file mode 100644 index ed44180f..00000000 --- a/api/internal/model/alert_config.go +++ /dev/null @@ -1,239 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - "fmt" - "time" - - "github.com/google/uuid" -) - -type AlertConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertType string `json:"alert_type" db:"alert_type"` - StartDate time.Time `json:"start_date" db:"start_date"` - ScheduleInterval string `json:"schedule_interval" db:"schedule_interval"` - RemindInterval string `json:"remind_interval" db:"remind_interval"` - WarningInterval string `json:"warning_interval" db:"warning_interval"` - LastChecked *time.Time `json:"last_checked" db:"last_checked"` - LastReminded *time.Time `json:"last_reminded" db:"last_reminded"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` - AlertEmailSubscriptions dbJSONSlice[EmailAutocompleteResult] `json:"alert_email_subscriptions" db:"alert_email_subscriptions"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts" db:"mute_consecutive_alerts"` - CreateNextSubmittalFrom *time.Time `json:"-" db:"-"` - AuditInfo -} - -type AlertConfigInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` -} - -func (a *AlertConfig) GetToAddresses() []string { - emails := make([]string, len(a.AlertEmailSubscriptions)) - for idx := range a.AlertEmailSubscriptions { - emails[idx] = a.AlertEmailSubscriptions[idx].Email - } - return emails -} - -const getAllAlertConfigsForProject = ` - SELECT * - FROM v_alert_config - WHERE project_id = $1 - ORDER BY name -` - -// GetAllAlertConfigsForProject lists all alert configs for a single project -func (q *Queries) GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, getAllAlertConfigsForProject, projectID) - return aa, err -} - -const qetAllAlertConfigsForProjectAndAlertType = ` - SELECT * - FROM v_alert_config - WHERE project_id = $1 - AND alert_type_id = $2 - ORDER BY name -` - -// GetAllAlertConfigsForProjectAndAlertType lists alert configs for a single project filetered by alert type -func (q *Queries) GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, qetAllAlertConfigsForProjectAndAlertType, projectID, alertTypeID) - return aa, err -} - -const getAllAlertConfigsForInstrument = ` - SELECT * - FROM v_alert_config - WHERE id = ANY( - SELECT alert_config_id - FROM alert_config_instrument - WHERE instrument_id = $1 - ) - ORDER BY name -` - -// GetAllAlertConfigsForInstrument lists all alerts for a single instrument -func (q *Queries) GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, getAllAlertConfigsForInstrument, instrumentID) - return aa, err -} - -const getOneAlertConfig = ` - SELECT * FROM v_alert_config WHERE id = $1 -` - -// GetOneAlertConfig gets a single alert -func (q *Queries) GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (AlertConfig, error) { - var a AlertConfig - err := q.db.GetContext(ctx, &a, getOneAlertConfig, alertConfigID) - return a, err -} - -const createAlertConfig = ` - INSERT INTO alert_config ( - project_id, - name, - body, - alert_type_id, - start_date, - schedule_interval, - mute_consecutive_alerts, - remind_interval, - warning_interval, - creator, - create_date - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) - RETURNING id -` - -func (q *Queries) CreateAlertConfig(ctx context.Context, ac AlertConfig) (uuid.UUID, error) { - var alertConfigID uuid.UUID - err := q.db.GetContext(ctx, &alertConfigID, createAlertConfig, - ac.ProjectID, - ac.Name, - ac.Body, - ac.AlertTypeID, - ac.StartDate, - ac.ScheduleInterval, - ac.MuteConsecutiveAlerts, - ac.RemindInterval, - ac.WarningInterval, - ac.CreatorID, - ac.CreateDate, - ) - return alertConfigID, err -} - -const assignInstrumentToAlertConfig = ` - INSERT INTO alert_config_instrument (alert_config_id, instrument_id) VALUES ($1, $2) -` - -func (q *Queries) AssignInstrumentToAlertConfig(ctx context.Context, alertConfigID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignInstrumentToAlertConfig, alertConfigID, instrumentID) - return err -} - -const unassignAllInstrumentsFromAlertConfig = ` - DELETE FROM alert_config_instrument WHERE alert_config_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllInstrumentsFromAlertConfig, alertConfigID) - return err -} - -const createNextSubmittalFromExistingAlertConfigDate = ` - INSERT INTO submittal (alert_config_id, due_date) - SELECT id, create_date + schedule_interval - FROM alert_config - WHERE id = $1 -` - -func (q *Queries) CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createNextSubmittalFromExistingAlertConfigDate, alertConfigID) - return err -} - -const updateAlertConfig = ` - UPDATE alert_config SET - name = $3, - body = $4, - start_date = $5, - schedule_interval = $6, - mute_consecutive_alerts = $7, - remind_interval = $8, - warning_interval = $9, - updater = $10, - update_date = $11 - WHERE id = $1 AND project_id = $2 -` - -func (q *Queries) UpdateAlertConfig(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, updateAlertConfig, - ac.ID, - ac.ProjectID, - ac.Name, - ac.Body, - ac.StartDate, - ac.ScheduleInterval, - ac.MuteConsecutiveAlerts, - ac.RemindInterval, - ac.WarningInterval, - ac.UpdaterID, - ac.UpdateDate, - ) - return err -} - -const updateFutureSubmittalForAlertConfig = ` - UPDATE submittal - SET due_date = sq.new_due_date - FROM ( - SELECT - sub.id AS submittal_id, - sub.create_date + ac.schedule_interval AS new_due_date - FROM submittal sub - INNER JOIN alert_config ac ON sub.alert_config_id = ac.id - WHERE sub.alert_config_id = $1 - AND sub.due_date > NOW() - AND sub.completion_date IS NULL - AND NOT sub.marked_as_missing - ) sq - WHERE id = sq.submittal_id - AND sq.new_due_date > NOW() - RETURNING id -` - -func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - var updatedSubID uuid.UUID - if err := q.db.GetContext(ctx, &updatedSubID, updateFutureSubmittalForAlertConfig, alertConfigID); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return fmt.Errorf("updated alert config new due date must be in the future! complete the current submittal before updating") - } - return err - } - return nil -} - -const deleteAlertConfig = ` - UPDATE alert_config SET deleted=true WHERE id = $1 -` - -// DeleteAlertConfig deletes an alert by ID -func (q *Queries) DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAlertConfig, alertConfigID) - return err -} diff --git a/api/internal/model/alert_subscription.go b/api/internal/model/alert_subscription.go deleted file mode 100644 index 66d2e564..00000000 --- a/api/internal/model/alert_subscription.go +++ /dev/null @@ -1,209 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// AlertSubscription is a profile subscription to an alert -type AlertSubscription struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - AlertSubscriptionSettings -} - -// AlertSubscriptionSettings holds all settings for an AlertSubscription -type AlertSubscriptionSettings struct { - MuteUI bool `json:"mute_ui" db:"mute_ui"` - MuteNotify bool `json:"mute_notify" db:"mute_notify"` -} - -// AlertSubscriptionCollection is a collection of AlertSubscription items -type AlertSubscriptionCollection struct { - Items []AlertSubscription `json:"items"` -} - -// EmailAlert is an email subscription to an alert -type EmailAlert struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - EmailID uuid.UUID `json:"profile_id"` - MuteNotify bool `json:"mute_notify" db:"mute_notify"` -} - -type Email struct { - ID uuid.UUID `json:"id" db:"id"` - Email string `json:"email" db:"email"` -} - -// UnmarshalJSON implements the UnmarshalJSON Interface for AlertSubscription -func (c *AlertSubscriptionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var a AlertSubscription - if err := json.Unmarshal(b, &a); err != nil { - return err - } - c.Items = []AlertSubscription{a} - default: - c.Items = make([]AlertSubscription, 0) - } - return nil -} - -const subscribeProfileToAlerts = ` - INSERT INTO alert_profile_subscription (alert_config_id, profile_id) - VALUES ($1, $2) - ON CONFLICT DO NOTHING -` - -// SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (q *Queries) SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeProfileToAlerts, alertConfigID, profileID) - return err -} - -const unsubscribeProfileToAlerts = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -// UnsubscribeProfileToAlerts subscribes a profile to an instrument alert -func (q *Queries) UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeProfileToAlerts, alertConfigID, profileID) - return err -} - -const getAlertSubscription = ` - SELECT * FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -// GetAlertSubscription returns a AlertSubscription -func (q *Queries) GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (AlertSubscription, error) { - var a AlertSubscription - err := q.db.GetContext(ctx, &a, getAlertSubscription, alertConfigID, profileID) - return a, err -} - -const getAlertSubscriptionByID = ` - SELECT * FROM alert_profile_subscription WHERE id = $1 -` - -// GetAlertSubscriptionByID returns an alert subscription -func (q *Queries) GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (AlertSubscription, error) { - var a AlertSubscription - err := q.db.GetContext(ctx, &a, getAlertSubscriptionByID, subscriptionID) - return a, err -} - -const listMyAlertSubscriptions = ` - SELECT * FROM alert_profile_subscription WHERE profile_id = $1 -` - -// ListMyAlertSubscriptions returns all profile_alerts for a given profile ID -func (q *Queries) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertSubscription, error) { - aa := make([]AlertSubscription, 0) - if err := q.db.SelectContext(ctx, &aa, listMyAlertSubscriptions, profileID); err != nil { - return nil, err - } - return aa, nil -} - -const updateMyAlertSubscription = ` - UPDATE alert_profile_subscription SET mute_ui=$1, mute_notify=$2 WHERE alert_config_id=$3 AND profile_id=$4 -` - -// UpdateMyAlertSubscription updates properties on a AlertSubscription -func (q *Queries) UpdateMyAlertSubscription(ctx context.Context, s AlertSubscription) error { - _, err := q.db.ExecContext(ctx, updateMyAlertSubscription, s.MuteUI, s.MuteNotify, s.AlertConfigID, s.ProfileID) - return err -} - -const registerEmail = ` - WITH e AS ( - INSERT INTO email (email) VALUES ($1) - ON CONFLICT ON CONSTRAINT unique_email DO NOTHING - RETURNING id - ) - SELECT id FROM e - UNION - SELECT id from email WHERE email = $1 -` - -func (q *Queries) RegisterEmail(ctx context.Context, emailAddress string) (uuid.UUID, error) { - var newID uuid.UUID - err := q.db.GetContext(ctx, &newID, registerEmail, emailAddress) - return newID, err -} - -const unregisterEmail = ` - DELETE FROM email WHERE id = $1 -` - -func (q *Queries) UnregisterEmail(ctx context.Context, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unregisterEmail, emailID) - return err -} - -const subscribeEmailToAlertConfig = ` - INSERT INTO alert_email_subscription (alert_config_id, email_id) VALUES ($1,$2) - ON CONFLICT ON CONSTRAINT email_unique_alert_config DO NOTHING -` - -func (q *Queries) SubscribeEmailToAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeEmailToAlertConfig, alertConfigID, emailID) - return err -} - -const subscribeProfileToAlertConfig = ` - INSERT INTO alert_profile_subscription (alert_config_id, profile_id) VALUES ($1,$2) - ON CONFLICT ON CONSTRAINT profile_unique_alert_config DO NOTHING -` - -func (q *Queries) SubscribeProfileToAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeProfileToAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeEmailFromAlertConfig = ` - DELETE FROM alert_email_subscription WHERE alert_config_id = $1 AND email_id = $2 -` - -func (q *Queries) UnsubscribeEmailFromAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeEmailFromAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeProfileFromAlertConfig = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -func (q *Queries) UnsubscribeProfileFromAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeProfileFromAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeAllEmailsFromAlertConfig = ` - DELETE FROM alert_email_subscription WHERE alert_config_id = $1 -` - -func (q *Queries) UnsubscribeAllEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeAllEmailsFromAlertConfig, alertConfigID) - return err -} - -const unsubscribeAllProfilesFromAlertConfig = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 -` - -func (q *Queries) UnsubscribeAllProfilesFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeAllProfilesFromAlertConfig, alertConfigID) - return err -} diff --git a/api/internal/model/autocomplete.go b/api/internal/model/autocomplete.go deleted file mode 100644 index e84f236a..00000000 --- a/api/internal/model/autocomplete.go +++ /dev/null @@ -1,31 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// EmailAutocompleteResult stores search result in profiles and emails -type EmailAutocompleteResult struct { - ID uuid.UUID `json:"id"` - UserType string `json:"user_type" db:"user_type"` - Username *string `json:"username"` - Email string `json:"email"` -} - -const listEmailAutocomplete = ` - SELECT id, user_type, username, email - FROM v_email_autocomplete - WHERE username_email ILIKE '%'||$1||'%' - LIMIT $2 -` - -// ListEmailAutocomplete returns search results for email autocomplete -func (q *Queries) ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]EmailAutocompleteResult, error) { - aa := make([]EmailAutocompleteResult, 0) - if err := q.db.SelectContext(ctx, &aa, listEmailAutocomplete, emailInput, limit); err != nil { - return nil, err - } - return aa, nil -} diff --git a/api/internal/model/aware.go b/api/internal/model/aware.go deleted file mode 100644 index bda30d05..00000000 --- a/api/internal/model/aware.go +++ /dev/null @@ -1,67 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// AwareParameter struct -type AwareParameter struct { - ID uuid.UUID `json:"id"` - Key string `json:"key"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` -} - -// AwarePlatformParameterConfig holds information about which parameters are "enabled" for given instrument(s) -// { projectID: , instrument_id: , aware_id: , aware_parameters: { : } } -// aware_parameters is a map of : -type AwarePlatformParameterConfig struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - AwareID uuid.UUID `json:"aware_id" db:"aware_id"` - AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` -} - -type AwarePlatformParameterEnabled struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - AwareID uuid.UUID `json:"aware_id" db:"aware_id"` - AwareParameterKey string `json:"aware_parameter_key" db:"aware_parameter_key"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` -} - -const listAwareParameters = ` - SELECT id, key, parameter_id, unit_id FROM aware_parameter -` - -// ListAwareParameters returns aware parameters -func (q *Queries) ListAwareParameters(ctx context.Context) ([]AwareParameter, error) { - pp := make([]AwareParameter, 0) - if err := q.db.SelectContext(ctx, &pp, listAwareParameters); err != nil { - return nil, err - } - return pp, nil -} - -const listAwarePlatformParameterEnabled = ` - SELECT instrument_id, aware_id, aware_parameter_key, timeseries_id - FROM v_aware_platform_parameter_enabled - ORDER BY aware_id, aware_parameter_key -` - -func (q *Queries) ListAwarePlatformParameterEnabled(ctx context.Context) ([]AwarePlatformParameterEnabled, error) { - aa := make([]AwarePlatformParameterEnabled, 0) - if err := q.db.SelectContext(ctx, &aa, listAwarePlatformParameterEnabled); err != nil { - return nil, err - } - return aa, nil -} - -const createAwarePlatform = ` - INSERT INTO aware_platform (instrument_id, aware_id) VALUES ($1, $2) -` - -func (q *Queries) CreateAwarePlatform(ctx context.Context, instrumentID, awareID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createAwarePlatform, &instrumentID, &awareID) - return err -} diff --git a/api/internal/model/collection_group.go b/api/internal/model/collection_group.go deleted file mode 100644 index 1d25da33..00000000 --- a/api/internal/model/collection_group.go +++ /dev/null @@ -1,160 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -// CollectionGroup holds information for entity collection_group -type CollectionGroup struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Slug string `json:"slug" db:"slug"` - Name string `json:"name" db:"name"` - SortOrder int `json:"sort_order" db:"sort_order"` - AuditInfo -} - -// CollectionGroupDetails holds same information as a CollectionGroup -// In Addition, contains array of structs; Each struct contains -// all fields for Timeseries AND additional latest_value, latest_time -type CollectionGroupDetails struct { - CollectionGroup - Timeseries []collectionGroupDetailsTimeseries `json:"timeseries"` -} - -// collectionGroupDetailsTimeseriesItem is a Timeseries with a little bit of extra information -type collectionGroupDetailsTimeseries struct { - Timeseries - LatestTime *time.Time `json:"latest_time" db:"latest_time"` - LatestValue *float32 `json:"latest_value" db:"latest_value"` - SortOrder int `json:"sort_order" db:"sort_order"` -} - -const listCollectionGroups = ` - SELECT * FROM collection_group WHERE project_id = $1 ORDER BY sort_order, name -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) { - aa := make([]CollectionGroup, 0) - if err := q.db.SelectContext(ctx, &aa, listCollectionGroups, projectID); err != nil { - return nil, err - } - return aa, nil -} - -const getCollectionGroupDetails = ` - SELECT * FROM collection_group WHERE project_id = $1 AND id = $2 -` - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (q *Queries) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (CollectionGroupDetails, error) { - var a CollectionGroupDetails - if err := q.db.GetContext(ctx, &a, getCollectionGroupDetails, projectID, collectionGroupID); err != nil { - return a, err - } - return a, nil -} - -const getCollectionGroupDetailsTimeseries = ` - SELECT t.*, tm.time as latest_time, tm.value as latest_value, cgt.sort_order - FROM collection_group_timeseries cgt - INNER JOIN collection_group cg on cg.id = cgt.collection_group_id - INNER JOIN v_timeseries t on t.id = cgt.timeseries_id - LEFT JOIN timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( - SELECT time FROM timeseries_measurement - WHERE timeseries_id = t.id - ORDER BY time DESC LIMIT 1 - ) - WHERE t.instrument_id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND cgt.collection_group_id = $2 - ORDER BY sort_order ASC, t.name ASC -` - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (q *Queries) GetCollectionGroupDetailsTimeseries(ctx context.Context, projectID, collectionGroupID uuid.UUID) ([]collectionGroupDetailsTimeseries, error) { - aa := make([]collectionGroupDetailsTimeseries, 0) - if err := q.db.SelectContext(ctx, &aa, getCollectionGroupDetailsTimeseries, projectID, collectionGroupID); err != nil { - return nil, err - } - return aa, nil -} - -const createCollectionGroup = ` - INSERT INTO collection_group (project_id, name, slug, creator, create_date, updater, update_date, sort_order) - VALUES ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6, $7) - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date, sort_order -` - -// CreateCollectionGroup creates a new collection group -func (q *Queries) CreateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { - var cgNew CollectionGroup - if err := q.db.GetContext(ctx, &cgNew, createCollectionGroup, cg.ProjectID, cg.Name, cg.CreatorID, cg.CreateDate, cg.UpdaterID, cg.UpdateDate, cg.SortOrder); err != nil { - return cgNew, err - } - return cgNew, nil -} - -const updateCollectionGroup = ` - UPDATE collection_group SET name=$3, updater=$4, update_date=$5, sort_order=$6 - WHERE project_id=$1 AND id=$2 - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date, sort_order -` - -// UpdateCollectionGroup updates an existing collection group's metadata -func (q *Queries) UpdateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { - var cgUpdated CollectionGroup - if err := q.db.GetContext(ctx, &cgUpdated, updateCollectionGroup, cg.ProjectID, cg.ID, cg.Name, cg.UpdaterID, cg.UpdateDate, cg.SortOrder); err != nil { - return cgUpdated, err - } - return cgUpdated, nil -} - -const deleteCollectionGroup = ` - DELETE FROM collection_group WHERE project_id=$1 AND id=$2 -` - -// DeleteCollectionGroup deletes a collection group and associated timeseries relationships -// using the id of the collection group -func (q *Queries) DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteCollectionGroup, projectID, collectionGroupID) - return err -} - -const addTimeseriesToCollectionGroup = ` - INSERT INTO collection_group_timeseries (collection_group_id, timeseries_id, sort_order) VALUES ($1, $2, $3) - ON CONFLICT ON CONSTRAINT collection_group_unique_timeseries DO NOTHING -` - -// AddTimeseriesToCollectionGroup adds a timeseries to a collection group -func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error { - _, err := q.db.ExecContext(ctx, addTimeseriesToCollectionGroup, collectionGroupID, timeseriesID, sortOrder) - return err -} - -const removeTimeseriesFromCollectionGroup = ` - DELETE FROM collection_group_timeseries WHERE collection_group_id=$1 AND timeseries_id = $2 -` - -// RemoveTimeseriesFromCollectionGroup removes a timeseries from a collection group -func (q *Queries) RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, removeTimeseriesFromCollectionGroup, collectionGroupID, timeseriesID) - return err -} - -const updateTimeseriesCollectionGroupSortOrder = ` - UPDATE collection_group_timeseries set sort_order=$3 - WHERE collection_group_id=$1 AND timeseries_id=$2 -` - -func (q *Queries) UpdateTimeseriesCollectionGroupSortOrder(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error { - _, err := q.db.ExecContext(ctx, updateTimeseriesCollectionGroupSortOrder, collectionGroupID, timeseriesID, sortOrder) - return err -} diff --git a/api/internal/model/datalogger.go b/api/internal/model/datalogger.go deleted file mode 100644 index fd153f03..00000000 --- a/api/internal/model/datalogger.go +++ /dev/null @@ -1,271 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - "time" - - "github.com/USACE/instrumentation-api/api/internal/password" - "github.com/google/uuid" - "github.com/jackc/pgtype" -) - -// Telemetry struct -type Telemetry struct { - ID uuid.UUID - TypeID string - TypeSlug string - TypeName string -} - -type Datalogger struct { - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - SN string `json:"sn" db:"sn"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Slug string `json:"slug" db:"slug"` - ModelID uuid.UUID `json:"model_id" db:"model_id"` - Model *string `json:"model" db:"model"` - Errors []string `json:"errors" db:"-"` - PgErrors pgtype.TextArray `json:"-" db:"errors"` - Tables dbJSONSlice[DataloggerTable] `json:"tables" db:"tables"` - AuditInfo -} - -type DataloggerWithKey struct { - Datalogger - Key string `json:"key"` -} - -// type DataloggerTable struct { -// ID uuid.UUID `json:"id" db:"id"` -// TableName string `json:"table_name" db:"table_name"` -// } - -type DataloggerTablePreview struct { - DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` - UpdateDate time.Time `json:"update_date" db:"update_date"` - Preview pgtype.JSON `json:"preview" db:"preview"` -} - -type DataloggerError struct { - DataloggerTableID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` - Errors []string `json:"errors" db:"errors"` -} - -const getDataloggerModelName = ` - SELECT model FROM datalogger_model WHERE id = $1 -` - -func (q *Queries) GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) { - var modelName string - if err := q.db.GetContext(ctx, &modelName, getDataloggerModelName, modelID); err != nil { - return "", err - } - return modelName, nil -} - -const listProjectDataloggers = ` - SELECT * FROM v_datalogger WHERE project_id = $1 -` - -func (q *Queries) ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]Datalogger, error) { - dls := make([]Datalogger, 0) - if err := q.db.SelectContext(ctx, &dls, listProjectDataloggers, projectID); err != nil { - return make([]Datalogger, 0), err - } - for i := 0; i < len(dls); i++ { - if err := dls[i].PgErrors.AssignTo(&dls[i].Errors); err != nil { - return make([]Datalogger, 0), err - } - } - return dls, nil -} - -const listAllDataloggers = ` - SELECT * FROM v_datalogger -` - -func (q *Queries) ListAllDataloggers(ctx context.Context) ([]Datalogger, error) { - dls := make([]Datalogger, 0) - if err := q.db.SelectContext(ctx, &dls, listAllDataloggers); err != nil { - return make([]Datalogger, 0), err - } - for i := 0; i < len(dls); i++ { - if err := dls[i].PgErrors.AssignTo(&dls[i].Errors); err != nil { - return make([]Datalogger, 0), err - } - } - return dls, nil -} - -const getDataloggerIsActive = ` - SELECT EXISTS (SELECT * FROM v_datalogger WHERE model = $1 AND sn = $2)::int -` - -// GetDataloggerIsActive checks if datalogger with sn already exists and is not deleted -func (q *Queries) GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) { - var isActive bool - if err := q.db.GetContext(ctx, &isActive, getDataloggerIsActive, modelName, sn); err != nil { - return false, err - } - return isActive, nil -} - -const verifyDataloggerExists = ` - SELECT id FROM v_datalogger WHERE id = $1 -` - -// VerifyDataloggerExists checks if datalogger with sn already exists and is not deleted -func (q *Queries) VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error { - return q.db.GetContext(ctx, &uuid.UUID{}, verifyDataloggerExists, dlID) -} - -const createDataloggerHash = ` - INSERT INTO datalogger_hash (datalogger_id, "hash") VALUES ($1, $2) -` - -func (q *Queries) CreateDataloggerHash(ctx context.Context, dataloggerID uuid.UUID) (string, error) { - key := password.GenerateRandom(40) - if _, err := q.db.ExecContext(ctx, createDataloggerHash, dataloggerID, password.MustCreateHash(key, password.DefaultParams)); err != nil { - return "", err - } - return key, nil -} - -const getOneDatalogger = ` - SELECT * FROM v_datalogger WHERE id = $1 -` - -func (q *Queries) GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (Datalogger, error) { - var dl Datalogger - if err := q.db.GetContext(ctx, &dl, getOneDatalogger, dataloggerID); err != nil { - return dl, err - } - if err := dl.PgErrors.AssignTo(&dl.Errors); err != nil { - return dl, err - } - return dl, nil -} - -const createDatalogger = ` - INSERT INTO datalogger (name, sn, project_id, creator, updater, slug, model_id) - VALUES ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) - RETURNING id -` - -func (q *Queries) CreateDatalogger(ctx context.Context, dl Datalogger) (uuid.UUID, error) { - var dlID uuid.UUID - err := q.db.GetContext(ctx, &dlID, createDatalogger, dl.Name, dl.SN, dl.ProjectID, dl.CreatorID, dl.ModelID) - return dlID, err -} - -const updateDatalogger = ` - UPDATE datalogger SET - name = $2, - updater = $3, - update_date = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateDatalogger(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, updateDatalogger, dl.ID, dl.Name, dl.UpdaterID, dl.UpdateDate) - return err -} - -const updateDataloggerHash = ` - UPDATE datalogger_hash SET "hash" = $2 WHERE datalogger_id = $1 -` - -func (q *Queries) UpdateDataloggerHash(ctx context.Context, dataloggerID uuid.UUID) (string, error) { - key := password.GenerateRandom(40) - if _, err := q.db.ExecContext(ctx, updateDataloggerHash, dataloggerID, password.MustCreateHash(key, password.DefaultParams)); err != nil { - return "", err - } - return key, nil -} - -const updateDataloggerUpdater = ` - UPDATE datalogger SET updater = $2, update_date = $3 WHERE id = $1 -` - -func (q *Queries) UpdateDataloggerUpdater(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, updateDataloggerUpdater, dl.ID, dl.UpdaterID, dl.UpdateDate) - return err -} - -const deleteDatalogger = ` - UPDATE datalogger SET deleted = true, updater = $2, update_date = $3 WHERE id = $1 -` - -func (q *Queries) DeleteDatalogger(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, deleteDatalogger, dl.ID, dl.UpdaterID, dl.UpdateDate) - return err -} - -const getDataloggerTablePreview = ` - SELECT * FROM v_datalogger_preview WHERE datalogger_table_id = $1 -` - -func (q *Queries) GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (DataloggerTablePreview, error) { - var dlp DataloggerTablePreview - err := q.db.GetContext(ctx, &dlp, getDataloggerTablePreview, dataloggerTableID) - if errors.Is(err, sql.ErrNoRows) { - dlp.DataloggerTableID = dataloggerTableID - if err := dlp.Preview.Set("null"); err != nil { - return DataloggerTablePreview{}, err - } - return dlp, nil - } - return dlp, err -} - -const resetDataloggerTableName = ` - UPDATE datalogger_table SET table_name = '' WHERE id = $1 -` - -func (q *Queries) ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, resetDataloggerTableName, dataloggerTableID) - return err -} - -const renameEmptyDataloggerTableName = ` - UPDATE datalogger_table - SET table_name = $2 - WHERE table_name = '' AND datalogger_id = $1 - AND NOT EXISTS ( - SELECT 1 FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 - ); -` - -func (q *Queries) RenameEmptyDataloggerTableName(ctx context.Context, dataloggerID uuid.UUID, tableName string) error { - _, err := q.db.ExecContext(ctx, renameEmptyDataloggerTableName, dataloggerID, tableName) - return err -} - -const getOrCreateDataloggerTable = ` - WITH dt AS ( - INSERT INTO datalogger_table (datalogger_id, table_name) VALUES ($1, $2) - ON CONFLICT ON CONSTRAINT datalogger_table_datalogger_id_table_name_key DO NOTHING - RETURNING id - ) - SELECT id FROM dt - UNION - SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 -` - -func (q *Queries) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - var tID uuid.UUID - err := q.db.GetContext(ctx, &tID, getOrCreateDataloggerTable, dataloggerID, tableName) - return tID, err -} - -const deleteDataloggerTable = ` - DELETE FROM datalogger_table WHERE id = $1 -` - -func (q *Queries) DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteDataloggerTable, dataloggerTableID) - return err -} diff --git a/api/internal/model/datalogger_telemetry.go b/api/internal/model/datalogger_telemetry.go deleted file mode 100644 index b8b36e3b..00000000 --- a/api/internal/model/datalogger_telemetry.go +++ /dev/null @@ -1,82 +0,0 @@ -package model - -import ( - "context" - "database/sql" - - "github.com/google/uuid" -) - -const getDataloggerByModelSN = ` - SELECT * FROM v_datalogger - WHERE model = $1 AND sn = $2 -` - -func (q *Queries) GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (Datalogger, error) { - var dl Datalogger - err := q.db.GetContext(ctx, &dl, getDataloggerByModelSN, modelName, sn) - return dl, err -} - -const getDataloggerHashByModelSN = ` - SELECT "hash" FROM v_datalogger_hash - WHERE model = $1 AND sn = $2 -` - -func (q *Queries) GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) { - var hash string - if err := q.db.GetContext(ctx, &hash, getDataloggerHashByModelSN, modelName, sn); err != nil { - return "", err - } - return hash, nil -} - -const createDataloggerTablePreview = ` - INSERT INTO datalogger_preview (datalogger_table_id, preview, update_date) VALUES ($1, $2, $3) -` - -func (q *Queries) CreateDataloggerTablePreview(ctx context.Context, prv DataloggerTablePreview) error { - _, err := q.db.ExecContext(ctx, createDataloggerTablePreview, prv.DataloggerTableID, prv.Preview, prv.UpdateDate) - return err -} - -const updateDataloggerTablePreview = ` - UPDATE datalogger_preview SET preview = $3, update_date = $4 - WHERE datalogger_table_id IN (SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2) -` - -func (q *Queries) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv DataloggerTablePreview) error { - result, err := q.db.ExecContext(ctx, updateDataloggerTablePreview, dataloggerID, tableName, prv.Preview, prv.UpdateDate) - r, err := result.RowsAffected() - if err != nil { - return err - } - if r == 0 { - return sql.ErrNoRows - } - return err -} - -const deleteDataloggerTableError = ` - DELETE FROM datalogger_error - WHERE datalogger_table_id IN (SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2) -` - -func (q *Queries) DeleteDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string) error { - _, err := q.db.ExecContext(ctx, deleteDataloggerTableError, dataloggerID, tableName) - return err -} - -const createDataloggerError = ` - INSERT INTO datalogger_error (datalogger_table_id, error_message) - SELECT id, $3 FROM datalogger_table - WHERE datalogger_id = $1 AND table_name = $2 - AND NOT EXISTS ( - SELECT 1 FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 - ); -` - -func (q *Queries) CreateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, errMessage string) error { - _, err := q.db.ExecContext(ctx, createDataloggerError, dataloggerID, tableName, errMessage) - return err -} diff --git a/api/internal/model/db.go b/api/internal/model/db.go deleted file mode 100644 index 9174e4bc..00000000 --- a/api/internal/model/db.go +++ /dev/null @@ -1,143 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "encoding/json" - "errors" - "fmt" - "log" - "time" - - "github.com/USACE/instrumentation-api/api/internal/config" - _ "github.com/jackc/pgx/v5/stdlib" - "github.com/jmoiron/sqlx" - "github.com/lib/pq" -) - -// DBTX includes all methods shared by sqlx.DB and sqlx.Tx, allowing -// either type to be used interchangeably. -// https://github.com/jmoiron/sqlx/pull/809 -type DBTX interface { - sqlx.Ext - sqlx.ExecerContext - sqlx.PreparerContext - sqlx.QueryerContext - sqlx.Preparer - - GetContext(context.Context, interface{}, string, ...interface{}) error - SelectContext(context.Context, interface{}, string, ...interface{}) error - Get(interface{}, string, ...interface{}) error - MustExecContext(context.Context, string, ...interface{}) sql.Result - PreparexContext(context.Context, string) (*sqlx.Stmt, error) - QueryRowContext(context.Context, string, ...interface{}) *sql.Row - Select(interface{}, string, ...interface{}) error - QueryRow(string, ...interface{}) *sql.Row - PrepareNamedContext(context.Context, string) (*sqlx.NamedStmt, error) - PrepareNamed(string) (*sqlx.NamedStmt, error) - Preparex(string) (*sqlx.Stmt, error) - NamedExec(string, interface{}) (sql.Result, error) - NamedExecContext(context.Context, string, interface{}) (sql.Result, error) - MustExec(string, ...interface{}) sql.Result - NamedQuery(string, interface{}) (*sqlx.Rows, error) -} - -type DBRows interface { - Close() error - Columns() ([]string, error) - ColumnTypes() ([]*sql.ColumnType, error) - Err() error - Next() bool - NextResultSet() bool - Scan(dest ...interface{}) error - SliceScan() ([]interface{}, error) - MapScan(dest map[string]interface{}) error - StructScan(dest interface{}) error -} - -type Tx interface { - Commit() error - Rollback() error -} - -var _ DBTX = (*sqlx.DB)(nil) -var _ DBTX = (*sqlx.Tx)(nil) -var _ DBRows = (*sqlx.Rows)(nil) -var _ Tx = (*sqlx.Tx)(nil) - -var sqlIn = sqlx.In - -type Database struct { - *sqlx.DB -} - -func (db *Database) Queries() *Queries { - return &Queries{db} -} - -type Queries struct { - db DBTX -} - -func (q *Queries) WithTx(tx *sqlx.Tx) *Queries { - return &Queries{ - db: tx, - } -} - -func TxDo(rollback func() error) { - err := rollback() - if err != nil && !errors.Is(err, sql.ErrTxDone) { - log.Print(err.Error()) - } -} - -func NewDatabase(cfg *config.DBConfig) *Database { - db, err := sqlx.Connect("pgx", cfg.ConnStr()) - if err != nil { - log.Fatalf("Could not connect to database: %s", err.Error()) - } - if db == nil { - log.Panicf("database is nil") - } - - db.SetMaxOpenConns(50) - db.SetMaxIdleConns(5) - db.SetConnMaxLifetime(time.Minute * 30) - - return &Database{db} -} - -// Some generic types to help sqlx scan arrays / json -type dbSlice[T any] []T - -func (d *dbSlice[T]) Scan(src interface{}) error { - value := make([]T, 0) - if err := pq.Array(&value).Scan(src); err != nil { - return err - } - *d = dbSlice[T](value) - return nil -} - -type dbJSONSlice[T any] []T - -func (d *dbJSONSlice[T]) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("failed type assertion") - } - return json.Unmarshal([]byte(b), d) -} - -func MapToStruct[T any](v map[string]interface{}) (T, error) { - var o T - s, err := json.Marshal(v) - if err != nil { - return o, err - } - if err := json.Unmarshal(s, &o); err != nil { - return o, err - } - return o, nil -} diff --git a/api/internal/model/district_rollup.go b/api/internal/model/district_rollup.go deleted file mode 100644 index 60ac63ed..00000000 --- a/api/internal/model/district_rollup.go +++ /dev/null @@ -1,56 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type DistrictRollup struct { - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` - DistrictInitials *string `json:"district_initials" db:"district_initials"` - ProjectName string `json:"project_name" db:"project_name"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Month time.Time `json:"month" db:"the_month"` - ExpectedTotalSubmittals int `json:"expected_total_submittals" db:"expected_total_submittals"` - ActualTotalSubmittals int `json:"actual_total_submittals" db:"actual_total_submittals"` - RedSubmittals int `json:"red_submittals" db:"red_submittals"` - YellowSubmittals int `json:"yellow_submittals" db:"yellow_submittals"` - GreenSubmittals int `json:"green_submittals" db:"green_submittals"` -} - -const listEvaluationDistrictRollup = ` - SELECT * FROM v_district_rollup - WHERE alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID - AND project_id = $1 - AND the_month >= DATE_TRUNC('month', $2::TIMESTAMPTZ) - AND the_month <= DATE_TRUNC('month', $3::TIMESTAMPTZ) -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw TimeWindow) ([]DistrictRollup, error) { - dr := make([]DistrictRollup, 0) - if err := q.db.SelectContext(ctx, &dr, listEvaluationDistrictRollup, opID, tw.After, tw.Before); err != nil { - return nil, err - } - return dr, nil -} - -const listMeasurementDistrictRollup = ` - SELECT * FROM v_district_rollup - WHERE alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::UUID - AND project_id = $1 - AND the_month >= DATE_TRUNC('month', $2::TIMESTAMPTZ) - AND the_month <= DATE_TRUNC('month', $3::TIMESTAMPTZ) -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw TimeWindow) ([]DistrictRollup, error) { - dr := make([]DistrictRollup, 0) - if err := q.db.SelectContext(ctx, &dr, listMeasurementDistrictRollup, opID, tw.After, tw.Before); err != nil { - return nil, err - } - return dr, nil -} diff --git a/api/internal/model/domains.go b/api/internal/model/domains.go deleted file mode 100644 index 6ee923f2..00000000 --- a/api/internal/model/domains.go +++ /dev/null @@ -1,77 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// Domain is a struct for returning all database domain values -type Domain struct { - ID uuid.UUID `json:"id" db:"id"` - Group string `json:"group" db:"group"` - Value string `json:"value" db:"value"` - Description *string `json:"description" db:"description"` -} - -type DomainGroup struct { - Group string `json:"group" db:"group"` - Opts dbJSONSlice[DomainGroupOption] `json:"opts" db:"opts"` -} - -type DomainGroupOption struct { - ID uuid.UUID `json:"id" db:"id"` - Value string `json:"value" db:"value"` - Description *string `json:"description" db:"description"` -} - -type DomainGroupCollection []DomainGroup - -type DomainMap map[string][]DomainGroupOption - -const getDomains = ` - SELECT * FROM v_domain -` - -// GetDomains returns a UNION of all domain tables in the database -func (q *Queries) GetDomains(ctx context.Context) ([]Domain, error) { - dd := make([]Domain, 0) - if err := q.db.SelectContext(ctx, &dd, getDomains); err != nil { - return nil, err - } - return dd, nil -} - -const getDomainMap = ` - SELECT * FROM v_domain_group -` - -// GetDomainsV2 returns all domains grouped by table -func (q *Queries) GetDomainMap(ctx context.Context) (DomainMap, error) { - dd := make([]DomainGroup, 0) - if err := q.db.SelectContext(ctx, &dd, getDomainMap); err != nil { - return nil, err - } - m := make(DomainMap) - for i := range dd { - m[dd[i].Group] = dd[i].Opts - } - return m, nil -} - -type TimezoneOption struct { - Name string `json:"name" db:"name"` - Abbrev string `json:"abbrev" db:"abbrev"` - UtcOffset string `json:"utc_offset" db:"utc_offset"` - IsDst bool `json:"is_dst" db:"is_dst"` -} - -const listTimezoneOptions = ` - SELECT * FROM pg_timezone_names -` - -func (q *Queries) ListTimezoneOptions(ctx context.Context) ([]TimezoneOption, error) { - dd := make([]TimezoneOption, 0) - err := q.db.SelectContext(ctx, &dd, listTimezoneOptions) - return dd, err -} diff --git a/api/internal/model/equivalency_table.go b/api/internal/model/equivalency_table.go deleted file mode 100644 index 803a6e4f..00000000 --- a/api/internal/model/equivalency_table.go +++ /dev/null @@ -1,161 +0,0 @@ -package model - -import ( - "context" - "errors" - "fmt" - - "github.com/google/uuid" - "github.com/jackc/pgconn" - "github.com/jackc/pgerrcode" -) - -type EquivalencyTable struct { - DataloggerID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` - DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` - DataloggerTableName string `json:"datalogger_table_name" db:"datalogger_table_name"` - Rows dbJSONSlice[EquivalencyTableRow] `json:"rows" db:"fields"` -} - -type EquivalencyTableRow struct { - ID uuid.UUID `json:"id" db:"id"` - FieldName string `json:"field_name" db:"field_name"` - DisplayName string `json:"display_name" db:"display_name"` - InstrumentID *uuid.UUID `json:"instrument_id" db:"instrument_id"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` -} - -const getIsValidDataloggerTable = ` - SELECT NOT EXISTS ( - SELECT * FROM datalogger_table WHERE id = $1 AND table_name = 'preparse' - ) -` - -// GetIsValidDataloggerTable verifies that a datalogger table is not "preparse" (read-only) -func (q *Queries) GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - var isValid bool - if err := q.db.GetContext(ctx, &isValid, getIsValidDataloggerTable, dataloggerTableID); err != nil { - return err - } - if !isValid { - return fmt.Errorf("table preparse is read only %s", dataloggerTableID) - } - return nil -} - -const getIsValidEquivalencyTableTimeseries = ` - SELECT NOT EXISTS ( - SELECT id FROM v_timeseries_computed - WHERE id = $1 - UNION ALL - SELECT timeseries_id FROM instrument_constants - WHERE timeseries_id = $1 - ) -` - -// GetIsValidEquivalencyTableTimeseries verifies that a Timeseries is not computed or constant -func (q *Queries) GetIsValidEquivalencyTableTimeseries(ctx context.Context, tsID uuid.UUID) error { - var isValid bool - if err := q.db.GetContext(ctx, &isValid, getIsValidEquivalencyTableTimeseries, tsID); err != nil { - return err - } - if !isValid { - return fmt.Errorf("timeseries '%s' must not be computed or constant", tsID) - } - return nil -} - -const getEquivalencyTable = ` - SELECT - datalogger_id, - datalogger_table_id, - datalogger_table_name, - fields - FROM v_datalogger_equivalency_table - WHERE datalogger_table_id = $1 -` - -// GetEquivalencyTable returns a single Datalogger EquivalencyTable -func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (EquivalencyTable, error) { - var et EquivalencyTable - err := q.db.GetContext(ctx, &et, getEquivalencyTable, dataloggerTableID) - return et, err -} - -const createOrUpdateEquivalencyTableRow = ` - INSERT INTO datalogger_equivalency_table - (datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) - VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT ON CONSTRAINT datalogger_equivalency_table_datalogger_table_id_field_name_key - DO UPDATE SET display_name = EXCLUDED.display_name, instrument_id = EXCLUDED.instrument_id, timeseries_id = EXCLUDED.timeseries_id -` - -func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, dataloggerID, dataloggerTableID uuid.UUID, tr EquivalencyTableRow) error { - if _, err := q.db.ExecContext(ctx, createOrUpdateEquivalencyTableRow, - dataloggerID, - dataloggerTableID, - tr.FieldName, - tr.DisplayName, - tr.InstrumentID, - tr.TimeseriesID, - ); err != nil { - return err - } - return nil -} - -const updateEquivalencyTableRow = ` - UPDATE datalogger_equivalency_table SET - field_name = $2, - display_name = $3, - instrument_id = $4, - timeseries_id = $5 - WHERE id = $1 -` - -func (q *Queries) UpdateEquivalencyTableRow(ctx context.Context, tr EquivalencyTableRow) error { - if _, err := q.db.ExecContext(ctx, updateEquivalencyTableRow, - tr.ID, - tr.FieldName, - tr.DisplayName, - tr.InstrumentID, - tr.TimeseriesID, - ); err != nil { - var pgErr *pgconn.PgError - if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation { - return fmt.Errorf("timeseries_id %s is already mapped to an active datalogger", tr.TimeseriesID) - } - return err - } - return nil -} - -const deleteEquivalencyTable = ` - DELETE FROM datalogger_equivalency_table WHERE datalogger_table_id = $1 -` - -// DeleteEquivalencyTable clears all rows of the EquivalencyTable for a datalogger table -func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteEquivalencyTable, dataloggerTableID) - return err -} - -const deleteEquivalencyTableRow = ` - DELETE FROM datalogger_equivalency_table WHERE id = $1 -` - -// DeleteEquivalencyTableRow deletes a single EquivalencyTable row by row id -func (q *Queries) DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error { - res, err := q.db.ExecContext(ctx, deleteEquivalencyTableRow, rowID) - if err != nil { - return err - } - count, err := res.RowsAffected() - if err != nil { - return err - } - if count == 0 { - return fmt.Errorf("row not found %s", rowID) - } - return nil -} diff --git a/api/internal/model/evaluation.go b/api/internal/model/evaluation.go deleted file mode 100644 index 62a1b0d8..00000000 --- a/api/internal/model/evaluation.go +++ /dev/null @@ -1,224 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "fmt" - "time" - - "github.com/google/uuid" -) - -type Evaluation struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertConfigID *uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName *string `json:"alert_config_name" db:"alert_config_name"` - SubmittalID *uuid.UUID `json:"submittal_id" db:"submittal_id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - StartDate time.Time `json:"start_date" db:"start_date"` - EndDate time.Time `json:"end_date" db:"end_date"` - Instruments dbJSONSlice[EvaluationInstrument] `json:"instruments" db:"instruments"` - AuditInfo -} - -type EvaluationInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` -} - -const listProjectEvaluations = ` - SELECT * - FROM v_evaluation - WHERE project_id = $1 -` - -func (q *Queries) ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - if err := q.db.SelectContext(ctx, &ee, listProjectEvaluations, projectID); err != nil { - return nil, err - } - return ee, nil -} - -const listProjectEvaluationsByAlertConfig = ` - SELECT * FROM v_evaluation - WHERE project_id = $1 - AND alert_config_id IS NOT NULL - AND alert_config_id = $2 -` - -func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - err := q.db.SelectContext(ctx, &ee, listProjectEvaluationsByAlertConfig, projectID, alertConfigID) - if err != nil { - return make([]Evaluation, 0), err - } - return ee, nil -} - -const listInstrumentEvaluations = ` - SELECT * FROM v_evaluation - WHERE id = ANY( - SELECT evaluation_id - FROM evaluation_instrument - WHERE instrument_id = $1 - ) -` - -func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - if err := q.db.SelectContext(ctx, &ee, listInstrumentEvaluations, instrumentID); err != nil { - return nil, err - } - return ee, nil -} - -const getEvaluation = ` - SELECT * FROM v_evaluation WHERE id = $1 -` - -func (q *Queries) GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (Evaluation, error) { - var e Evaluation - if err := q.db.GetContext(ctx, &e, getEvaluation, evaluationID); err != nil { - return e, err - } - return e, nil -} - -const completeEvaluationSubmittal = ` - UPDATE submittal sub1 SET - submittal_status_id = sq.submittal_status_id, - completion_date = NOW() - FROM ( - SELECT - sub2.id AS submittal_id, - CASE - -- if completed before due date, mark submittal as green id - WHEN NOW() <= sub2.due_date THEN '0c0d6487-3f71-4121-8575-19514c7b9f03'::UUID - -- if completed after due date, mark as yellow - ELSE 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::UUID - END AS submittal_status_id - FROM submittal sub2 - INNER JOIN alert_config ac ON sub2.alert_config_id = ac.id - WHERE sub2.id = $1 - AND sub2.completion_date IS NULL - AND NOT sub2.marked_as_missing - AND ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID - ) sq - WHERE sub1.id = sq.submittal_id - RETURNING sub1.* -` - -func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, submittalID uuid.UUID) (Submittal, error) { - var sub Submittal - if err := q.db.GetContext(ctx, &sub, completeEvaluationSubmittal, submittalID); err != nil { - if err == sql.ErrNoRows { - return sub, fmt.Errorf("submittal must exist, be of evaluation type, and before due date or unvalidated missing") - } - return sub, err - } - return sub, nil -} - -const createNextEvaluationSubmittal = ` - INSERT INTO submittal (alert_config_id, due_date) - SELECT - ac.id, - NOW() + ac.schedule_interval - FROM alert_config ac - WHERE ac.id IN (SELECT alert_config_id FROM submittal WHERE id = $1) -` - -func (q *Queries) CreateNextEvaluationSubmittal(ctx context.Context, submittalID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createNextEvaluationSubmittal, submittalID) - return err -} - -const createEvaluation = ` - INSERT INTO evaluation ( - project_id, - submittal_id, - name, - body, - start_date, - end_date, - creator, - create_date - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8) - RETURNING id -` - -func (q *Queries) CreateEvaluation(ctx context.Context, ev Evaluation) (uuid.UUID, error) { - var evaluationID uuid.UUID - err := q.db.GetContext( - ctx, - &evaluationID, - createEvaluation, - ev.ProjectID, - ev.SubmittalID, - ev.Name, - ev.Body, - ev.StartDate, - ev.EndDate, - ev.CreatorID, - ev.CreateDate, - ) - return evaluationID, err -} - -const createEvalationInstrument = ` - INSERT INTO evaluation_instrument (evaluation_id, instrument_id) VALUES ($1,$2) -` - -func (q *Queries) CreateEvaluationInstrument(ctx context.Context, evaluationID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createEvalationInstrument, evaluationID, instrumentID) - return err -} - -const updateEvaluation = ` - UPDATE evaluation SET - name=$3, - body=$4, - start_date=$5, - end_date=$6, - updater=$7, - update_date=$8 - WHERE id=$1 AND project_id=$2 -` - -func (q *Queries) UpdateEvaluation(ctx context.Context, ev Evaluation) error { - _, err := q.db.ExecContext( - ctx, - updateEvaluation, - ev.ID, - ev.ProjectID, - ev.Name, - ev.Body, - ev.StartDate, - ev.EndDate, - ev.UpdaterID, - ev.UpdateDate, - ) - return err -} - -const unassignAllInstrumentsFromEvaluation = ` - DELETE FROM evaluation_instrument WHERE evaluation_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllInstrumentsFromEvaluation, evaluationID) - return err -} - -const deleteEvaluation = ` - DELETE FROM evaluation WHERE id = $1 -` - -func (q *Queries) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteEvaluation, evaluationID) - return err -} diff --git a/api/internal/model/heartbeat.go b/api/internal/model/heartbeat.go deleted file mode 100644 index 1fd536a6..00000000 --- a/api/internal/model/heartbeat.go +++ /dev/null @@ -1,50 +0,0 @@ -package model - -import ( - "context" - "time" -) - -// Heartbeat is a timestamp -type Heartbeat struct { - Time time.Time `json:"time"` -} - -const doHeartbeat = ` - INSERT INTO heartbeat (time) VALUES ($1) RETURNING * -` - -// DoHeartbeat does regular-interval tasks -func (q *Queries) DoHeartbeat(ctx context.Context) (Heartbeat, error) { - var h Heartbeat - if err := q.db.GetContext(ctx, &h, doHeartbeat, time.Now().In(time.UTC)); err != nil { - return h, err - } - return h, nil -} - -const getLatestHeartbeat = ` - SELECT MAX(time) AS time FROM heartbeat -` - -// GetLatestHeartbeat returns the most recent system heartbeat -func (q *Queries) GetLatestHeartbeat(ctx context.Context) (Heartbeat, error) { - var h Heartbeat - if err := q.db.GetContext(ctx, &h, getLatestHeartbeat); err != nil { - return h, err - } - return h, nil -} - -const listHeartbeats = ` - SELECT * FROM heartbeat -` - -// ListHeartbeats returns all system heartbeats -func (q *Queries) ListHeartbeats(ctx context.Context) ([]Heartbeat, error) { - hh := make([]Heartbeat, 0) - if err := q.db.SelectContext(ctx, &hh, listHeartbeats); err != nil { - return nil, err - } - return hh, nil -} diff --git a/api/internal/model/home.go b/api/internal/model/home.go deleted file mode 100644 index f0854766..00000000 --- a/api/internal/model/home.go +++ /dev/null @@ -1,30 +0,0 @@ -package model - -import ( - "context" -) - -// Home is information for the homepage (landing page) -type Home struct { - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - InstrumetGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` - ProjectCount int `json:"project_count" db:"project_count"` - NewInstruments7D int `json:"new_instruments_7d" db:"new_instruments_7d"` - NewMeasurements2H int `json:"new_measurements_2h" db:"new_measurements_2h"` -} - -const getHome = ` - SELECT - (SELECT COUNT(*) FROM instrument WHERE NOT deleted) AS instrument_count, - (SELECT COUNT(*) FROM project WHERE NOT deleted) AS project_count, - (SELECT COUNT(*) FROM instrument_group) AS instrument_group_count, - (SELECT COUNT(*) FROM instrument WHERE NOT deleted AND create_date > NOW() - '7 days'::INTERVAL) AS new_instruments_7d, - (SELECT COUNT(*) FROM timeseries_measurement WHERE time > NOW() - '2 hours'::INTERVAL) AS new_measurements_2h -` - -// GetHome returns information for the homepage -func (q *Queries) GetHome(ctx context.Context) (Home, error) { - var home Home - err := q.db.GetContext(ctx, &home, getHome) - return home, err -} diff --git a/api/internal/model/instrument.go b/api/internal/model/instrument.go deleted file mode 100644 index 57c09a4d..00000000 --- a/api/internal/model/instrument.go +++ /dev/null @@ -1,272 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/google/uuid" - "github.com/twpayne/go-geom/encoding/geojson" -) - -// Instrument is an instrument -type Instrument struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - AwareID *uuid.UUID `json:"aware_id,omitempty"` - Groups dbSlice[uuid.UUID] `json:"groups" db:"groups"` - Constants dbSlice[uuid.UUID] `json:"constants" db:"constants"` - AlertConfigs dbSlice[uuid.UUID] `json:"alert_configs" db:"alert_configs"` - StatusID uuid.UUID `json:"status_id" db:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time" db:"status_time"` - Deleted bool `json:"-"` - TypeID uuid.UUID `json:"type_id" db:"type_id"` - Type string `json:"type"` - Icon *string `json:"icon" db:"icon"` - Geometry db.Geometry `json:"geometry,omitempty"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"offset" db:"station_offset"` - Projects dbJSONSlice[IDSlugName] `json:"projects" db:"projects"` - NIDID *string `json:"nid_id" db:"nid_id"` - USGSID *string `json:"usgs_id" db:"usgs_id"` - HasCwms bool `json:"has_cwms" db:"has_cwms"` - ShowCwmsTab bool `json:"show_cwms_tab" db:"show_cwms_tab"` - Opts Opts `json:"opts" db:"opts"` - AuditInfo -} - -// InstrumentCollection is a collection of Instrument items -type InstrumentCollection []Instrument - -// Shorten returns an instrument collection with individual objects limited to ID and Struct fields -func (ic InstrumentCollection) Shorten() IDSlugCollection { - ss := IDSlugCollection{Items: make([]IDSlug, 0)} - for _, n := range ic { - s := IDSlug{ID: n.ID, Slug: n.Slug} - - ss.Items = append(ss.Items, s) - } - return ss -} - -type InstrumentCount struct { - InstrumentCount int `json:"instrument_count"` -} - -type InstrumentsProjectCount struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` - ProjectCount int `json:"project_count" db:"project_count"` -} - -// type InstrumentIDName struct { -// ID uuid.UUID `json:"id"` -// Name string `json:"name"` -// } - -const listInstrumentsSQL = ` - SELECT - id, - deleted, - status_id, - status, - status_time, - slug, - name, - type_id, - type, - icon, - geometry, - station, - station_offset, - creator, - create_date, - updater, - update_date, - projects, - constants, - groups, - alert_configs, - nid_id, - usgs_id, - has_cwms, - show_cwms_tab, - opts - FROM v_instrument -` - -const listInstruments = listInstrumentsSQL + ` - WHERE NOT deleted -` - -// ListInstruments returns an array of instruments from the database -func (q *Queries) ListInstruments(ctx context.Context) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listInstruments); err != nil { - return nil, err - } - return ii, nil -} - -const getInstrument = listInstrumentsSQL + ` - WHERE id = $1 -` - -// GetInstrument returns a single instrument -func (q *Queries) GetInstrument(ctx context.Context, instrumentID uuid.UUID) (Instrument, error) { - var i Instrument - err := q.db.GetContext(ctx, &i, getInstrument, instrumentID) - return i, err -} - -const getInstrumentCount = ` - SELECT COUNT(*) FROM instrument WHERE NOT deleted -` - -// GetInstrumentCount returns the number of instruments in the database -func (q *Queries) GetInstrumentCount(ctx context.Context) (InstrumentCount, error) { - var ic InstrumentCount - if err := q.db.GetContext(ctx, &ic.InstrumentCount, getInstrumentCount); err != nil { - return ic, err - } - return ic, nil -} - -const createInstrument = ` - INSERT INTO instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) - VALUES (slugify($1, 'instrument'), $1, $2, ST_SetSRID(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) - RETURNING id, slug -` - -func (q *Queries) CreateInstrument(ctx context.Context, i Instrument) (IDSlugName, error) { - var aa IDSlugName - if err := q.db.GetContext( - ctx, &aa, createInstrument, - i.Name, i.TypeID, i.Geometry, i.Station, i.StationOffset, i.CreatorID, i.CreateDate, i.NIDID, i.USGSID, i.ShowCwmsTab, - ); err != nil { - return aa, err - } - return aa, nil -} - -const listAdminProjects = ` - SELECT pr.project_id FROM profile_project_roles pr - INNER JOIN role ro ON ro.id = pr.role_id - WHERE pr.profile_id = $1 - AND ro.name = 'ADMIN' -` - -func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { - projectIDs := make([]uuid.UUID, 0) - err := q.db.SelectContext(ctx, &projectIDs, listAdminProjects, profileID) - return projectIDs, err -} - -const listInstrumentProjects = ` - SELECT project_id FROM project_instrument WHERE instrument_id = $1 -` - -func (q *Queries) ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { - projectIDs := make([]uuid.UUID, 0) - err := q.db.SelectContext(ctx, &projectIDs, listInstrumentProjects, instrumentID) - return projectIDs, err -} - -const getProjectCountForInstrument = ` - SELECT pi.instrument_id, i.name AS instrument_name, COUNT(pi.*) AS project_count - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id IN (?) - GROUP BY pi.instrument_id, i.name - ORDER BY i.name -` - -func (q *Queries) GetProjectCountForInstruments(ctx context.Context, instrumentIDs []uuid.UUID) ([]InstrumentsProjectCount, error) { - counts := make([]InstrumentsProjectCount, 0) - err := q.db.SelectContext(ctx, &counts, getProjectCountForInstrument, instrumentIDs) - return counts, err -} - -const updateInstrument = ` - UPDATE instrument SET - name = $3, - type_id = $4, - geometry = ST_GeomFromWKB($5), - updater = $6, - update_date = $7, - station = $8, - station_offset = $9, - nid_id = $10, - usgs_id = $11, - show_cwms_tab = $12 - WHERE id = $2 - AND id IN ( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) -` - -func (q *Queries) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i Instrument) error { - _, err := q.db.ExecContext( - ctx, updateInstrument, - projectID, i.ID, i.Name, i.TypeID, i.Geometry, - i.UpdaterID, i.UpdateDate, i.Station, i.StationOffset, i.NIDID, i.USGSID, i.ShowCwmsTab, - ) - return err -} - -const updateInstrumentGeometry = ` - UPDATE instrument SET - geometry = ST_GeomFromWKB($3), - updater = $4, - update_date = NOW() - WHERE id = $2 - AND id IN ( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - RETURNING id -` - -// UpdateInstrumentGeometry updates instrument geometry property -func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p Profile) error { - _, err := q.db.ExecContext(ctx, updateInstrumentGeometry, projectID, instrumentID, geom.Geometry(), p.ID) - return err -} - -const deleteFlagInstrument = ` - UPDATE instrument SET deleted = true - WHERE id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND id = $2 -` - -// DeleteFlagInstrument changes delete flag to true -func (q *Queries) DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagInstrument, projectID, instrumentID) - return err -} - -const listInstrumentIDNamesByIDs = ` - SELECT id, name - FROM instrument - WHERE id IN (?) - AND NOT deleted -` - -func (q *Queries) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIDs []uuid.UUID) ([]InstrumentIDName, error) { - query, args, err := sqlIn(listInstrumentIDNamesByIDs, instrumentIDs) - if err != nil { - return nil, err - } - ii := make([]InstrumentIDName, 0) - err = q.db.SelectContext(ctx, &ii, q.db.Rebind(query), args...) - return ii, err -} diff --git a/api/internal/model/instrument_assign.go b/api/internal/model/instrument_assign.go deleted file mode 100644 index d4c7e37c..00000000 --- a/api/internal/model/instrument_assign.go +++ /dev/null @@ -1,220 +0,0 @@ -package model - -import ( - "context" - "fmt" - - "github.com/google/uuid" -) - -type ReasonCode int - -const ( - None ReasonCode = iota - Unauthorized - InvalidName - InvalidUnassign -) - -type InstrumentsValidation struct { - ReasonCode ReasonCode `json:"-"` - IsValid bool `json:"is_valid"` - Errors []string `json:"errors"` -} - -type ProjectInstrumentAssignments struct { - InstrumentIDs []uuid.UUID `json:"instrument_ids"` -} - -type InstrumentProjectAssignments struct { - ProjectIDs []uuid.UUID `json:"project_ids"` -} - -const assignInstrumentToProject = ` - INSERT INTO project_instrument (project_id, instrument_id) VALUES ($1, $2) - ON CONFLICT ON CONSTRAINT project_instrument_project_id_instrument_id_key DO NOTHING -` - -func (q *Queries) AssignInstrumentToProject(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignInstrumentToProject, projectID, instrumentID) - return err -} - -const unassignInstrumentFromProject = ` - DELETE FROM project_instrument WHERE project_id = $1 AND instrument_id = $2 -` - -func (q *Queries) UnassignInstrumentFromProject(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignInstrumentFromProject, projectID, instrumentID) - return err -} - -const validateInstrumentNamesProjectUnique = ` - SELECT i.name - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.project_id = ? - AND i.name IN (?) - AND NOT i.deleted -` - -// ValidateInstrumentNamesProjectUnique checks that the provided instrument names do not already belong to a project -func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateInstrumentNamesProjectUnique, projectID, instrumentNames) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = InvalidName - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, nil -} - -const validateProjectsInstrumentNameUnique = ` - SELECT p.name, i.name - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - INNER JOIN project p ON pi.project_id = p.id - WHERE i.name = ? - AND pi.instrument_id IN (?) - AND NOT i.deleted - ORDER BY pi.project_id -` - -// ValidateProjectsInstrumentNameUnique checks that the provided instrument name does not already belong to one of the provided projects -func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateProjectsInstrumentNameUnique, instrumentName, projectIDs) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = InvalidName - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, nil -} - -// case where service provides slice of instrument ids for single project -const validateInstrumentsAssignerAuthorized = ` - SELECT p.name AS project_name, i.name AS instrument_name - FROM project_instrument pi - INNER JOIN project p ON pi.project_id = p.id - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id IN (?) - AND NOT EXISTS ( - SELECT 1 FROM v_profile_project_roles ppr - WHERE ppr.profile_id = ? - AND (ppr.is_admin OR (ppr.project_id = pi.project_id AND ppr.role = 'ADMIN')) - ) - AND NOT i.deleted -` - -func (q *Queries) ValidateInstrumentsAssignerAuthorized(ctx context.Context, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateInstrumentsAssignerAuthorized, instrumentIDs, profileID) - if err != nil { - return v, err - } - var nn []struct { - ProjectName string `db:"project_name"` - InstrumentName string `db:"instrument_name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", - nn[idx].InstrumentName, nn[idx].ProjectName, - ) - } - v.Errors = vErrors - v.ReasonCode = Unauthorized - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, err -} - -// case where service provides slice of project ids for single instrument -const validateProjectsAssignerAuthorized = ` - SELECT p.name - FROM project_instrument pi - INNER JOIN project p ON pi.project_id = p.id - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id = ? - AND pi.project_id IN (?) - AND NOT EXISTS ( - SELECT 1 FROM v_profile_project_roles ppr - WHERE profile_id = ? AND (ppr.is_admin OR (ppr.project_id = pi.project_id AND ppr.role = 'ADMIN')) - ) - AND NOT i.deleted - ORDER BY p.name -` - -func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateProjectsAssignerAuthorized, instrumentID, projectIDs, profileID) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = Unauthorized - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, err -} diff --git a/api/internal/model/instrument_constant.go b/api/internal/model/instrument_constant.go deleted file mode 100644 index dbc4fbcc..00000000 --- a/api/internal/model/instrument_constant.go +++ /dev/null @@ -1,40 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -const listInstrumentConstants = ` - SELECT t.* FROM v_timeseries t - INNER JOIN instrument_constants ic ON ic.timeseries_id = t.id - WHERE ic.instrument_id = $1 -` - -// ListInstrumentConstants lists constants for a given instrument id -func (q *Queries) ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listInstrumentConstants, instrumentID); err != nil { - return tt, err - } - return tt, nil -} - -const createInstrumentConstant = ` - INSERT INTO instrument_constants (instrument_id, timeseries_id) VALUES ($1, $2) -` - -func (q *Queries) CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createInstrumentConstant, instrumentID, timeseriesID) - return err -} - -const deleteInstrumentConstant = ` - DELETE FROM instrument_constants WHERE instrument_id = $1 AND timeseries_id = $2 -` - -func (q *Queries) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentConstant, instrumentID, timeseriesID) - return err -} diff --git a/api/internal/model/instrument_group.go b/api/internal/model/instrument_group.go deleted file mode 100644 index cbd257ee..00000000 --- a/api/internal/model/instrument_group.go +++ /dev/null @@ -1,182 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentGroup holds information for entity instrument_group -type InstrumentGroup struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"-"` - Slug string `json:"slug"` - Name string `json:"name"` - Description string `json:"description"` - ProjectID *uuid.UUID `json:"project_id" db:"project_id"` - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - TimeseriesCount int `json:"timeseries_count" db:"timeseries_count"` - AuditInfo -} - -// InstrumentGroupCollection is a collection of Instrument items -type InstrumentGroupCollection struct { - Items []InstrumentGroup -} - -// Shorten returns an instrument collection with individual objects limited to ID and Struct fields -func (c InstrumentGroupCollection) Shorten() IDSlugCollection { - ss := IDSlugCollection{Items: make([]IDSlug, 0)} - for _, n := range c.Items { - s := IDSlug{ID: n.ID, Slug: n.Slug} - ss.Items = append(ss.Items, s) - } - return ss -} - -// UnmarshalJSON implements UnmarshalJSON interface -// Allows unpacking object or array of objects into array of objects -func (c *InstrumentGroupCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var g InstrumentGroup - if err := json.Unmarshal(b, &g); err != nil { - return err - } - c.Items = []InstrumentGroup{g} - default: - c.Items = make([]InstrumentGroup, 0) - } - return nil -} - -const listInstrumentGroupsSQL = ` - SELECT - id, - slug, - name, - description, - creator, - create_date, - updater, - update_date, - project_id, - instrument_count, - timeseries_count - FROM v_instrument_group -` - -const listInstrumentGroups = listInstrumentGroupsSQL + ` - WHERE NOT deleted -` - -// ListInstrumentGroups returns a list of instrument groups -func (q *Queries) ListInstrumentGroups(ctx context.Context) ([]InstrumentGroup, error) { - gg := make([]InstrumentGroup, 0) - if err := q.db.SelectContext(ctx, &gg, listInstrumentGroups); err != nil { - return make([]InstrumentGroup, 0), err - } - return gg, nil -} - -const getInstrumentGroup = listInstrumentGroupsSQL + ` - WHERE id = $1 -` - -// GetInstrumentGroup returns a single instrument group -func (q *Queries) GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (InstrumentGroup, error) { - var g InstrumentGroup - if err := q.db.GetContext(ctx, &g, getInstrumentGroup, instrumentGroupID); err != nil { - return g, err - } - return g, nil -} - -const createInstrumentGroup = ` - INSERT INTO instrument_group (slug, name, description, creator, create_date, project_id) - VALUES (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) - RETURNING id, slug, name, description, creator, create_date, updater, update_date, project_id -` - -func (q *Queries) CreateInstrumentGroup(ctx context.Context, group InstrumentGroup) (InstrumentGroup, error) { - var groupNew InstrumentGroup - err := q.db.GetContext( - ctx, &groupNew, createInstrumentGroup, - group.Name, group.Description, group.CreatorID, group.CreateDate, group.ProjectID, - ) - return groupNew, err -} - -const updateInstrumentGroup = ` - UPDATE instrument_group SET - name = $2, - deleted = $3, - description = $4, - updater = $5, - update_date = $6, - project_id = $7 - WHERE id = $1 - RETURNING * -` - -// UpdateInstrumentGroup updates an instrument group -func (q *Queries) UpdateInstrumentGroup(ctx context.Context, group InstrumentGroup) (InstrumentGroup, error) { - var groupUpdated InstrumentGroup - err := q.db.GetContext( - ctx, &groupUpdated, updateInstrumentGroup, - group.ID, group.Name, group.Deleted, group.Description, group.UpdaterID, group.UpdateDate, group.ProjectID, - ) - return groupUpdated, err -} - -const deleteFlagInstrumentGroup = ` - UPDATE instrument_group SET deleted = true WHERE id = $1 -` - -// DeleteFlagInstrumentGroup sets the deleted field to true -func (q *Queries) DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagInstrumentGroup, instrumentGroupID) - return err -} - -const listInstrumentGroupInstruments = ` - SELECT inst.* - FROM instrument_group_instruments igi - INNER JOIN (` + listInstrumentsSQL + `) inst ON igi.instrument_id = inst.id - WHERE igi.instrument_group_id = $1 and inst.deleted = false -` - -// ListInstrumentGroupInstruments returns a list of instrument group instruments for a given instrument -func (q *Queries) ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listInstrumentGroupInstruments, groupID); err != nil { - return nil, err - } - return ii, nil -} - -const createInstrumentGroupInstruments = ` - INSERT INTO instrument_group_instruments (instrument_group_id, instrument_id) VALUES ($1, $2) -` - -// CreateInstrumentGroupInstruments adds an instrument to an instrument group -func (q *Queries) CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createInstrumentGroupInstruments, instrumentGroupID, instrumentID) - return err -} - -const deleteInstrumentGroupInstruments = ` - DELETE FROM instrument_group_instruments WHERE instrument_group_id = $1 and instrument_id = $2 -` - -// DeleteInstrumentGroupInstruments adds an instrument to an instrument group -func (q *Queries) DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentGroupInstruments, instrumentGroupID, instrumentID) - return err -} diff --git a/api/internal/model/instrument_ipi.go b/api/internal/model/instrument_ipi.go deleted file mode 100644 index d20f5481..00000000 --- a/api/internal/model/instrument_ipi.go +++ /dev/null @@ -1,141 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type IpiOpts struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - NumSegments int `json:"num_segments" db:"num_segments"` - BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` - BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` - InitialTime *time.Time `json:"initial_time" db:"initial_time"` -} - -type IpiSegment struct { - ID int `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Length *float64 `json:"length" db:"length"` - LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` - TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id" db:"tilt_timeseries_id"` - IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id" db:"inc_dev_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` -} - -type IpiMeasurements struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - Time time.Time `json:"time" db:"time"` - Measurements dbJSONSlice[IpiSegmentMeasurement] `json:"measurements" db:"measurements"` -} - -type IpiSegmentMeasurement struct { - SegmentID int `json:"segment_id" db:"segment_id"` - Tilt *float64 `json:"tilt" db:"tilt"` - IncDev *float64 `json:"inc_dev" db:"inc_dev"` - CumDev *float64 `json:"cum_dev" db:"cum_dev"` - Temp *float64 `json:"temp" db:"temp"` - Elelvation *float64 `json:"elevation" db:"elevation"` -} - -var ( - IpiParameterID = uuid.MustParse("a9a5ad45-b2e5-4744-816e-d3184f2c08bd") -) - -// TODO: when creating new timeseries, any depth based instruments should not be available for assignment - -const createIpiOpts = ` - INSERT INTO ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) - VALUES ($1, $2, $3, $4) -` - -func (q *Queries) CreateIpiOpts(ctx context.Context, instrumentID uuid.UUID, si IpiOpts) error { - _, err := q.db.ExecContext(ctx, createIpiOpts, instrumentID, si.NumSegments, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const updateIpiOpts = ` - UPDATE ipi_opts SET - bottom_elevation_timeseries_id = $2, - initial_time = $3 - WHERE instrument_id = $1 -` - -func (q *Queries) UpdateIpiOpts(ctx context.Context, instrumentID uuid.UUID, si IpiOpts) error { - _, err := q.db.ExecContext(ctx, updateIpiOpts, instrumentID, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const getAllIpiSegmentsForInstrument = ` - SELECT * FROM v_ipi_segment WHERE instrument_id = $1 -` - -func (q *Queries) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]IpiSegment, error) { - ssi := make([]IpiSegment, 0) - err := q.db.SelectContext(ctx, &ssi, getAllIpiSegmentsForInstrument, instrumentID) - return ssi, err -} - -const createIpiSegment = ` - INSERT INTO ipi_segment ( - id, - instrument_id, - length_timeseries_id, - tilt_timeseries_id, - inc_dev_timeseries_id, - temp_timeseries_id - ) VALUES ($1, $2, $3, $4, $5, $6) -` - -func (q *Queries) CreateIpiSegment(ctx context.Context, seg IpiSegment) error { - _, err := q.db.ExecContext(ctx, createIpiSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.TiltTimeseriesID, - seg.IncDevTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const updateIpiSegment = ` - UPDATE ipi_segment SET - length_timeseries_id = $3, - tilt_timeseries_id = $4, - inc_dev_timeseries_id = $5, - temp_timeseries_id = $6 - WHERE id = $1 AND instrument_id = $2 -` - -func (q *Queries) UpdateIpiSegment(ctx context.Context, seg IpiSegment) error { - _, err := q.db.ExecContext(ctx, updateIpiSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.TiltTimeseriesID, - seg.IncDevTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const getIpiMeasurementsForInstrument = ` - SELECT instrument_id, time, measurements - FROM v_ipi_measurement - WHERE instrument_id = $1 AND time >= $2 AND time <= $3 - UNION - SELECT instrument_id, time, measurements - FROM v_ipi_measurement - WHERE time IN (SELECT initial_time FROM ipi_opts WHERE instrument_id = $1) - AND instrument_id = $1 - ORDER BY time ASC -` - -func (q *Queries) GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw TimeWindow) ([]IpiMeasurements, error) { - mm := make([]IpiMeasurements, 0) - err := q.db.SelectContext(ctx, &mm, getIpiMeasurementsForInstrument, instrumentID, tw.After, tw.Before) - return mm, err -} diff --git a/api/internal/model/instrument_note.go b/api/internal/model/instrument_note.go deleted file mode 100644 index 582ff1f7..00000000 --- a/api/internal/model/instrument_note.go +++ /dev/null @@ -1,134 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentNote is a note about an instrument -type InstrumentNote struct { - ID uuid.UUID `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Title string `json:"title"` - Body string `json:"body"` - Time time.Time `json:"time"` - AuditInfo -} - -// InstrumentNoteCollection is a collection of Instrument Notes -type InstrumentNoteCollection struct { - Items []InstrumentNote -} - -// UnmarshalJSON implements UnmarshalJSON interface -// Allows unpacking object or array of objects into array of objects -func (c *InstrumentNoteCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var n InstrumentNote - if err := json.Unmarshal(b, &n); err != nil { - return err - } - c.Items = []InstrumentNote{n} - default: - c.Items = make([]InstrumentNote, 0) - } - return nil -} - -const listInstrumentNotes = ` - SELECT - N.id AS id, - N.instrument_id AS instrument_id, - N.title, - N.body, - N.time, - N.creator, - N.create_date, - N.updater, - N.update_date - FROM instrument_note N -` - -// ListInstrumentNotes returns an array of instruments from the database -func (q *Queries) ListInstrumentNotes(ctx context.Context) ([]InstrumentNote, error) { - nn := make([]InstrumentNote, 0) - if err := q.db.SelectContext(ctx, &nn, listInstrumentNotes); err != nil { - return nil, err - } - return nn, nil -} - -const listInstrumentInstrumentNotes = listInstrumentNotes + ` - WHERE N.instrument_id = $1 -` - -// ListInstrumentInstrumentNotes returns an array of instrument notes for a given instrument -func (q *Queries) ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) { - nn := make([]InstrumentNote, 0) - if err := q.db.SelectContext(ctx, &nn, listInstrumentInstrumentNotes, instrumentID); err != nil { - return nil, err - } - return nn, nil -} - -const getInstrumentNotes = listInstrumentNotes + ` - WHERE N.id = $1 -` - -// GetInstrumentNote returns a single instrument note -func (q *Queries) GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (InstrumentNote, error) { - var n InstrumentNote - if err := q.db.GetContext(ctx, &n, getInstrumentNotes, noteID); err != nil { - return n, err - } - return n, nil -} - -const createInstrumentNote = ` - INSERT INTO instrument_note (instrument_id, title, body, time, creator, create_date) - VALUES ($1, $2, $3, $4, $5, $6) - RETURNING id, instrument_id, title, body, time, creator, create_date, updater, update_date -` - -func (q *Queries) CreateInstrumentNote(ctx context.Context, note InstrumentNote) (InstrumentNote, error) { - var noteNew InstrumentNote - err := q.db.GetContext(ctx, ¬eNew, createInstrumentNote, note.InstrumentID, note.Title, note.Body, note.Time, note.CreatorID, note.CreateDate) - return noteNew, err -} - -const updateInstrumentNote = ` - UPDATE instrument_note SET - title = $2, - body = $3, - time = $4, - updater = $5, - update_date = $6 - WHERE id = $1 - RETURNING id, instrument_id, title, body, time, creator, create_date, updater, update_date -` - -// UpdateInstrumentNote updates a single instrument note -func (q *Queries) UpdateInstrumentNote(ctx context.Context, n InstrumentNote) (InstrumentNote, error) { - var nUpdated InstrumentNote - err := q.db.GetContext(ctx, &nUpdated, updateInstrumentNote, n.ID, n.Title, n.Body, n.Time, n.UpdaterID, n.UpdateDate) - return nUpdated, err -} - -const deleteInstrumentNote = ` - DELETE FROM instrument_note WHERE id = $1 -` - -// DeleteInstrumentNote deletes an instrument note -func (q *Queries) DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentNote, noteID) - return err -} diff --git a/api/internal/model/instrument_saa.go b/api/internal/model/instrument_saa.go deleted file mode 100644 index b461670a..00000000 --- a/api/internal/model/instrument_saa.go +++ /dev/null @@ -1,154 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type SaaOpts struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - NumSegments int `json:"num_segments" db:"num_segments"` - BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` - BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` - InitialTime *time.Time `json:"initial_time" db:"initial_time"` -} - -type SaaSegment struct { - ID int `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Length *float64 `json:"length" db:"length"` - LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` - XTimeseriesID *uuid.UUID `json:"x_timeseries_id" db:"x_timeseries_id"` - YTimeseriesID *uuid.UUID `json:"y_timeseries_id" db:"y_timeseries_id"` - ZTimeseriesID *uuid.UUID `json:"z_timeseries_id" db:"z_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` -} - -type SaaMeasurements struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - Time time.Time `json:"time" db:"time"` - Measurements dbJSONSlice[SaaSegmentMeasurement] `json:"measurements" db:"measurements"` -} - -type SaaSegmentMeasurement struct { - SegmentID int `json:"segment_id" db:"segment_id"` - X *float64 `json:"x" db:"x"` - Y *float64 `json:"y" db:"y"` - Z *float64 `json:"z" db:"z"` - Temp *float64 `json:"temp" db:"temp"` - XIncrement *float64 `json:"x_increment" db:"x_increment"` - YIncrement *float64 `json:"y_increment" db:"y_increment"` - ZIncrement *float64 `json:"z_increment" db:"z_increment"` - TempIncrement *float64 `json:"temp_increment" db:"temp_increment"` - XCumDev *float64 `json:"x_cum_dev" db:"x_cum_dev"` - YCumDev *float64 `json:"y_cum_dev" db:"y_cum_dev"` - ZCumDev *float64 `json:"z_cum_dev" db:"z_cum_dev"` - TempCumDev *float64 `json:"temp_cum_dev" db:"temp_cum_dev"` - Elevation *float64 `json:"elevation" db:"elevation"` -} - -var ( - SaaParameterID = uuid.MustParse("6d12ca4c-b618-41cd-87a2-a248980a0d69") -) - -// TODO: when creating new timeseries, any depth based instruments should not be available for assignment - -const createSaaOpts = ` - INSERT INTO saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) - VALUES ($1, $2, $3, $4) -` - -func (q *Queries) CreateSaaOpts(ctx context.Context, instrumentID uuid.UUID, si SaaOpts) error { - _, err := q.db.ExecContext(ctx, createSaaOpts, instrumentID, si.NumSegments, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const updateSaaOpts = ` - UPDATE saa_opts SET - bottom_elevation_timeseries_id = $2, - initial_time = $3 - WHERE instrument_id = $1 -` - -func (q *Queries) UpdateSaaOpts(ctx context.Context, instrumentID uuid.UUID, si SaaOpts) error { - _, err := q.db.ExecContext(ctx, updateSaaOpts, instrumentID, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const getAllSaaSegmentsForInstrument = ` - SELECT * FROM v_saa_segment WHERE instrument_id = $1 -` - -func (q *Queries) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]SaaSegment, error) { - ssi := make([]SaaSegment, 0) - err := q.db.SelectContext(ctx, &ssi, getAllSaaSegmentsForInstrument, instrumentID) - return ssi, err -} - -const createSaaSegment = ` - INSERT INTO saa_segment ( - id, - instrument_id, - length_timeseries_id, - x_timeseries_id, - y_timeseries_id, - z_timeseries_id, - temp_timeseries_id - ) VALUES ($1, $2, $3, $4, $5, $6, $7) -` - -func (q *Queries) CreateSaaSegment(ctx context.Context, seg SaaSegment) error { - _, err := q.db.ExecContext(ctx, createSaaSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.XTimeseriesID, - seg.YTimeseriesID, - seg.ZTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const updateSaaSegment = ` - UPDATE saa_segment SET - length_timeseries_id = $3, - x_timeseries_id = $4, - y_timeseries_id = $5, - z_timeseries_id = $6, - temp_timeseries_id = $7 - WHERE id = $1 AND instrument_id = $2 -` - -func (q *Queries) UpdateSaaSegment(ctx context.Context, seg SaaSegment) error { - _, err := q.db.ExecContext(ctx, updateSaaSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.XTimeseriesID, - seg.YTimeseriesID, - seg.ZTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const getSaaMeasurementsForInstrument = ` - SELECT instrument_id, time, measurements - FROM v_saa_measurement - WHERE instrument_id = $1 AND time >= $2 AND time <= $3 - UNION - SELECT instrument_id, time, measurements - FROM v_saa_measurement - WHERE time IN (SELECT initial_time FROM saa_opts WHERE instrument_id = $1) - AND instrument_id = $1 - ORDER BY time ASC -` - -func (q *Queries) GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw TimeWindow) ([]SaaMeasurements, error) { - mm := make([]SaaMeasurements, 0) - err := q.db.SelectContext(ctx, &mm, getSaaMeasurementsForInstrument, instrumentID, tw.After, tw.Before) - return mm, err -} diff --git a/api/internal/model/instrument_status.go b/api/internal/model/instrument_status.go deleted file mode 100644 index 9e260d45..00000000 --- a/api/internal/model/instrument_status.go +++ /dev/null @@ -1,100 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentStatus is an instrument status -type InstrumentStatus struct { - ID uuid.UUID `json:"id"` - Time time.Time `json:"time"` - StatusID uuid.UUID `json:"status_id" db:"status_id"` - Status string `json:"status"` -} - -// InstrumentStatusCollection is a collection of instrument status -type InstrumentStatusCollection struct { - Items []InstrumentStatus -} - -// UnmarshalJSON implements the UnmarshalJSONinterface -func (c *InstrumentStatusCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var s InstrumentStatus - if err := json.Unmarshal(b, &s); err != nil { - return err - } - c.Items = []InstrumentStatus{s} - default: - c.Items = make([]InstrumentStatus, 0) - } - return nil -} - -const listInstrumentStatusSQL = ` - SELECT - S.id, - S.status_id, - D.name AS status, - S.time - FROM instrument_status S - INNER JOIN status D - ON D.id = S.status_id -` - -const listInstrumentStatus = listInstrumentStatusSQL + ` - WHERE S.instrument_id = $1 ORDER BY time DESC -` - -// ListInstrumentStatus returns all status values for an instrument -func (q *Queries) ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentStatus, error) { - ss := make([]InstrumentStatus, 0) - if err := q.db.SelectContext(ctx, &ss, listInstrumentStatus, instrumentID); err != nil { - return nil, err - } - return ss, nil -} - -const getInstrumentStatus = listInstrumentStatusSQL + ` - WHERE S.id = $1 -` - -// GetInstrumentStatus gets a single status -func (q *Queries) GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (InstrumentStatus, error) { - var s InstrumentStatus - if err := q.db.GetContext(ctx, &s, getInstrumentStatus, statusID); err != nil { - return s, err - } - return s, nil -} - -const createOrUpdateInstrumentStatus = ` - INSERT INTO instrument_status (instrument_id, status_id, time) VALUES ($1, $2, $3) - ON CONFLICT ON CONSTRAINT instrument_unique_status_in_time DO UPDATE SET status_id = EXCLUDED.status_id -` - -// CreateOrUpdateInstrumentStatus creates a Instrument Status, updates value on conflict -func (q *Queries) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID, statusID uuid.UUID, statusTime time.Time) error { - _, err := q.db.ExecContext(ctx, createOrUpdateInstrumentStatus, instrumentID, statusID, statusTime) - return err -} - -const deleteInstrumentStatus = ` - DELETE FROM instrument_status WHERE id = $1 -` - -// DeleteInstrumentStatus deletes a status for an instrument -func (q *Queries) DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentStatus, statusID) - return err -} diff --git a/api/internal/model/measurement.go b/api/internal/model/measurement.go deleted file mode 100644 index 18029e7e..00000000 --- a/api/internal/model/measurement.go +++ /dev/null @@ -1,344 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "math" - "strings" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// TimeseriesMeasurementCollectionCollection is a collection of timeseries measurement collections -// i.e an array of structs, each containing timeseries measurements not necessarily from the same time series -type TimeseriesMeasurementCollectionCollection struct { - Items []MeasurementCollection -} - -// TimeseriesIDs returns a slice of all timeseries IDs contained in the MeasurementCollectionCollection -func (cc *TimeseriesMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { - dd := make(map[uuid.UUID]struct{}) - for _, item := range cc.Items { - dd[item.TimeseriesID] = struct{}{} - } - return dd -} - -// UnmarshalJSON implements UnmarshalJSON interface -func (cc *TimeseriesMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &cc.Items); err != nil { - return err - } - case "OBJECT": - var mc MeasurementCollection - if err := json.Unmarshal(b, &mc); err != nil { - return err - } - cc.Items = []MeasurementCollection{mc} - default: - cc.Items = make([]MeasurementCollection, 0) - } - return nil -} - -// Measurement is a time and value associated with a timeseries -type Measurement struct { - TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` - Time time.Time `json:"time"` - Value FloatNanInf `json:"value"` - Error string `json:"error,omitempty"` - TimeseriesNote -} - -type FloatNanInf float64 - -func (j FloatNanInf) MarshalJSON() ([]byte, error) { - if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { - return []byte("null"), nil - } - - return []byte(fmt.Sprintf("%f", float64(j))), nil -} - -func (j *FloatNanInf) UnmarshalJSON(v []byte) error { - switch strings.ToLower(string(v)) { - case `"nan"`, "nan", "", "null", "undefined": - *j = FloatNanInf(math.NaN()) - case `"inf"`, "inf": - *j = FloatNanInf(math.Inf(1)) - default: - var fv float64 - if err := json.Unmarshal(v, &fv); err != nil { - *j = FloatNanInf(math.NaN()) - return nil - } - *j = FloatNanInf(fv) - } - return nil -} - -// MeasurementLean is the minimalist representation of a timeseries measurement -// a key value pair where key is the timestamp, value is the measurement { : } -type MeasurementLean map[time.Time]float64 - -// MeasurementCollection is a collection of timeseries measurements -type MeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []Measurement `json:"items"` -} - -// MeasurementCollectionLean uses a minimalist representation of a timeseries measurement -type MeasurementCollectionLean struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []MeasurementLean `json:"items"` -} - -type MeasurementGetter interface { - getTime() time.Time - getValue() float64 -} - -func (m Measurement) getTime() time.Time { - return m.Time -} - -func (m Measurement) getValue() float64 { - return float64(m.Value) -} - -// Should only ever be one -func (ml MeasurementLean) getTime() time.Time { - var t time.Time - for k := range ml { - t = k - } - return t -} - -// Should only ever be one -func (ml MeasurementLean) getValue() float64 { - var m float64 - for _, v := range ml { - m = v - } - return m -} - -const ( - createTimeseriesMeasurementSQL = ` - INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES ($1, $2, $3) - ` - createTimeseriesNoteSQL = ` - INSERT INTO timeseries_notes (timeseries_id, time, masked, validated, annotation) VALUES ($1, $2, $3, $4, $5) - ` -) - -const listTimeseriesMeasurements = ` - SELECT - m.timeseries_id, - m.time, - m.value, - n.masked, - n.validated, - n.annotation - FROM timeseries_measurement m - LEFT JOIN timeseries_notes n ON m.timeseries_id = n.timeseries_id AND m.time = n.time - INNER JOIN timeseries t ON t.id = m.timeseries_id - WHERE t.id = $1 AND m.time > $2 AND m.time < $3 ORDER BY m.time ASC -` - -// ListTimeseriesMeasurements returns a stored timeseries with slice of timeseries measurements populated -func (q *Queries) ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw TimeWindow, threshold int) (*MeasurementCollection, error) { - items := make([]Measurement, 0) - if err := q.db.SelectContext(ctx, &items, listTimeseriesMeasurements, timeseriesID, tw.After, tw.Before); err != nil { - return nil, err - } - return &MeasurementCollection{TimeseriesID: timeseriesID, Items: LTTB(items, threshold)}, nil -} - -const deleteTimeseriesMeasurements = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 and time = $2 -` - -// DeleteTimeserieMeasurements deletes a timeseries Measurement -func (q *Queries) DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurements, timeseriesID, time) - return err -} - -const getTimeseriesConstantMeasurement = ` - SELECT - M.timeseries_id, - M.time, - M.value - FROM timeseries_measurement M - INNER JOIN v_timeseries_stored T ON T.id = M.timeseries_id - INNER JOIN parameter P ON P.id = T.parameter_id - WHERE T.instrument_id IN ( - SELECT instrument_id - FROM v_timeseries_stored T - WHERE t.id= $1 - ) - AND P.name = $2 -` - -// GetTimeseriesConstantMeasurement returns a constant timeseries measurement for the same instrument by constant name -func (q *Queries) GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (Measurement, error) { - var m Measurement - ms := make([]Measurement, 0) - if err := q.db.Select(&ms, getTimeseriesConstantMeasurement, timeseriesID, constantName); err != nil { - return m, err - } - if len(ms) > 0 { - m = ms[0] - } - return m, nil -} - -const createTimeseriesMeasruement = createTimeseriesMeasurementSQL + ` - ON CONFLICT ON CONSTRAINT timeseries_unique_time DO NOTHING -` - -func (q *Queries) CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error { - _, err := q.db.ExecContext(ctx, createTimeseriesMeasruement, timeseriesID, t, value) - return err -} - -const createOrUpdateTimeseriesMeasurement = createTimeseriesMeasurementSQL + ` - ON CONFLICT ON CONSTRAINT timeseries_unique_time DO UPDATE SET value = EXCLUDED.value -` - -func (q *Queries) CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error { - _, err := q.db.ExecContext(ctx, createOrUpdateTimeseriesMeasurement, timeseriesID, t, value) - return err -} - -const createTimeseriesNote = createTimeseriesNoteSQL + ` - ON CONFLICT ON CONSTRAINT notes_unique_time DO NOTHING -` - -func (q *Queries) CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n TimeseriesNote) error { - _, err := q.db.ExecContext(ctx, createTimeseriesNote, timeseriesID, t, n.Masked, n.Validated, n.Annotation) - return err -} - -const createOrUpdateTimeseriesNote = createTimeseriesNoteSQL + ` - ON CONFLICT ON CONSTRAINT notes_unique_time DO UPDATE SET masked = EXCLUDED.masked, validated = EXCLUDED.validated, annotation = EXCLUDED.annotation -` - -func (q *Queries) CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n TimeseriesNote) error { - _, err := q.db.ExecContext(ctx, createOrUpdateTimeseriesNote, timeseriesID, t, n.Masked, n.Validated, n.Annotation) - return err -} - -const deleteTimeseriesMeasurement = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 AND time = $2 -` - -func (q *Queries) DeleteTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurementsRange, timeseriesID, t) - return err -} - -const deleteTimeseriesMeasurementsRange = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 AND time > $2 AND time < $3 -` - -func (q *Queries) DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurementsRange, timeseriesID, start, end) - return err -} - -const deleteTimeseriesNote = ` - DELETE FROM timeseries_notes WHERE timeseries_id = $1 AND time > $2 AND time < $3 -` - -func (q *Queries) DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesNote, timeseriesID, start, end) - return err -} - -// A slightly modified LTTB (Largest-Triange-Three-Buckets) algorithm for downsampling timeseries measurements -// https://godoc.org/github.com/dgryski/go-lttb -func LTTB[T MeasurementGetter](data []T, threshold int) []T { - if threshold == 0 || threshold >= len(data) { - return data // Nothing to do - } - - if threshold < 3 { - threshold = 3 - } - - sampled := make([]T, 0, threshold) - - // Bucket size. Leave room for start and end data points - every := float64(len(data)-2) / float64(threshold-2) - - sampled = append(sampled, data[0]) // Always add the first point - - bucketStart := 1 - bucketCenter := int(math.Floor(every)) + 1 - - var a int - - for i := 0; i < threshold-2; i++ { - - bucketEnd := int(math.Floor(float64(i+2)*every)) + 1 - - // Calculate point average for next bucket (containing c) - avgRangeStart := bucketCenter - avgRangeEnd := bucketEnd - - if avgRangeEnd >= len(data) { - avgRangeEnd = len(data) - } - - avgRangeLength := float64(avgRangeEnd - avgRangeStart) - - var avgX, avgY float64 - for ; avgRangeStart < avgRangeEnd; avgRangeStart++ { - avgX += time.Duration(data[avgRangeStart].getTime().Unix()).Seconds() - avgY += data[avgRangeStart].getValue() - } - avgX /= avgRangeLength - avgY /= avgRangeLength - - // Get the range for this bucket - rangeOffs := bucketStart - rangeTo := bucketCenter - - // Point a - pointAX := time.Duration(data[a].getTime().UnixNano()).Seconds() - pointAY := data[a].getValue() - - maxArea := float64(-1.0) - - var nextA int - for ; rangeOffs < rangeTo; rangeOffs++ { - // Calculate triangle area over three buckets - area := (pointAX-avgX)*(data[rangeOffs].getValue()-pointAY) - (pointAX-time.Duration(data[rangeOffs].getTime().Unix()).Seconds())*(avgY-pointAY) - // We only care about the relative area here. - // Calling math.Abs() is slower than squaring - area *= area - if area > maxArea { - maxArea = area - nextA = rangeOffs // Next a is this b - } - } - - sampled = append(sampled, data[nextA]) // Pick this point from the bucket - a = nextA // This a is the next a (chosen b) - - bucketStart = bucketCenter - bucketCenter = bucketEnd - } - - sampled = append(sampled, data[len(data)-1]) // Always add last - - return sampled -} diff --git a/api/internal/model/measurement_inclinometer.go b/api/internal/model/measurement_inclinometer.go deleted file mode 100644 index 7f7727b1..00000000 --- a/api/internal/model/measurement_inclinometer.go +++ /dev/null @@ -1,213 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" - "github.com/jmoiron/sqlx/types" -) - -// Inclinometer Measurement is a time and values associated with a timeseries -type InclinometerMeasurement struct { - TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` - Time time.Time `json:"time"` - Values types.JSONText `json:"values"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` -} - -// Values associated with a inclinometer measurement -type InclinometerMeasurementValues struct { - Depth int `json:"depth" db:"depth"` - A0 float32 `json:"a0" db:"a0"` - A180 float32 `json:"a180" db:"a180"` - B0 float32 `json:"b0" db:"b0"` - B180 float32 `json:"b180" db:"b180"` - AChecksum float32 `json:"aChecksum" db:"a_checksum"` - AComb float32 `json:"aComb" db:"a_comb"` - AIncrement float32 `json:"aIncrement" db:"a_increment"` - ACumDev float32 `json:"aCumDev" db:"a_cum_dev"` - BChecksum float32 `json:"bChecksum" db:"b_checksum"` - BComb float32 `json:"bComb" db:"b_comb"` - BIncrement float32 `json:"bIncrement" db:"b_increment"` - BCumDev float32 `json:"bCumDev" db:"b_cum_dev"` -} - -// InclinometerMeasurementLean is the minimalist representation of a timeseries measurement -// a key value pair where key is the timestamp, value is the measurement { : } -type InclinometerMeasurementLean map[time.Time]types.JSONText - -// InclinometerMeasurementCollection is a collection of Inclinometer measurements -type InclinometerMeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Inclinometers []InclinometerMeasurement `json:"inclinometers"` -} - -// InclinometerMeasurementCollectionLean uses a minimalist representation of a Inclinometer timeseries measurement -type InclinometerMeasurementCollectionLean struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []InclinometerMeasurementLean `json:"items"` -} - -// InclinometerMeasurementCollectionCollection is a collection of inclinometer measurement collections -// i.e an array of structs, each containing inclinometer measurements not necessarily from the same time series -type InclinometerMeasurementCollectionCollection struct { - Items []InclinometerMeasurementCollection -} - -// InclinometerTimeseriesIDs returns a slice of all timeseries IDs contained in the InclinometerMeasurementCollectionCollection -func (cc *InclinometerMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { - dd := make(map[uuid.UUID]struct{}) - for _, item := range cc.Items { - dd[item.TimeseriesID] = struct{}{} - } - return dd -} - -// UnmarshalJSON implements UnmarshalJSON interface -func (cc *InclinometerMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &cc.Items); err != nil { - return err - } - case "OBJECT": - var mc InclinometerMeasurementCollection - if err := json.Unmarshal(b, &mc); err != nil { - return err - } - cc.Items = []InclinometerMeasurementCollection{mc} - default: - cc.Items = make([]InclinometerMeasurementCollection, 0) - } - return nil -} - -const listInclinometerMeasurements = ` - SELECT M.timeseries_id, - M.time, - M.creator, - M.create_date - FROM inclinometer_measurement M - INNER JOIN timeseries T - ON T.id = M.timeseries_id - WHERE T.id = $1 AND M.time > $2 AND M.time < $3 ORDER BY M.time DESC -` - -// ListInclinometersMeasurements returns a timeseries with slice of inclinometer measurements populated -func (q *Queries) ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw TimeWindow) (*InclinometerMeasurementCollection, error) { - mc := InclinometerMeasurementCollection{TimeseriesID: timeseriesID} - if err := q.db.SelectContext(ctx, &mc.Inclinometers, listInclinometerMeasurements, timeseriesID, tw.After, tw.Before); err != nil { - return nil, err - } - return &mc, nil -} - -func listInclinometerMeasurementsValues(inclinometerConstant string) string { - if inclinometerConstant == "0" { - return ` - select items.depth, - items.a0, - items.a180, - items.b0, - items.b180, - (items.a0 + items.a180) AS a_checksum, - (items.a0 - items.a180)/2 AS a_comb, - 0 AS a_increment, - 0 AS a_cum_dev, - (items.b0 + items.b180) AS b_checksum, - (items.b0 - items.b180)/2 AS b_comb, - 0 AS b_increment, - 0 AS b_cum_dev - from inclinometer_measurement, jsonb_to_recordset(inclinometer_measurement.values) as items(depth int, a0 real, a180 real, b0 real, b180 real) - ` - } else { - return fmt.Sprintf(` - select items.depth, - items.a0, - items.a180, - items.b0, - items.b180, - (items.a0 + items.a180) AS a_checksum, - (items.a0 - items.a180)/2 AS a_comb, - (items.a0 - items.a180) / 2 / %s * 24 AS a_increment, - SUM((items.a0 - items.a180) / 2 / %s * 24) OVER (ORDER BY depth desc) AS a_cum_dev, - (items.b0 + items.b180) AS b_checksum, - (items.b0 - items.b180)/2 AS b_comb, - (items.b0 - items.b180) / 2 / %s * 24 AS b_increment, - SUM((items.b0 - items.b180) / 2 / %s * 24) OVER (ORDER BY depth desc) AS b_cum_dev - from inclinometer_measurement, jsonb_to_recordset(inclinometer_measurement.values) as items(depth int, a0 real, a180 real, b0 real, b180 real) - `, inclinometerConstant, inclinometerConstant, inclinometerConstant, inclinometerConstant) - } -} - -func (q *Queries) ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*InclinometerMeasurementValues, error) { - constant := fmt.Sprintf("%.0f", inclConstant) - v := []*InclinometerMeasurementValues{} - if err := q.db.SelectContext(ctx, &v, listInclinometerMeasurementsValues(constant)+" WHERE timeseries_id = $1 AND time = $2 ORDER BY depth", timeseriesID, time); err != nil { - return nil, err - } - return v, nil -} - -const deleteInclinometerMeasurement = ` - DELETE FROM inclinometer_measurement WHERE timeseries_id = $1 and time = $2 -` - -// DeleteInclinometerMeasurements deletes a inclinometer Measurement -func (q *Queries) DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error { - _, err := q.db.ExecContext(ctx, deleteInclinometerMeasurement, timeseriesID, time) - return err -} - -const createOrUpdateInclinometerMeasurement = ` - INSERT INTO inclinometer_measurement (timeseries_id, time, values, creator, create_date) VALUES ($1, $2, $3, $4, $5) - ON CONFLICT ON CONSTRAINT inclinometer_unique_time DO UPDATE SET values = EXCLUDED.values; -` - -// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer -// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated -func (q *Queries) CreateOrUpdateInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, values types.JSONText, profileID uuid.UUID, createDate time.Time) error { - _, err := q.db.ExecContext(ctx, createOrUpdateInclinometerMeasurement, timeseriesID, t, values, profileID, createDate) - return err -} - -const listInstrumentIDsFromTimeseriesID = ` - SELECT instrument_id FROM v_timeseries_stored WHERE id= $1 -` - -func (q *Queries) ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) { - instrumentIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, &instrumentIDs, listInstrumentIDsFromTimeseriesID, timeseriesID); err != nil { - return nil, err - } - return instrumentIDs, nil -} - -const listParameterIDsFromParameterName = ` - SELECT id FROM parameter WHERE name = $1 -` - -func (q *Queries) ListParameterIDsFromParameterName(ctx context.Context, parameterName string) ([]uuid.UUID, error) { - parameterIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, ¶meterIDs, listParameterIDsFromParameterName, parameterName); err != nil { - return nil, err - } - return parameterIDs, nil -} - -const listUnitIDsFromUnitName = ` - SELECT id FROM unit WHERE name = $1 -` - -func (q *Queries) ListUnitIDsFromUnitName(ctx context.Context, unitName string) ([]uuid.UUID, error) { - unitIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, &unitIDs, listUnitIDsFromUnitName, unitName); err != nil { - return nil, err - } - return unitIDs, nil -} diff --git a/api/internal/model/opendcs.go b/api/internal/model/opendcs.go deleted file mode 100644 index 2f0366ab..00000000 --- a/api/internal/model/opendcs.go +++ /dev/null @@ -1,48 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// Site is an instrument, represented as an OpenDCS Site -type Site struct { - Elevation string `xml:"Elevation"` - ElevationUnits string `xml:"ElevationUnits"` - Description string `xml:"Description"` - SiteName SiteName `xml:"SiteName"` -} - -// SiteName is SiteName -type SiteName struct { - ID uuid.UUID `xml:",chardata"` - NameType string `xml:",attr"` -} - -// AsSite returns an instrument represented as an OpenDCS Site -func (n *Instrument) AsSite() Site { - return Site{ - Elevation: "", - ElevationUnits: "", - Description: n.Name, - SiteName: SiteName{ - ID: n.ID, - NameType: "uuid", - }, - } -} - -// ListOpendcsSites returns an array of instruments from the database -// And formats them as OpenDCS Sites -func (q *Queries) ListOpendcsSites(ctx context.Context) ([]Site, error) { - nn, err := q.ListInstruments(ctx) - if err != nil { - return make([]Site, 0), err - } - ss := make([]Site, len(nn)) - for idx := range nn { - ss[idx] = nn[idx].AsSite() - } - return ss, nil -} diff --git a/api/internal/model/plot_config.go b/api/internal/model/plot_config.go deleted file mode 100644 index 695b5d94..00000000 --- a/api/internal/model/plot_config.go +++ /dev/null @@ -1,167 +0,0 @@ -package model - -import ( - "context" - "fmt" - "strings" - "time" - - "github.com/google/uuid" -) - -const ( - ScatterLinePlotType = "scatter-line" - ProfilePlotType = "profile" - ContourPlotType = "contour" - BullseyePlotType = "bullseye" -) - -type PlotConfig struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Slug string `json:"slug"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ReportConfigs dbJSONSlice[IDSlugName] `json:"report_configs" db:"report_configs"` - PlotType string `json:"plot_type" db:"plot_type"` - Display Opts `json:"display" db:"display"` - PlotConfigSettings - AuditInfo -} - -// PlotConfigSettings describes options for displaying the plot consistently. -// Specifically, whether to ignore data entries in a timeseries that have been masked, -// or whether to display user comments. -type PlotConfigSettings struct { - ShowMasked bool `json:"show_masked" db:"show_masked"` - ShowNonValidated bool `json:"show_nonvalidated" db:"show_nonvalidated"` - ShowComments bool `json:"show_comments" db:"show_comments"` - AutoRange bool `json:"auto_range" db:"auto_range"` - DateRange string `json:"date_range" db:"date_range"` - Threshold int `json:"threshold" db:"threshold"` -} - -// DateRangeTimeWindow creates a TimeWindow from a date range string. -// -// Acceptable date range strings are "lifetime", "5 years", "1 year", or a fixed date in the -// format "YYYY-MM-DD YYYY-MM-DD" with after and before dates separated by a single whitespace. -func (pc *PlotConfig) DateRangeTimeWindow() (TimeWindow, error) { - switch dr := strings.ToLower(pc.DateRange); dr { - case "lifetime": - return TimeWindow{After: time.Time{}, Before: time.Now()}, nil - case "5 years": - return TimeWindow{After: time.Now().AddDate(-5, 0, 0), Before: time.Now()}, nil - case "1 year": - return TimeWindow{After: time.Now().AddDate(-1, 0, 0), Before: time.Now()}, nil - case "1 month": - return TimeWindow{After: time.Now().AddDate(0, -1, 0), Before: time.Now()}, nil - default: - cdr := strings.Split(dr, " ") - invalidDateErr := fmt.Errorf("invalid date range; custom date range must be in format \"YYYY-MM-DD YYYY-MM-DD\"") - if len(cdr) != 2 { - return TimeWindow{}, invalidDateErr - } - after, err := time.Parse("2006-01-02", cdr[0]) - if err != nil { - return TimeWindow{}, invalidDateErr - } - before, err := time.Parse("2006-01-02", cdr[1]) - if err != nil { - return TimeWindow{}, invalidDateErr - } - return TimeWindow{After: after, Before: before}, nil - } -} - -const listPlotConfigsSQL = ` - SELECT - id, - slug, - name, - project_id, - report_configs, - creator, - create_date, - updater, - update_date, - show_masked, - show_nonvalidated, - show_comments, - auto_range, - date_range, - threshold, - plot_type, - display - FROM v_plot_configuration -` - -// PlotConfig -const listPlotConfigs = listPlotConfigsSQL + ` - WHERE project_id = $1 -` - -func (q *Queries) ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]PlotConfig, error) { - ppc := make([]PlotConfig, 0) - if err := q.db.SelectContext(ctx, &ppc, listPlotConfigs, projectID); err != nil { - return make([]PlotConfig, 0), err - } - return ppc, nil -} - -const getPlotConfig = listPlotConfigsSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetPlotConfig(ctx context.Context, plotConfigID uuid.UUID) (PlotConfig, error) { - var pc PlotConfig - err := q.db.GetContext(ctx, &pc, getPlotConfig, plotConfigID) - return pc, err -} - -const createPlotConfig = ` - INSERT INTO plot_configuration (slug, name, project_id, creator, create_date, plot_type) VALUES (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) - RETURNING id -` - -func (q *Queries) CreatePlotConfig(ctx context.Context, pc PlotConfig) (uuid.UUID, error) { - var pcID uuid.UUID - err := q.db.GetContext(ctx, &pcID, createPlotConfig, pc.Name, pc.ProjectID, pc.CreatorID, pc.CreateDate, pc.PlotType) - return pcID, err -} - -// PlotConfigSettings -const createPlotConfigSettings = ` - INSERT INTO plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) - VALUES ($1, $2, $3, $4, $5, $6, $7) -` - -func (q *Queries) CreatePlotConfigSettings(ctx context.Context, pcID uuid.UUID, pc PlotConfigSettings) error { - _, err := q.db.ExecContext(ctx, createPlotConfigSettings, pcID, pc.ShowMasked, pc.ShowNonValidated, pc.ShowComments, pc.AutoRange, pc.DateRange, pc.Threshold) - return err -} - -const updatePlotConfig = ` - UPDATE plot_configuration SET name = $3, updater = $4, update_date = $5 WHERE project_id = $1 AND id = $2 -` - -func (q *Queries) UpdatePlotConfig(ctx context.Context, pc PlotConfig) error { - _, err := q.db.ExecContext(ctx, updatePlotConfig, pc.ProjectID, pc.ID, pc.Name, pc.UpdaterID, pc.UpdateDate) - return err -} - -const deletePlotConfig = ` - DELETE from plot_configuration WHERE project_id = $1 AND id = $2 -` - -func (q *Queries) DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotConfig, projectID, plotConfigID) - return err -} - -const deletePlotConfigSettings = ` - DELETE FROM plot_configuration_settings WHERE id = $1 -` - -func (q *Queries) DeletePlotConfigSettings(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotConfigSettings, plotConfigID) - return err -} diff --git a/api/internal/model/plot_config_bullseye.go b/api/internal/model/plot_config_bullseye.go deleted file mode 100644 index c2ea9da6..00000000 --- a/api/internal/model/plot_config_bullseye.go +++ /dev/null @@ -1,89 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type PlotConfigBullseyePlot struct { - PlotConfig - Display PlotConfigBullseyePlotDisplay `json:"display" db:"display"` -} - -type PlotConfigBullseyePlotDisplay struct { - XAxisTimeseriesID uuid.UUID `json:"x_axis_timeseries_id" db:"x_axis_timeseries_id"` - YAxisTimeseriesID uuid.UUID `json:"y_axis_timeseries_id" db:"y_axis_timeseries_id"` -} - -func (d *PlotConfigBullseyePlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigMeasurementBullseyePlot struct { - Time time.Time `json:"time" db:"time"` - X *float64 `json:"x" db:"x"` - Y *float64 `json:"y" db:"y"` -} - -const createPlotBullseyeConfig = ` - INSERT INTO plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) VALUES ($1, $2, $3) -` - -func (q *Queries) CreatePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigBullseyePlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotBullseyeConfig, plotConfigID, cfg.XAxisTimeseriesID, cfg.YAxisTimeseriesID) - return err -} - -const updatePlotBullseyeConfig = ` - UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigBullseyePlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotBullseyeConfig, plotConfigID, cfg.XAxisTimeseriesID, cfg.YAxisTimeseriesID) - return err -} - -const deletePlotBullseyeConfig = ` - DELETE FROM plot_bullseye_config WHERE plog_config_id = $1 -` - -func (q *Queries) DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotBullseyeConfig, plotConfigID) - return err -} - -const listPlotConfigMeasurementsBullseyePlot = ` - SELECT - t.time, - locf(xm.value) AS x, - locf(ym.value) AS y - FROM plot_bullseye_config pc - INNER JOIN timeseries_measurement t - ON t.timeseries_id = pc.x_axis_timeseries_id - OR t.timeseries_id = pc.y_axis_timeseries_id - LEFT JOIN timeseries_measurement xm - ON xm.timeseries_id = pc.x_axis_timeseries_id - AND xm.time = t.time - LEFT JOIN timeseries_measurement ym - ON ym.timeseries_id = pc.y_axis_timeseries_id - AND ym.time = t.time - WHERE pc.plot_config_id = $1 - AND t.time > $2 - AND t.time < $3 - GROUP BY t.time - ORDER BY t.time ASC -` - -func (q *Queries) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw TimeWindow) ([]PlotConfigMeasurementBullseyePlot, error) { - pcmm := make([]PlotConfigMeasurementBullseyePlot, 0) - err := q.db.SelectContext(ctx, &pcmm, listPlotConfigMeasurementsBullseyePlot, plotConfigID, tw.After, tw.Before) - return pcmm, err -} diff --git a/api/internal/model/plot_config_contour.go b/api/internal/model/plot_config_contour.go deleted file mode 100644 index a4e4bdb2..00000000 --- a/api/internal/model/plot_config_contour.go +++ /dev/null @@ -1,136 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type PlotConfigContourPlot struct { - PlotConfig - Display PlotConfigContourPlotDisplay `json:"display" db:"display"` -} - -type PlotConfigContourPlotDisplay struct { - TimeseriesIDs dbSlice[uuid.UUID] `json:"timeseries_ids" db:"timeseries_ids"` - Time *time.Time `json:"time" db:"time"` - LocfBackfill string `json:"locf_backfill" db:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing" db:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing" db:"contour_smoothing"` - ShowLabels bool `json:"show_labels" db:"show_labels"` -} - -func (d *PlotConfigContourPlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigMeasurementContourPlot struct { - X float64 `json:"x" db:"x"` - Y float64 `json:"y" db:"y"` - Z *float64 `json:"z" db:"z"` -} - -type AggregatePlotConfigMeasurementsContourPlot struct { - X []float64 `json:"x" db:"x"` - Y []float64 `json:"y" db:"y"` - Z []*float64 `json:"z" db:"z"` -} - -const createPlotContourConfig = ` - INSERT INTO plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) - VALUES ($1, $2, $3, $4, $5, $6) -` - -func (q *Queries) CreatePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigContourPlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotContourConfig, plotConfigID, cfg.Time, cfg.LocfBackfill, cfg.GradientSmoothing, cfg.ContourSmoothing, cfg.ShowLabels) - return err -} - -const updatePlotContourConfig = ` - UPDATE plot_contour_config SET "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 - WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigContourPlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotContourConfig, plotConfigID, cfg.Time, cfg.LocfBackfill, cfg.GradientSmoothing, cfg.ContourSmoothing, cfg.ShowLabels) - return err -} - -const deletePlotContourConfig = ` - DELETE FROM plot_contour_config WHERE plog_config_id = $1 -` - -func (q *Queries) DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotContourConfig, plotConfigID) - return err -} - -const createPlotContourConfigTimeseries = ` - INSERT INTO plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) VALUES ($1, $2) - ON CONFLICT (plot_contour_config_id, timeseries_id) DO NOTHING -` - -func (q *Queries) CreatePlotContourConfigTimeseries(ctx context.Context, plotConfigID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createPlotContourConfigTimeseries, plotConfigID, timeseriesID) - return err -} - -const deleteAllPlotContourConfigTimeseries = ` - DELETE FROM plot_contour_config_timeseries WHERE plot_contour_config_id = $1 -` - -func (q *Queries) DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotContourConfigTimeseries, plotConfigID) - return err -} - -const listPlotContourConfigTimes = ` - SELECT DISTINCT mm.time - FROM plot_contour_config_timeseries pcts - INNER JOIN timeseries_measurement mm ON mm.timeseries_id = pcts.timeseries_id - WHERE pcts.plot_contour_config_id = $1 - AND mm.time > $2 - AND mm.time < $3 - ORDER BY time ASC -` - -func (q *Queries) ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw TimeWindow) ([]time.Time, error) { - tt := make([]time.Time, 0) - err := q.db.SelectContext(ctx, &tt, listPlotContourConfigTimes, plotConfigID, tw.After, tw.Before) - return tt, err -} - -// NOTE: this assumes all geometries are stored natively as WGS84 (EPSG:4326) -const listPlotConfigMeasurementsContourPlot = ` - SELECT - oi.x, - oi.y, - locf(mm.value) AS z - FROM plot_contour_config pc - LEFT JOIN plot_contour_config_timeseries pcts ON pcts.plot_contour_config_id = pc.plot_config_id - LEFT JOIN timeseries_measurement mm ON mm.timeseries_id = pcts.timeseries_id - INNER JOIN timeseries ts ON ts.id = pcts.timeseries_id - INNER JOIN ( - SELECT - ii.id, - ST_X(ST_Centroid(ii.geometry)) AS x, - ST_Y(ST_Centroid(ii.geometry)) AS y - FROM instrument ii - ) oi ON oi.id = ts.instrument_id - WHERE plot_config_id = $1 - AND mm.time = $2 - GROUP BY pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y -` - -func (q *Queries) ListPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) ([]PlotConfigMeasurementContourPlot, error) { - pcmm := make([]PlotConfigMeasurementContourPlot, 0) - err := q.db.SelectContext(ctx, &pcmm, listPlotConfigMeasurementsContourPlot, plotConfigID, t) - return pcmm, err -} diff --git a/api/internal/model/plot_config_profile.go b/api/internal/model/plot_config_profile.go deleted file mode 100644 index 848d66e3..00000000 --- a/api/internal/model/plot_config_profile.go +++ /dev/null @@ -1,45 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/google/uuid" -) - -type PlotConfigProfilePlot struct { - PlotConfig - Display PlotConfigProfilePlotDisplay `json:"display" db:"display"` -} - -type PlotConfigProfilePlotDisplay struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentType string `json:"instrument_type,omitempty" db:"instrument_type"` -} - -func (d *PlotConfigProfilePlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -const createPlotProfileConfig = ` - INSERT INTO plot_profile_config (plot_config_id, instrument_id) VALUES ($1, $2) -` - -func (q *Queries) CreatePlotProfileConfig(ctx context.Context, plotConfigID uuid.UUID, d PlotConfigProfilePlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotProfileConfig, plotConfigID, d.InstrumentID) - return err -} - -const updatePlotProfileConfig = ` - UPDATE plot_profile_config SET instrument_id=$2 WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotProfileConfig(ctx context.Context, plotConfigID uuid.UUID, d PlotConfigProfilePlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotProfileConfig, plotConfigID, d.InstrumentID) - return err -} diff --git a/api/internal/model/plot_config_scatter_line.go b/api/internal/model/plot_config_scatter_line.go deleted file mode 100644 index a28faebe..00000000 --- a/api/internal/model/plot_config_scatter_line.go +++ /dev/null @@ -1,144 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/google/uuid" -) - -type PlotConfigScatterLinePlot struct { - PlotConfig - Display PlotConfigScatterLineDisplay `json:"display" db:"display"` - // TODO AlertConfigIDs []string -} - -type PlotConfigScatterLineDisplay struct { - Traces []PlotConfigScatterLineTimeseriesTrace `json:"traces"` - Layout PlotConfigScatterLineLayout `json:"layout"` -} - -func (d *PlotConfigScatterLineDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigScatterLineTimeseriesTrace struct { - PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` - Name string `json:"name"` // read-only - Parameter string `json:"parameter"` // read-only - TraceOrder int `json:"trace_order"` - TraceType string `json:"trace_type"` - Color string `json:"color"` - LineStyle string `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis string `json:"y_axis"` // y1 or y2, default y1 -} - -type PlotConfigScatterLineLayout struct { - CustomShapes []PlotConfigScatterLineCustomShape `json:"custom_shapes"` - YAxisTitle *string `json:"y_axis_title"` - Y2AxisTitle *string `json:"y2_axis_title"` -} - -type PlotConfigScatterLineCustomShape struct { - PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` - Color string `json:"color"` -} - -const createPlotConfigScatterLineLayout = `INSERT INTO plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) VALUES ($1, $2, $3)` - -func (q *Queries) CreatePlotConfigScatterLineLayout(ctx context.Context, pcID uuid.UUID, layout PlotConfigScatterLineLayout) error { - _, err := q.db.ExecContext(ctx, createPlotConfigScatterLineLayout, pcID, layout.YAxisTitle, layout.Y2AxisTitle) - return err -} - -const updatePlotConfigScatterLineLayout = `UPDATE plot_scatter_line_config SET y_axis_title=$2, y2_axis_title=$3 WHERE plot_config_id=$1` - -func (q *Queries) UpdatePlotConfigScatterLineLayout(ctx context.Context, pcID uuid.UUID, layout PlotConfigScatterLineLayout) error { - _, err := q.db.ExecContext(ctx, updatePlotConfigScatterLineLayout, pcID, layout.YAxisTitle, layout.Y2AxisTitle) - return err -} - -// PlotConfigTimeseriesTrace -const createPlotConfigTimeseriesTrace = ` - INSERT INTO plot_configuration_timeseries_trace - (plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8) -` - -func (q *Queries) CreatePlotConfigTimeseriesTrace(ctx context.Context, tr PlotConfigScatterLineTimeseriesTrace) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigTimeseriesTrace, - tr.PlotConfigurationID, tr.TimeseriesID, tr.TraceOrder, tr.Color, tr.LineStyle, tr.Width, tr.ShowMarkers, tr.YAxis, - ) - return err -} - -const updatePlotConfigTimeseriesTrace = ` - UPDATE plot_configuration_timeseries_trace - SET trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 - WHERE plot_configuration_id=$1 AND timeseries_id=$2 -` - -func (q *Queries) UpdatePlotConfigTimeseriesTrace(ctx context.Context, tr PlotConfigScatterLineTimeseriesTrace) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigTimeseriesTrace, - tr.PlotConfigurationID, tr.TimeseriesID, tr.TraceOrder, tr.Color, tr.LineStyle, tr.Width, tr.ShowMarkers, tr.YAxis, - ) - return err -} - -const deleteAllPlotConfigTimeseriesTraces = ` - DELETE FROM plot_configuration_timeseries_trace WHERE plot_configuration_id=$1 -` - -func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotConfigTimeseriesTraces, pcID) - return err -} - -// PlotConfigCustomShape -const createPlotConfigCustomShape = ` - INSERT INTO plot_configuration_custom_shape - (plot_configuration_id, enabled, name, data_point, color) VALUES ($1, $2, $3, $4, $5) -` - -func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, cs PlotConfigScatterLineCustomShape) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigCustomShape, - cs.PlotConfigurationID, cs.Enabled, cs.Name, cs.DataPoint, cs.Color, - ) - return err -} - -const updatePlotConfigCustomShape = ` - UPDATE plot_configuration_custom_shape - SET enabled=$2, name=$3, data_point=$4, color=$5 WHERE plot_configuration_id=$1 -` - -func (q *Queries) UpdatePlotConfigCustomShape(ctx context.Context, cs PlotConfigScatterLineCustomShape) error { - _, err := q.db.ExecContext( - ctx, updatePlotConfigCustomShape, - cs.PlotConfigurationID, cs.Enabled, cs.Name, cs.DataPoint, cs.Color, - ) - return err -} - -const deleteAllPlotConfigCustomShapes = ` - DELETE FROM plot_configuration_custom_shape WHERE plot_configuration_id=$1 -` - -func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotConfigCustomShapes, pcID) - return err -} diff --git a/api/internal/model/profile.go b/api/internal/model/profile.go deleted file mode 100644 index 9b075bb4..00000000 --- a/api/internal/model/profile.go +++ /dev/null @@ -1,188 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/password" - "github.com/google/uuid" -) - -// Profile is a user profile -type Profile struct { - ID uuid.UUID `json:"id" db:"id"` - Tokens []TokenInfoProfile `json:"tokens"` - IsAdmin bool `json:"is_admin" db:"is_admin"` - Roles dbSlice[string] `json:"roles" db:"roles"` - ProfileInfo -} - -// TokenInfoProfile is token information embedded in Profile -type TokenInfoProfile struct { - TokenID string `json:"token_id" db:"token_id"` - Issued time.Time `json:"issued"` -} - -// ProfileInfo is information necessary to construct a profile -type ProfileInfo struct { - EDIPI int `json:"-" db:"edipi"` - Username string `json:"username" db:"username"` - DisplayName string `json:"display_name" db:"display_name"` - Email string `json:"email" db:"email"` -} - -// TokenInfo represents the information held in the database about a token -type TokenInfo struct { - ID uuid.UUID `json:"-"` - TokenID string `json:"token_id" db:"token_id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - Issued time.Time `json:"issued"` - Hash string `json:"-"` -} - -// Token includes all TokenInfo and the actual token string generated for a user -// this is only returned the first time a token is generated -type Token struct { - SecretToken string `json:"secret_token"` - TokenInfo -} - -type ProfileClaims struct { - PreferredUsername string - Name string - Email string - SubjectDN *string - CacUID *int - X509Presented bool -} - -const getProfileForEDIPI = ` - SELECT * FROM v_profile WHERE edipi = $1 -` - -func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForEDIPI, edipi) - return p, err -} - -const getProfileForEmail = ` - SELECT * FROM v_profile WHERE email ILIKE $1 -` - -func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForEmail, email) - return p, err -} - -const getProfileForUsername = ` - SELECT * FROM v_profile WHERE username = $1 -` - -func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForUsername, username) - return p, err -} - -const getIssuedTokens = ` - SELECT token_id, issued FROM profile_token WHERE profile_id = $1 -` - -func (q *Queries) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]TokenInfoProfile, error) { - tokens := make([]TokenInfoProfile, 0) - err := q.db.SelectContext(ctx, &tokens, getIssuedTokens, profileID) - return tokens, err -} - -const getProfileForTokenID = ` - SELECT p.id, p.edipi, p.username, p.email, p.is_admin - FROM profile_token t - LEFT JOIN v_profile p ON p.id = t.profile_id - WHERE t.token_id = $1 -` - -func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, getProfileForTokenID, tokenID) - return p, err -} - -const createProfile = ` - INSERT INTO profile (edipi, username, email, display_name) VALUES ($1, $2, $3, $4) RETURNING id, username, email, display_name -` - -// CreateProfile creates a new profile -func (q *Queries) CreateProfile(ctx context.Context, n ProfileInfo) (Profile, error) { - p := Profile{ - Tokens: make([]TokenInfoProfile, 0), - Roles: make([]string, 0), - } - err := q.db.GetContext(ctx, &p, createProfile, n.EDIPI, n.Username, n.Email, n.DisplayName) - return p, err -} - -const createProfileToken = ` - INSERT INTO profile_token (token_id, profile_id, hash) VALUES ($1,$2,$3) RETURNING * -` - -// CreateProfileToken creates a secret token and stores the HASH (not the actual token) -// to the database. The return payload of this function is the first and last time you'll see -// the raw token unless the user writes it down or stores it somewhere safe. -func (q *Queries) CreateProfileToken(ctx context.Context, profileID uuid.UUID) (Token, error) { - var t Token - secretToken := password.GenerateRandom(40) - tokenID := password.GenerateRandom(40) - hash, err := password.CreateHash(secretToken, password.DefaultParams) - if err != nil { - return t, err - } - if err := q.db.GetContext(ctx, &t, createProfileToken, tokenID, profileID, hash); err != nil { - return t, err - } - t.SecretToken = secretToken - return t, nil -} - -const getTokenInfoByTokenID = ` - SELECT id, token_id, profile_id, issued, hash FROM profile_token WHERE token_id=$1 LIMIT 1 -` - -// GetTokenInfoByTokenID returns a single token by token id -func (q *Queries) GetTokenInfoByTokenID(ctx context.Context, tokenID string) (TokenInfo, error) { - var n TokenInfo - err := q.db.GetContext(ctx, &n, getTokenInfoByTokenID, tokenID) - return n, err -} - -const updateProfileForEDIPI = `UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4` - -func (q *Queries) UpdateProfileForEDIPI(ctx context.Context, edipi int, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEDIPI, pi.Username, pi.Email, pi.DisplayName, edipi) - return err -} - -const updateProfileForEmail = `UPDATE profile SET username=$1, display_name=$2 WHERE email ILIKE $3` - -func (q *Queries) UpdateProfileForEmail(ctx context.Context, email string, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEmail, pi.Username, pi.DisplayName, email) - return err -} - -const updateProfileForUsername = `UPDATE profile SET email=$1, display_name=$2 WHERE username=$3` - -func (q *Queries) UpdateProfileForUsername(ctx context.Context, username string, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEmail, pi.Email, pi.DisplayName, username) - return err -} - -const deleteToken = ` - DELETE FROM profile_token WHERE profile_id=$1 AND token_id=$2 -` - -// DeleteToken deletes a token by token_id -func (q *Queries) DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error { - _, err := q.db.ExecContext(ctx, deleteToken, profileID, tokenID) - return err -} diff --git a/api/internal/model/project.go b/api/internal/model/project.go deleted file mode 100644 index c5c42b89..00000000 --- a/api/internal/model/project.go +++ /dev/null @@ -1,228 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -type District struct { - Agency string `json:"agency" db:"agency"` - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - Initials string `json:"initials" db:"initials"` - DivisionName string `json:"division_name" db:"division_name"` - DivisionInitials string `json:"division_initials" db:"division_initials"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` -} - -type Project struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - FederalID *string `json:"federal_id" db:"federal_id"` - DistrictID *uuid.UUID `json:"district_id" db:"district_id"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` - Image *string `json:"image" db:"image"` - Deleted bool `json:"-"` - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - InstrumentGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` - AuditInfo -} - -type ProjectCount struct { - ProjectCount int `json:"project_count"` -} - -type ProjectCollection []Project - -const selectProjectsSQL = ` - SELECT - id, federal_id, image, office_id, district_id, deleted, slug, name, creator, creator_username, create_date, - updater, updater_username, update_date, instrument_count, instrument_group_count - FROM v_project -` - -const projectSearch = selectProjectsSQL + ` - WHERE NOT deleted AND name ILIKE '%' || $1 || '%' LIMIT $2 ORDER BY name -` - -// SearchProjects returns search result for projects -func (q *Queries) SearchProjects(ctx context.Context, searchInput string, limit int) ([]SearchResult, error) { - ss := make([]SearchResult, 0) - if err := q.db.SelectContext(ctx, &ss, projectSearch, searchInput, limit); err != nil { - return nil, err - } - rr := make([]SearchResult, len(ss)) - for idx, p := range ss { - rr[idx] = SearchResult{ID: p.ID, Type: "project", Item: p} - } - return rr, nil -} - -const listDistricts = ` - SELECT * FROM v_district -` - -func (q *Queries) ListDistricts(ctx context.Context) ([]District, error) { - dd := make([]District, 0) - if err := q.db.SelectContext(ctx, &dd, listDistricts); err != nil { - return nil, err - } - return dd, nil -} - -const listProjects = selectProjectsSQL + ` - WHERE NOT deleted ORDER BY name -` - -// ListProjects returns a slice of projects -func (q *Queries) ListProjects(ctx context.Context) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjects); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsByFederalID = selectProjectsSQL + ` - WHERE federal_id IS NOT NULL AND federal_id = $1 AND NOT deleted ORDER BY name -` - -// ListProjects returns a slice of projects -func (q *Queries) ListProjectsByFederalID(ctx context.Context, federalID string) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjectsByFederalID, federalID); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsForProfile = selectProjectsSQL + ` - WHERE id = ANY( - SELECT project_id FROM profile_project_roles - WHERE profile_id = $1 - ) - AND NOT deleted - ORDER BY name -` - -func (q *Queries) ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjectsForProfile, profileID); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsForProfileRole = selectProjectsSQL + ` - WHERE id = ANY( - SELECT project_id FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND r.name = $2 - ) - AND NOT deleted - ORDER BY name -` - -func (q *Queries) ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]Project, error) { - pp := make([]Project, 0) - err := q.db.SelectContext(ctx, &pp, listProjectsForProfileRole, profileID, role) - return pp, err -} - -const listProjectInstruments = listInstrumentsSQL + ` - WHERE id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND NOT deleted -` - -// ListProjectInstruments returns a slice of instruments for a project -func (q *Queries) ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listProjectInstruments, projectID); err != nil { - return nil, err - } - return ii, nil -} - -const listProjectInstrumentGroups = listInstrumentGroupsSQL + ` - WHERE project_id = $1 AND NOT deleted -` - -// ListProjectInstrumentGroups returns a list of instrument groups for a project -func (q *Queries) ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]InstrumentGroup, error) { - gg := make([]InstrumentGroup, 0) - if err := q.db.SelectContext(ctx, &gg, listProjectInstrumentGroups, projectID); err != nil { - return nil, err - } - return gg, nil -} - -const getProjectCount = ` - SELECT COUNT(id) FROM project WHERE NOT deleted -` - -// GetProjectCount returns the number of projects in the database that are not deleted -func (q *Queries) GetProjectCount(ctx context.Context) (ProjectCount, error) { - var pc ProjectCount - if err := q.db.GetContext(ctx, &pc.ProjectCount, getProjectCount); err != nil { - return pc, err - } - return pc, nil -} - -const getProject = selectProjectsSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetProject(ctx context.Context, id uuid.UUID) (Project, error) { - var p Project - err := q.db.GetContext(ctx, &p, getProject, id) - return p, err -} - -const createProject = ` - INSERT INTO project (federal_id, slug, name, district_id, creator, create_date) - VALUES ($1, slugify($2, 'project'), $2, $3, $4, $5) - RETURNING id, slug -` - -func (q *Queries) CreateProject(ctx context.Context, p Project) (IDSlugName, error) { - var aa IDSlugName - err := q.db.GetContext(ctx, &aa, createProject, p.FederalID, p.Name, p.DistrictID, p.CreatorID, p.CreateDate) - return aa, err -} - -const updateProject = ` - UPDATE project SET name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 WHERE id=$1 RETURNING id -` - -// UpdateProject updates a project -func (q *Queries) UpdateProject(ctx context.Context, p Project) error { - _, err := q.db.ExecContext(ctx, updateProject, p.ID, p.Name, p.UpdaterID, p.UpdateDate, p.DistrictID, p.FederalID) - return err -} - -const updateProjectImage = ` - UPDATE project SET image = $1 WHERE project_id = $2 -` - -func (q *Queries) UpdateProjectImage(ctx context.Context, fileName string, projectID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, updateProjectImage, fileName, projectID) - return err -} - -const deleteFlagProject = ` - UPDATE project SET deleted=true WHERE id = $1 -` - -// DeleteFlagProject sets deleted to true for a project -func (q *Queries) DeleteFlagProject(ctx context.Context, id uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagProject, id) - return err -} diff --git a/api/internal/model/project_role.go b/api/internal/model/project_role.go deleted file mode 100644 index 8a5b38a6..00000000 --- a/api/internal/model/project_role.go +++ /dev/null @@ -1,101 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// ProjectMembership holds -type ProjectMembership struct { - ID uuid.UUID `json:"id" db:"id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - Username *string `json:"username"` - Email string `json:"email"` - RoleID uuid.UUID `json:"role_id" db:"role_id"` - Role string `json:"role"` -} - -const listProjectMembers = ` - SELECT id, profile_id, username, email, role_id, role - FROM v_profile_project_roles - WHERE project_id = $1 - ORDER BY email -` - -// ListProjectMembers lists users (profiles) who have permissions on a project and their role info -func (q *Queries) ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ProjectMembership, error) { - rr := make([]ProjectMembership, 0) - if err := q.db.SelectContext(ctx, &rr, listProjectMembers, projectID); err != nil { - return nil, err - } - return rr, nil -} - -const getProjectMembership = ` - SELECT id, profile_id, username, email, role_id, role - FROM v_profile_project_roles - WHERE id = $1 -` - -func (q *Queries) GetProjectMembership(ctx context.Context, roleID uuid.UUID) (ProjectMembership, error) { - var pm ProjectMembership - err := q.db.GetContext(ctx, &pm, getProjectMembership, roleID) - return pm, err -} - -const addProjectMemberRole = ` - INSERT INTO profile_project_roles (project_id, profile_id, role_id, granted_by) - VALUES ($1, $2, $3, $4) - ON CONFLICT ON CONSTRAINT unique_profile_project_role DO UPDATE SET project_id = EXCLUDED.project_id - RETURNING id -` - -// AddProjectMemberRole adds a role to a user for a specific project -func (q *Queries) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (uuid.UUID, error) { - var roleIDNew uuid.UUID - err := q.db.GetContext(ctx, &roleIDNew, addProjectMemberRole, projectID, profileID, roleID, grantedBy) - return roleIDNew, err -} - -const removeProjectMemberRole = ` - DELETE FROM profile_project_roles WHERE project_id = $1 AND profile_id = $2 AND role_id = $3 -` - -// RemoveProjectMemberRole removes a role from a user for a specific project -func (q *Queries) RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, removeProjectMemberRole, projectID, profileID, roleID) - return err -} - -const isProjectAdmin = ` - SELECT EXISTS ( - SELECT 1 FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND pr.project_id = $2 - AND r.name = 'ADMIN' - ) -` - -func (q *Queries) IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) { - var isAdmin bool - err := q.db.GetContext(ctx, &isAdmin, isProjectAdmin, projectID) - return isAdmin, err -} - -const isProjectMember = ` - SELECT EXISTS ( - SELECT 1 FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND pr.project_id = $2 - AND (r.name = 'MEMBER' OR r.name = 'ADMIN') - ) -` - -func (q *Queries) IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) { - var isMember bool - err := q.db.GetContext(ctx, &isMember, isProjectMember, projectID) - return isMember, err -} diff --git a/api/internal/model/report_config.go b/api/internal/model/report_config.go deleted file mode 100644 index f49f9f0a..00000000 --- a/api/internal/model/report_config.go +++ /dev/null @@ -1,190 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type ReportConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Slug string `json:"slug" db:"slug"` - Name string `json:"name" db:"name"` - Description string `json:"description" db:"description"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - DistrictName *string `json:"district_name" db:"district_name"` - PlotConfigs dbJSONSlice[IDSlugName] `json:"plot_configs" db:"plot_configs"` - GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides" db:"global_overrides"` - AuditInfo -} - -type ReportDownloadJob struct { - ID uuid.UUID `json:"id" db:"id"` - ReportConfigID uuid.UUID `json:"report_config_id" db:"report_config_id"` - Creator uuid.UUID `json:"creator" db:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Status string `json:"status" db:"status"` - FileKey *string `json:"file_key" db:"file_key"` - FileExpiry *time.Time `json:"file_expiry" db:"file_expiry"` - Progress int `json:"progress" db:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date" db:"progress_update_date"` -} - -func (o *ReportConfigGlobalOverrides) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), o) -} - -type ReportConfigWithPlotConfigs struct { - ReportConfig - PlotConfigs []PlotConfigScatterLinePlot `json:"plot_configs"` -} - -type ReportConfigJobMessage struct { - ReportConfigID uuid.UUID `json:"report_config_id"` - JobID uuid.UUID `json:"job_id"` - IsLandscape bool `json:"is_landscape"` -} - -const createReportConfig = ` - INSERT INTO report_config ( - name, slug, project_id, creator, description, date_range, date_range_enabled, - show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled - ) - VALUES ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) - RETURNING id -` - -func (q *Queries) CreateReportConfig(ctx context.Context, rc ReportConfig) (uuid.UUID, error) { - var rcID uuid.UUID - err := q.db.GetContext( - ctx, &rcID, createReportConfig, rc.Name, rc.ProjectID, rc.CreatorID, rc.Description, - rc.GlobalOverrides.DateRange.Value, rc.GlobalOverrides.DateRange.Enabled, - rc.GlobalOverrides.ShowMasked.Value, rc.GlobalOverrides.ShowMasked.Enabled, - rc.GlobalOverrides.ShowNonvalidated.Value, rc.GlobalOverrides.ShowNonvalidated.Enabled, - ) - return rcID, err -} - -const listProjectReportConfigs = ` - SELECT * FROM v_report_config WHERE project_id = $1 -` - -func (q *Queries) ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]ReportConfig, error) { - rcs := make([]ReportConfig, 0) - err := q.db.SelectContext(ctx, &rcs, listProjectReportConfigs, projectID) - return rcs, err -} - -const listReportConfigPlotConfigs = ` - SELECT * FROM v_plot_configuration WHERE id = ANY( - SELECT plot_config_id FROM report_config_plot_config WHERE report_config_id = $1 - ) -` - -func (q *Queries) ListReportConfigPlotConfigs(ctx context.Context, rcID uuid.UUID) ([]PlotConfigScatterLinePlot, error) { - pcs := make([]PlotConfigScatterLinePlot, 0) - err := q.db.SelectContext(ctx, &pcs, listReportConfigPlotConfigs, rcID) - return pcs, err -} - -const getReportConfigByID = ` - SELECT * FROM v_report_config WHERE id = $1 -` - -func (q *Queries) GetReportConfigByID(ctx context.Context, rcID uuid.UUID) (ReportConfig, error) { - var rc ReportConfig - err := q.db.GetContext(ctx, &rc, getReportConfigByID, rcID) - return rc, err -} - -const updateReportConfig = ` - UPDATE report_config SET name=$2, - updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, - show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 WHERE id=$1 -` - -func (q *Queries) UpdateReportConfig(ctx context.Context, rc ReportConfig) error { - _, err := q.db.ExecContext( - ctx, updateReportConfig, rc.ID, rc.Name, rc.UpdaterID, rc.UpdateDate, rc.Description, - rc.GlobalOverrides.DateRange.Value, rc.GlobalOverrides.DateRange.Enabled, - rc.GlobalOverrides.ShowMasked.Value, rc.GlobalOverrides.ShowMasked.Enabled, - rc.GlobalOverrides.ShowNonvalidated.Value, rc.GlobalOverrides.ShowNonvalidated.Enabled, - ) - return err -} - -const deleteReportConfig = ` - DELETE FROM report_config WHERE id=$1 -` - -func (q *Queries) DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteReportConfig, rcID) - return err -} - -const assignReportConfigPlotConfig = ` - INSERT INTO report_config_plot_config (report_config_id, plot_config_id) VALUES ($1, $2) -` - -func (q *Queries) AssignReportConfigPlotConfig(ctx context.Context, rcID, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignReportConfigPlotConfig, rcID, pcID) - return err -} - -const unassignReportConfigPlotConfig = ` - DELETE FROM report_config_plot_config WHERE report_config_id=$1 AND plot_config_id=$2 -` - -func (q *Queries) UnassignReportConfigPlotConfig(ctx context.Context, rcID, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignReportConfigPlotConfig, rcID, pcID) - return err -} - -const unassignAllReportConfigPlotConfig = ` - DELETE FROM report_config_plot_config WHERE report_config_id=$1 -` - -func (q *Queries) UnassignAllReportConfigPlotConfig(ctx context.Context, rcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllReportConfigPlotConfig, rcID) - return err -} - -const getReportDownloadJob = ` - SELECT * FROM report_download_job WHERE id=$1 AND creator=$2 -` - -func (q *Queries) GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (ReportDownloadJob, error) { - var j ReportDownloadJob - err := q.db.GetContext(ctx, &j, getReportDownloadJob, jobID, profileID) - return j, err -} - -const createReportDownloadJob = ` - INSERT INTO report_download_job (report_config_id, creator) VALUES ($1, $2) RETURNING * -` - -func (q *Queries) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID) (ReportDownloadJob, error) { - var jNew ReportDownloadJob - err := q.db.GetContext(ctx, &jNew, createReportDownloadJob, rcID, profileID) - return jNew, err -} - -const updateReportDownloadJob = ` - UPDATE report_download_job SET status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 WHERE id=$1 -` - -func (q *Queries) UpdateReportDownloadJob(ctx context.Context, j ReportDownloadJob) error { - _, err := q.db.ExecContext( - ctx, updateReportDownloadJob, - j.ID, j.Status, j.Progress, j.ProgressUpdateDate, j.FileKey, j.FileExpiry, - ) - return err -} diff --git a/api/internal/model/submittal.go b/api/internal/model/submittal.go deleted file mode 100644 index 24ec96f0..00000000 --- a/api/internal/model/submittal.go +++ /dev/null @@ -1,149 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type Submittal struct { - ID uuid.UUID `json:"id" db:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName string `json:"alert_config_name" db:"alert_config_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertTypeName string `json:"alert_type_name" db:"alert_type_name"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - SubmittalStatusID uuid.UUID `json:"submittal_status_id" db:"submittal_status_id"` - SubmittalStatusName string `json:"submittal_status_name" db:"submittal_status_name"` - CompletionDate *time.Time `json:"completion_date" db:"completion_date"` - CreateDate time.Time `json:"create_date" db:"create_date"` - DueDate time.Time `json:"due_date" db:"due_date"` - MarkedAsMissing bool `json:"marked_as_missing" db:"marked_as_missing"` - WarningSent bool `json:"warning_sent" db:"warning_sent"` -} - -const missingFilter = ` - AND completion_date IS NULL AND NOT marked_as_missing -` - -func (q *Queries) ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listProjectSubmittals := ` - SELECT * - FROM v_submittal - WHERE project_id = $1 - ` + filter + ` - ORDER BY due_date DESC, alert_type_name ASC - ` - - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listProjectSubmittals, projectID); err != nil { - return aa, err - } - return aa, nil -} - -func (q *Queries) ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listInstrumentSubmittals := ` - SELECT * - FROM v_submittal - WHERE id = ANY( - SELECT sub.id - FROM submittal sub - INNER JOIN alert_config_instrument aci ON aci.alert_config_id = sub.alert_config_id - WHERE aci.instrument_id = $1 - ) - ` + filter + ` - ORDER BY due_date DESC - ` - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listInstrumentSubmittals, instrumentID); err != nil { - return aa, err - } - return aa, nil -} - -func (q *Queries) ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listAlertConfigSubmittals := ` - SELECT * - FROM v_submittal - WHERE alert_config_id = $1 - ` + filter + ` - ORDER BY due_date DESC - ` - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listAlertConfigSubmittals, alertConfigID); err != nil { - return aa, err - } - return aa, nil -} - -const listUnverifiedMissingSubmittals = ` - SELECT * - FROM v_submittal - WHERE completion_date IS NULL - AND NOT marked_as_missing - ORDER BY due_date DESC -` - -func (q *Queries) ListUnverifiedMissingSubmittals(ctx context.Context) ([]Submittal, error) { - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listUnverifiedMissingSubmittals); err != nil { - return nil, err - } - return aa, nil -} - -const updateSubmittal = ` - UPDATE submittal SET - submittal_status_id = $2, - completion_date = $3, - warning_sent = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateSubmittal(ctx context.Context, sub Submittal) error { - _, err := q.db.ExecContext(ctx, updateSubmittal, sub.ID, sub.SubmittalStatusID, sub.CompletionDate, sub.WarningSent) - return err -} - -const verifyMissingSubmittal = ` - UPDATE submittal SET - -- red submittal status - submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::UUID, - marked_as_missing = true - WHERE id = $1 - AND completion_date IS NULL - AND NOW() > due_date -` - -func (q *Queries) VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, verifyMissingSubmittal, submittalID) - return err -} - -const verifyMissingAlertConfigSubmittals = ` - UPDATE submittal SET - submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::UUID, - marked_as_missing = true - WHERE alert_config_id = $1 - AND completion_date IS NULL - AND NOW() > due_date -` - -func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, verifyMissingAlertConfigSubmittals, alertConfigID) - return err -} diff --git a/api/internal/model/timeseries.go b/api/internal/model/timeseries.go deleted file mode 100644 index 8f2c00b5..00000000 --- a/api/internal/model/timeseries.go +++ /dev/null @@ -1,220 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -const ( - StandardTimeseriesType = "standard" - ConstantTimeseriesType = "constant" - ComputedTimeseriesType = "computed" - CwmsTimeseriesType = "cwms" -) - -type Timeseries struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Variable string `json:"variable"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentSlug string `json:"instrument_slug" db:"instrument_slug"` - Instrument string `json:"instrument,omitempty"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - Parameter string `json:"parameter,omitempty"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` - Unit string `json:"unit,omitempty"` - Values []Measurement `json:"values,omitempty"` - Type string `json:"type" db:"type"` - IsComputed bool `json:"is_computed" db:"is_computed"` -} - -type TimeseriesNote struct { - Masked *bool `json:"masked,omitempty"` - Validated *bool `json:"validated,omitempty"` - Annotation *string `json:"annotation,omitempty"` -} - -type TimeseriesCollectionItems struct { - Items []Timeseries -} - -func (c *TimeseriesCollectionItems) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var t Timeseries - if err := json.Unmarshal(b, &t); err != nil { - return err - } - c.Items = []Timeseries{t} - default: - c.Items = make([]Timeseries, 0) - } - return nil -} - -var ( - UnknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") - UnknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") -) - -const getStoredTimeseriesExists = ` - SELECT EXISTS (SELECT id FROM v_timeseries_stored WHERE id = $1) -` - -// ValidateStoredTimeseries returns an error if the timeseries id does not exist or the timeseries is computed -func (q *Queries) GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) { - var isStored bool - if err := q.db.GetContext(ctx, &isStored, getStoredTimeseriesExists, ×eriesID); err != nil { - return false, err - } - return isStored, nil -} - -const getTimeseriesProjectMap = ` - SELECT timeseries_id, project_id - FROM v_timeseries_project_map - WHERE timeseries_id IN (?) -` - -func (q *Queries) GetTimeseriesProjectMap(ctx context.Context, timeseriesIDs []uuid.UUID) (map[uuid.UUID]uuid.UUID, error) { - query, args, err := sqlIn(getTimeseriesProjectMap, timeseriesIDs) - if err != nil { - return nil, err - } - query = q.db.Rebind(query) - var result []struct { - TimeseriesID uuid.UUID `db:"timeseries_id"` - ProjectID uuid.UUID `db:"project_id"` - } - if err = q.db.SelectContext(ctx, &result, query, args...); err != nil { - return nil, err - } - m := make(map[uuid.UUID]uuid.UUID) - for _, r := range result { - m[r.TimeseriesID] = r.ProjectID - } - return m, nil -} - -const listProjectTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN project_instrument p ON p.instrument_id = t.instrument_id - WHERE p.project_id = $1 -` - -// ListProjectTimeseries lists all timeseries for a given project -func (q *Queries) ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listProjectTimeseries, projectID); err != nil { - return make([]Timeseries, 0), err - } - - return tt, nil -} - -const listInstrumentTimeseries = ` - SELECT * FROM v_timeseries - WHERE instrument_id = $1 -` - -func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.Select(&tt, listInstrumentTimeseries, instrumentID); err != nil { - return nil, err - } - return tt, nil -} - -const listPlotConfigTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN plot_configuration_timeseries pct ON pct.timeseries_id = t.id - WHERE pct.plot_configuration_id = $1 -` - -func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.Select(&tt, listPlotConfigTimeseries, plotConfigID); err != nil { - return nil, err - } - return tt, nil -} - -const listInstrumentGroupTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN instrument_group_instruments gi ON gi.instrument_id = t.instrument_id - WHERE gi.instrument_group_id = $1 -` - -func (q *Queries) ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listInstrumentGroupTimeseries, instrumentGroupID); err != nil { - return nil, err - } - return tt, nil -} - -const getTimeseries = ` - SELECT * FROM v_timeseries WHERE id = $1 -` - -func (q *Queries) GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (Timeseries, error) { - var t Timeseries - err := q.db.GetContext(ctx, &t, getTimeseries, timeseriesID) - return t, err -} - -const createTimeseries = ` - INSERT INTO timeseries (instrument_id, slug, name, parameter_id, unit_id, type) - VALUES ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) - RETURNING id, instrument_id, slug, name, parameter_id, unit_id, type -` - -func (q *Queries) CreateTimeseries(ctx context.Context, ts Timeseries) (Timeseries, error) { - if ts.ParameterID == uuid.Nil { - ts.ParameterID = UnknownParameterID - } - if ts.UnitID == uuid.Nil { - ts.UnitID = UnknownUnitID - } - if ts.Type == "" { - ts.Type = StandardTimeseriesType - } - var tsNew Timeseries - err := q.db.GetContext(ctx, &tsNew, createTimeseries, ts.InstrumentID, ts.Name, ts.ParameterID, ts.UnitID, ts.Type) - return tsNew, err -} - -const updateTimeseries = ` - UPDATE timeseries SET name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 - WHERE id = $1 - RETURNING id -` - -func (q *Queries) UpdateTimeseries(ctx context.Context, ts Timeseries) (uuid.UUID, error) { - if ts.ParameterID == uuid.Nil { - ts.ParameterID = UnknownParameterID - } - if ts.UnitID == uuid.Nil { - ts.UnitID = UnknownUnitID - } - var tID uuid.UUID - err := q.db.GetContext(ctx, &tID, updateTimeseries, ts.ID, ts.Name, ts.InstrumentID, ts.ParameterID, ts.UnitID) - return tID, err -} - -const deleteTimeseries = ` - DELETE FROM timeseries WHERE id = $1 -` - -func (q *Queries) DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteTimeseries, timeseriesID) - return err -} diff --git a/api/internal/model/timeseries_calculated.go b/api/internal/model/timeseries_calculated.go deleted file mode 100644 index 3344117d..00000000 --- a/api/internal/model/timeseries_calculated.go +++ /dev/null @@ -1,140 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -type CalculatedTimeseries struct { - ID uuid.UUID `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` - Slug string `json:"slug" db:"slug"` - FormulaName string `json:"formula_name" db:"formula_name"` - Formula string `json:"formula" db:"formula"` -} - -const listCalculatedTimeseriesSQL = ` - SELECT - id, - instrument_id, - parameter_id, - unit_id, - slug, - name AS formula_name, - COALESCE(contents, '') AS formula - FROM v_timeseries_computed -` - -const getAllCalculatedTimeseriesForInstrument = listCalculatedTimeseriesSQL + ` - WHERE instrument_id = $1 -` - -// GetAllCalculationsForInstrument returns all formulas associated to a given instrument ID. -func (q *Queries) GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]CalculatedTimeseries, error) { - cc := make([]CalculatedTimeseries, 0) - if err := q.db.SelectContext(ctx, &cc, getAllCalculatedTimeseriesForInstrument, instrumentID); err != nil { - return nil, err - } - return cc, nil -} - -const createCalculatedTimeseries = ` - INSERT INTO timeseries ( - instrument_id, - parameter_id, - unit_id, - slug, - name, - type - ) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') - RETURNING id -` - -func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, cc CalculatedTimeseries) (uuid.UUID, error) { - if cc.ParameterID == uuid.Nil { - cc.ParameterID = UnknownParameterID - } - if cc.UnitID == uuid.Nil { - cc.UnitID = UnknownUnitID - } - var tsID uuid.UUID - err := q.db.GetContext(ctx, &tsID, createCalculatedTimeseries, &cc.InstrumentID, &cc.ParameterID, &cc.UnitID, &cc.FormulaName) - return tsID, err -} - -const createCalculation = ` - INSERT INTO calculation (timeseries_id, contents) VALUES ($1,$2) -` - -func (q *Queries) CreateCalculation(ctx context.Context, timeseriesID uuid.UUID, contents string) error { - _, err := q.db.ExecContext(ctx, createCalculation, timeseriesID, contents) - return err -} - -const getOneCalculation = listCalculatedTimeseriesSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetOneCalculation(ctx context.Context, calculationID *uuid.UUID) (CalculatedTimeseries, error) { - var defaultCc CalculatedTimeseries - err := q.db.GetContext(ctx, &defaultCc, getOneCalculation, calculationID) - return defaultCc, err -} - -const createOrUpdateCalculation = ` - INSERT INTO calculation (timeseries_id, contents) VALUES ($1, $2) - ON CONFLICT (timeseries_id) DO UPDATE SET contents = COALESCE(EXCLUDED.contents, $3) -` - -func (q *Queries) CreateOrUpdateCalculation(ctx context.Context, timeseriesID uuid.UUID, formula, defaultFormula string) error { - _, err := q.db.ExecContext(ctx, createOrUpdateCalculation, timeseriesID, formula, defaultFormula) - return err -} - -const deleteCalculatedTimeseries = ` - DELETE FROM timeseries WHERE id = $1 AND id IN (SELECT timeseries_id FROM calculation) -` - -func (q *Queries) DeleteCalculatedTimeseries(ctx context.Context, calculationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteCalculatedTimeseries, calculationID) - return err -} - -const createOrUpdateCalculatedTimeseries = ` - INSERT INTO timeseries ( - id, - instrument_id, - parameter_id, - unit_id, - slug, - name, - type - ) VALUES ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') - ON CONFLICT (id) DO UPDATE SET - instrument_id = COALESCE(EXCLUDED.instrument_id, $6), - parameter_id = COALESCE(EXCLUDED.parameter_id, $7), - unit_id = COALESCE(EXCLUDED.unit_id, $8), - slug = COALESCE(EXCLUDED.slug, slugify($9, 'timeseries')), - name = COALESCE(EXCLUDED.name, $9), - type = 'computed' -` - -func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, cc CalculatedTimeseries, defaultCc CalculatedTimeseries) error { - if _, err := q.db.ExecContext(ctx, createOrUpdateCalculatedTimeseries, - cc.ID, - cc.InstrumentID, - cc.ParameterID, - cc.UnitID, - cc.FormulaName, - defaultCc.InstrumentID, - defaultCc.ParameterID, - defaultCc.UnitID, - defaultCc.FormulaName, - ); err != nil { - return err - } - return nil -} diff --git a/api/internal/model/timeseries_cwms.go b/api/internal/model/timeseries_cwms.go deleted file mode 100644 index 84072142..00000000 --- a/api/internal/model/timeseries_cwms.go +++ /dev/null @@ -1,66 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type TimeseriesCwms struct { - Timeseries - CwmsTimeseriesID string `json:"cwms_timeseries_id" db:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id" db:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time" db:"cwms_extent_earliest_time"` - CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time" db:"cwms_extent_latest_time"` -} - -const listTimeseriesCwms = ` - SELECT * FROM v_timeseries_cwms - WHERE instrument_id = $1 -` - -func (q *Queries) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]TimeseriesCwms, error) { - tss := make([]TimeseriesCwms, 0) - err := q.db.SelectContext(ctx, &tss, listTimeseriesCwms, instrumentID) - return tss, err -} - -const getTimeseriesCwms = ` - SELECT * FROM v_timeseries_cwms - WHERE id = $1 -` - -func (q *Queries) GetTimeseriesCwms(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesCwms, error) { - var t TimeseriesCwms - err := q.db.GetContext(ctx, &t, getTimeseriesCwms, timeseriesID) - return t, err -} - -const createTimeseriesCwms = ` - INSERT INTO timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) VALUES - ($1, $2, $3, $4, $5) -` - -func (q *Queries) CreateTimeseriesCwms(ctx context.Context, tsCwms TimeseriesCwms) error { - _, err := q.db.ExecContext(ctx, createTimeseriesCwms, - tsCwms.ID, tsCwms.CwmsTimeseriesID, tsCwms.CwmsOfficeID, tsCwms.CwmsExtentEarliestTime, tsCwms.CwmsExtentLatestTime, - ) - return err -} - -const updateTimeseriesCwms = ` - UPDATE timeseries_cwms SET - cwms_timeseries_id=$2, - cwms_office_id=$3, - cwms_extent_earliest_time=$4, - cwms_extent_latest_time=$5 - WHERE timeseries_id=$1 -` - -func (q *Queries) UpdateTimeseriesCwms(ctx context.Context, tsCwms TimeseriesCwms) error { - _, err := q.db.ExecContext(ctx, updateTimeseriesCwms, - tsCwms.ID, tsCwms.CwmsTimeseriesID, tsCwms.CwmsOfficeID, tsCwms.CwmsExtentEarliestTime, tsCwms.CwmsExtentLatestTime, - ) - return err -} diff --git a/api/internal/model/timeseries_process.go b/api/internal/model/timeseries_process.go deleted file mode 100644 index d2bd6e5f..00000000 --- a/api/internal/model/timeseries_process.go +++ /dev/null @@ -1,564 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "log" - "strconv" - "time" - - "github.com/Knetic/govaluate" - "github.com/google/uuid" - "github.com/jmoiron/sqlx/types" - "github.com/tidwall/btree" -) - -type ProcessTimeseriesInfo struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Variable string `json:"variable" db:"variable"` - IsComputed bool `json:"is_computed" db:"is_computed"` - Formula *string `json:"formula" db:"formula"` -} - -type DBProcessTimeseries struct { - ProcessTimeseriesInfo - Measurements string `json:"measurements" db:"measurements"` - NextMeasurementLow *string `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *string `json:"next_measurement_high" db:"next_measurement_high"` -} - -type ProcessTimeseries struct { - ProcessTimeseriesInfo - Measurements []ProcessMeasurement `json:"measurements" db:"measurements"` - NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` - TimeWindow TimeWindow `json:"time_window"` -} - -type ProcessMeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []ProcessMeasurement `json:"items"` -} - -type ProcessTimeseriesResponseCollection []ProcessTimeseries - -type ProcessMeasurement struct { - Time time.Time `json:"time"` - Value float64 `json:"value"` - Error string `json:"error,omitempty"` -} - -func (m ProcessMeasurement) Lean() map[time.Time]float64 { - return map[time.Time]float64{m.Time: m.Value} -} - -type ProcessInclinometerTimeseries struct { - ProcessTimeseriesInfo - Measurements []ProcessInclinometerMeasurement `json:"measurements" db:"measurements"` - NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` - TimeWindow TimeWindow `json:"time_window"` -} - -type ProcessInclinometerMeasurement struct { - Time time.Time `json:"time"` - Values types.JSONText `json:"values"` -} - -type ProcessInclinometerTimeseriesResponseCollection []ProcessInclinometerTimeseries - -func (m ProcessInclinometerMeasurement) InclinometerLean() map[time.Time]types.JSONText { - return map[time.Time]types.JSONText{m.Time: m.Values} -} - -// explorerResponseFactory returns the explorer-specific JSON response format -func explorerInclinometerResponseFactory(tt []ProcessInclinometerTimeseries) (map[uuid.UUID][]InclinometerMeasurementCollectionLean, error) { - response := make(map[uuid.UUID][]InclinometerMeasurementCollectionLean) - - for _, t := range tt { - if _, hasInstrument := response[t.InstrumentID]; !hasInstrument { - response[t.InstrumentID] = make([]InclinometerMeasurementCollectionLean, 0) - } - mcl := InclinometerMeasurementCollectionLean{ - TimeseriesID: t.TimeseriesID, - Items: make([]InclinometerMeasurementLean, len(t.Measurements)), - } - for idx, m := range t.Measurements { - mcl.Items[idx] = m.InclinometerLean() - } - response[t.InstrumentID] = append(response[t.InstrumentID], mcl) - } - - return response, nil -} - -// ProcessMeasurementFilter for conveniently passsing SQL query paramters to functions -type ProcessMeasurementFilter struct { - TimeseriesID *uuid.UUID `db:"timeseries_id"` - InstrumentID *uuid.UUID `db:"instrument_id"` - InstrumentGroupID *uuid.UUID `db:"instrument_group_id"` - InstrumentIDs []uuid.UUID `db:"instrument_ids"` - TimeseriesIDs []uuid.UUID `db:"timeseries_ids"` - After time.Time `db:"after"` - Before time.Time `db:"before"` -} - -// BTreeNode represents node for btree used for computing timeseries -type BTreeNode struct { - Key time.Time - Value map[string]interface{} -} - -func (mrc *ProcessTimeseriesResponseCollection) GroupByInstrument(threshold int) (map[uuid.UUID][]MeasurementCollectionLean, error) { - if len(*mrc) == 0 { - return make(map[uuid.UUID][]MeasurementCollectionLean), nil - } - - tmp := make(map[uuid.UUID]map[uuid.UUID][]MeasurementLean) - - for _, t := range *mrc { - if _, hasInstrument := tmp[t.InstrumentID]; !hasInstrument { - tmp[t.InstrumentID] = make(map[uuid.UUID][]MeasurementLean, 0) - } - if _, hasTimeseries := tmp[t.InstrumentID][t.TimeseriesID]; !hasTimeseries { - tmp[t.InstrumentID][t.TimeseriesID] = make([]MeasurementLean, 0) - } - for _, m := range t.Measurements { - tmp[t.InstrumentID][t.TimeseriesID] = append(tmp[t.InstrumentID][t.TimeseriesID], MeasurementLean{m.Time: m.Value}) - } - } - - res := make(map[uuid.UUID][]MeasurementCollectionLean) - - for instrumentID := range tmp { - res[instrumentID] = make([]MeasurementCollectionLean, 0) - - for tsID := range tmp[instrumentID] { - res[instrumentID] = append(res[instrumentID], - MeasurementCollectionLean{ - TimeseriesID: tsID, - Items: LTTB(tmp[instrumentID][tsID], threshold), - }, - ) - } - } - - return res, nil -} - -func (mrc *ProcessInclinometerTimeseriesResponseCollection) GroupByInstrument() (map[uuid.UUID][]InclinometerMeasurementCollectionLean, error) { - if len(*mrc) == 0 { - return make(map[uuid.UUID][]InclinometerMeasurementCollectionLean), sql.ErrNoRows - } - - res := make(map[uuid.UUID][]InclinometerMeasurementCollectionLean) - - for _, t := range *mrc { - if _, hasInstrument := res[t.InstrumentID]; !hasInstrument { - res[t.InstrumentID] = make([]InclinometerMeasurementCollectionLean, 0) - } - mcl := InclinometerMeasurementCollectionLean{ - TimeseriesID: t.TimeseriesID, - Items: make([]InclinometerMeasurementLean, len(t.Measurements)), - } - for idx, m := range t.Measurements { - mcl.Items[idx] = m.InclinometerLean() - } - res[t.InstrumentID] = append(res[t.InstrumentID], mcl) - } - return res, nil -} - -func (mrc *ProcessTimeseriesResponseCollection) CollectSingleTimeseries(threshold int, tsID uuid.UUID) (MeasurementCollection, error) { - if len(*mrc) == 0 { - return MeasurementCollection{ - TimeseriesID: tsID, - Items: make([]Measurement, 0), - }, nil - } - - for _, t := range *mrc { - if t.TimeseriesID == tsID { - mmts := make([]Measurement, len(t.Measurements)) - for i, m := range t.Measurements { - mmts[i] = Measurement{ - TimeseriesID: t.TimeseriesID, - Time: m.Time, - Value: FloatNanInf(m.Value), - Error: m.Error, - } - } - return MeasurementCollection{TimeseriesID: t.TimeseriesID, Items: LTTB(mmts, threshold)}, nil - } - } - - return MeasurementCollection{}, fmt.Errorf("requested timeseries does not match any in the result") -} - -// SelectMeasurements returns measurements for the timeseries specified in the filter -func (q *Queries) SelectMeasurements(ctx context.Context, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { - tss, err := queryTimeseriesMeasurements(ctx, q, f) - if err != nil { - return tss, err - } - tss, err = processLOCF(tss) - if err != nil { - return tss, err - } - return tss, nil -} - -// SelectInclinometerMeasurements returns inclinometer measurements for the instruments specified in the filter -func (q *Queries) SelectInclinometerMeasurements(ctx context.Context, f ProcessMeasurementFilter) (ProcessInclinometerTimeseriesResponseCollection, error) { - tss, err := queryInclinometerTimeseriesMeasurements(ctx, q, f) - if err != nil { - return tss, err - } - return tss, nil -} - -// collectAggregate creates a btree of all sorted times (key) and measurements (value; as variable map) from an array of Timeseries -func collectAggregate(tss *ProcessTimeseriesResponseCollection) *btree.BTreeG[BTreeNode] { - // Get unique set of all measurement times of timeseries dependencies for non-regularized values - btm := btree.NewBTreeG(func(a, b BTreeNode) bool { return a.Key.Before(b.Key) }) - for _, ts := range *tss { - if ts.NextMeasurementLow != nil { - if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementLow.Time}); !exists { - btm.Set(BTreeNode{Key: ts.NextMeasurementLow.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementLow.Value}}) - } else { - item.Value[ts.Variable] = ts.NextMeasurementLow.Value - btm.Set(item) - } - } - for _, m := range ts.Measurements { - if item, exists := btm.Get(BTreeNode{Key: m.Time}); !exists { - btm.Set(BTreeNode{Key: m.Time, Value: map[string]interface{}{ts.Variable: m.Value}}) - } else { - item.Value[ts.Variable] = m.Value - btm.Set(item) - } - } - if ts.NextMeasurementHigh != nil { - if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementHigh.Time}); !exists { - btm.Set(BTreeNode{Key: ts.NextMeasurementHigh.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementHigh.Value}}) - } else { - item.Value[ts.Variable] = ts.NextMeasurementHigh.Value - btm.Set(item) - } - } - } - return btm -} - -// processLOCF calculates computed timeseries using "Last-Observation-Carried-Forward" algorithm -func processLOCF(tss ProcessTimeseriesResponseCollection) (ProcessTimeseriesResponseCollection, error) { - tssFinal := make(ProcessTimeseriesResponseCollection, 0) - var variableMap *btree.BTreeG[BTreeNode] - // Check if any computed timeseries present, collect aggregates used for calculations if so - for _, ts := range tss { - if ts.IsComputed { - variableMap = collectAggregate(&tss) - break - } - } - // Add any stored timeseries to the result - // Do calculations for computed timeseries and add to result - for _, ts := range tss { - // Array of existing measurements - a1 := make([]ProcessMeasurement, 0) - if ts.NextMeasurementLow != nil { - a1 = append(a1, *ts.NextMeasurementLow) - } - a1 = append(a1, ts.Measurements...) - if ts.NextMeasurementHigh != nil { - a1 = append(a1, *ts.NextMeasurementHigh) - } - - // Could do some additional checks before adding, like if the - // timeseries was actual requested or if it was just in the result as a - // dependency of the computed timeseries, just returning them all for now - if !ts.IsComputed { - tssFinal = append(tssFinal, ProcessTimeseries{ - ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, - Measurements: a1, - TimeWindow: ts.TimeWindow, - }) - continue - } - - // By now, all of the stored timeseries have been processed; - // the query is ordered in a way that priortizes stored timeseries - expr, err := govaluate.NewEvaluableExpression(*ts.Formula) - if err != nil { - continue - } - - // Do calculations - remember := make(map[string]interface{}) - a2 := make([]ProcessMeasurement, 0) - - it := variableMap.Iter() - for it.Next() { - item := it.Item() - - // fill in any missing gaps of data - for k, v := range remember { - if _, exists := item.Value[k]; !exists { - item.Value[k] = v - } - } - // Add/Update the most recent values - for k, v := range item.Value { - remember[k] = v - } - - val, err := expr.Evaluate(item.Value) - if err != nil { - continue - } - val64, err := strconv.ParseFloat(fmt.Sprint(val), 64) - if err != nil { - continue - } - - a2 = append(a2, ProcessMeasurement{Time: item.Key, Value: val64}) - } - it.Release() - - tssFinal = append(tssFinal, ProcessTimeseries{ - ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, - Measurements: a2, - TimeWindow: ts.TimeWindow, - }) - } - - return tssFinal, nil -} - -// SelectTimeseriesMeasurements selects stored measurements and dependencies for computed measurements -func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { - var filterSQL string - var filterArg interface{} - // short circuiting before executing SQL query greatly improves query perfomance, - // rather than adding all parameters to the query with logical OR - if f.TimeseriesID != nil { - filterSQL = `id = ?` - filterArg = f.TimeseriesID - } else if f.InstrumentID != nil { - filterSQL = `instrument_id = ?` - filterArg = f.InstrumentID - } else if f.InstrumentGroupID != nil { - filterSQL = ` - instrument_id IN ( - SELECT instrument_id - FROM instrument_group_instruments - WHERE instrument_group_id = ? - )` - filterArg = f.InstrumentGroupID - } else if len(f.InstrumentIDs) > 0 { - filterSQL = `instrument_id IN (?)` - filterArg = f.InstrumentIDs - } else if len(f.TimeseriesIDs) > 0 { - filterSQL = `id IN (?)` - filterArg = f.TimeseriesIDs - } else { - return nil, fmt.Errorf("must supply valid filter for timeseries_measurement query") - } - listTimeseriesMeasurments := ` - WITH required_timeseries AS ( - ( - SELECT id - FROM v_timeseries_stored - WHERE ` + filterSQL + ` - ) - UNION ALL - ( - SELECT dependency_timeseries_id AS id - FROM v_timeseries_dependency - WHERE ` + filterSQL + ` - ) - ), - next_low AS ( - SELECT nlm.timeseries_id AS timeseries_id, json_build_object('time', nlm.time, 'value', m1.value) AS measurement - FROM ( - SELECT timeseries_id, MAX(time) AS time - FROM timeseries_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time < ? - GROUP BY timeseries_id - ) nlm - INNER JOIN timeseries_measurement m1 ON m1.time = nlm.time AND m1.timeseries_id = nlm.timeseries_id - ), - next_high AS ( - SELECT nhm.timeseries_id AS timeseries_id, json_build_object('time', nhm.time, 'value', m2.value) AS measurement - FROM ( - SELECT timeseries_id, MIN(time) AS time - FROM timeseries_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time > ? - GROUP BY timeseries_id - ) nhm - INNER JOIN timeseries_measurement m2 ON m2.time = nhm.time AND m2.timeseries_id = nhm.timeseries_id - ) - ( - SELECT - rt.id AS timeseries_id, - ts.instrument_id AS instrument_id, - i.slug || '.' || ts.slug AS variable, - false AS is_computed, - null AS formula, - COALESCE(( - SELECT json_agg(json_build_object('time', time, 'value', value) ORDER BY time ASC)::text - FROM timeseries_measurement - WHERE timeseries_id = rt.id AND time >= ? AND time <= ? - ), '[]') AS measurements, - nl.measurement::text AS next_measurement_low, - nh.measurement::text AS next_measurement_high - FROM required_timeseries rt - INNER JOIN timeseries ts ON ts.id = rt.id - INNER JOIN instrument i ON i.id = ts.instrument_id - LEFT JOIN next_low nl ON nl.timeseries_id = rt.id - LEFT JOIN next_high nh ON nh.timeseries_id = rt.id - ) - UNION ALL - ( - SELECT - id AS timeseries_id, - instrument_id AS instrument_id, - slug AS variable, - true AS is_computed, - contents AS formula, - '[]'::text AS measurements, - null AS next_measurement_low, - null AS next_measurement_high - FROM v_timeseries_computed - WHERE ` + filterSQL + ` AND contents IS NOT NULL - ) - ORDER BY is_computed - ` - query, args, err := sqlIn(listTimeseriesMeasurments, filterArg, filterArg, f.After, f.Before, f.After, f.Before, filterArg) - if err != nil { - return nil, err - } - query = q.db.Rebind(query) - tt := make([]DBProcessTimeseries, 0) - if err := q.db.SelectContext(ctx, &tt, query, args...); err != nil { - return make(ProcessTimeseriesResponseCollection, 0), err - } - tt2 := make(ProcessTimeseriesResponseCollection, len(tt)) - for idx, t := range tt { - tt2[idx] = ProcessTimeseries{ - ProcessTimeseriesInfo: t.ProcessTimeseriesInfo, - Measurements: make([]ProcessMeasurement, 0), - TimeWindow: TimeWindow{After: f.After, Before: f.Before}, - } - if err := json.Unmarshal([]byte(t.Measurements), &tt2[idx].Measurements); err != nil { - log.Println(err) - } - if t.NextMeasurementHigh != nil { - if err := json.Unmarshal([]byte(*t.NextMeasurementHigh), &tt2[idx].NextMeasurementHigh); err != nil { - log.Println(err) - } - } - if t.NextMeasurementLow != nil { - if err := json.Unmarshal([]byte(*t.NextMeasurementLow), &tt2[idx].NextMeasurementLow); err != nil { - log.Println(err) - } - } - } - return tt2, nil -} - -// ComputedInclinometerTimeseries returns computed and stored inclinometer timeseries for a specified array of instrument IDs -func queryInclinometerTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) ([]ProcessInclinometerTimeseries, error) { - tt := make([]DBProcessTimeseries, 0) - listInclinometerTimeseriesMeasurements := ` - -- Get Timeseries and Dependencies for Calculations - -- timeseries required based on requested instrument - WITH requested_instruments AS ( - SELECT id - FROM instrument - WHERE id IN (?) - ), required_timeseries AS ( - -- Timeseries for Instrument - SELECT id FROM v_timeseries_stored WHERE instrument_id IN (SELECT id FROM requested_instruments) - UNION - -- Dependencies for Instrument Timeseries - SELECT dependency_timeseries_id AS id - FROM v_timeseries_dependency - WHERE instrument_id IN (SELECT id from requested_instruments) - ), - -- Measurements Within Time Window by timeseries_id; - measurements AS ( - SELECT timeseries_id, - json_agg(json_build_object('time', time, 'values', values) ORDER BY time ASC)::text AS measurements - FROM inclinometer_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time >= ? AND time <= ? - GROUP BY timeseries_id - ) - -- Stored Timeseries - SELECT - rt.id AS timeseries_id, - ts.instrument_id AS instrument_id, - i.slug || '.' || ts.slug AS variable, - false AS is_computed, - null AS formula, - COALESCE(m.measurements, '[]') AS measurements - FROM required_timeseries rt - INNER JOIN timeseries ts ON ts.id = rt.id - INNER JOIN instrument i ON i.id = ts.instrument_id AND i.id IN (SELECT id FROM requested_instruments) - LEFT JOIN measurements m ON m.timeseries_id = rt.id - UNION - -- Computed Timeseries - SELECT - cc.id AS timeseries_id, - cc.instrument_id AS instrument_id, - cc.name AS variable, - true AS is_computed, - cc.contents AS formula, - '[]'::text AS measurements - FROM v_timeseries_computed cc - WHERE cc.contents IS NOT NULL AND cc.instrument_id IN (SELECT id FROM requested_instruments) - ORDER BY is_computed - ` - - query, args, err := sqlIn(listInclinometerTimeseriesMeasurements, f.InstrumentIDs, f.After, f.Before) - if err != nil { - return make([]ProcessInclinometerTimeseries, 0), err - } - query = q.db.Rebind(query) - if err := q.db.Select(&tt, query, args...); err != nil { - return make([]ProcessInclinometerTimeseries, 0), err - } - - // Unmarshal JSON Strings - tt2 := make([]ProcessInclinometerTimeseries, len(tt)) - for idx, t := range tt { - tt2[idx] = ProcessInclinometerTimeseries{ - ProcessTimeseriesInfo: t.ProcessTimeseriesInfo, - Measurements: make([]ProcessInclinometerMeasurement, 0), - TimeWindow: TimeWindow{After: f.After, Before: f.Before}, - } - cm, err := q.GetTimeseriesConstantMeasurement(ctx, t.TimeseriesID, "inclinometer-constant") - if err != nil { - return nil, err - } - if err := json.Unmarshal([]byte(t.Measurements), &tt2[idx].Measurements); err != nil { - log.Println(err) - } - for i := range tt2[idx].Measurements { - values, err := q.ListInclinometerMeasurementValues(ctx, t.TimeseriesID, tt2[idx].Measurements[i].Time, float64(cm.Value)) - if err != nil { - return nil, err - } - - jsonValues, err := json.Marshal(values) - if err != nil { - return nil, err - } - tt2[idx].Measurements[i].Values = jsonValues - } - } - return tt2, nil -} diff --git a/api/internal/model/uploader.go b/api/internal/model/uploader.go deleted file mode 100644 index d461c08d..00000000 --- a/api/internal/model/uploader.go +++ /dev/null @@ -1,111 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -type UploaderConfigType string - -const ( - CSV, DUX, TOA5 UploaderConfigType = "csv", "dux", "toa5" -) - -type UploaderConfig struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Name string `json:"name" db:"name"` - Slug string `json:"slug" db:"slug"` - Description string `json:"description" db:"description"` - Type UploaderConfigType `json:"type" db:"type"` - TzName string `json:"tz_name" db:"tz_name"` - AuditInfo -} - -type UploaderConfigMapping struct { - UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` - FieldName string `json:"field_name" db:"field_name"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` -} - -const listUploaderConfigsForProject = ` - SELECT * FROM uploader_config WHERE project_id=$1 -` - -func (q *Queries) ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { - uu := make([]UploaderConfig, 0) - err := q.db.SelectContext(ctx, &uu, listUploaderConfigsForProject, projectID) - return uu, err -} - -const createUploaderConfig = ` - INSERT INTO uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) - VALUES ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) - RETURNING id -` - -func (q *Queries) CreateUploaderConfig(ctx context.Context, uc UploaderConfig) (uuid.UUID, error) { - var newID uuid.UUID - err := q.db.GetContext( - ctx, &newID, createUploaderConfig, - uc.ProjectID, uc.Name, uc.Description, uc.CreateDate, uc.CreatorID, uc.Type, uc.TzName, - ) - return newID, err -} - -const updateUploaderConfig = ` - UPDATE uploader_config SET - name=$2, - description=$3, - update_date=$4, - updater=$5, - type=$6, - tz_name=$7 - WHERE id=$1 -` - -func (q *Queries) UpdateUploaderConfig(ctx context.Context, uc UploaderConfig) error { - _, err := q.db.ExecContext( - ctx, updateUploaderConfig, - uc.ID, uc.Name, uc.Description, uc.UpdateDate, uc.UpdaterID, uc.Type, uc.TzName, - ) - return err -} - -const deleteUploaderConfig = ` - DELETE FROM uploader_config WHERE id=$1 -` - -func (q *Queries) DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteUploaderConfig, ucID) - return err -} - -const listUploaderConfigMappings = ` - SELECT * FROM uploader_config_mapping WHERE uploader_config_id=$1 -` - -func (q *Queries) ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]UploaderConfigMapping, error) { - mm := make([]UploaderConfigMapping, 0) - err := q.db.SelectContext(ctx, &mm, listUploaderConfigMappings, ucID) - return mm, err -} - -const createUploaderConfigMapping = ` - INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_id) VALUES ($1, $2, $3) -` - -func (q *Queries) CreateUploaderConfigMapping(ctx context.Context, m UploaderConfigMapping) error { - _, err := q.db.ExecContext(ctx, createUploaderConfigMapping, m.UploaderConfigID, m.FieldName, m.TimeseriesID) - return err -} - -const deleteAllUploaderConfigMappingsForUploaderConfig = ` - DELETE FROM uploader_config_mapping WHERE uploader_config_id=$1 -` - -func (q *Queries) DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllUploaderConfigMappingsForUploaderConfig, ucID) - return err -} diff --git a/api/internal/server/api.go b/api/internal/server/api.go index 4c3ce31c..654c323c 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -96,7 +96,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.POST("/my_alerts/:alert_id/unread", h.DoAlertUnread) //AlertConfig - r.public.GET("/projects/:project_id/alert_configs", h.GetAllAlertConfigsForProject) + r.public.GET("/projects/:project_id/alert_configs", h.ListAlertConfigsForProject) r.public.GET("/projects/:project_id/instruments/:instrument_id/alert_configs", h.ListInstrumentAlertConfigs) r.public.GET("/projects/:project_id/alert_configs/:alert_config_id", h.GetAlertConfig) r.private.POST("/projects/:project_id/alert_configs", h.CreateAlertConfig) @@ -141,7 +141,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.public.GET("/projects/:project_id/district_rollup/measurement_submittals", h.ListProjectMeasurementDistrictRollup) // Domain - r.public.GET("/domains", h.GetDomains) + r.public.GET("/domains", h.ListDomains) r.public.GET("/domains/map", h.GetDomainMap) r.public.GET("/domains/timezones", h.ListTimezoneOptions) @@ -163,7 +163,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Explorer r.public.POST("/explorer", h.ListTimeseriesMeasurementsExplorer) - r.public.POST("/inclinometer_explorer", h.ListInclinometerTimeseriesMeasurementsExplorer) // Heartbeat r.public.GET("/health", h.Healthcheck) @@ -209,7 +208,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/instrument_groups/:instrument_group_id/instruments/:instrument_id", h.DeleteInstrumentGroupInstruments) // InstrumentNote - r.public.GET("/instruments/notes", h.ListInstrumentNotes) r.public.GET("/instruments/notes/:note_id", h.GetInstrumentNote) r.public.GET("/instruments/:instrument_id/notes", h.ListInstrumentInstrumentNotes) r.public.GET("/instruments/:instrument_id/notes/:note_id", h.GetInstrumentNote) @@ -226,7 +224,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/instruments/:instrument_id/status/:status_id", h.DeleteInstrumentStatus) // IpiInstruemnt - r.public.GET("/instruments/ipi/:instrument_id/segments", h.GetAllIpiSegmentsForInstrument) + r.public.GET("/instruments/ipi/:instrument_id/segments", h.ListIpiSegmentsForInstrument) r.public.GET("/instruments/ipi/:instrument_id/measurements", h.GetIpiMeasurementsForInstrument) r.private.PUT("/instruments/ipi/:instrument_id/segments", h.UpdateIpiSegments) @@ -237,9 +235,9 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/timeseries/:timeseries_id/measurements", h.DeleteTimeserieMeasurements) // InclinometerMeasurement - r.public.GET("/timeseries/:timeseries_id/inclinometer_measurements", h.ListInclinometerMeasurements) - r.private.POST("/projects/:project_id/inclinometer_measurements", h.CreateOrUpdateProjectInclinometerMeasurements) - r.private.DELETE("/timeseries/:timeseries_id/inclinometer_measurements", h.DeleteInclinometerMeasurements) + r.public.GET("/instruments/incl/:instrument_id/segments", h.ListInclSegmentsForInstrument) + r.public.GET("/instruments/incl/:instrument_id/measurements", h.GetInclMeasurementsForInstrument) + r.private.PUT("/instruments/incl/:instrument_id/segments", h.UpdateInclSegments) // Media r.public.GET("/projects/:project_slug/images/*", h.GetMedia) @@ -310,10 +308,10 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.GET("/projects/:project_id/report_configs/:report_config_id/jobs/:job_id/downloads", h.DownloadReport) // Search - r.public.GET("/search/:entity", h.Search) + r.public.GET("/search/:entity", h.ProjectSearch) // SaaInstrument - r.public.GET("/instruments/saa/:instrument_id/segments", h.GetAllSaaSegmentsForInstrument) + r.public.GET("/instruments/saa/:instrument_id/segments", h.ListSaaSegmentsForInstrument) r.public.GET("/instruments/saa/:instrument_id/measurements", h.GetSaaMeasurementsForInstrument) r.private.PUT("/instruments/saa/:instrument_id/segments", h.UpdateSaaSegments) diff --git a/api/internal/service/alert.go b/api/internal/service/alert.go index 04d72cb5..35fa68aa 100644 --- a/api/internal/service/alert.go +++ b/api/internal/service/alert.go @@ -3,88 +3,60 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) -type AlertService interface { - CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error - GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Alert, error) - GetOneAlertForProfile(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertRead(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertUnread(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) +func (s DBService) AlertCreateBatch(ctx context.Context, alertConfigIDs []uuid.UUID) error { + var err error + s.Queries.AlertCreateBatch(ctx, alertConfigIDs).Exec(batchExecErr(&err)) + return err } -type alertService struct { - db *model.Database - *model.Queries -} - -func NewAlertService(db *model.Database, q *model.Queries) *alertService { - return &alertService{db, q} -} - -// Create creates one or more new alerts -func (s alertService) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - for _, id := range alertConfigIDs { - if err := qtx.CreateAlerts(ctx, id); err != nil { - return err - } - } - return tx.Commit() -} - -// DoAlertRead marks an alert as read for a profile -func (s alertService) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertReadCreate(ctx context.Context, arg db.AlertReadCreateParams) (db.AlertGetRow, error) { + var a db.AlertGetRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DoAlertRead(ctx, profileID, alertID); err != nil { - return model.Alert{}, err + if err := qtx.AlertReadCreate(ctx, arg); err != nil { + return a, err } - b, err := qtx.GetOneAlertForProfile(ctx, profileID, alertID) + a, err = qtx.AlertGet(ctx, db.AlertGetParams{ + ProfileID: arg.ProfileID, + ID: arg.AlertID, + }) if err != nil { - return model.Alert{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.Alert{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - return b, nil + return a, nil } -// DoAlertUnread marks an alert as unread for a profile -func (s alertService) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertReadDelete(ctx context.Context, arg db.AlertReadDeleteParams) (db.AlertGetRow, error) { + var a db.AlertGetRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DoAlertUnread(ctx, profileID, alertID); err != nil { - return model.Alert{}, err + if err := qtx.AlertReadDelete(ctx, arg); err != nil { + return a, err } - a, err := qtx.GetOneAlertForProfile(ctx, profileID, alertID) + a, err = qtx.AlertGet(ctx, db.AlertGetParams{ + ProfileID: arg.ProfileID, + ID: arg.AlertID, + }) if err != nil { - return model.Alert{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.Alert{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return a, nil } diff --git a/api/internal/service/alert_check.go b/api/internal/service/alert_check.go index a30c8488..9effd166 100644 --- a/api/internal/service/alert_check.go +++ b/api/internal/service/alert_check.go @@ -9,11 +9,14 @@ import ( "time" "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/email" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) +// TODO: refactor this to work with a task scheduler like airflow, or possibly gocron + var ( GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") @@ -29,54 +32,38 @@ const ( reminder = "Reminder" ) -type AlertCheckService interface { - DoAlertChecks(ctx context.Context) error -} +type alertConfigMap map[uuid.UUID]db.VAlertConfig + +type submittalMap map[uuid.UUID]db.VSubmittal type alertConfigChecker[T alertChecker] interface { - GetAlertConfig() model.AlertConfig - SetAlertConfig(model.AlertConfig) + GetAlertConfig() db.VAlertConfig + SetAlertConfig(ac db.VAlertConfig) GetChecks() []T - SetChecks([]T) - DoEmail(string, config.AlertCheckConfig) error + SetChecks(checks []T) + DoEmail(content string, cfg *config.AlertCheckConfig) error } type alertChecker interface { GetShouldWarn() bool GetShouldAlert() bool GetShouldRemind() bool - GetSubmittal() model.Submittal - SetSubmittal(model.Submittal) -} - -type alertCheckService struct { - db *model.Database - *model.Queries - cfg *config.AlertCheckConfig + GetSubmittal() *db.VSubmittal + SetSubmittal(sub db.VSubmittal) } -func NewAlertCheckService(db *model.Database, q *model.Queries, cfg *config.AlertCheckConfig) *alertCheckService { - return &alertCheckService{db, q, cfg} -} - -func (s alertCheckService) DoAlertChecks(ctx context.Context) error { - if s.cfg == nil { - return fmt.Errorf("missing config") - } - - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DoAlertChecks(ctx context.Context, cfg *config.AlertCheckConfig) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - subs, err := qtx.ListUnverifiedMissingSubmittals(ctx) + subs, err := qtx.SubmittalListUnverifiedMissing(ctx) if err != nil { return err } - acs, err := qtx.ListAndCheckAlertConfigs(ctx) + acs, err := qtx.AlertConfigListUpdateLastChecked(ctx) if err != nil { return err } @@ -85,25 +72,25 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } - subMap := make(map[uuid.UUID]model.Submittal) + subMap := make(map[uuid.UUID]db.VSubmittal) for _, s := range subs { subMap[s.ID] = s } - acMap := make(map[uuid.UUID]model.AlertConfig) + acMap := make(map[uuid.UUID]db.VAlertConfig) for _, a := range acs { acMap[a.ID] = a } errs := make([]error, 0) - if err := checkMeasurements(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkMeasurements(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := checkEvaluations(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkEvaluations(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { errs = append(errs, err) } @@ -114,29 +101,38 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } -func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigEvaluationCheck, 0) - ecs, err := q.GetAllIncompleteEvaluationSubmittals(ctx) +func checkEvaluations(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg *config.AlertCheckConfig) error { + accs := make([]*AlertConfigEvaluationCheck, 0) + ecs, err := q.SubmittalListIncompleteEvaluation(ctx) if err != nil { return err } - ecMap := make(map[uuid.UUID][]*model.EvaluationCheck) + ecMap := make(map[uuid.UUID][]*EvaluationCheck) for k := range acMap { - ecMap[k] = make([]*model.EvaluationCheck, 0) + ecMap[k] = make([]*EvaluationCheck, 0) } for idx := range ecs { + ck := ecs[idx] + check := EvaluationCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} if sub, ok := subMap[ecs[idx].SubmittalID]; ok { - ecs[idx].Submittal = sub - ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], ecs[idx]) + ecs[idx].Submittal = &sub + ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], &check) } } for k, v := range acMap { if v.AlertTypeID != EvaluationSubmittalAlertTypeID { continue } - acc := model.AlertConfigEvaluationCheck{ - AlertConfig: v, + acc := AlertConfigEvaluationCheck{ + AlertConfig: AlertConfig(v), AlertChecks: ecMap[k], } accs = append(accs, &acc) @@ -151,22 +147,31 @@ func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.Submit return nil } -func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigMeasurementCheck, 0) - mcs, err := q.GetAllIncompleteMeasurementSubmittals(ctx) +func checkMeasurements(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg *config.AlertCheckConfig) error { + accs := make([]*AlertConfigMeasurementCheck, 0) + mcs, err := q.SubmittalListIncompleteMeasurement(ctx) if err != nil { return err } - mcMap := make(map[uuid.UUID][]*model.MeasurementCheck) + mcMap := make(map[uuid.UUID][]*MeasurementCheck) for k := range acMap { - mcMap[k] = make([]*model.MeasurementCheck, 0) + mcMap[k] = make([]*MeasurementCheck, 0) } for idx := range mcs { if sub, ok := subMap[mcs[idx].SubmittalID]; ok { - mcs[idx].Submittal = sub - mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], mcs[idx]) + ck := mcs[idx] + check := MeasurementCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} + mcs[idx].Submittal = &sub + mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], &check) } } @@ -174,8 +179,8 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi if v.AlertTypeID != MeasurementSubmittalAlertTypeID { continue } - acc := model.AlertConfigMeasurementCheck{ - AlertConfig: v, + acc := AlertConfigMeasurementCheck{ + AlertConfig: AlertConfig(v), AlertChecks: mcMap[k], } accs = append(accs, &acc) @@ -189,21 +194,35 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi return nil } -func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT) error { +func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT) error { for _, acc := range accs { ac := acc.GetAlertConfig() - if err := q.UpdateAlertConfigLastReminded(ctx, ac); err != nil { + if err := q.AlertConfigUpdateLastReminded(ctx, db.AlertConfigUpdateLastRemindedParams{ + ID: ac.ID, + LastReminded: ac.LastReminded, + }); err != nil { return err } checks := acc.GetChecks() for _, c := range checks { sub := c.GetSubmittal() - if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, sub); err != nil { + if sub == nil { + continue + } + if err := q.SubmittalUpdateCompletionDateOrWarningSent(ctx, db.SubmittalUpdateCompletionDateOrWarningSentParams{ + ID: sub.ID, + SubmittalStatusID: &sub.SubmittalStatusID, + CompletionDate: sub.CompletionDate, + WarningSent: sub.WarningSent, + }); err != nil { return err } } if ac.CreateNextSubmittalFrom != nil { - if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, ac); err != nil { + if err := q.SubmittalCreateNextFromNewAlertConfigDate(ctx, db.SubmittalCreateNextFromNewAlertConfigDateParams{ + ID: ac.ID, + Date: *ac.CreateNextSubmittalFrom, + }); err != nil { return err } } @@ -224,7 +243,7 @@ func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx conte // TODO: smtp.SendMail esablishes a new connection for each batch of emails sent. I would be better to aggregate // the contents of each email, then create a connection pool to reuse and send all emails at once, with any errors wrapped and returned // p.s. Dear future me/someone else: I'm sorry -func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT, cfg config.AlertCheckConfig) error { +func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT, cfg *config.AlertCheckConfig) error { defer util.Timer()() mu := &sync.Mutex{} @@ -307,7 +326,11 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, acReminder = true } - c.SetSubmittal(sub) + if sub == nil { + continue + } + + c.SetSubmittal(*sub) checks[j] = c } @@ -360,3 +383,157 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, return nil } + +type AlertCheck struct { + AlertConfigID uuid.UUID + SubmittalID uuid.UUID + ShouldWarn bool + ShouldAlert bool + ShouldRemind bool + Submittal *db.VSubmittal +} + +func (ck AlertCheck) GetShouldWarn() bool { + return ck.ShouldWarn +} + +func (ck AlertCheck) GetShouldAlert() bool { + return ck.ShouldAlert +} + +func (ck AlertCheck) GetShouldRemind() bool { + return ck.ShouldRemind +} + +func (ck AlertCheck) GetSubmittal() *db.VSubmittal { + return ck.Submittal +} + +func (ck *AlertCheck) SetSubmittal(sub db.VSubmittal) { + ck.Submittal = &sub +} + +type AlertConfig db.VAlertConfig + +func (a *AlertConfig) GetToAddresses() []string { + emails := make([]string, len(a.AlertEmailSubscriptions)) + for idx := range a.AlertEmailSubscriptions { + emails[idx] = a.AlertEmailSubscriptions[idx].Email + } + return emails +} + +type AlertConfigEvaluationCheck struct { + AlertConfig + AlertChecks []*EvaluationCheck +} + +type EvaluationCheck struct { + AlertCheck +} + +func (a AlertConfigEvaluationCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigEvaluationCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigEvaluationCheck) GetChecks() []*EvaluationCheck { + return a.AlertChecks +} + +func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { + a.AlertChecks = ec +} + +func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Evaluation Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Evaluation Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Evaluation Submittals:\r\n" + + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + + "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, acc) + if err != nil { + return err + } + content.To = acc.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} + +type AlertConfigMeasurementCheck struct { + AlertConfig AlertConfig + AlertChecks []*MeasurementCheck +} + +type MeasurementCheck struct { + AlertCheck + AffectedTimeseries []db.AlertCheckMeasurementSubmittalAffectedTimeseries +} + +func (a AlertConfigMeasurementCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigMeasurementCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigMeasurementCheck) GetChecks() []*MeasurementCheck { + return a.AlertChecks +} + +func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { + a.AlertChecks = mc +} + +func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Timeseries Measurement Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Measurement Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Measurement Submittals:\r\n" + + "{{range .AlertChecks}}" + + "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "{{range .AffectedTimeseries}}" + + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + + "{{end}}\r\n{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, ms) + if err != nil { + return err + } + content.To = ms.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} diff --git a/api/internal/service/alert_config.go b/api/internal/service/alert_config.go index f0799794..5ed5021c 100644 --- a/api/internal/service/alert_config.go +++ b/api/internal/service/alert_config.go @@ -3,37 +3,18 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type AlertConfigService interface { - GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.AlertConfig, error) - GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (model.AlertConfig, error) - CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) - UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) - DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error -} - -type alertConfigService struct { - db *model.Database - *model.Queries -} - -func NewAlertConfigService(db *model.Database, q *model.Queries) *alertConfigService { - return &alertConfigService{db, q} -} - -// CreateAlertConfig creates one new alert configuration -func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertConfigCreate(ctx context.Context, ac dto.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -44,13 +25,28 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler qtx := s.WithTx(tx) - acID, err := qtx.CreateAlertConfig(ctx, ac) + acID, err := qtx.AlertConfigCreate(ctx, db.AlertConfigCreateParams{ + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + AlertTypeID: ac.AlertTypeID, + StartDate: ac.StartDate, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + Creator: ac.CreatorID, + CreateDate: ac.CreateDate, + }) if err != nil { return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, acID, aci.InstrumentID); err != nil { + if err := qtx.AlertConfigInstrumentCreateAssignment(ctx, db.AlertConfigInstrumentCreateAssignmentParams{ + AlertConfigID: acID, + InstrumentID: aci.InstrumentID, + }); err != nil { return a, err } } @@ -59,29 +55,29 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler return a, err } - if err := qtx.CreateNextSubmittalFromExistingAlertConfigDate(ctx, acID); err != nil { + if err := qtx.SubmittalCreateNextFromExistingAlertConfigDate(ctx, acID); err != nil { return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, acID) + acNew, err := qtx.AlertConfigGet(ctx, acID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return acNew, nil } -// UpdateAlertConfig updates an alert config -func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertConfigUpdate(ctx context.Context, alertConfigID uuid.UUID, ac dto.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.AlertConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -92,39 +88,54 @@ func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID qtx := s.WithTx(tx) - if err := qtx.UpdateAlertConfig(ctx, ac); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigUpdate(ctx, db.AlertConfigUpdateParams{ + ID: ac.ID, + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + StartDate: ac.StartDate, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + Updater: ac.UpdaterID, + UpdateDate: ac.UpdateDate, + }); err != nil { + return a, err } - if err := qtx.UnassignAllInstrumentsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx, alertConfigID); err != nil { + return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, alertConfigID, aci.InstrumentID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigInstrumentCreateAssignment(ctx, db.AlertConfigInstrumentCreateAssignmentParams{ + AlertConfigID: alertConfigID, + InstrumentID: aci.InstrumentID, + }); err != nil { + return a, err } } - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertEmailSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { + return a, err } if err := registerAndSubscribe(ctx, qtx, alertConfigID, ac.AlertEmailSubscriptions); err != nil { - return model.AlertConfig{}, err + return a, err } - if err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if _, err := qtx.SubmittalUpdateNextForAlertConfig(ctx, &alertConfigID); err != nil { + return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + a, err = qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { - return model.AlertConfig{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.AlertConfig{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return acNew, nil + return a, nil } diff --git a/api/internal/service/alert_subscription.go b/api/internal/service/alert_subscription.go index 75b60079..18a7a8b8 100644 --- a/api/internal/service/alert_subscription.go +++ b/api/internal/service/alert_subscription.go @@ -4,223 +4,209 @@ import ( "context" "fmt" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) const ( - unknown = "" - email = "email" - profile = "profile" + unknownUserType = "" + emailUserType = "email" + profileUserType = "profile" ) -type AlertSubscriptionService interface { - SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error - GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (model.AlertSubscription, error) - ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]model.AlertSubscription, error) - UpdateMyAlertSubscription(ctx context.Context, s model.AlertSubscription) (model.AlertSubscription, error) - SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - UnregisterEmail(ctx context.Context, emailID uuid.UUID) error -} - -type alertSubscriptionService struct { - db *model.Database - *model.Queries -} - -func NewAlertSubscriptionService(db *model.Database, q *model.Queries) *alertSubscriptionService { - return &alertSubscriptionService{db, q} -} - -// SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertProfileSubscriptionCreateForAlertConfigProfile(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.SubscribeProfileToAlerts(ctx, alertConfigID, profileID); err != nil { + if err := qtx.AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx, db.AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }); err != nil { return a, err } - updated, err := qtx.GetAlertSubscription(ctx, alertConfigID, profileID) + updated, err := qtx.AlertSubscriptionGetForAlertConfigProfile(ctx, db.AlertSubscriptionGetForAlertConfigProfileParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return updated, nil } -// UpdateMyAlertSubscription updates properties on a AlertSubscription -func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertProfileSubscriptionUpdateForProfile(ctx context.Context, sub dto.AlertSubscription) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UpdateMyAlertSubscription(ctx, sub); err != nil { + if err := qtx.AlertSubscriptionUpdateForProfile(ctx, db.AlertSubscriptionUpdateForProfileParams{ + MuteUi: sub.MuteUI, + MuteNotify: sub.MuteNotify, + AlertConfigID: sub.AlertConfigID, + ProfileID: sub.ProfileID, + }); err != nil { return a, err } - - updated, err := qtx.GetAlertSubscription(ctx, sub.AlertConfigID, sub.ProfileID) + updated, err := qtx.AlertSubscriptionGet(ctx, sub.ID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return updated, nil } -func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertEmailSubscriptionCreateForAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := registerAndSubscribe(ctx, qtx, alertConfigID, emails); err != nil { return a, err } - // Register any emails that are not yet in system for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { - newID, err := qtx.RegisterEmail(ctx, em.Email) + if em.UserType == unknownUserType || em.UserType == emailUserType { + newID, err := qtx.EmailGetOrCreate(ctx, em.Email) if err != nil { return a, err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } // Subscribe emails for _, em := range emails { - if em.UserType == email { - if err := qtx.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := qtx.AlertEmailSubscriptionCreate(ctx, db.AlertEmailSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.AlertProfileSubscriptionCreate(ctx, db.AlertProfileSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } -func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertEmailSubscriptionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, em := range emails { - if em.UserType == unknown { + if em.UserType == unknownUserType { return a, fmt.Errorf("required field user_type is null, aborting transaction") - } else if em.UserType == email { - if err := qtx.UnsubscribeEmailFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == emailUserType { + if err := qtx.AlertEmailSubscriptionDelete(ctx, db.AlertEmailSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.UnsubscribeProfileFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.AlertProfileSubscriptionDelete(ctx, db.AlertProfileSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } -func (s alertSubscriptionService) UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertSubscriptionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.AlertEmailSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := qtx.UnsubscribeAllProfilesFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.AlertProfileSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return err } return nil } -func registerAndSubscribe(ctx context.Context, q *model.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { +func registerAndSubscribe(ctx context.Context, q *db.Queries, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) error { for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { - newID, err := q.RegisterEmail(ctx, em.Email) + if em.UserType == unknownUserType || em.UserType == emailUserType { + newID, err := q.EmailGetOrCreate(ctx, em.Email) if err != nil { return err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } for _, em := range emails { - if em.UserType == email { - if err := q.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := q.AlertEmailSubscriptionCreate(ctx, db.AlertEmailSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return err } - } else if em.UserType == profile { - if err := q.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := q.AlertProfileSubscriptionCreate(ctx, db.AlertProfileSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return err } } else { diff --git a/api/internal/service/autocomplete.go b/api/internal/service/autocomplete.go deleted file mode 100644 index ebf9b95a..00000000 --- a/api/internal/service/autocomplete.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type EmailAutocompleteService interface { - ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]model.EmailAutocompleteResult, error) -} - -type emailAutocompleteService struct { - db *model.Database - *model.Queries -} - -func NewEmailAutocompleteService(db *model.Database, q *model.Queries) *emailAutocompleteService { - return &emailAutocompleteService{db, q} -} diff --git a/api/internal/service/aware.go b/api/internal/service/aware.go index 28798558..cc89eae2 100644 --- a/api/internal/service/aware.go +++ b/api/internal/service/aware.go @@ -3,37 +3,25 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) -type AwareParameterService interface { - ListAwareParameters(ctx context.Context) ([]model.AwareParameter, error) - ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) +type AwarePlatformParameterConfig struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` } -type awareParameterService struct { - db *model.Database - *model.Queries -} - -func NewAwareParameterService(db *model.Database, q *model.Queries) *awareParameterService { - return &awareParameterService{db, q} -} - -// ListAwarePlatformParameterConfig returns aware platform parameter configs -func (s awareParameterService) ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) { - aa := make([]model.AwarePlatformParameterConfig, 0) - ee, err := s.ListAwarePlatformParameterEnabled(ctx) +func (s DBService) AwarePlatformParameterConfigList(ctx context.Context) ([]AwarePlatformParameterConfig, error) { + aa := make([]AwarePlatformParameterConfig, 0) + ee, err := s.Queries.AwarePlatformParameterListEnabled(ctx) if err != nil { return aa, err } - // reorganize aware_parameter_key, timeseries_id into map for each instrument - // Map of aware parameters to timeseries - m1 := make(map[uuid.UUID]model.AwarePlatformParameterConfig) + m1 := make(map[uuid.UUID]AwarePlatformParameterConfig) for _, e := range ee { if _, ok := m1[e.InstrumentID]; !ok { - m1[e.InstrumentID] = model.AwarePlatformParameterConfig{ + m1[e.InstrumentID] = AwarePlatformParameterConfig{ InstrumentID: e.InstrumentID, AwareID: e.AwareID, AwareParameters: make(map[string]*uuid.UUID), diff --git a/api/internal/service/collection_group.go b/api/internal/service/collection_group.go index 405d412a..a7ef1433 100644 --- a/api/internal/service/collection_group.go +++ b/api/internal/service/collection_group.go @@ -3,55 +3,26 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type CollectionGroupService interface { - ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]model.CollectionGroup, error) - GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) - CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error - AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error - UpdateTimeseriesCollectionGroupSortOrder(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID, sortOrder int) error - RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error +func (s DBService) CollectionGroupCreate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { + return s.Queries.CollectionGroupCreate(ctx, db.CollectionGroupCreateParams{ + ProjectID: cg.ProjectID, + Name: cg.Name, + Creator: cg.CreatorID, + CreateDate: cg.CreateDate, + SortOrder: cg.SortOrder, + }) } -type collectionGroupService struct { - db *model.Database - *model.Queries -} - -func NewCollectionGroupService(db *model.Database, q *model.Queries) *collectionGroupService { - return &collectionGroupService{db, q} -} - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (s collectionGroupService) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) { - var a model.CollectionGroupDetails - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return a, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - cg, err := qtx.GetCollectionGroupDetails(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - ts, err := qtx.GetCollectionGroupDetailsTimeseries(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - - if err := tx.Commit(); err != nil { - return a, err - } - - cg.Timeseries = ts - - return cg, nil +func (s DBService) CollectionGroupUpdate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { + return s.Queries.CollectionGroupUpdate(ctx, db.CollectionGroupUpdateParams{ + ID: cg.ID, + ProjectID: cg.ProjectID, + Name: cg.Name, + Updater: cg.UpdaterID, + UpdateDate: cg.UpdateDate, + }) } diff --git a/api/internal/service/datalogger.go b/api/internal/service/datalogger.go index 5fc5c385..fc9c8776 100644 --- a/api/internal/service/datalogger.go +++ b/api/internal/service/datalogger.go @@ -2,155 +2,172 @@ package service import ( "context" + "errors" + "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/password" "github.com/google/uuid" ) -type DataloggerService interface { - GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) - ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]model.Datalogger, error) - ListAllDataloggers(ctx context.Context) ([]model.Datalogger, error) - GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) - VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error - CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) - CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) - GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (model.Datalogger, error) - UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) - DeleteDatalogger(ctx context.Context, d model.Datalogger) error - GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) - ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error - GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) - DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error +type DataloggerWithKey struct { + db.VDatalogger + Key string `json:"key"` } -type dataloggerService struct { - db *model.Database - *model.Queries -} - -func NewDataloggerService(db *model.Database, q *model.Queries) *dataloggerService { - return &dataloggerService{db, q} -} +func (s DBService) DataloggerCreate(ctx context.Context, n dto.Datalogger) (DataloggerWithKey, error) { + var a DataloggerWithKey -func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - dataloggerID, err := qtx.CreateDatalogger(ctx, n) + dataloggerID, err := qtx.DataloggerCreate(ctx, db.DataloggerCreateParams{ + Name: n.Name, + Sn: n.SN, + ProjectID: n.ProjectID, + Creator: n.CreatorID, + ModelID: n.ModelID, + }) if err != nil { return a, err } - key, err := qtx.CreateDataloggerHash(ctx, dataloggerID) - if err != nil { + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) + + if err := qtx.DataloggerHashCreate(ctx, db.DataloggerHashCreateParams{ + DataloggerID: dataloggerID, + Hash: hash, + }); err != nil { return a, err } - - dl, err := qtx.GetOneDatalogger(ctx, dataloggerID) + dl, err := qtx.DataloggerGet(ctx, dataloggerID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } - return dk, nil } -func (s dataloggerService) CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerHashUpdate(ctx context.Context, arg dto.Datalogger) (DataloggerWithKey, error) { + var a DataloggerWithKey + if arg.UpdaterID == nil { + return a, errors.New("must supply updater profile id") + } + + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) - key, err := qtx.UpdateDataloggerHash(ctx, u.ID) - if err != nil { + if err := qtx.DataloggerHashUpdate(ctx, db.DataloggerHashUpdateParams{ + DataloggerID: arg.ID, + Hash: hash, + }); err != nil { return a, err } - if err := qtx.UpdateDataloggerUpdater(ctx, u); err != nil { + if err := qtx.DataloggerUpdateUpdater(ctx, db.DataloggerUpdateUpdaterParams{ + ID: arg.ID, + Updater: *arg.UpdaterID, + UpdateDate: time.Now(), + }); err != nil { return a, err } - dl, err := qtx.GetOneDatalogger(ctx, u.ID) + dl, err := qtx.DataloggerGet(ctx, arg.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } return dk, nil } -func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) { - var a model.Datalogger - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerUpdate(ctx context.Context, u dto.Datalogger) (db.VDatalogger, error) { + var a db.VDatalogger + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateDatalogger(ctx, u); err != nil { + if u.UpdaterID == nil { + return a, errors.New("must set updater id") + } + + if err := qtx.DataloggerUpdate(ctx, db.DataloggerUpdateParams{ + ID: u.ID, + Name: u.Name, + Updater: *u.UpdaterID, + UpdateDate: time.Now(), + }); err != nil { return a, err } - dlUpdated, err := qtx.GetOneDatalogger(ctx, u.ID) + dlUpdated, err := qtx.DataloggerGet(ctx, u.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return dlUpdated, nil } -func (s dataloggerTelemetryService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTableGetOrCreate(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DataloggerTableUpdateNameIfEmpty(ctx, db.DataloggerTableUpdateNameIfEmptyParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + dataloggerTableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return uuid.Nil, err } diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index b55f2063..907fbb46 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -11,91 +11,112 @@ import ( "strconv" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type DataloggerTelemetryService interface { - GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (model.Datalogger, error) - GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) - CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error - UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) - UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error - CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error -} - -type dataloggerTelemetryService struct { - db *model.Database - *model.Queries -} - -func NewDataloggerTelemetryService(db *model.Database, q *model.Queries) *dataloggerTelemetryService { - return &dataloggerTelemetryService{db, q} +func (s DBService) DataloggerTablePreviewCreate(ctx context.Context, prv dto.DataloggerTablePreview) error { + return s.Queries.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ + DataloggerTableID: prv.DataloggerTableID, + UpdateDate: prv.UpdateDate, + Preview: prv.Preview, + }) } // UpdateDataloggerTablePreview attempts to update a table preview by datalogger_id and table_name, creates the // datalogger table and corresponding preview if it doesn't exist -func (s dataloggerTelemetryService) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv dto.DataloggerTablePreview) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) // replace empty datalogger table name with most recent payload - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DataloggerTableUpdateNameIfEmpty(ctx, db.DataloggerTableUpdateNameIfEmptyParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := qtx.UpdateDataloggerTablePreview(ctx, dataloggerID, tableName, prv); err != nil { + if err := qtx.DataloggerTablePreviewUpdate(ctx, db.DataloggerTablePreviewUpdateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + Preview: prv.Preview, + UpdateDate: prv.UpdateDate, + }); err != nil { if !errors.Is(err, sql.ErrNoRows) { return uuid.Nil, err } prv.DataloggerTableID = tableID - if err := qtx.CreateDataloggerTablePreview(ctx, prv); err != nil { + if err := qtx.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ + DataloggerTableID: prv.DataloggerTableID, + Preview: prv.Preview, + UpdateDate: prv.UpdateDate, + }); err != nil { } } - return tableID, tx.Commit() + return tableID, tx.Commit(ctx) } -func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTableErrorUpdate(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *dto.DataloggerError) error { + if tableName == nil { + return errors.New("table name must not be nil") + } + + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DeleteDataloggerTableError(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DataloggerErrorDelete(ctx, db.DataloggerErrorDeleteParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + }); err != nil { return err } + if len(e.Errors) == 0 { + return tx.Commit(ctx) + } - for _, m := range e.Errors { - if err := qtx.CreateDataloggerTableError(ctx, dataloggerID, tableName, m); err != nil { - return err + ee := make([]db.DataloggerErrorCreateBatchParams, len(e.Errors)) + for idx, m := range e.Errors { + ee[idx] = db.DataloggerErrorCreateBatchParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + ErrorMessage: &m, } } + qtx.DataloggerErrorCreateBatch(ctx, ee).Exec(batchExecErr(&err)) + if err != nil { + return err + } - return tx.Commit() + return tx.Commit(ctx) } // ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. // The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) -func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementCreateOrUpdateDataloggerTOA5Upload(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) reader := csv.NewReader(r) @@ -117,7 +138,7 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle return err } - meta := model.Environment{ + meta := dto.Environment{ StationName: envHeader[1], Model: envHeader[2], SerialNo: envHeader[3], @@ -126,42 +147,52 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle TableName: envHeader[6], } - dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) if err != nil { return err } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) if err != nil { return err } em := make([]string, 0) defer func() { - s.UpdateDataloggerTableError(ctx, dl.ID, &meta.TableName, &model.DataloggerError{Errors: em}) + s.DataloggerTableErrorUpdate(ctx, dl.ID, &meta.TableName, &dto.DataloggerError{Errors: em}) }() // first two columns are timestamp and record number // we only want to collect the measurement fields here - fields := make([]model.Field, len(fieldHeader)-2) + fields := make([]dto.Field, len(fieldHeader)-2) for i := 2; i < len(fieldHeader); i++ { - fields[i] = model.Field{ + fields[i] = dto.Field{ Name: fieldHeader[i], Units: unitsHeader[i], Process: processHeader[i], } } - eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) if err != nil { return err } fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Rows { + for _, eqtRow := range eqt.Fields { fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID } + chunkSize := 1_000 + mmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + noteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mIdx, nIdx int for { record, err := reader.Read() if err == io.EOF { @@ -170,12 +201,10 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle if err != nil { return err } - t, err := time.Parse(record[0], time.RFC3339) if err != nil { return err } - for idx, cell := range record[2:] { fieldName := fields[idx].Name tsID, ok := fieldNameTimeseriesIDMap[fieldName] @@ -188,23 +217,60 @@ func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementColle )) continue } - v, err := strconv.ParseFloat(cell, 64) - if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { - // could not parse float - // add error to Measurement payload to report back to user + if err != nil { + v = math.NaN() + } + if math.IsNaN(v) || math.IsInf(v, 0) { em = append(em, fmt.Sprintf( - "value error: field_name %s contains invalid value entry at %s", + "warning: field_name %s contains invalid value entry at %s (NAN or INF)", fieldName, t, )) - continue + masked := true + noteParams[nIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Masked: &masked, + } + nIdx++ + if nIdx == chunkSize { + var err error + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, noteParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } } - - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { - return err + mmtParams[mIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + } + mIdx++ + if mIdx == chunkSize { + var err error + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mIdx = 0 } } } - - return tx.Commit() + if mIdx != 0 { + var err error + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mmtParams[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if nIdx != 0 { + var err error + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, noteParams[:nIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return tx.Commit(ctx) } diff --git a/api/internal/service/db.go b/api/internal/service/db.go new file mode 100644 index 00000000..96d0a5cc --- /dev/null +++ b/api/internal/service/db.go @@ -0,0 +1,92 @@ +package service + +import ( + "context" + "database/sql" + "errors" + "fmt" + "log" + + "github.com/USACE/instrumentation-api/api/internal/config" + gen "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + pgxgeom "github.com/twpayne/pgx-geom" +) + +type DBService struct { + db *DatabasePool + *gen.Queries +} + +type DatabasePool struct { + *pgxpool.Pool +} + +func NewDBService(cfg config.DBConfig) *DBService { + dbpool := newDatabasePool(cfg) + q := gen.New(dbpool) + return &DBService{dbpool, q} +} + +func newDatabasePool(cfg config.DBConfig) *DatabasePool { + config, err := pgxpool.ParseConfig(cfg.ConnStr()) + if err != nil { + log.Fatal(err.Error()) + } + + config.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error { + return pgxgeom.Register(ctx, conn) + } + + pool, err := pgxpool.NewWithConfig(context.Background(), config) + if err != nil { + log.Fatal(err.Error()) + } + + if err := pool.Ping(context.Background()); err != nil { + log.Fatal(err.Error()) + } + + return &DatabasePool{pool} +} + +func txDo(ctx context.Context, rollback func(ctx context.Context) error) { + err := rollback(ctx) + if err != nil && !errors.Is(err, sql.ErrTxDone) { + log.Print(err.Error()) + } +} + +func batchExecErr(err *error) func(int, error) { + return func(_ int, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowErr[T any](err *error) func(int, T, error) { + return func(_ int, _ T, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowCollect[T any](rr []T, err *error) func(int, T, error) { + rrlen := len(rr) + return func(i int, r T, e error) { + if e != nil { + *err = e + return + } + if i == rrlen { + *err = fmt.Errorf("rr slice must be same length as QueryRow args") + return + } + rr[i] = r + } +} diff --git a/api/internal/service/dcsloader.go b/api/internal/service/dcsloader.go index f1fd6880..739f4357 100644 --- a/api/internal/service/dcsloader.go +++ b/api/internal/service/dcsloader.go @@ -13,14 +13,14 @@ import ( "time" "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) type DcsLoaderService interface { - ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) - PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error + ParseCsvMeasurementCollection(r io.Reader) ([]dto.MeasurementCollection, int, error) + PostMeasurementCollectionToApi(mcs []dto.MeasurementCollection) error } type dcsLoaderService struct { @@ -32,8 +32,8 @@ func NewDcsLoaderService(apiClient *http.Client, cfg *config.DcsLoaderConfig) *d return &dcsLoaderService{apiClient, cfg} } -func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) { - mcs := make([]model.MeasurementCollection, 0) +func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]dto.MeasurementCollection, int, error) { + mcs := make([]dto.MeasurementCollection, 0) mCount := 0 reader := csv.NewReader(r) @@ -49,7 +49,7 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me rows = append(rows, row) } - mcMap := make(map[uuid.UUID]*model.MeasurementCollection) + mcMap := make(map[uuid.UUID]*dto.MeasurementCollection) for _, row := range rows { // 0=timeseries_id, 1=time, 2=value tsid, err := uuid.Parse(row[0]) @@ -66,16 +66,16 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me } if _, ok := mcMap[tsid]; !ok { - mcMap[tsid] = &model.MeasurementCollection{ + mcMap[tsid] = &dto.MeasurementCollection{ TimeseriesID: tsid, - Items: make([]model.Measurement, 0), + Items: make([]dto.Measurement, 0), } } - mcMap[tsid].Items = append(mcMap[tsid].Items, model.Measurement{TimeseriesID: tsid, Time: t, Value: model.FloatNanInf(v)}) + mcMap[tsid].Items = append(mcMap[tsid].Items, dto.Measurement{TimeseriesID: tsid, Time: t, Value: dto.FloatNanInf(v)}) mCount++ } - mcs = make([]model.MeasurementCollection, len(mcMap)) + mcs = make([]dto.MeasurementCollection, len(mcMap)) idx := 0 for _, v := range mcMap { mcs[idx] = *v @@ -85,7 +85,7 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me return mcs, mCount, nil } -func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error { +func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []dto.MeasurementCollection) error { requestBodyBytes, err := json.Marshal(mcs) if err != nil { return err diff --git a/api/internal/service/district_rollup.go b/api/internal/service/district_rollup.go deleted file mode 100644 index cecf29f7..00000000 --- a/api/internal/service/district_rollup.go +++ /dev/null @@ -1,22 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type DistrictRollupService interface { - ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) - ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) -} - -type districtRollupService struct { - db *model.Database - *model.Queries -} - -func NewDistrictRollupService(db *model.Database, q *model.Queries) *districtRollupService { - return &districtRollupService{db, q} -} diff --git a/api/internal/service/domain.go b/api/internal/service/domain.go index 94d4530c..5aacb298 100644 --- a/api/internal/service/domain.go +++ b/api/internal/service/domain.go @@ -3,20 +3,19 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" ) -type DomainService interface { - GetDomains(ctx context.Context) ([]model.Domain, error) - GetDomainMap(ctx context.Context) (model.DomainMap, error) - ListTimezoneOptions(ctx context.Context) ([]model.TimezoneOption, error) -} - -type domainService struct { - db *model.Database - *model.Queries -} +type DomainMap map[string][]db.DomainGroupOpt -func NewDomainService(db *model.Database, q *model.Queries) *domainService { - return &domainService{db, q} +func (s DBService) DomainMapGet(ctx context.Context) (DomainMap, error) { + a := make(DomainMap) + gg, err := s.Queries.DomainGroupList(ctx) + if err != nil { + return a, err + } + for _, g := range gg { + a[g.Group] = g.Opts + } + return a, nil } diff --git a/api/internal/service/equivalency_table.go b/api/internal/service/equivalency_table.go index 3ed47a9d..71942882 100644 --- a/api/internal/service/equivalency_table.go +++ b/api/internal/service/equivalency_table.go @@ -2,88 +2,89 @@ package service import ( "context" + "errors" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type EquivalencyTableService interface { - GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (model.EquivalencyTable, error) - CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error - DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error - GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error -} - -type equivalencyTableService struct { - db *model.Database - *model.Queries -} - -func NewEquivalencyTableService(db *model.Database, q *model.Queries) *equivalencyTableService { - return &equivalencyTableService{db, q} -} +func (s DBService) EquivalencyTableCreateOrUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable -// CreateEquivalencyTable creates EquivalencyTable rows -// If a row with the given datalogger id or field name already exists the row will be ignored -func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, t.DataloggerID, t.DataloggerTableID, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.EquivalencyTableCreateOrUpdate(ctx, db.EquivalencyTableCreateOrUpdateParams{ + DataloggerID: t.DataloggerID, + DataloggerTableID: &t.DataloggerTableID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + InstrumentID: r.InstrumentID, + TimeseriesID: r.TimeseriesID, + }); err != nil { + return a, err } } - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil } -// UpdateEquivalencyTable updates rows of an EquivalencyTable -func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EquivalencyTableUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.UpdateEquivalencyTableRow(ctx, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.EquivalencyTableUpdate(ctx, db.EquivalencyTableUpdateParams{ + ID: r.ID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + }); err != nil { + return a, err } } - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil diff --git a/api/internal/service/evaluation.go b/api/internal/service/evaluation.go index 2525a36a..447f1cae 100644 --- a/api/internal/service/evaluation.go +++ b/api/internal/service/evaluation.go @@ -3,150 +3,149 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type EvaluationService interface { - ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]model.Evaluation, error) - ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]model.Evaluation, error) - ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]model.Evaluation, error) - GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (model.Evaluation, error) - RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error - CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) - UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) - DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error -} - -type evaluationService struct { - db *model.Database - *model.Queries -} - -func NewEvaluationService(db *model.Database, q *model.Queries) *evaluationService { - return &evaluationService{db, q} -} - -func (s evaluationService) RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationSubmittalUpdateCompleteCreateNext(ctx context.Context, subID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - sub, err := qtx.CompleteEvaluationSubmittal(ctx, subID) + sub, err := qtx.SubmittalUpdateCompleteEvaluation(ctx, subID) if err != nil { return err } - // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { - if err := qtx.CreateNextEvaluationSubmittal(ctx, subID); err != nil { + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == dto.GreenSubmittalStatusID { + if err := qtx.SubmittalCreateNextEvaluation(ctx, subID); err != nil { return err } } - return tx.Commit() + return tx.Commit(ctx) } -func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationCreate(ctx context.Context, ev dto.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) if ev.SubmittalID != nil { - sub, err := qtx.CompleteEvaluationSubmittal(ctx, *ev.SubmittalID) + sub, err := qtx.SubmittalUpdateCompleteEvaluation(ctx, *ev.SubmittalID) if err != nil { return a, err } // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { - qtx.CreateNextEvaluationSubmittal(ctx, *ev.SubmittalID) + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == dto.GreenSubmittalStatusID { + qtx.SubmittalCreateNextEvaluation(ctx, *ev.SubmittalID) } } - - evID, err := qtx.CreateEvaluation(ctx, ev) + evID, err := qtx.EvaluationCreate(ctx, db.EvaluationCreateParams{ + ProjectID: ev.ProjectID, + SubmittalID: ev.SubmittalID, + Name: ev.Name, + Body: ev.Body, + StartDate: ev.StartDate, + EndDate: ev.EndDate, + Creator: ev.CreatorID, + CreateDate: ev.CreateDate, + }) if err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, evID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.EvaluationInstrumentCreateBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.EvaluationInstrumentCreateBatchParams{ + EvaluationID: &evID, + InstrumentID: &aci.InstrumentID, } } - - evNew, err := qtx.GetEvaluation(ctx, evID) + qtx.EvaluationInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + a, err = qtx.EvaluationGet(ctx, evID) + if err != nil { + return a, err + } + if err := tx.Commit(ctx); err != nil { return a, err } - return evNew, nil + return a, nil } -func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationUpdate(ctx context.Context, evaluationID uuid.UUID, ev dto.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateEvaluation(ctx, ev); err != nil { + if err := qtx.EvaluationUpdate(ctx, db.EvaluationUpdateParams{ + ID: ev.ID, + ProjectID: ev.ProjectID, + Name: ev.Name, + Body: ev.Body, + StartDate: ev.StartDate, + EndDate: ev.EndDate, + Updater: ev.UpdaterID, + UpdateDate: ev.UpdateDate, + }); err != nil { return a, err } - - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, ev.ID); err != nil { + if err := qtx.EvaluationInstrumentDeleteForEvaluation(ctx, &ev.ID); err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, ev.ID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.EvaluationInstrumentCreateBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.EvaluationInstrumentCreateBatchParams{ + EvaluationID: &evaluationID, + InstrumentID: &aci.InstrumentID, } } + qtx.EvaluationInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } - evUpdated, err := qtx.GetEvaluation(ctx, ev.ID) + a, err = qtx.EvaluationGet(ctx, ev.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return evUpdated, nil + return a, nil } -func (s evaluationService) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationDelete(ctx context.Context, evaluationID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, evaluationID); err != nil { + if err := qtx.EvaluationInstrumentDeleteForEvaluation(ctx, &evaluationID); err != nil { return err } - - if err := qtx.DeleteEvaluation(ctx, evaluationID); err != nil { + if err := qtx.EvaluationDelete(ctx, evaluationID); err != nil { return err } - return nil + return tx.Commit(ctx) } diff --git a/api/internal/service/heartbeat.go b/api/internal/service/heartbeat.go deleted file mode 100644 index 16fb78ca..00000000 --- a/api/internal/service/heartbeat.go +++ /dev/null @@ -1,22 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type HeartbeatService interface { - DoHeartbeat(ctx context.Context) (model.Heartbeat, error) - GetLatestHeartbeat(ctx context.Context) (model.Heartbeat, error) - ListHeartbeats(ctx context.Context) ([]model.Heartbeat, error) -} - -type heartbeatService struct { - db *model.Database - *model.Queries -} - -func NewHeartbeatService(db *model.Database, q *model.Queries) *heartbeatService { - return &heartbeatService{db, q} -} diff --git a/api/internal/service/home.go b/api/internal/service/home.go deleted file mode 100644 index dc9f849a..00000000 --- a/api/internal/service/home.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type HomeService interface { - GetHome(ctx context.Context) (model.Home, error) -} - -type homeService struct { - db *model.Database - *model.Queries -} - -func NewHomeService(db *model.Database, q *model.Queries) *homeService { - return &homeService{db, q} -} diff --git a/api/internal/service/instrument.go b/api/internal/service/instrument.go index 73d0a734..c7f78799 100644 --- a/api/internal/service/instrument.go +++ b/api/internal/service/instrument.go @@ -2,35 +2,17 @@ package service import ( "context" + "slices" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" - "github.com/paulmach/orb/geojson" ) -type InstrumentService interface { - ListInstruments(ctx context.Context) ([]model.Instrument, error) - GetInstrument(ctx context.Context, instrumentID uuid.UUID) (model.Instrument, error) - GetInstrumentCount(ctx context.Context) (model.InstrumentCount, error) - CreateInstrument(ctx context.Context, i model.Instrument) (model.IDSlugName, error) - CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) - UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) - UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) - DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error -} - -type instrumentService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentService(db *model.Database, q *model.Queries) *instrumentService { - return &instrumentService{db, q} -} - var ( - saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") - ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") + saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") + ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") + inclTypeID = uuid.MustParse("3c3dfc23-ed2a-4a4a-9ce0-683c7c1d4d20") ) type requestType int @@ -40,128 +22,191 @@ const ( update ) -func createInstrument(ctx context.Context, q *model.Queries, instrument model.Instrument) (model.IDSlugName, error) { - newInstrument, err := q.CreateInstrument(ctx, instrument) +func (s DBService) InstrumentCreateBatch(ctx context.Context, ii []dto.Instrument) ([]db.InstrumentCreateBatchRow, error) { + tx, err := s.db.Begin(ctx) if err != nil { - return model.IDSlugName{}, err + return nil, err } - for _, prj := range instrument.Projects { - if err := q.AssignInstrumentToProject(ctx, prj.ID, newInstrument.ID); err != nil { - return model.IDSlugName{}, err + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + createInstrumentsArgs := make([]db.InstrumentCreateBatchParams, len(ii)) + assignInstrumentsProjectsArgs := make([][]db.ProjectInstrumentCreateBatchParams, len(ii)) + instrumentStatusArgs := make([]db.InstrumentStatusCreateOrUpdateBatchParams, len(ii)) + instrumentAwareArgs := make([]db.AwarePlatformCreateBatchParams, 0) + + for idx, inst := range ii { + createInstrumentsArgs[idx] = db.InstrumentCreateBatchParams{ + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + Station: inst.Station, + StationOffset: inst.StationOffset, + Creator: inst.CreatorID, + CreateDate: inst.CreateDate, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, } } - if err := q.CreateOrUpdateInstrumentStatus(ctx, newInstrument.ID, instrument.StatusID, instrument.StatusTime); err != nil { - return model.IDSlugName{}, err - } - if instrument.AwareID != nil { - if err := q.CreateAwarePlatform(ctx, newInstrument.ID, *instrument.AwareID); err != nil { - return model.IDSlugName{}, err + newInstruments := make([]db.InstrumentCreateBatchRow, len(createInstrumentsArgs)) + qtx.InstrumentCreateBatch(ctx, createInstrumentsArgs).QueryRow(func(idx int, r db.InstrumentCreateBatchRow, e error) { + if e != nil { + err = e + return } - } - instrument.ID = newInstrument.ID - if err := handleOpts(ctx, q, instrument, create); err != nil { - return model.IDSlugName{}, err - } - return newInstrument, nil -} - -func (s instrumentService) CreateInstrument(ctx context.Context, instrument model.Instrument) (model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) + assignInstrumentsProjectsArgs[idx] = make([]db.ProjectInstrumentCreateBatchParams, len(ii[idx].Projects)) + for j, p := range ii[idx].Projects { + assignInstrumentsProjectsArgs[idx][j] = db.ProjectInstrumentCreateBatchParams{ + InstrumentID: r.ID, + ProjectID: p.ID, + } + } + instrumentStatusArgs[idx] = db.InstrumentStatusCreateOrUpdateBatchParams{ + InstrumentID: r.ID, + StatusID: ii[idx].StatusID, + Time: ii[idx].StatusTime, + } + if ii[idx].AwareID != nil { + instrumentAwareArgs = append(instrumentAwareArgs, db.AwarePlatformCreateBatchParams{ + InstrumentID: &r.ID, + AwareID: *ii[idx].AwareID, + }) + } + newInstruments[idx] = r + }) if err != nil { - return model.IDSlugName{}, err + return nil, err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - newInstrument, err := createInstrument(ctx, qtx, instrument) + qtx.ProjectInstrumentCreateBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(batchExecErr(&err)) if err != nil { - return model.IDSlugName{}, err + return nil, err } - - if err := tx.Commit(); err != nil { - return model.IDSlugName{}, err + qtx.InstrumentStatusCreateOrUpdateBatch(ctx, instrumentStatusArgs).Exec(batchExecErr(&err)) + if err != nil { + return nil, err } - return newInstrument, nil -} - -func (s instrumentService) CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) + qtx.AwarePlatformCreateBatch(ctx, instrumentAwareArgs).Exec(batchExecErr(&err)) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - ii := make([]model.IDSlugName, len(instruments)) - for idx, i := range instruments { - newInstrument, err := createInstrument(ctx, qtx, i) - if err != nil { - return nil, err - } - ii[idx] = newInstrument + if err := handleOptsBatch(ctx, qtx, ii, create); err != nil { + return nil, err } - if err := tx.Commit(); err != nil { + + if err := tx.Commit(ctx); err != nil { return nil, err } - return ii, nil + + return newInstruments, nil } -// UpdateInstrument updates a single instrument -func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentUpdate(ctx context.Context, projectID uuid.UUID, inst dto.Instrument) (db.VInstrument, error) { + var a db.VInstrument + tx, err := s.db.Begin(ctx) if err != nil { - return model.Instrument{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrument(ctx, projectID, i); err != nil { - return model.Instrument{}, err - } - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, i.ID, i.StatusID, i.StatusTime); err != nil { - return model.Instrument{}, err - } - - if err := handleOpts(ctx, qtx, i, update); err != nil { - return model.Instrument{}, err - } - - aa, err := qtx.GetInstrument(ctx, i.ID) + if err := qtx.InstrumentUpdate(ctx, db.InstrumentUpdateParams{ + ProjectID: projectID, + ID: inst.ID, + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + Updater: inst.UpdaterID, + UpdateDate: inst.UpdateDate, + Station: inst.Station, + StationOffset: inst.StationOffset, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, + }); err != nil { + return a, err + } + if err := qtx.InstrumentStatusCreateOrUpdate(ctx, db.InstrumentStatusCreateOrUpdateParams{ + InstrumentID: inst.ID, + StatusID: inst.StatusID, + Time: inst.StatusTime, + }); err != nil { + return a, err + } + if err := handleOptsBatch(ctx, qtx, []dto.Instrument{inst}, update); err != nil { + return a, err + } + a, err = qtx.InstrumentGet(ctx, inst.ID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return aa, nil + return a, nil } -func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentUpdateGeometry(ctx context.Context, arg db.InstrumentUpdateGeometryParams) (db.VInstrument, error) { + var a db.VInstrument + tx, err := s.db.Begin(ctx) if err != nil { - return model.Instrument{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrumentGeometry(ctx, projectID, instrumentID, geom, p); err != nil { - return model.Instrument{}, err + if _, err := qtx.InstrumentUpdateGeometry(ctx, arg); err != nil { + return a, err } - - aa, err := qtx.GetInstrument(ctx, instrumentID) + a, err = qtx.InstrumentGet(ctx, arg.ID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } + return a, nil +} - return aa, nil +func handleOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument, rt requestType) error { + saa := make([]dto.Instrument, 0) + ipi := make([]dto.Instrument, 0) + incl := make([]dto.Instrument, 0) + for _, inst := range ii { + switch inst.TypeID { + case saaTypeID: + saa = append(saa, inst) + case ipiTypeID: + ipi = append(ipi, inst) + case inclTypeID: + incl = append(incl, inst) + default: + } + } + if len(saa) != 0 { + var err error + switch rt { + case create: + err = createSaaOptsBatch(ctx, q, saa) + case update: + err = updateSaaOptsBatch(ctx, q, saa) + } + if err != nil { + return err + } + } + if len(ipi) != 0 { + var err error + switch rt { + case create: + err = createIpiOptsBatch(ctx, q, ipi) + case update: + err = updateIpiOptsBatch(ctx, q, ipi) + } + if err != nil { + return err + } + } + return nil } diff --git a/api/internal/service/instrument_assign.go b/api/internal/service/instrument_assign.go index d5dc912b..f0662842 100644 --- a/api/internal/service/instrument_assign.go +++ b/api/internal/service/instrument_assign.go @@ -4,180 +4,322 @@ import ( "context" "fmt" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) -type InstrumentAssignService interface { - AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (model.InstrumentsValidation, error) - ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) -} +type ReasonCode int -type instrumentAssignService struct { - db *model.Database - *model.Queries -} +const ( + None ReasonCode = iota + Unauthorized + InvalidName + InvalidUnassign +) -func NewInstrumentAssignService(db *model.Database, q *model.Queries) *instrumentAssignService { - return &instrumentAssignService{db, q} +type InstrumentsValidation struct { + ReasonCode ReasonCode `json:"-"` + IsValid bool `json:"is_valid"` + Errors []string `json:"errors"` } -func validateAssignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID uuid.UUID, instrument model.Instrument, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrument.ID, projectIDs) - if err != nil || !v.IsValid { +func (s DBService) ProjectInstrumentCreateBatchAssignmentProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateProjectsInstrumentNameUnique(ctx, instrument.Name, projectIDs) + return v, tx.Commit(ctx) } -func validateAssignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) +func (s DBService) ProjectInstrumentDeleteBatchAssignmentProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - iIDs := make([]uuid.UUID, len(iIDNames)) - iNames := make([]string, len(iIDNames)) - for idx := range iIDNames { - iIDs[idx] = iIDNames[idx].ID - iNames[idx] = iIDNames[idx].Name - } - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, iIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateInstrumentNamesProjectUnique(ctx, projectID, iNames) + return v, tx.Commit(ctx) } -func assignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - instrument, err := q.GetInstrument(ctx, instrumentID) +func (s DBService) ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - for _, pID := range projectIDs { - if err := q.AssignInstrumentToProject(ctx, pID, instrumentID); err != nil { - return model.InstrumentsValidation{}, err - } - } - return v, nil + return v, tx.Commit(ctx) } -func unassignProjectsFromInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid { +func (s DBService) ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - for _, pID := range projectIDs { - if err := q.UnassignInstrumentFromProject(ctx, pID, instrumentID); err != nil { - return v, err + return v, tx.Commit(ctx) +} + +func (s DBService) ProjectInstrumentGetInstrumentNamesUniqueForProject(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { + return validateInstrumentNamesUniqueForProject(ctx, s.Queries, projectID, instrumentNames) +} + +func (s DBService) ProjectInstrumentGetInstrumentNameUniqueForProjects(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + return validateProjectsInstrumentNameUnique(ctx, s.Queries, instrumentName, projectIDs) +} + +func assignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.ProjectInstrumentCreateBatchParams, len(instrumentIDs)) + for idx := range instrumentIDs { + args[idx] = db.ProjectInstrumentCreateBatchParams{ + ProjectID: projectID, + InstrumentID: instrumentIDs[idx], } } - return v, nil + q.ProjectInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } + return a, nil } -func assignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err +func validateInstrumentNamesUniqueForProject(ctx context.Context, q *db.Queries, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForProjectInstrumentNames(ctx, db.ProjectInstrumentListForProjectInstrumentNamesParams{ + ProjectID: projectID, + InstrumentNames: instrumentNames, + }) + if err != nil { + return a, err } - for _, iID := range instrumentIDs { - if err := q.AssignInstrumentToProject(ctx, projectID, iID); err != nil { - return v, err + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func unassignInstrumentsFromProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err - } - cc, err := q.GetProjectCountForInstruments(ctx, instrumentIDs) +func validateProjectsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx, db.ProjectInstrumentListForInstrumentProjectsProfileAdminParams{ + InstrumentID: instrumentID, + ProjectIds: projectIDs, + ProfileID: profileID, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - - for _, count := range cc { - if count.ProjectCount < 1 { - // invalid instrument, skipping - continue - } - if count.ProjectCount == 1 { - v.IsValid = false - v.ReasonCode = model.InvalidUnassign - v.Errors = append(v.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", + nn[idx], + ) } - if err := q.UnassignInstrumentFromProject(ctx, projectID, count.InstrumentID); err != nil { - return v, err + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) + } + return a, err +} + +func validateInstrumentsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentsProfileAdmin(ctx, db.ProjectInstrumentListForInstrumentsProfileAdminParams{ + InstrumentIds: instrumentIDs, + ProfileID: profileID, + }) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", + nn[idx].InstrumentName, nn[idx].ProjectName, + ) } + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func validateProjectsInstrumentNameUnique(ctx context.Context, q *db.Queries, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentNameProjects(ctx, db.ProjectInstrumentListForInstrumentNameProjectsParams{ + InstrumentName: instrumentName, + ProjectIds: projectIDs, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) + } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) + } + return a, err +} - v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func validateAssignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrument db.VInstrument, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + v, err := validateProjectsAssignerAuthorized(ctx, q, profileID, instrument.ID, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + return validateProjectsInstrumentNameUnique(ctx, q, instrument.Name, projectIDs) } -func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignProjectsFromInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateProjectsAssignerAuthorized(ctx, q, profileID, instrumentID, projectIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.ProjectInstrumentDeleteBatchParams, len(projectIDs)) + for idx := range projectIDs { + args[idx] = db.ProjectInstrumentDeleteBatchParams{ + ProjectID: projectIDs[idx], + InstrumentID: instrumentID, + } + } + q.ProjectInstrumentDeleteBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + return a, nil +} - v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func validateAssignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + iIDNames, err := q.InstrumentIDNameListByIDs(ctx, instrumentIDs) + if err != nil { + return a, err + } + iIDs := make([]uuid.UUID, len(iIDNames)) + iNames := make([]string, len(iIDNames)) + for idx := range iIDNames { + iIDs[idx] = iIDNames[idx].ID + iNames[idx] = iIDNames[idx].Name + } + v, err := validateInstrumentsAssignerAuthorized(ctx, q, profileID, iIDs) + if err != nil { return v, err } - return v, tx.Commit() + if !v.IsValid { + return v, nil + } + return validateInstrumentNamesUniqueForProject(ctx, q, projectID, iNames) } -func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func assignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + instrument, err := q.InstrumentGet(ctx, instrumentID) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { + v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + for _, pID := range projectIDs { + if err := q.ProjectInstrumentCreate(ctx, db.ProjectInstrumentCreateParams{ + ProjectID: pID, + InstrumentID: instrumentID, + }); err != nil { + return a, err + } + } + return v, nil } -func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignInstrumentsFromProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateInstrumentsAssignerAuthorized(ctx, q, profileID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + cc, err := q.ProjectInstrumentListCountByInstrument(ctx, instrumentIDs) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err + args := make([]db.ProjectInstrumentDeleteBatchParams, 0) + for _, count := range cc { + if count.ProjectCount < 1 { + // invalid instrument, skipping + continue + } + if count.ProjectCount == 1 { + a.IsValid = false + a.ReasonCode = InvalidUnassign + a.Errors = append(a.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + } + args = append(args, db.ProjectInstrumentDeleteBatchParams{ + ProjectID: projectID, + InstrumentID: count.InstrumentID, + }) + } + q.ProjectInstrumentDeleteBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err } - return v, tx.Commit() + return a, nil } diff --git a/api/internal/service/instrument_constant.go b/api/internal/service/instrument_constant.go index cf500988..463a9c98 100644 --- a/api/internal/service/instrument_constant.go +++ b/api/internal/service/instrument_constant.go @@ -2,73 +2,58 @@ package service import ( "context" + "errors" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentConstantService interface { - ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error - CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error -} - -type instrumentConstantService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentConstantService(db *model.Database, q *model.Queries) *instrumentConstantService { - return &instrumentConstantService{db, q} -} - -// CreateInstrumentConstants creates many instrument constants from an array of instrument constants -// An InstrumentConstant is structurally the same as a timeseries and saved in the same tables -func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentConstantCreateBatch(ctx context.Context, tt []dto.Timeseries) ([]db.TimeseriesCreateBatchRow, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - uu := make([]model.Timeseries, len(tt)) + createTimeseriesParams := make([]db.TimeseriesCreateBatchParams, len(tt)) for idx, t := range tt { - t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return nil, err - } - if err := qtx.CreateInstrumentConstant(ctx, tsNew.InstrumentID, tsNew.ID); err != nil { - return nil, err + createTimeseriesParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &t.InstrumentID, + Name: t.Name, + ParameterID: t.ParameterID, + UnitID: t.UnitID, + Type: db.NullTimeseriesType{ + TimeseriesType: db.TimeseriesTypeConstant, + Valid: true, + }, } - uu[idx] = tsNew } - if err := tx.Commit(); err != nil { + uu := make([]db.TimeseriesCreateBatchRow, len(createTimeseriesParams)) + createConstantsParams := make([]db.InstrumentConstantCreateBatchParams, len(createTimeseriesParams)) + qtx.TimeseriesCreateBatch(ctx, createTimeseriesParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("instrument id must not be nil") + } + createConstantsParams[i] = db.InstrumentConstantCreateBatchParams{ + InstrumentID: *r.InstrumentID, + TimeseriesID: r.ID, + } + uu[i] = r + }) + if err != nil { return nil, err } - return uu, nil -} - -// DeleteInstrumentConstant removes a timeseries as an Instrument Constant; Does not delete underlying timeseries -func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + qtx.InstrumentConstantCreateBatch(ctx, createConstantsParams).Exec(batchExecErr(&err)) if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - if err := qtx.DeleteInstrumentConstant(ctx, instrumentID, timeseriesID); err != nil { - return err + return nil, err } - - if err := qtx.DeleteTimeseries(ctx, timeseriesID); err != nil { - return err + if err := tx.Commit(ctx); err != nil { + return nil, err } - - return tx.Commit() + return uu, nil } diff --git a/api/internal/service/instrument_group.go b/api/internal/service/instrument_group.go index f20d0456..5480a224 100644 --- a/api/internal/service/instrument_group.go +++ b/api/internal/service/instrument_group.go @@ -3,52 +3,37 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentGroupService interface { - ListInstrumentGroups(ctx context.Context) ([]model.InstrumentGroup, error) - GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (model.InstrumentGroup, error) - CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) - UpdateInstrumentGroup(ctx context.Context, group model.InstrumentGroup) (model.InstrumentGroup, error) - DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error - ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]model.Instrument, error) - CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error - DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error -} - -type instrumentGroupService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentGroupService(db *model.Database, q *model.Queries) *instrumentGroupService { - return &instrumentGroupService{db, q} -} - -// CreateInstrumentGroup creates many instruments from an array of instruments -func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - q := s.WithTx(tx) - - gg := make([]model.InstrumentGroup, len(groups)) +func (s DBService) InstrumentGroupCreateBatch(ctx context.Context, groups []dto.InstrumentGroup) ([]db.InstrumentGroup, error) { + args := make([]db.InstrumentGroupCreateBatchParams, len(groups)) for idx, g := range groups { - gNew, err := q.CreateInstrumentGroup(ctx, g) - if err != nil { - return nil, err + args[idx] = db.InstrumentGroupCreateBatchParams{ + Name: g.Name, + Description: &g.Description, + Creator: g.CreatorID, + CreateDate: g.CreateDate, + ProjectID: g.ProjectID, } - gg[idx] = gNew } - - if err := tx.Commit(); err != nil { + var err error + ggNew := make([]db.InstrumentGroup, len(groups)) + s.Queries.InstrumentGroupCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(ggNew, &err)) + if err != nil { return nil, err } + return ggNew, nil +} - return gg, nil +func (s DBService) InstrumentGroupUpdate(ctx context.Context, g dto.InstrumentGroup) (db.InstrumentGroup, error) { + return s.Queries.InstrumentGroupUpdate(ctx, db.InstrumentGroupUpdateParams{ + ID: g.ID, + Name: g.Name, + Description: &g.Description, + ProjectID: g.ProjectID, + Updater: g.UpdaterID, + UpdateDate: g.UpdateDate, + }) } diff --git a/api/internal/service/instrument_incl.go b/api/internal/service/instrument_incl.go new file mode 100644 index 00000000..0731e442 --- /dev/null +++ b/api/internal/service/instrument_incl.go @@ -0,0 +1,161 @@ +package service + +import ( + "context" + "errors" + "slices" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" +) + +func (s DBService) InclSegmentUpdateBatch(ctx context.Context, segs []dto.InclSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + updateInclArgs := make([]db.InclSegmentUpdateBatchParams, len(segs)) + + for idx, seg := range segs { + updateInclArgs[idx] = db.InclSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + DepthTimeseriesID: seg.DepthTimeseriesID, + A0TimeseriesID: seg.A0TimeseriesID, + A180TimeseriesID: seg.A180TimeseriesID, + B0TimeseriesID: seg.B0TimeseriesID, + B180TimeseriesID: seg.B180TimeseriesID, + } + } + qtx.InclSegmentUpdateBatch(ctx, updateInclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func inclOptsCreateBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createInclSegmentBatchParams := make([][]db.InclSegmentCreateBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createInclOptsParams := make([]db.InclOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.InclOpts](inst.Opts) + if err != nil { + return err + } + createInclSegmentBatchParams[idx] = make([]db.InclSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createInclSegmentBatchParams[idx][i] = db.InclSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.InclParameterID, + UnitID: dto.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createInclOptsParams[idx] = db.InclOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + inclArgs := slices.Concat(createInclSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + }) + if err != nil { + return err + } + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.InclSegmentCreateBatch(ctx, inclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createInclOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.InclOptsCreateBatch(ctx, createInclOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return err +} + +func updateInclOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateInclOptsParams := make([]db.InclOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.InclOpts](inst.Opts) + if err != nil { + return err + } + updateInclOptsParams[idx] = db.InclOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.InclOptsUpdateBatch(ctx, updateInclOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err +} diff --git a/api/internal/service/instrument_ipi.go b/api/internal/service/instrument_ipi.go index 52ac84f6..fb91c4fc 100644 --- a/api/internal/service/instrument_ipi.go +++ b/api/internal/service/instrument_ipi.go @@ -2,47 +2,192 @@ package service import ( "context" + "errors" + "fmt" + "slices" "time" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type IpiInstrumentService interface { - GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.IpiSegment, error) - UpdateIpiSegment(ctx context.Context, seg model.IpiSegment) error - UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error - GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.IpiMeasurements, error) -} - -type ipiInstrumentService struct { - db *model.Database - *model.Queries -} +func (s DBService) IpiSegmentUpdateBatch(ctx context.Context, segs []dto.IpiSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -func NewIpiInstrumentService(db *model.Database, q *model.Queries) *ipiInstrumentService { - return &ipiInstrumentService{db, q} -} + updateIpiArgs := make([]db.IpiSegmentUpdateBatchParams, len(segs)) + createMmtArgs := make([]db.TimeseriesMeasurementCreateBatchParams, 0) -func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + for idx, seg := range segs { + updateIpiArgs[idx] = db.IpiSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + TiltTimeseriesID: seg.TiltTimeseriesID, + IncDevTimeseriesID: seg.IncDevTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, + } + if seg.Length == nil { + continue + } + createMmtArgs = append(createMmtArgs, db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.IpiSegmentUpdateBatch(ctx, updateIpiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.TimeseriesMeasurementCreateBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) if err != nil { return err } - defer model.TxDo(tx.Rollback) + return tx.Commit(ctx) +} - qtx := s.WithTx(tx) +func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createIpiSegmentBatchParams := make([][]db.IpiSegmentCreateBatchParams, len(ii)) - for _, seg := range segs { - if err := qtx.UpdateIpiSegment(ctx, seg); err != nil { + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createIpiOptsParams := make([]db.IpiOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.IpiOpts](inst.Opts) + if err != nil { return err } - if seg.Length == nil { - continue + createTimeseriesBatchParams[idx] = make([]db.TimeseriesCreateBatchParams, opts.NumSegments) + createIpiSegmentBatchParams[idx] = make([]db.IpiSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: dto.IpiParameterID, + UnitID: dto.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createIpiSegmentBatchParams[idx][i] = db.IpiSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.IpiParameterID, + UnitID: dto.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createIpiOptsParams[idx] = db.IpiOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + ipiArgs := slices.Concat(createIpiSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + ipiArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.IpiSegmentCreateBatch(ctx, ipiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.IpiOptsCreateBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { + }) + return err +} + +func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateIpiOptsParams := make([]db.IpiOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.IpiOpts](inst.Opts) + if err != nil { return err } + updateIpiOptsParams[idx] = db.IpiOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.IpiOptsUpdateBatch(ctx, updateIpiOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err } - return tx.Commit() + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/instrument_note.go b/api/internal/service/instrument_note.go index 77a462c6..530f3540 100644 --- a/api/internal/service/instrument_note.go +++ b/api/internal/service/instrument_note.go @@ -3,50 +3,48 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentNoteService interface { - ListInstrumentNotes(ctx context.Context) ([]model.InstrumentNote, error) - ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentNote, error) - GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (model.InstrumentNote, error) - CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) - UpdateInstrumentNote(ctx context.Context, n model.InstrumentNote) (model.InstrumentNote, error) - DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error -} - -type instrumentNoteService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentNoteService(db *model.Database, q *model.Queries) *instrumentNoteService { - return &instrumentNoteService{db, q} -} - -// CreateInstrumentNote creates many instrument notes from an array of instrument notes -func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentNoteCreateBatch(ctx context.Context, notes []dto.InstrumentNote) ([]db.InstrumentNote, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - nn := make([]model.InstrumentNote, len(notes)) + args := make([]db.InstrumentNoteCreateBatchParams, len(notes)) for idx, n := range notes { - noteNew, err := qtx.CreateInstrumentNote(ctx, n) - if err != nil { - return nil, err + args[idx] = db.InstrumentNoteCreateBatchParams{ + InstrumentID: n.InstrumentID, + Title: n.Title, + Body: n.Body, + Time: n.Time, + Creator: n.CreatorID, + CreateDate: n.CreateDate, } - nn[idx] = noteNew } - - if err := tx.Commit(); err != nil { + nn := make([]db.InstrumentNote, len(args)) + qtx.InstrumentNoteCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(nn, &err)) + if err != nil { + return nil, err + } + if err := tx.Commit(ctx); err != nil { return nil, err } return nn, nil } + +func (s DBService) InstrumentNoteUpdate(ctx context.Context, u dto.InstrumentNote) (db.InstrumentNote, error) { + return s.Queries.InstrumentNoteUpdate(ctx, db.InstrumentNoteUpdateParams{ + ID: u.ID, + Title: u.Title, + Body: u.Body, + Time: u.Time, + Updater: u.UpdaterID, + UpdateDate: u.UpdateDate, + }) +} diff --git a/api/internal/service/instrument_opts.go b/api/internal/service/instrument_opts.go deleted file mode 100644 index 84eca1ef..00000000 --- a/api/internal/service/instrument_opts.go +++ /dev/null @@ -1,130 +0,0 @@ -package service - -import ( - "context" - "fmt" - "time" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -func handleOpts(ctx context.Context, q *model.Queries, inst model.Instrument, rt requestType) error { - switch inst.TypeID { - case saaTypeID: - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateSaaSegment(ctx, model.SaaSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } - - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateSaaOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if rt == update { - if err := q.UpdateSaaOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { - return err - } - case ipiTypeID: - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateIpiSegment(ctx, model.IpiSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } - - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateIpiOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if rt == update { - if err := q.UpdateIpiOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { - return err - } - default: - } - return nil -} diff --git a/api/internal/service/instrument_saa.go b/api/internal/service/instrument_saa.go index 8c42eca2..d801e42f 100644 --- a/api/internal/service/instrument_saa.go +++ b/api/internal/service/instrument_saa.go @@ -2,47 +2,182 @@ package service import ( "context" + "errors" + "fmt" + "slices" "time" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type SaaInstrumentService interface { - GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.SaaSegment, error) - UpdateSaaSegment(ctx context.Context, seg model.SaaSegment) error - UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error - GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.SaaMeasurements, error) -} +func (s DBService) SaaSegmentUpdateBatch(ctx context.Context, segs []dto.SaaSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -type saaInstrumentService struct { - db *model.Database - *model.Queries + updateSaaSegParams := make([]db.SaaSegmentUpdateBatchParams, len(segs)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, 0) + for idx, seg := range segs { + updateSaaSegParams[idx] = db.SaaSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: seg.InstrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + XTimeseriesID: seg.XTimeseriesID, + YTimeseriesID: seg.YTimeseriesID, + ZTimeseriesID: seg.ZTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, + } + if seg.Length == nil { + continue + } + createMmtParams = append(createMmtParams, db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.SaaSegmentUpdateBatch(ctx, updateSaaSegParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) } -func NewSaaInstrumentService(db *model.Database, q *model.Queries) *saaInstrumentService { - return &saaInstrumentService{db, q} -} +func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createSaaSegmentBatchParams := make([][]db.SaaSegmentCreateBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createSaaOptsParams := make([]db.SaaOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.SaaOpts](inst.Opts) + if err != nil { + return err + } + createTimeseriesBatchParams[idx] = make([]db.TimeseriesCreateBatchParams, opts.NumSegments) + createSaaSegmentBatchParams[idx] = make([]db.SaaSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: dto.SaaParameterID, + UnitID: dto.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createSaaSegmentBatchParams[idx][i] = db.SaaSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.SaaParameterID, + UnitID: dto.FeetUnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeConstant, + }, + } + createSaaOptsParams[idx] = db.SaaOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } -func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + args := slices.Concat(createTimeseriesBatchParams...) + saaArgs := slices.Concat(createSaaSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + saaArgs[i].LengthTimeseriesID = &r.ID + }) if err != nil { return err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.SaaSegmentCreateBatch(ctx, saaArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.SaaOptsCreateBatch(ctx, createSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) + return err +} - for _, seg := range segs { - if err := qtx.UpdateSaaSegment(ctx, seg); err != nil { +func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateSaaOptsParams := make([]db.SaaOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.SaaOpts](inst.Opts) + if err != nil { return err } - if seg.Length == nil { - continue + updateSaaOptsParams[idx] = db.SaaOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { - return err + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, } } - return tx.Commit() + var err error + q.SaaOptsUpdateBatch(ctx, updateSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/instrument_status.go b/api/internal/service/instrument_status.go index b41a8f6f..b726b537 100644 --- a/api/internal/service/instrument_status.go +++ b/api/internal/service/instrument_status.go @@ -3,40 +3,21 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type InstrumentStatusService interface { - ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentStatus, error) - GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (model.InstrumentStatus, error) - CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error - DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error -} - -type instrumentStatusService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentStatusService(db *model.Database, q *model.Queries) *instrumentStatusService { - return &instrumentStatusService{db, q} -} - -func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - for _, updateStatus := range ss { - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, instrumentID, updateStatus.StatusID, updateStatus.Time); err != nil { - return err +func (s DBService) InstrumentStatusCreateOrUpdateBatch(ctx context.Context, instrumentID uuid.UUID, ss []dto.InstrumentStatus) error { + args := make([]db.InstrumentStatusCreateOrUpdateBatchParams, len(ss)) + for idx, st := range ss { + args[idx] = db.InstrumentStatusCreateOrUpdateBatchParams{ + InstrumentID: instrumentID, + StatusID: st.StatusID, + Time: st.Time, } } - - return tx.Commit() + var err error + s.Queries.InstrumentStatusCreateOrUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/measurement.go b/api/internal/service/measurement.go index 909185a0..3f9e1f6b 100644 --- a/api/internal/service/measurement.go +++ b/api/internal/service/measurement.go @@ -2,124 +2,237 @@ package service import ( "context" - "time" + "math" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/util" ) -type MeasurementService interface { - ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow, threshold int) (*model.MeasurementCollection, error) - DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error - GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (model.Measurement, error) - CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) - DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error - DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error -} - -type measurementService struct { - db *model.Database - *model.Queries -} - -func NewMeasurementService(db *model.Database, q *model.Queries) *measurementService { - return &measurementService{db, q} +func (s DBService) TimeseriesMeasurementListRange(ctx context.Context, arg db.TimeseriesMeasurementListRangeParams, threshold int) ([]db.VTimeseriesMeasurement, error) { + mm, err := s.Queries.TimeseriesMeasurementListRange(ctx, arg) + if err != nil { + return nil, err + } + return db.LTTB(mm, threshold), nil } -type mmtCbk func(context.Context, uuid.UUID, time.Time, float64) error -type noteCbk func(context.Context, uuid.UUID, time.Time, model.TimeseriesNote) error +func (s DBService) CreateTimeseriesMeasurements(ctx context.Context, mc []dto.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -func createMeasurements(ctx context.Context, mc []model.MeasurementCollection, mmtFn mmtCbk, noteFn noteCbk) error { - for _, c := range mc { - for _, m := range c.Items { - if err := mmtFn(ctx, c.TimeseriesID, m.Time, float64(m.Value)); err != nil { - return err + chunkSize := 1_000 + mm := make([]db.TimeseriesMeasurementCreateBatchParams, chunkSize) + nn := make([]db.TimeseriesNoteCreateBatchParams, chunkSize) + var mIdx, nIdx int + + for idx := range mc { + for _, m := range mc[idx].Items { + v := float64(m.Value) + mm[mIdx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: v, } - if m.Masked != nil || m.Validated != nil || m.Annotation != nil { - if err := noteFn(ctx, c.TimeseriesID, m.Time, m.TimeseriesNote); err != nil { + mIdx++ + if mIdx == chunkSize { + qtx.TimeseriesMeasurementCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { return err } + mIdx = 0 + } + if math.IsNaN(v) || math.IsInf(v, 0) { + masked := true + m.Masked = &masked + } + if m.Masked != nil || m.Validated != nil || m.Annotation != nil { + nn[nIdx] = db.TimeseriesNoteCreateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + } + nIdx++ + if nIdx == chunkSize { + qtx.TimeseriesNoteCreateBatch(ctx, nn).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } } } } - return nil + if mIdx != 0 { + qtx.TimeseriesMeasurementCreateBatch(ctx, mm[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if nIdx != 0 { + qtx.TimeseriesNoteCreateBatch(ctx, nn[:nIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return tx.Commit(ctx) } -// CreateTimeseriesMeasurements creates many timeseries from an array of timeseries -func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries +// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated +func (s DBService) TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, mc []dto.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err + if err := createOrUpdateTimeseriesMeasurementsBatch(ctx, qtx, mc); err != nil { + return err } - if err := tx.Commit(); err != nil { - return nil, err - } - - return mc, nil + return tx.Commit(ctx) } -// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries -// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated -func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementUpdateBatch(ctx context.Context, mc []dto.MeasurementCollection, tw *util.TimeWindow) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return nil, err + if tw != nil { + if err := deleteTimeseriesMeasurementsRange(ctx, qtx, mc, *tw); err != nil { + return err + } } - - if err := tx.Commit(); err != nil { - return nil, err + if err := createOrUpdateTimeseriesMeasurementsBatch(ctx, qtx, mc); err != nil { + return err } - - return mc, nil + return tx.Commit(ctx) } -// UpdateTimeseriesMeasurements updates many timeseries measurements, "overwriting" time and values to match paylaod -func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementDeleteRange(ctx context.Context, arg db.TimeseriesMeasurementDeleteRangeParams) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, c := range mc { - if err := qtx.DeleteTimeseriesMeasurementsByRange(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + if err := qtx.TimeseriesMeasurementDeleteRange(ctx, arg); err != nil { + return err + } + if err := qtx.TimeseriesNoteDeleteRange(ctx, db.TimeseriesNoteDeleteRangeParams(arg)); err != nil { + return err + } + return tx.Commit(ctx) +} + +func createOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, q *db.Queries, mc []dto.MeasurementCollection) error { + chunkSize := 1_000 + mm := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + nn := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mIdx, nIdx int + + var err error + for idx := range mc { + for _, m := range mc[idx].Items { + mm[mIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: float64(m.Value), + } + mIdx++ + if mIdx == chunkSize { + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mIdx = 0 + } + if m.Masked != nil || m.Validated != nil || m.Annotation != nil { + nn[nIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + } + nIdx++ + if nIdx == chunkSize { + q.TimeseriesNoteCreateOrUpdateBatch(ctx, nn).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } + } } - if err := qtx.DeleteTimeseriesNote(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + } + if mIdx != 0 { + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mm[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err } } - - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err + if nIdx != 0 { + q.TimeseriesNoteCreateOrUpdateBatch(ctx, nn[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } } + return nil +} - if err := tx.Commit(); err != nil { - return nil, err - } +func deleteTimeseriesMeasurementsRange(ctx context.Context, q *db.Queries, mc []dto.MeasurementCollection, tw util.TimeWindow) error { + chunkSize := 1_000 + delMmParams := make([]db.TimeseriesMeasurementDeleteRangeBatchParams, chunkSize) + delNnParams := make([]db.TimeseriesNoteDeleteRangeBatchParams, chunkSize) + var dIdx int - return mc, nil + var err error + for _, c := range mc { + delMmParams[dIdx] = db.TimeseriesMeasurementDeleteRangeBatchParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + } + delNnParams[dIdx] = db.TimeseriesNoteDeleteRangeBatchParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + } + dIdx++ + if dIdx == chunkSize { + q.TimeseriesMeasurementDeleteRangeBatch(ctx, delMmParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesNoteDeleteRangeBatch(ctx, delNnParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + dIdx = 0 + } + } + if dIdx != 0 { + q.TimeseriesMeasurementDeleteRangeBatch(ctx, delMmParams[:dIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesNoteDeleteRangeBatch(ctx, delNnParams[:dIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return nil } diff --git a/api/internal/service/measurement_inclinometer.go b/api/internal/service/measurement_inclinometer.go deleted file mode 100644 index 8805a046..00000000 --- a/api/internal/service/measurement_inclinometer.go +++ /dev/null @@ -1,120 +0,0 @@ -package service - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InclinometerMeasurementService interface { - ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow) (*model.InclinometerMeasurementCollection, error) - ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*model.InclinometerMeasurementValues, error) - DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error - CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) - ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) - CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error -} - -type inclinometerMeasurementService struct { - db *model.Database - *model.Queries -} - -func NewInclinometerMeasurementService(db *model.Database, q *model.Queries) *inclinometerMeasurementService { - return &inclinometerMeasurementService{db, q} -} - -// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer -// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated -func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - // Iterate All inclinometer Measurements - for idx := range im { - for i := range im[idx].Inclinometers { - im[idx].Inclinometers[i].Creator = p.ID - im[idx].Inclinometers[i].CreateDate = createDate - if err := qtx.CreateOrUpdateInclinometerMeasurement(ctx, im[idx].TimeseriesID, im[idx].Inclinometers[i].Time, im[idx].Inclinometers[i].Values, p.ID, createDate); err != nil { - return nil, err - } - } - } - if err := tx.Commit(); err != nil { - return nil, err - } - - return im, nil -} - -// CreateTimeseriesConstant creates timeseries constant -func (s inclinometerMeasurementService) CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - instrumentIDs, err := qtx.ListInstrumentIDsFromTimeseriesID(ctx, timeseriesID) - if err != nil { - return err - } - - parameterIDs, err := qtx.ListParameterIDsFromParameterName(ctx, parameterName) - if err != nil { - return err - } - - unitIDs, err := qtx.ListUnitIDsFromUnitName(ctx, unitName) - if err != nil { - return err - } - - if len(instrumentIDs) > 0 && len(parameterIDs) > 0 && len(unitIDs) > 0 { - t := model.Timeseries{} - measurement := model.Measurement{} - measurements := []model.Measurement{} - mc := model.MeasurementCollection{} - mcs := []model.MeasurementCollection{} - ts := []model.Timeseries{} - - t.InstrumentID = instrumentIDs[0] - t.Slug = parameterName - t.Name = parameterName - t.ParameterID = parameterIDs[0] - t.UnitID = unitIDs[0] - ts = append(ts, t) - - t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return err - } - // Assign timeseries - if err := qtx.CreateInstrumentConstant(ctx, t.InstrumentID, t.ID); err != nil { - return err - } - - measurement.Time = time.Now() - measurement.Value = model.FloatNanInf(value) - measurements = append(measurements, measurement) - mc.TimeseriesID = tsNew.ID - mc.Items = measurements - mcs = append(mcs, mc) - - if err = createMeasurements(ctx, mcs, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return err - } - } - - return nil -} diff --git a/api/internal/service/opendcs.go b/api/internal/service/opendcs.go index 00af0f17..b82dab61 100644 --- a/api/internal/service/opendcs.go +++ b/api/internal/service/opendcs.go @@ -3,18 +3,37 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/google/uuid" ) -type OpendcsService interface { - ListOpendcsSites(ctx context.Context) ([]model.Site, error) +type Site struct { + Elevation string `xml:"Elevation"` + ElevationUnits string `xml:"ElevationUnits"` + Description string `xml:"Description"` + SiteName SiteName `xml:"SiteName"` } -type opendcsService struct { - db *model.Database - *model.Queries +type SiteName struct { + ID uuid.UUID `xml:",chardata"` + NameType string `xml:",attr"` } -func NewOpendcsService(db *model.Database, q *model.Queries) *opendcsService { - return &opendcsService{db, q} +func (s DBService) OpendcsSiteList(ctx context.Context) ([]Site, error) { + nn, err := s.Queries.InstrumentList(ctx) + if err != nil { + return make([]Site, 0), err + } + ss := make([]Site, len(nn)) + for idx, n := range nn { + ss[idx] = Site{ + Elevation: "", + ElevationUnits: "", + Description: n.Name, + SiteName: SiteName{ + ID: n.ID, + NameType: "uuid", + }, + } + } + return ss, nil } diff --git a/api/internal/service/plot_config.go b/api/internal/service/plot_config.go index 7d1f0bc0..100a1075 100644 --- a/api/internal/service/plot_config.go +++ b/api/internal/service/plot_config.go @@ -3,25 +3,54 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type PlotConfigService interface { - ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]model.PlotConfig, error) - GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (model.PlotConfig, error) - DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error - plotConfigBullseyePlotService - plotConfigContourPlotService - plotConfigProfilePlotService - plotConfigScatterLinePlotService +func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) (uuid.UUID, error) { + pcID, err := q.PlotConfigCreate(ctx, db.PlotConfigCreateParams{ + Name: pc.Name, + ProjectID: pc.ProjectID, + Creator: pc.CreatorID, + CreateDate: pc.CreateDate, + PlotType: db.PlotType(pc.PlotType), + }) + if err != nil { + return pcID, err + } + err = q.PlotConfigSettingsCreate(ctx, db.PlotConfigSettingsCreateParams{ + ID: pcID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) + return pcID, err } -type plotConfigService struct { - db *model.Database - *model.Queries -} - -func NewPlotConfigService(db *model.Database, q *model.Queries) *plotConfigService { - return &plotConfigService{db, q} +func updatePlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) error { + if err := q.PlotConfigUpdate(ctx, db.PlotConfigUpdateParams{ + ProjectID: pc.ProjectID, + ID: pc.ID, + Name: pc.Name, + Updater: pc.UpdaterID, + UpdateDate: pc.UpdateDate, + }); err != nil { + return err + } + if err := q.PlotConfigSettingsDelete(ctx, pc.ID); err != nil { + return err + } + return q.PlotConfigSettingsCreate(ctx, db.PlotConfigSettingsCreateParams{ + ID: pc.ID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) } diff --git a/api/internal/service/plot_config_bullseye.go b/api/internal/service/plot_config_bullseye.go index 2fea5a11..3ef94130 100644 --- a/api/internal/service/plot_config_bullseye.go +++ b/api/internal/service/plot_config_bullseye.go @@ -3,79 +3,63 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type plotConfigBullseyePlotService interface { - CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) - UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) - ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]model.PlotConfigMeasurementBullseyePlot, error) -} - -func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateBullseye(ctx context.Context, pc dto.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.BullseyePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pc.PlotType = dto.BullseyePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotBullseyeConfigCreate(ctx, db.PlotBullseyeConfigCreateParams{ + PlotConfigID: pcID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - if err := qtx.CreatePlotBullseyeConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + a, err = qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() - - return pcNew, err + err = tx.Commit(ctx) + return a, err } -func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateBullseye(ctx context.Context, pc dto.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.UpdatePlotBullseyeConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotBullseyeConfigUpdate(ctx, db.PlotBullseyeConfigUpdateParams{ + PlotConfigID: pc.ID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } diff --git a/api/internal/service/plot_config_contour.go b/api/internal/service/plot_config_contour.go index d73026d5..355ec374 100644 --- a/api/internal/service/plot_config_contour.go +++ b/api/internal/service/plot_config_contour.go @@ -2,122 +2,129 @@ package service import ( "context" + "errors" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type plotConfigContourPlotService interface { - CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) - UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) - ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]time.Time, error) - GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) +type AggregatePlotConfigMeasurementsContourPlot struct { + X []float64 `json:"x"` + Y []float64 `json:"y"` + Z []*float64 `json:"z"` } -func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateContour(ctx context.Context, pc dto.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ContourPlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) - if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotContourConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pcID, tsID); err != nil { - return model.PlotConfig{}, err - } - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pc.PlotType = dto.ContourPlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) + if err := qtx.PlotContourConfigCreate(ctx, db.PlotContourConfigCreateParams{ + PlotConfigID: pcID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err + } + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pcID, pc.Display.TimeseriesIDs); err != nil { + return a, err + } + a, err = qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } + err = tx.Commit(ctx) - err = tx.Commit() - - return pcNew, err + return a, err } -func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateContour(ctx context.Context, pc dto.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.UpdatePlotContourConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotContourConfigUpdate(ctx, db.PlotContourConfigUpdateParams{ + PlotConfigID: pc.ID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err } - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx, pc.ID); err != nil { + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeleteAllPlotContourConfigTimeseries(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pc.ID, pc.Display.TimeseriesIDs); err != nil { + return a, err } - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pc.ID, tsID); err != nil { - return model.PlotConfig{}, err - } - } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { - q := s.db.Queries() - - mm, err := q.ListPlotConfigMeasurementsContourPlot(ctx, plotConfigID, t) +func (s DBService) PlotConfigMeasurementListContour(ctx context.Context, pcID uuid.UUID, t time.Time) (AggregatePlotConfigMeasurementsContourPlot, error) { + var a AggregatePlotConfigMeasurementsContourPlot + mm, err := s.Queries.PlotConfigMeasurementListContour(ctx, db.PlotConfigMeasurementListContourParams{ + PlotConfigID: pcID, + Time: t, + }) if err != nil { - return model.AggregatePlotConfigMeasurementsContourPlot{}, err + return a, err } - - am := model.AggregatePlotConfigMeasurementsContourPlot{ + a = AggregatePlotConfigMeasurementsContourPlot{ X: make([]float64, len(mm)), Y: make([]float64, len(mm)), Z: make([]*float64, len(mm)), } - for idx := range mm { - am.X[idx] = mm[idx].X - am.Y[idx] = mm[idx].Y - am.Z[idx] = mm[idx].Z + z, ok := mm[idx].Z.(*float64) + if !ok { + return a, errors.New("failed type assertion: interface to float64") + } + a.X[idx] = mm[idx].X + a.Y[idx] = mm[idx].Y + a.Z[idx] = z } + return a, nil +} - return am, nil +func createPlotContourConfigTimeseriesBatch(ctx context.Context, q *db.Queries, pcID uuid.UUID, tt []uuid.UUID) error { + args := make([]db.PlotContourConfigTimeseriesCreateBatchParams, len(tt)) + for idx, tsID := range tt { + args[idx] = db.PlotContourConfigTimeseriesCreateBatchParams{ + PlotContourConfigID: pcID, + TimeseriesID: tsID, + } + } + var err error + q.PlotContourConfigTimeseriesCreateBatch(ctx, args).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/plot_config_profile.go b/api/internal/service/plot_config_profile.go index 2bc9920c..d9e85c8a 100644 --- a/api/internal/service/plot_config_profile.go +++ b/api/internal/service/plot_config_profile.go @@ -3,78 +3,62 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type plotConfigProfilePlotService interface { - CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) - UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) -} - -func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateProfile(ctx context.Context, pc dto.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ProfilePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pc.PlotType = dto.ProfilePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotProfileConfigCreate(ctx, db.PlotProfileConfigCreateParams{ + PlotConfigID: pcID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - - if err := qtx.CreatePlotProfileConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pcNew, err := qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateProfile(ctx context.Context, pc dto.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.UpdatePlotProfileConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotProfileConfigUpdate(ctx, db.PlotProfileConfigUpdateParams{ + PlotConfigID: pc.ID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + a, err = qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } + err = tx.Commit(ctx) - err = tx.Commit() - - return pcNew, err + return a, err } diff --git a/api/internal/service/plot_config_scatter_line.go b/api/internal/service/plot_config_scatter_line.go index 836e888f..4e790201 100644 --- a/api/internal/service/plot_config_scatter_line.go +++ b/api/internal/service/plot_config_scatter_line.go @@ -3,115 +3,92 @@ package service import ( "context" "fmt" - "log" "strings" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type plotConfigScatterLinePlotService interface { - CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) - UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) -} - -func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateScatterLine(ctx context.Context, pc dto.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ScatterLinePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) - if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - + pc.PlotType = dto.ScatterLinePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err := validateCreateTraces(ctx, qtx, pcID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigScatterLineLayout(ctx, pcID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigScatterLineLayoutCreate(ctx, db.PlotConfigScatterLineLayoutCreateParams{ + PlotConfigID: pcID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pcID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pcNew, err := qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateScatterLine(ctx context.Context, pc dto.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - log.Printf("fails on delete %s", pc.ID) - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx, &pc.ID); err != nil { + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - log.Printf("fails on create %s, %+v", pc.ID, pc.PlotConfigSettings) - return model.PlotConfig{}, err + if err := qtx.PlotConfigCustomShapeDeleteForPlotConfig(ctx, &pc.ID); err != nil { + return a, err } - if err := validateCreateTraces(ctx, qtx, pc.ID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, pc.ID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigScatterLineLayoutUpdate(ctx, db.PlotConfigScatterLineLayoutUpdateParams{ + PlotConfigID: pc.ID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pc.ID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { - for _, tr := range trs { +func validateCreateTraces(ctx context.Context, q *db.Queries, pcID uuid.UUID, trs []dto.PlotConfigScatterLineTimeseriesTrace) error { + args := make([]db.PlotConfigTimeseriesTracesCreateBatchParams, len(trs)) + for idx, tr := range trs { tr.PlotConfigurationID = pcID - if err := validateColor(tr.Color); err != nil { return err } @@ -121,23 +98,37 @@ func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, if tr.YAxis == "" { tr.YAxis = "y1" } - - if err := q.CreatePlotConfigTimeseriesTrace(ctx, tr); err != nil { - return err + args[idx] = db.PlotConfigTimeseriesTracesCreateBatchParams{ + PlotConfigurationID: &tr.PlotConfigurationID, + TimeseriesID: &tr.TimeseriesID, + TraceOrder: int32(tr.TraceOrder), + Color: tr.Color, + LineStyle: db.LineStyle(tr.LineStyle), + Width: tr.Width, + ShowMarkers: tr.ShowMarkers, + YAxis: db.YAxis(tr.YAxis), } } + var err error + q.PlotConfigTimeseriesTracesCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } return nil } -func validateCreateCustomShapes(ctx context.Context, q *model.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { +func validateCreateCustomShapes(ctx context.Context, q *db.Queries, pcID uuid.UUID, css []dto.PlotConfigScatterLineCustomShape) error { for _, cs := range css { - cs.PlotConfigurationID = pcID - if err := validateColor(cs.Color); err != nil { return err } - - if err := q.CreatePlotConfigCustomShape(ctx, cs); err != nil { + if err := q.PlotConfigCustomShapeCreate(ctx, db.PlotConfigCustomShapeCreateParams{ + PlotConfigurationID: &pcID, + Enabled: cs.Enabled, + Name: cs.Name, + DataPoint: cs.DataPoint, + Color: cs.Color, + }); err != nil { return err } } diff --git a/api/internal/service/profile.go b/api/internal/service/profile.go index f1d9f833..0e0c613f 100644 --- a/api/internal/service/profile.go +++ b/api/internal/service/profile.go @@ -5,102 +5,32 @@ import ( "errors" "strings" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/password" "github.com/google/uuid" ) -type ProfileService interface { - GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) - GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) - GetProfileForEmail(ctx context.Context, email string) (model.Profile, error) - GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) - GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) - CreateProfile(ctx context.Context, n model.ProfileInfo) (model.Profile, error) - CreateProfileToken(ctx context.Context, profileID uuid.UUID) (model.Token, error) - GetTokenInfoByTokenID(ctx context.Context, tokenID string) (model.TokenInfo, error) - UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) - DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error +type Token struct { + db.ProfileToken + SecretToken string `json:"secret_token"` } -type profileService struct { - db *model.Database - *model.Queries -} - -func NewProfileService(db *model.Database, q *model.Queries) *profileService { - return &profileService{db, q} -} - -func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) { - var p model.Profile +func (s DBService) ProfileGetWithTokensForClaims(ctx context.Context, claims dto.ProfileClaims) (db.VProfile, error) { + var a db.VProfile var err error if claims.CacUID != nil { - p, err = s.GetProfileWithTokensForEDIPI(ctx, *claims.CacUID) + a, err = s.Queries.ProfileGetForEDIPI(ctx, int64(*claims.CacUID)) } else { - p, err = s.GetProfileWithTokensForEmail(ctx, claims.Email) - } - if err != nil { - return model.Profile{}, err - } - return p, nil -} - -func (s profileService) GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) { - p, err := s.GetProfileForEDIPI(ctx, edipi) - if err != nil { - return model.Profile{}, err + a, err = s.ProfileGetForEmail(ctx, claims.Email) } - tokens, err := s.GetIssuedTokens(ctx, p.ID) if err != nil { - return model.Profile{}, err + return a, err } - p.Tokens = tokens - return p, nil + return a, nil } -func (s profileService) GetProfileWithTokensForEmail(ctx context.Context, email string) (model.Profile, error) { - p, err := s.GetProfileForEmail(ctx, email) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -func (s profileService) GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) { - p, err := s.GetProfileForUsername(ctx, username) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -// GetProfileForTokenID returns a profile given a token ID -func (s profileService) GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) { - p, err := s.GetProfileForTokenID(ctx, tokenID) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -// UpdateProfileForClaims syncs a database profile to the provided token claims -// THe order of precence in which the function will attepmt to update profiles is edipi, email, username -func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) { +func (s DBService) ProfileUpdateForClaims(ctx context.Context, p db.VProfile, claims dto.ProfileClaims) (db.VProfile, error) { var claimsMatchProfile bool = p.Username == claims.PreferredUsername && strings.ToLower(p.Email) == strings.ToLower(claims.Email) && p.DisplayName == claims.Name @@ -108,12 +38,12 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof if claimsMatchProfile { return p, nil } - if claims.CacUID != nil && !claimsMatchProfile { - if err := s.UpdateProfileForEDIPI(ctx, *claims.CacUID, model.ProfileInfo{ + if err := s.Queries.ProfileUpdateForEDIPI(ctx, db.ProfileUpdateForEDIPIParams{ Username: claims.PreferredUsername, - DisplayName: claims.Name, Email: claims.Email, + DisplayName: claims.Name, + Edipi: int64(*claims.CacUID), }); err != nil { return p, err } @@ -123,11 +53,11 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof return p, nil } - if strings.ToLower(p.Email) == strings.ToLower(claims.Email) && !claimsMatchProfile { - if err := s.UpdateProfileForEmail(ctx, claims.Email, model.ProfileInfo{ + if err := s.Queries.ProfileUpdateForEmail(ctx, db.ProfileUpdateForEmailParams{ Username: claims.PreferredUsername, DisplayName: claims.Name, + Email: claims.Email, }); err != nil { return p, err } @@ -139,3 +69,19 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof return p, errors.New("claims did not match profile and could not be updated") } + +func (s DBService) ProfileTokenCreate(ctx context.Context, profileID uuid.UUID) (Token, error) { + var t Token + secretToken := password.GenerateRandom(40) + hash, err := password.CreateHash(secretToken, password.DefaultParams) + if err != nil { + return t, err + } + s.Queries.ProfileTokenCreate(ctx, db.ProfileTokenCreateParams{ + ProfileID: profileID, + TokenID: password.GenerateRandom(40), + Hash: hash, + }) + t.SecretToken = secretToken + return t, nil +} diff --git a/api/internal/service/project.go b/api/internal/service/project.go index 90581625..526f48d1 100644 --- a/api/internal/service/project.go +++ b/api/internal/service/project.go @@ -4,129 +4,112 @@ import ( "context" "image" "io" + "log" "mime/multipart" "os" + "github.com/USACE/instrumentation-api/api/internal/cloud" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/img" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) -type ProjectService interface { - SearchProjects(ctx context.Context, searchInput string, limit int) ([]model.SearchResult, error) - ListDistricts(ctx context.Context) ([]model.District, error) - ListProjects(ctx context.Context) ([]model.Project, error) - ListProjectsByFederalID(ctx context.Context, federalID string) ([]model.Project, error) - ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Project, error) - ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]model.Project, error) - ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]model.Instrument, error) - ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]model.InstrumentGroup, error) - GetProjectCount(ctx context.Context) (model.ProjectCount, error) - GetProject(ctx context.Context, projectID uuid.UUID) (model.Project, error) - CreateProject(ctx context.Context, p model.Project) (model.IDSlugName, error) - CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) - UpdateProject(ctx context.Context, p model.Project) (model.Project, error) - UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error - DeleteFlagProject(ctx context.Context, projectID uuid.UUID) error -} - -type projectService struct { - db *model.Database - *model.Queries -} +type imgUploader func(ctx context.Context, r io.Reader, opts ImgUploaderOpts) error -func NewProjectService(db *model.Database, q *model.Queries) *projectService { - return &projectService{db, q} +type ImgUploaderOpts struct { + rawPath string + bucketName string } -type uploader func(ctx context.Context, r io.Reader, rawPath, bucketName string) error - -// CreateProjectBulk creates one or more projects from an array of projects -func (s projectService) CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - pp := make([]model.IDSlugName, len(projects)) +func (s DBService) ProjectCreateBatch(ctx context.Context, projects []dto.Project) ([]db.ProjectCreateBatchRow, error) { + args := make([]db.ProjectCreateBatchParams, len(projects)) for idx, p := range projects { - aa, err := qtx.CreateProject(ctx, p) - if err != nil { - return nil, err + args[idx] = db.ProjectCreateBatchParams{ + FederalID: p.FederalID, + Name: p.Name, + DistrictID: p.DistrictID, + Creator: p.CreatorID, + CreateDate: p.CreateDate, } - pp[idx] = aa } - if err := tx.Commit(); err != nil { + var err error + pp := make([]db.ProjectCreateBatchRow, len(args)) + s.Queries.ProjectCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(pp, &err)) + if err != nil { return nil, err } return pp, nil } -// UpdateProject updates a project -func (s projectService) UpdateProject(ctx context.Context, p model.Project) (model.Project, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ProjectUpdate(ctx context.Context, p dto.Project) (db.VProject, error) { + var a db.VProject + tx, err := s.db.Begin(ctx) if err != nil { - return model.Project{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateProject(ctx, p); err != nil { - return model.Project{}, err + if _, err := qtx.ProjectUpdate(ctx, db.ProjectUpdateParams{ + ID: p.ID, + Name: p.Name, + Updater: p.UpdaterID, + UpdateDate: p.UpdateDate, + DistrictID: p.DistrictID, + FederalID: p.FederalID, + }); err != nil { + return a, err } - - updated, err := qtx.GetProject(ctx, p.ID) + updated, err := qtx.ProjectGet(ctx, p.ID) if err != nil { - return model.Project{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Project{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return updated, nil } -func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ProjectUploadImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, blobService cloud.Blob) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - p, err := qtx.GetProject(ctx, projectID) + p, err := qtx.ProjectGet(ctx, projectID) if err != nil { return err } - src, err := file.Open() if err != nil { return err } defer src.Close() - dst, err := os.Create(file.Filename) + fp := "/tmp/" + file.Filename + dst, err := os.Create(fp) if err != nil { return err } defer dst.Close() - + defer func() { + if err := os.Remove(fp); err != nil { + log.Printf("enable to remove file /tmp/%s", fp) + } + }() if err := img.Resize(src, dst, image.Rect(0, 0, 480, 480)); err != nil { return err } - - if err := qtx.UpdateProjectImage(ctx, file.Filename, projectID); err != nil { + if err := qtx.ProjectUpdateImage(ctx, db.ProjectUpdateImageParams{ + ID: projectID, + Image: &file.Filename, + }); err != nil { return err } - - if err := u(ctx, src, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { + if err := blobService.UploadContext(ctx, dst, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { return err } - - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/service/project_role.go b/api/internal/service/project_role.go deleted file mode 100644 index dbdf0e22..00000000 --- a/api/internal/service/project_role.go +++ /dev/null @@ -1,53 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type ProjectRoleService interface { - ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]model.ProjectMembership, error) - GetProjectMembership(ctx context.Context, roleID uuid.UUID) (model.ProjectMembership, error) - AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) - RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error - IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) - IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) -} - -type projectRoleService struct { - db *model.Database - *model.Queries -} - -func NewProjectRoleService(db *model.Database, q *model.Queries) *projectRoleService { - return &projectRoleService{db, q} -} - -// AddProjectMemberRole adds a role to a user for a specific project -func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return model.ProjectMembership{}, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - pprID, err := qtx.AddProjectMemberRole(ctx, projectID, profileID, roleID, grantedBy) - if err != nil { - return model.ProjectMembership{}, err - } - - pm, err := qtx.GetProjectMembership(ctx, pprID) - if err != nil { - return model.ProjectMembership{}, err - } - - if err := tx.Commit(); err != nil { - return model.ProjectMembership{}, err - } - - return pm, nil -} diff --git a/api/internal/service/report_config.go b/api/internal/service/report_config.go index 1954c9c3..b600c666 100644 --- a/api/internal/service/report_config.go +++ b/api/internal/service/report_config.go @@ -5,139 +5,171 @@ import ( "encoding/json" "github.com/USACE/instrumentation-api/api/internal/cloud" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type ReportConfigService interface { - ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]model.ReportConfig, error) - CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) - UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error - DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error - GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) - CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) - GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (model.ReportDownloadJob, error) - UpdateReportDownloadJob(ctx context.Context, j model.ReportDownloadJob) error -} - -type reportConfigService struct { - db *model.Database - *model.Queries - pubsub cloud.Pubsub - mockQueue bool -} - -func NewReportConfigService(db *model.Database, q *model.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { - return &reportConfigService{db, q, ps, mockQueue} -} - -func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ReportConfigCreate(ctx context.Context, rc dto.ReportConfig) (db.VReportConfig, error) { + var a db.VReportConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - rcID, err := qtx.CreateReportConfig(ctx, rc) + rcID, err := qtx.ReportConfigCreate(ctx, db.ReportConfigCreateParams{ + Name: rc.Name, + ProjectID: rc.ProjectID, + Creator: rc.CreatorID, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }) if err != nil { - return model.ReportConfig{}, err + return a, err } - - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rcID, pc.ID); err != nil { - return model.ReportConfig{}, err + args := make([]db.ReportConfigPlotConfigCreateBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.ReportConfigPlotConfigCreateBatchParams{ + ReportConfigID: rcID, + PlotConfigID: rc.PlotConfigs[idx].ID, } } - - rcNew, err := qtx.GetReportConfigByID(ctx, rcID) + qtx.ReportConfigPlotConfigCreateBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { - return model.ReportConfig{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportConfig{}, err + a, err = qtx.ReportConfigGet(ctx, rcID) + if err != nil { + return a, err } - return rcNew, nil + if err := tx.Commit(ctx); err != nil { + return a, err + } + return a, nil } -func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ReportConfigUpdate(ctx context.Context, rc dto.ReportConfig) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateReportConfig(ctx, rc); err != nil { + if err := qtx.ReportConfigUpdate(ctx, db.ReportConfigUpdateParams{ + ID: rc.ID, + Name: rc.Name, + Updater: rc.UpdaterID, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }); err != nil { return err } - if err := qtx.UnassignAllReportConfigPlotConfig(ctx, rc.ID); err != nil { + if err := qtx.ReportConfigPlotConfigDeleteForReportConfig(ctx, rc.ID); err != nil { return err } - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rc.ID, pc.ID); err != nil { - return err + args := make([]db.ReportConfigPlotConfigCreateBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.ReportConfigPlotConfigCreateBatchParams{ + ReportConfigID: rc.ID, + PlotConfigID: rc.PlotConfigs[idx].ID, } } + qtx.ReportConfigPlotConfigCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } - return tx.Commit() + return tx.Commit(ctx) } -func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) { - q := s.db.Queries() +type ReportConfigWithPlotConfigs struct { + db.VReportConfig + PlotConfigs []db.VPlotConfiguration `json:"plot_configs"` +} - rc, err := q.GetReportConfigByID(ctx, rcID) +func (s DBService) ReportConfigWithPlotConfigsGet(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) { + var a ReportConfigWithPlotConfigs + rc, err := s.Queries.ReportConfigGet(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - pcs, err := q.ListReportConfigPlotConfigs(ctx, rcID) + pcs, err := s.Queries.ReportConfigListForReportConfigWithPlotConfig(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - return model.ReportConfigWithPlotConfigs{ - ReportConfig: rc, - PlotConfigs: pcs, - }, nil + a.VReportConfig = rc + a.PlotConfigs = pcs + return a, nil } -func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) { - tx, err := s.db.BeginTxx(ctx, nil) +type ReportDownloadJobCreateOpts struct { + ReportConfigID uuid.UUID + ProfileID uuid.UUID + IsLandscape bool + IsMock bool +} + +func (s DBService) ReportDownloadJobCreate(ctx context.Context, queue cloud.Pubsub, arg ReportDownloadJobCreateOpts) (db.ReportDownloadJob, error) { + var a db.ReportDownloadJob + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - j, err := qtx.CreateReportDownloadJob(ctx, rcID, profileID) + + a, err = qtx.ReportDownloadJobCreate(ctx, db.ReportDownloadJobCreateParams{ + ReportConfigID: &arg.ReportConfigID, + Creator: arg.ProfileID, + }) if err != nil { - return model.ReportDownloadJob{}, err + return a, err + } + msg := dto.ReportConfigJobMessage{ + ReportConfigID: arg.ReportConfigID, + JobID: a.ID, + IsLandscape: arg.IsLandscape, } - - msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: j.ID, IsLandscape: isLandscape} b, err := json.Marshal(msg) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - - // NOTE: Depending on how long this takes, possibly invoke the lambdas directly - if _, err := s.pubsub.PublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + if _, err := queue.PublishMessage(ctx, b); err != nil { + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportDownloadJob{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - if s.mockQueue { - if _, err := s.pubsub.MockPublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + if arg.IsMock { + if _, err := queue.MockPublishMessage(ctx, b); err != nil { + return a, err } } + return a, nil +} - return j, nil +func (s DBService) ReportDownloadJobUpdate(ctx context.Context, j dto.ReportDownloadJob) error { + return s.Queries.ReportDownloadJobUpdate(ctx, db.ReportDownloadJobUpdateParams{ + ID: j.ID, + Status: db.JobStatus(j.Status), + Progress: int32(j.Progress), + ProgressUpdateDate: j.ProgressUpdateDate, + FileKey: j.FileKey, + FileExpiry: j.FileExpiry, + }) } diff --git a/api/internal/service/submittal.go b/api/internal/service/submittal.go deleted file mode 100644 index 3a73147c..00000000 --- a/api/internal/service/submittal.go +++ /dev/null @@ -1,27 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type SubmittalService interface { - ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListUnverifiedMissingSubmittals(ctx context.Context) ([]model.Submittal, error) - UpdateSubmittal(ctx context.Context, sub model.Submittal) error - VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error - VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error -} - -type submittalService struct { - db *model.Database - *model.Queries -} - -func NewSubmittalService(db *model.Database, q *model.Queries) *submittalService { - return &submittalService{db, q} -} diff --git a/api/internal/service/timeseries.go b/api/internal/service/timeseries.go index 692c1126..52b23fe9 100644 --- a/api/internal/service/timeseries.go +++ b/api/internal/service/timeseries.go @@ -2,84 +2,60 @@ package service import ( "context" - "errors" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type TimeseriesService interface { - GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) - AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error - ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]model.Timeseries, error) - GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (model.Timeseries, error) - CreateTimeseries(ctx context.Context, ts model.Timeseries) (model.Timeseries, error) - CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - UpdateTimeseries(ctx context.Context, ts model.Timeseries) (uuid.UUID, error) - DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error -} - -type timeseriesService struct { - db *model.Database - *model.Queries -} - -func NewTimeseriesService(db *model.Database, q *model.Queries) *timeseriesService { - return ×eriesService{db, q} -} - -func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - uu := make([]model.Timeseries, len(tt)) +func (s DBService) TimeseriesCreateBatch(ctx context.Context, tt []dto.Timeseries) error { + uu := make([]db.TimeseriesCreateBatchParams, len(tt)) for idx, ts := range tt { - ts.Type = model.StandardTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, ts) - if err != nil { - return nil, err + if ts.ParameterID == uuid.Nil { + ts.ParameterID = dto.UnknownParameterID + } + if ts.UnitID == uuid.Nil { + ts.UnitID = dto.UnknownUnitID + } + uu[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeStandard, + }, } - uu[idx] = tsNew - } - - if err := tx.Commit(); err != nil { - return nil, err } - - return uu, nil + var err error + s.Queries.TimeseriesCreateBatch(ctx, uu).QueryRow(batchQueryRowErr[db.TimeseriesCreateBatchRow](&err)) + return err } -func (s timeseriesService) AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error { - ddc := make(map[uuid.UUID]struct{}, len(dd)) - dds := make([]uuid.UUID, len(dd)) - idx := 0 - for k := range ddc { - ddc[k] = struct{}{} - dds[idx] = k - idx++ - } - - q := s.db.Queries() - - m, err := q.GetTimeseriesProjectMap(ctx, dds) +func (s DBService) TimeseriesCreate(ctx context.Context, ts dto.Timeseries) (uuid.UUID, error) { + tsNew, err := s.Queries.TimeseriesCreate(ctx, db.TimeseriesCreateParams{ + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeStandard, + }, + }) if err != nil { - return err + return uuid.Nil, err } - for tID := range ddc { - ppID, ok := m[tID] - if ok && ppID == projectID { - delete(ddc, tID) - } - } - if len(ddc) != 0 { - return errors.New("instruments for all timeseries must be linked to project") - } - return nil + return tsNew.ID, nil +} + +func (s DBService) TimeseriesUpdate(ctx context.Context, ts dto.Timeseries) error { + return s.Queries.TimeseriesUpdate(ctx, db.TimeseriesUpdateParams{ + ID: ts.ID, + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + }) } diff --git a/api/internal/service/timeseries_calculated.go b/api/internal/service/timeseries_calculated.go index 5c55a08c..a1b87ba0 100644 --- a/api/internal/service/timeseries_calculated.go +++ b/api/internal/service/timeseries_calculated.go @@ -2,98 +2,59 @@ package service import ( "context" - "database/sql" - "errors" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type CalculatedTimeseriesService interface { - GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.CalculatedTimeseries, error) - CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error - UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error - DeleteCalculatedTimeseries(ctx context.Context, ctsID uuid.UUID) error -} - -type calculatedTimeseriesService struct { - db *model.Database - *model.Queries -} - -func NewCalculatedTimeseriesService(db *model.Database, q *model.Queries) *calculatedTimeseriesService { - return &calculatedTimeseriesService{db, q} -} - -func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesComputedCreate(ctx context.Context, ct dto.CalculatedTimeseries) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - tsID, err := qtx.CreateCalculatedTimeseries(ctx, cc) + tsID, err := qtx.TimeseriesComputedCreate(ctx, db.TimeseriesComputedCreateParams{ + InstrumentID: &ct.InstrumentID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + }) if err != nil { return err } - - if err := qtx.CreateCalculation(ctx, tsID, cc.Formula); err != nil { + if err := qtx.CalculationCreate(ctx, db.CalculationCreateParams{ + TimeseriesID: tsID, + Contents: &ct.Formula, + }); err != nil { return err } - - if err := tx.Commit(); err != nil { - return err - } - - return nil + return tx.Commit(ctx) } -func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesComputedCreateOrUpdate(ctx context.Context, ct dto.CalculatedTimeseries) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - defaultCts, err := qtx.GetOneCalculation(ctx, &cts.ID) - if err != nil { + if err := qtx.TimeseriesComputedCreateOrUpdate(ctx, db.TimeseriesComputedCreateOrUpdateParams{ + ID: ct.ID, + InstrumentID: &ct.InstrumentID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + }); err != nil { return err } - - if cts.InstrumentID == uuid.Nil { - cts.InstrumentID = defaultCts.InstrumentID - } - if cts.ParameterID == uuid.Nil { - cts.ParameterID = defaultCts.ParameterID - } - if cts.UnitID == uuid.Nil { - cts.UnitID = defaultCts.UnitID - } - if cts.Slug == "" { - cts.Slug = defaultCts.Slug - } - if cts.FormulaName == "" { - cts.FormulaName = defaultCts.FormulaName - } - if cts.Formula == "" { - cts.Formula = defaultCts.Formula - } - - if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, cts, defaultCts); err != nil && !errors.Is(err, sql.ErrNoRows) { + if err := qtx.CalculationCreateOrUpdate(ctx, db.CalculationCreateOrUpdateParams{ + TimeseriesID: ct.ID, + Contents: &ct.Formula, + }); err != nil { return err } - - if err := qtx.CreateOrUpdateCalculation(ctx, cts.ID, cts.Formula, defaultCts.Formula); err != nil && !errors.Is(err, sql.ErrNoRows) { - return err - } - - if err := tx.Commit(); err != nil { - return err - } - - return nil + return tx.Commit(ctx) } diff --git a/api/internal/service/timeseries_cwms.go b/api/internal/service/timeseries_cwms.go index b2d0f2d0..fbc6b518 100644 --- a/api/internal/service/timeseries_cwms.go +++ b/api/internal/service/timeseries_cwms.go @@ -3,70 +3,81 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type TimeseriesCwmsService interface { - ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]model.TimeseriesCwms, error) - CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) - UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error -} - -type timeseriesCwmsService struct { - db *model.Database - *model.Queries -} - -func NewTimeseriesCwmsService(db *model.Database, q *model.Queries) *timeseriesCwmsService { - return ×eriesCwmsService{db, q} -} - -func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesCwmsCreateBatch(ctx context.Context, instrumentID uuid.UUID, tcc []dto.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { - return tcc, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for idx := range tcc { - tcc[idx].Type = model.CwmsTimeseriesType - tcc[idx].InstrumentID = instrumentID - tsNew, err := qtx.CreateTimeseries(ctx, tcc[idx].Timeseries) + for idx, tc := range tcc { + if tc.ParameterID == uuid.Nil { + tc.ParameterID = dto.UnknownParameterID + } + if tc.UnitID == uuid.Nil { + tc.UnitID = dto.UnknownUnitID + } + tcc[idx].Type = dto.CwmsTimeseriesType + tsNew, err := qtx.TimeseriesCreate(ctx, db.TimeseriesCreateParams{ + InstrumentID: &instrumentID, + Name: tc.Name, + ParameterID: tc.ParameterID, + UnitID: tc.UnitID, + Type: db.NullTimeseriesType{ + Valid: true, + TimeseriesType: db.TimeseriesTypeCwms, + }, + }) if err != nil { - return tcc, err + return err } - tcc[idx].Timeseries = tsNew - if err := qtx.CreateTimeseriesCwms(ctx, tcc[idx]); err != nil { - return tcc, err + if err := qtx.TimeseriesCwmsCreate(ctx, db.TimeseriesCwmsCreateParams{ + TimeseriesID: tsNew.ID, + CwmsTimeseriesID: tc.CwmsTimeseriesID, + CwmsOfficeID: tc.CwmsOfficeID, + CwmsExtentEarliestTime: tc.CwmsExtentEarliestTime, + CwmsExtentLatestTime: tc.CwmsExtentLatestTime, + }); err != nil { + return err } } - - if err := tx.Commit(); err != nil { - return tcc, err - } - - return tcc, nil + return tx.Commit(ctx) } -func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesCwmsUpdate(ctx context.Context, ts dto.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if _, err := qtx.UpdateTimeseries(ctx, tsCwms.Timeseries); err != nil { + if err := qtx.TimeseriesUpdate(ctx, db.TimeseriesUpdateParams{ + ID: ts.ID, + Name: ts.Name, + InstrumentID: &ts.InstrumentID, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + }); err != nil { return err } - if err := qtx.UpdateTimeseriesCwms(ctx, tsCwms); err != nil { + if err := qtx.TimeseriesCwmsUpdate(ctx, db.TimeseriesCwmsUpdateParams{ + TimeseriesID: ts.ID, + CwmsTimeseriesID: ts.CwmsTimeseriesID, + CwmsOfficeID: ts.CwmsOfficeID, + CwmsExtentEarliestTime: ts.CwmsExtentEarliestTime, + CwmsExtentLatestTime: ts.CwmsExtentLatestTime, + }); err != nil { return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/service/timeseries_process.go b/api/internal/service/timeseries_process.go deleted file mode 100644 index 671a2ce5..00000000 --- a/api/internal/service/timeseries_process.go +++ /dev/null @@ -1,21 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type ProcessTimeseriesService interface { - SelectMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessTimeseriesResponseCollection, error) - SelectInclinometerMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessInclinometerTimeseriesResponseCollection, error) -} - -type processTimeseriesService struct { - db *model.Database - *model.Queries -} - -func NewProcessTimeseriesService(db *model.Database, q *model.Queries) *processTimeseriesService { - return &processTimeseriesService{db, q} -} diff --git a/api/internal/service/unit.go b/api/internal/service/unit.go deleted file mode 100644 index efea81bf..00000000 --- a/api/internal/service/unit.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type UnitService interface { - ListUnits(ctx context.Context) ([]model.Unit, error) -} - -type unitService struct { - db *model.Database - *model.Queries -} - -func NewUnitService(db *model.Database, q *model.Queries) *unitService { - return &unitService{db, q} -} diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index 7a7bc5e4..cd96294c 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -8,76 +8,82 @@ import ( "strconv" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type UploaderService interface { - ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.UploaderConfig, error) - ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]model.UploaderConfigMapping, error) - CreateUploaderConfig(ctx context.Context, uc model.UploaderConfig) (uuid.UUID, error) - UpdateUploaderConfig(ctx context.Context, uc model.UploaderConfig) error - DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error - CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) error - CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error - UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error - DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error - // CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error - // CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error +func (s DBService) UploaderConfigCreate(ctx context.Context, uc dto.UploaderConfig) (uuid.UUID, error) { + return s.Queries.UploaderConfigCreate(ctx, db.UploaderConfigCreateParams{ + ProjectID: uc.ProjectID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + Creator: uc.CreatorID, + CreateDate: uc.CreateDate, + }) } -type uploaderService struct { - db *model.Database - *model.Queries +func (s DBService) UploaderConfigUpdate(ctx context.Context, uc dto.UploaderConfig) error { + return s.Queries.UploaderConfigUpdate(ctx, db.UploaderConfigUpdateParams{ + ID: uc.ID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + Updater: uc.UpdaterID, + UpdateDate: uc.UpdateDate, + }) } -func NewUploaderService(db *model.Database, q *model.Queries) *uploaderService { - return &uploaderService{db, q} -} - -func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - for _, m := range mm { - if err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { - return err +func (s DBService) UploaderConfigMappingCreateBatch(ctx context.Context, ucID uuid.UUID, mm []dto.UploaderConfigMapping) error { + args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.UploaderConfigMappingCreateBatchParams{ + UploaderConfigID: m.UploaderConfigID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, } } - return tx.Commit() + var err error + s.Queries.UploaderConfigMappingCreateBatch(ctx, args).Exec(batchExecErr(&err)) + return err } -func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) UploaderConfigMappingUpdateBatch(ctx context.Context, ucID uuid.UUID, mm []dto.UploaderConfigMapping) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { + if err := qtx.UploaderConfigMappingDeleteForUploaderConfig(ctx, ucID); err != nil { return err } - for _, m := range mm { - if err := qtx.CreateUploaderConfigMapping(ctx, m); err != nil { - return err + args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.UploaderConfigMappingCreateBatchParams{ + UploaderConfigID: m.UploaderConfigID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, } } - return tx.Commit() + qtx.UploaderConfigMappingCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) } -// TODO: transition away from datalogger equivalency table to different parser that's uploader specific -func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementsCreateFromTOA5File(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) @@ -100,46 +106,55 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co return err } - meta := model.Environment{ - StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - OSVersion: envHeader[4], - ProgName: envHeader[5], - TableName: envHeader[6], + meta := dto.Environment{ + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], } - dl, err := qtx.GetDataloggerByModelSN(ctx, meta.Model, meta.SerialNo) + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) if err != nil { return err } - - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dl.ID, meta.TableName) + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) if err != nil { return err } // first two columns are timestamp and record number // we only want to collect the measurement fields here - fields := make([]model.Field, len(fieldHeader)-2) + fields := make([]dto.Field, len(fieldHeader)-2) for i := 2; i < len(fieldHeader); i++ { - fields[i] = model.Field{ + fields[i] = dto.Field{ Name: fieldHeader[i], Units: unitsHeader[i], Process: processHeader[i], } } - eqt, err := qtx.GetEquivalencyTable(ctx, tableID) + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) if err != nil { return err } fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Rows { + for _, eqtRow := range eqt.Fields { fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID } + chunkSize := 1_000 + createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mmtIdx, noteIdx int for { record, err := reader.Read() if err == io.EOF { @@ -160,15 +175,53 @@ func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Co if !ok { continue } - v, err := strconv.ParseFloat(cell, 64) - if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { + if err != nil { continue } - - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, tsID, t, v); err != nil { - return err + createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + } + mmtIdx++ + if mmtIdx == chunkSize { + var err error + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mmtIdx = 0 } + if math.IsNaN(v) || math.IsInf(v, 0) { + masked := true + createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Masked: &masked, + } + noteIdx++ + if noteIdx == chunkSize { + var err error + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + noteIdx = 0 + } + } + } + } + if mmtIdx != 0 { + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if noteIdx != 0 { + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err } } return nil diff --git a/api/internal/servicev2/alert.go b/api/internal/servicev2/alert.go deleted file mode 100644 index b113484d..00000000 --- a/api/internal/servicev2/alert.go +++ /dev/null @@ -1,90 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/google/uuid" -) - -type AlertService interface { - CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error - ListAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]db.VAlert, error) - ListAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]db.VAlert, error) - ListAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]db.VAlert, error) - GetAlert(ctx context.Context, arg db.GetAlertParams) (db.GetAlertRow, error) - DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) - DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) -} - -type alertService struct { - db *Database - *db.Queries -} - -func NewAlertService(db *Database, q *db.Queries) *alertService { - return &alertService{db, q} -} - -// Create creates one or more new alerts -func (s alertService) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { - var err error - s.Queries.CreateAlerts(ctx, alertConfigIDs).Exec(batchExecErr(&err)) - return err -} - -// DoAlertRead marks an alert as read for a profile -func (s alertService) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) { - var a db.GetAlertRow - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := qtx.CreateAlertRead(ctx, db.CreateAlertReadParams{ - ProfileID: profileID, - AlertID: alertID, - }); err != nil { - return a, err - } - a, err = qtx.GetAlert(ctx, db.GetAlertParams{ - ProfileID: profileID, - ID: alertID, - }) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} - -// DoAlertUnread marks an alert as unread for a profile -func (s alertService) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (db.GetAlertRow, error) { - var a db.GetAlertRow - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := qtx.DeleteAlertRead(ctx, db.DeleteAlertReadParams{ - ProfileID: profileID, - AlertID: alertID, - }); err != nil { - return a, err - } - a, err = qtx.GetAlert(ctx, db.GetAlertParams{ - ProfileID: profileID, - ID: alertID, - }) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} diff --git a/api/internal/servicev2/alert_check.go b/api/internal/servicev2/alert_check.go deleted file mode 100644 index 6aa86770..00000000 --- a/api/internal/servicev2/alert_check.go +++ /dev/null @@ -1,551 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "fmt" - "log" - "sync" - "time" - - "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/email" - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -type AlertCheckService interface { - DoAlertChecks(ctx context.Context) error -} - -type alertCheckService struct { - db *Database - *db.Queries - cfg *config.AlertCheckConfig -} - -func NewAlertCheckService(db *Database, q *db.Queries, cfg *config.AlertCheckConfig) *alertCheckService { - return &alertCheckService{db, q, cfg} -} - -var ( - GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") - YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") - RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") - - MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") - EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") -) - -const ( - warning = "Warning" - alert = "Alert" - reminder = "Reminder" -) - -type alertConfigMap map[uuid.UUID]db.VAlertConfig - -type submittalMap map[uuid.UUID]db.VSubmittal - -type alertConfigChecker[T alertChecker] interface { - GetAlertConfig() db.VAlertConfig - SetAlertConfig(ac db.VAlertConfig) - GetChecks() []T - SetChecks(checks []T) - DoEmail(content string, cfg config.AlertCheckConfig) error -} - -type alertChecker interface { - GetShouldWarn() bool - GetShouldAlert() bool - GetShouldRemind() bool - GetSubmittal() *db.VSubmittal - SetSubmittal(sub db.VSubmittal) -} - -func (s alertService) DoAlertChecks(ctx context.Context, cfg config.AlertCheckConfig) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - subs, err := qtx.ListUnverifiedMissingSubmittals(ctx) - if err != nil { - return err - } - acs, err := qtx.ListAndCheckAlertConfigs(ctx) - if err != nil { - return err - } - if len(acs) == 0 { - log.Println("no alert configs to check") - return nil - } - - subMap := make(map[uuid.UUID]db.VSubmittal) - for _, s := range subs { - subMap[s.ID] = s - } - acMap := make(map[uuid.UUID]db.VAlertConfig) - for _, a := range acs { - acMap[a.ID] = a - } - - errs := make([]error, 0) - - if err := checkMeasurements(ctx, qtx, subMap, acMap, cfg); err != nil { - errs = append(errs, err) - } - if err := checkEvaluations(ctx, qtx, subMap, acMap, cfg); err != nil { - errs = append(errs, err) - } - - if err := tx.Commit(ctx); err != nil { - errs = append(errs, err) - } - - if len(errs) > 0 { - return errors.Join(errs...) - } - - return nil -} - -func checkEvaluations(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*AlertConfigEvaluationCheck, 0) - ecs, err := q.ListIncompleteEvaluationSubmittals(ctx) - if err != nil { - return err - } - - ecMap := make(map[uuid.UUID][]*EvaluationCheck) - for k := range acMap { - ecMap[k] = make([]*EvaluationCheck, 0) - } - for idx := range ecs { - ck := ecs[idx] - check := EvaluationCheck{ - AlertCheck: AlertCheck{ - AlertConfigID: ck.AlertConfigID, - SubmittalID: ck.SubmittalID, - ShouldWarn: ck.ShouldWarn, - ShouldAlert: ck.ShouldAlert, - ShouldRemind: ck.ShouldRemind, - }} - if sub, ok := subMap[ecs[idx].SubmittalID]; ok { - ecs[idx].Submittal = &sub - ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], &check) - } - } - for k, v := range acMap { - if v.AlertTypeID != EvaluationSubmittalAlertTypeID { - continue - } - acc := AlertConfigEvaluationCheck{ - AlertConfig: AlertConfig(v), - AlertChecks: ecMap[k], - } - accs = append(accs, &acc) - } - - // handleChecks should not rollback txn but should bubble up errors after txn committed - alertCheckErr := handleChecks(ctx, q, accs, cfg) - if alertCheckErr != nil { - return alertCheckErr - } - - return nil -} - -func checkMeasurements(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*AlertConfigMeasurementCheck, 0) - mcs, err := q.ListIncompleteMeasurementSubmittals(ctx) - if err != nil { - return err - } - - mcMap := make(map[uuid.UUID][]*MeasurementCheck) - for k := range acMap { - mcMap[k] = make([]*MeasurementCheck, 0) - } - - for idx := range mcs { - if sub, ok := subMap[mcs[idx].SubmittalID]; ok { - ck := mcs[idx] - check := MeasurementCheck{ - AlertCheck: AlertCheck{ - AlertConfigID: ck.AlertConfigID, - SubmittalID: ck.SubmittalID, - ShouldWarn: ck.ShouldWarn, - ShouldAlert: ck.ShouldAlert, - ShouldRemind: ck.ShouldRemind, - }} - mcs[idx].Submittal = &sub - mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], &check) - } - } - - for k, v := range acMap { - if v.AlertTypeID != MeasurementSubmittalAlertTypeID { - continue - } - acc := AlertConfigMeasurementCheck{ - AlertConfig: AlertConfig(v), - AlertChecks: mcMap[k], - } - accs = append(accs, &acc) - } - - alertCheckErr := handleChecks(ctx, q, accs, cfg) - if alertCheckErr != nil { - return alertCheckErr - } - - return nil -} - -func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT) error { - for _, acc := range accs { - ac := acc.GetAlertConfig() - if err := q.UpdateAlertConfigLastReminded(ctx, db.UpdateAlertConfigLastRemindedParams{ - ID: ac.ID, - LastReminded: ac.LastReminded, - }); err != nil { - return err - } - checks := acc.GetChecks() - for _, c := range checks { - sub := c.GetSubmittal() - if sub == nil { - continue - } - if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, db.UpdateSubmittalCompletionDateOrWarningSentParams{ - ID: sub.ID, - SubmittalStatusID: &sub.SubmittalStatusID, - CompletionDate: sub.CompletionDate, - WarningSent: sub.WarningSent, - }); err != nil { - return err - } - } - if ac.CreateNextSubmittalFrom != nil { - if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, db.CreateNextSubmittalFromNewAlertConfigDateParams{ - ID: ac.ID, - Date: *ac.CreateNextSubmittalFrom, - }); err != nil { - return err - } - } - } - return nil -} - -// there should always be at least one "missing" submittal within an alert config. Submittals are created: -// 1. when an alert config is created (first submittal) -// 2. when a submittal is completed (next submittal created) -// 3. when a submittals due date has passed if it is not completed -// -// for evaluations, the next is submittal created manually when the evaluation is made -// for measurements, the next submittal is created the first time this function runs after the due date -// -// No "Yellow" Status Submittals should be passed to this function as it implies the submittal has been completed -// -// TODO: smtp.SendMail esablishes a new connection for each batch of emails sent. I would be better to aggregate -// the contents of each email, then create a connection pool to reuse and send all emails at once, with any errors wrapped and returned -// p.s. Dear future me/someone else: I'm sorry -func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT, cfg config.AlertCheckConfig) error { - defer util.Timer()() - - mu := &sync.Mutex{} - aaccs := make([]PT, len(accs)) - errs := make([]error, 0) - t := time.Now() - - wg := sync.WaitGroup{} - for i, p := range accs { - wg.Add(1) - go func(idx int, acc PT) { - defer wg.Done() - - ac := acc.GetAlertConfig() - checks := acc.GetChecks() - - // If ANY "missing" submittals are within an alert config, aggregate missing submittals and send an alert - acAlert := false - sendAlertEmail := false - // If ANY missing submittals previously existed within an alert config, send them in a "reminder" instead of an alert - acReminder := false - sendReminderEmail := false - // If a reminder exists when at least one submittal "shouldAlert", the alert should be aggregated into the next reminder - // instead of sending a new reminder email. If NO alerts exist for an alert config, the reminder can be reset to NULL. - // Reminders should be set when the first alert for an alert config is triggered, or at each reminder interval - resetReminders := len(checks) != 0 - - for j, c := range checks { - shouldWarn := c.GetShouldWarn() - shouldAlert := c.GetShouldAlert() - shouldRemind := c.GetShouldRemind() - sub := c.GetSubmittal() - - // if no submittal alerts or warnings are found, no emails should be sent - if !shouldAlert && !shouldWarn { - // if submittal status was previously red, update status to yellow and - // completion_date to current timestamp - if sub.SubmittalStatusID == RedSubmittalStatusID { - sub.SubmittalStatusID = YellowSubmittalStatusID - sub.CompletionDate = &t - ac.CreateNextSubmittalFrom = &t - } else - - // if submittal status is green and the current time is not before the submittal due date, - // complete the submittal at that due date and prepare the next submittal interval - if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueDate) { - sub.CompletionDate = &sub.DueDate - ac.CreateNextSubmittalFrom = &sub.DueDate - } - } else - - // if any submittal warning is triggered, immediately send a - // warning email, since submittal due dates are unique within alert configs - if shouldWarn && !sub.WarningSent { - if !ac.MuteConsecutiveAlerts || ac.LastReminded == nil { - mu.Lock() - if err := acc.DoEmail(warning, cfg); err != nil { - errs = append(errs, err) - } - mu.Unlock() - } - sub.SubmittalStatusID = GreenSubmittalStatusID - sub.WarningSent = true - } else - - // if any submittal alert is triggered after a warning has been sent within an - // alert config, aggregate missing submittals and send their contents in an alert email - if shouldAlert { - if sub.SubmittalStatusID != RedSubmittalStatusID { - sub.SubmittalStatusID = RedSubmittalStatusID - acAlert = true - ac.CreateNextSubmittalFrom = &sub.DueDate - } - resetReminders = false - } - - // if any reminder is triggered, aggregate missing - // submittals and send their contents in an email - if shouldRemind { - acReminder = true - } - - if sub == nil { - continue - } - - c.SetSubmittal(*sub) - checks[j] = c - } - - // if there are no alerts, there should also be no reminders sent. "last_reminded" is used to determine - // if an alert has already been sent for an alert config, and send a reminder if so - if resetReminders { - ac.LastReminded = nil - } - - // if there are any reminders within an alert config, they will override the alerts if MuteConsecutiveAlerts is true - if acAlert && ((!acReminder && ac.LastReminded == nil) || !ac.MuteConsecutiveAlerts) { - ac.LastReminded = &t - sendAlertEmail = true - } - if acReminder && ac.LastReminded != nil { - ac.LastReminded = &t - sendReminderEmail = true - } - - acc.SetAlertConfig(ac) - acc.SetChecks(checks) - - if sendAlertEmail { - mu.Lock() - if err := acc.DoEmail(alert, cfg); err != nil { - errs = append(errs, err) - } - mu.Unlock() - } - if sendReminderEmail { - mu.Lock() - if err := acc.DoEmail(reminder, cfg); err != nil { - errs = append(errs, err) - } - mu.Unlock() - } - - aaccs[idx] = acc - }(i, p) - } - wg.Wait() - - if err := updateAlertConfigChecks(ctx, q, aaccs); err != nil { - errs = append(errs, err) - return errors.Join(errs...) - } - if len(errs) > 0 { - return errors.Join(errs...) - } - - return nil -} - -type AlertCheck struct { - AlertConfigID uuid.UUID - SubmittalID uuid.UUID - ShouldWarn bool - ShouldAlert bool - ShouldRemind bool - Submittal *db.VSubmittal -} - -func (ck AlertCheck) GetShouldWarn() bool { - return ck.ShouldWarn -} - -func (ck AlertCheck) GetShouldAlert() bool { - return ck.ShouldAlert -} - -func (ck AlertCheck) GetShouldRemind() bool { - return ck.ShouldRemind -} - -func (ck AlertCheck) GetSubmittal() *db.VSubmittal { - return ck.Submittal -} - -func (ck *AlertCheck) SetSubmittal(sub db.VSubmittal) { - ck.Submittal = &sub -} - -type AlertConfig db.VAlertConfig - -func (a *AlertConfig) GetToAddresses() []string { - emails := make([]string, len(a.AlertEmailSubscriptions)) - for idx := range a.AlertEmailSubscriptions { - emails[idx] = a.AlertEmailSubscriptions[idx].Email - } - return emails -} - -type AlertConfigEvaluationCheck struct { - AlertConfig - AlertChecks []*EvaluationCheck -} - -type EvaluationCheck struct { - AlertCheck -} - -func (a AlertConfigEvaluationCheck) GetAlertConfig() db.VAlertConfig { - return db.VAlertConfig(a.AlertConfig) -} - -func (a *AlertConfigEvaluationCheck) SetAlertConfig(ac db.VAlertConfig) { - a.AlertConfig = AlertConfig(ac) -} - -func (a AlertConfigEvaluationCheck) GetChecks() []*EvaluationCheck { - return a.AlertChecks -} - -func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { - a.AlertChecks = ec -} - -func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { - if emailType == "" { - return fmt.Errorf("must provide emailType") - } - preformatted := email.EmailContent{ - TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Evaluation Submittal", - TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + - "Project: {{.AlertConfig.ProjectName}}\r\n" + - "Alert Type: Evaluation Submittal\r\n" + - "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + - "Description: \"{{.AlertConfig.Body}}\"\r\n" + - "Expected Evaluation Submittals:\r\n" + - "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + - "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", - } - templContent, err := email.CreateEmailTemplateContent(preformatted) - if err != nil { - return err - } - content, err := email.FormatAlertConfigTemplates(templContent, acc) - if err != nil { - return err - } - content.To = acc.AlertConfig.GetToAddresses() - if err := email.ConstructAndSendEmail(content, cfg); err != nil { - return err - } - return nil -} - -type AlertConfigMeasurementCheck struct { - AlertConfig AlertConfig - AlertChecks []*MeasurementCheck -} - -type MeasurementCheck struct { - AlertCheck - AffectedTimeseries []db.AlertCheckMeasurementSubmittalAffectedTimeseries -} - -func (a AlertConfigMeasurementCheck) GetAlertConfig() db.VAlertConfig { - return db.VAlertConfig(a.AlertConfig) -} - -func (a *AlertConfigMeasurementCheck) SetAlertConfig(ac db.VAlertConfig) { - a.AlertConfig = AlertConfig(ac) -} - -func (a AlertConfigMeasurementCheck) GetChecks() []*MeasurementCheck { - return a.AlertChecks -} - -func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { - a.AlertChecks = mc -} - -func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { - if emailType == "" { - return fmt.Errorf("must provide emailType") - } - preformatted := email.EmailContent{ - TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Timeseries Measurement Submittal", - TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + - "Project: {{.AlertConfig.ProjectName}}\r\n" + - "Alert Type: Measurement Submittal\r\n" + - "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + - "Description: \"{{.AlertConfig.Body}}\"\r\n" + - "Expected Measurement Submittals:\r\n" + - "{{range .AlertChecks}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + - "{{range .AffectedTimeseries}}" + - "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + - "{{end}}\r\n{{end}}", - } - templContent, err := email.CreateEmailTemplateContent(preformatted) - if err != nil { - return err - } - content, err := email.FormatAlertConfigTemplates(templContent, ms) - if err != nil { - return err - } - content.To = ms.AlertConfig.GetToAddresses() - if err := email.ConstructAndSendEmail(content, cfg); err != nil { - return err - } - return nil -} diff --git a/api/internal/servicev2/alert_config.go b/api/internal/servicev2/alert_config.go deleted file mode 100644 index 01965a09..00000000 --- a/api/internal/servicev2/alert_config.go +++ /dev/null @@ -1,162 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type AlertConfigService interface { - ListAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]db.VAlertConfig, error) - ListAlertConfigsForProjectAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]db.VAlertConfig, error) - ListAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]db.VAlertConfig, error) - GetAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (db.VAlertConfig, error) - CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (db.VAlertConfig, error) - UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (db.VAlertConfig, error) - DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error -} - -type alertConfigService struct { - db *Database - *db.Queries -} - -func NewAlertConfigService(db *Database, q *db.Queries) *alertConfigService { - return &alertConfigService{db, q} -} - -// CreateAlertConfig creates one new alert configuration -func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (db.VAlertConfig, error) { - var a db.VAlertConfig - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - if ac.RemindInterval == "" { - ac.RemindInterval = "PT0" - } - if ac.WarningInterval == "" { - ac.WarningInterval = "PT0" - } - - qtx := s.WithTx(tx) - - acID, err := qtx.CreateAlertConfig(ctx, db.CreateAlertConfigParams{ - ProjectID: ac.ProjectID, - Name: ac.Name, - Body: ac.Body, - AlertTypeID: ac.AlertTypeID, - StartDate: ac.StartDate, - ScheduleInterval: ac.ScheduleInterval, - MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, - RemindInterval: ac.RemindInterval, - WarningInterval: ac.WarningInterval, - Creator: ac.CreatorID, - CreateDate: ac.CreateDate, - }) - if err != nil { - return a, err - } - - for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, db.AssignInstrumentToAlertConfigParams{ - AlertConfigID: acID, - InstrumentID: aci.InstrumentID, - }); err != nil { - return a, err - } - } - - if err := registerAndSubscribe(ctx, qtx, acID, ac.AlertEmailSubscriptions); err != nil { - return a, err - } - - if err := qtx.CreateNextSubmittalFromExistingAlertConfigDate(ctx, acID); err != nil { - return a, err - } - - acNew, err := qtx.GetAlertConfig(ctx, acID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return acNew, nil -} - -// UpdateAlertConfig updates an alert config -func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (db.VAlertConfig, error) { - var a db.VAlertConfig - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - if ac.RemindInterval == "" { - ac.RemindInterval = "PT0" - } - if ac.WarningInterval == "" { - ac.WarningInterval = "PT0" - } - - qtx := s.WithTx(tx) - - if err := qtx.UpdateAlertConfig(ctx, db.UpdateAlertConfigParams{ - ID: ac.ID, - ProjectID: ac.ProjectID, - Name: ac.Name, - Body: ac.Body, - StartDate: ac.StartDate, - ScheduleInterval: ac.ScheduleInterval, - MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, - RemindInterval: ac.RemindInterval, - WarningInterval: ac.WarningInterval, - Updater: ac.UpdaterID, - UpdateDate: ac.UpdateDate, - }); err != nil { - return a, err - } - - if err := qtx.UnassignAllInstrumentsFromAlertConfig(ctx, alertConfigID); err != nil { - return a, err - } - - for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, db.AssignInstrumentToAlertConfigParams{ - AlertConfigID: alertConfigID, - InstrumentID: aci.InstrumentID, - }); err != nil { - return a, err - } - } - - if err := qtx.DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { - return a, err - } - if err := registerAndSubscribe(ctx, qtx, alertConfigID, ac.AlertEmailSubscriptions); err != nil { - return a, err - } - - if _, err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, &alertConfigID); err != nil { - return a, err - } - - a, err = qtx.GetAlertConfig(ctx, alertConfigID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return a, nil -} diff --git a/api/internal/servicev2/alert_subscription.go b/api/internal/servicev2/alert_subscription.go deleted file mode 100644 index 8c9dcc54..00000000 --- a/api/internal/servicev2/alert_subscription.go +++ /dev/null @@ -1,241 +0,0 @@ -package servicev2 - -import ( - "context" - "fmt" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -const ( - unknownUserType = "" - emailUserType = "email" - profileUserType = "profile" -) - -type AlertSubscriptionService interface { - SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error - GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (model.AlertSubscription, error) - ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]model.AlertSubscription, error) - UpdateMyAlertSubscription(ctx context.Context, s model.AlertSubscription) (model.AlertSubscription, error) - SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - UnregisterEmail(ctx context.Context, emailID uuid.UUID) error -} - -type alertSubscriptionService struct { - db *Database - *db.Queries -} - -func NewAlertSubscriptionService(db *Database, q *db.Queries) *alertSubscriptionService { - return &alertSubscriptionService{db, q} -} - -// SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (db.AlertProfileSubscription, error) { - var a db.AlertProfileSubscription - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - if err := qtx.CreateAlertProfileSubscriptionOnAnyConflictDoNothing(ctx, db.CreateAlertProfileSubscriptionOnAnyConflictDoNothingParams{ - AlertConfigID: alertConfigID, - ProfileID: profileID, - }); err != nil { - return a, err - } - - updated, err := qtx.GetAlertSubscriptionForAlertConfig(ctx, db.GetAlertSubscriptionForAlertConfigParams{ - AlertConfigID: alertConfigID, - ProfileID: profileID, - }) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return updated, nil -} - -// UpdateMyAlertSubscription updates properties on a AlertSubscription -func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (db.AlertProfileSubscription, error) { - var a db.AlertProfileSubscription - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := qtx.UpdateMyAlertSubscription(ctx, db.UpdateMyAlertSubscriptionParams{ - MuteUi: sub.MuteUI, - MuteNotify: sub.MuteNotify, - AlertConfigID: sub.AlertConfigID, - ProfileID: sub.ProfileID, - }); err != nil { - return a, err - } - updated, err := qtx.GetAlertSubscription(ctx, sub.ID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return updated, nil -} - -func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (db.VAlertConfig, error) { - var a db.VAlertConfig - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := registerAndSubscribe(ctx, qtx, alertConfigID, emails); err != nil { - return a, err - } - // Register any emails that are not yet in system - for idx, em := range emails { - if em.UserType == unknownUserType || em.UserType == emailUserType { - newID, err := qtx.RegisterEmail(ctx, em.Email) - if err != nil { - return a, err - } - emails[idx].ID = newID - emails[idx].UserType = emailUserType - } - } - // Subscribe emails - for _, em := range emails { - if em.UserType == emailUserType { - if err := qtx.CreateAlertEmailSubscription(ctx, db.CreateAlertEmailSubscriptionParams{ - AlertConfigID: alertConfigID, - EmailID: em.ID, - }); err != nil { - return a, err - } - } else if em.UserType == profileUserType { - if err := qtx.CreateAlertProfileSubscription(ctx, db.CreateAlertProfileSubscriptionParams{ - AlertConfigID: alertConfigID, - ProfileID: em.ID, - }); err != nil { - return a, err - } - } else { - return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) - } - } - acUpdated, err := qtx.GetAlertConfig(ctx, alertConfigID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return acUpdated, nil -} - -func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (db.VAlertConfig, error) { - var a db.VAlertConfig - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - for _, em := range emails { - if em.UserType == unknownUserType { - return a, fmt.Errorf("required field user_type is null, aborting transaction") - } else if em.UserType == emailUserType { - if err := qtx.DeleteAlertEmailSubscription(ctx, db.DeleteAlertEmailSubscriptionParams{ - AlertConfigID: alertConfigID, - EmailID: em.ID, - }); err != nil { - return a, err - } - } else if em.UserType == profileUserType { - if err := qtx.DeleteAlertProfileSubscription(ctx, db.DeleteAlertProfileSubscriptionParams{ - AlertConfigID: alertConfigID, - ProfileID: em.ID, - }); err != nil { - return a, err - } - } else { - return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) - } - } - acUpdated, err := qtx.GetAlertConfig(ctx, alertConfigID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return acUpdated, nil -} - -func (s alertSubscriptionService) UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - if err := qtx.DeleteAllAlertEmailSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { - return err - } - if err := qtx.DeleteAllAlertProfileSubscritpionsForAlertConfig(ctx, alertConfigID); err != nil { - return err - } - if err := tx.Commit(ctx); err != nil { - return err - } - return nil -} - -func registerAndSubscribe(ctx context.Context, q *db.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { - for idx, em := range emails { - if em.UserType == unknownUserType || em.UserType == emailUserType { - newID, err := q.RegisterEmail(ctx, em.Email) - if err != nil { - return err - } - emails[idx].ID = newID - emails[idx].UserType = emailUserType - } - } - for _, em := range emails { - if em.UserType == emailUserType { - if err := q.CreateAlertEmailSubscription(ctx, db.CreateAlertEmailSubscriptionParams{ - AlertConfigID: alertConfigID, - EmailID: em.ID, - }); err != nil { - return err - } - } else if em.UserType == profileUserType { - if err := q.CreateAlertProfileSubscription(ctx, db.CreateAlertProfileSubscriptionParams{ - AlertConfigID: alertConfigID, - ProfileID: em.ID, - }); err != nil { - return err - } - } else { - return fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) - } - } - return nil -} diff --git a/api/internal/servicev2/autocomplete.go b/api/internal/servicev2/autocomplete.go deleted file mode 100644 index 8eb581d1..00000000 --- a/api/internal/servicev2/autocomplete.go +++ /dev/null @@ -1,11 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type EmailAutocompleteService interface { - ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]model.EmailAutocompleteResult, error) -} diff --git a/api/internal/servicev2/aware.go b/api/internal/servicev2/aware.go deleted file mode 100644 index 4e9fbd40..00000000 --- a/api/internal/servicev2/aware.go +++ /dev/null @@ -1,49 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type AwareParameterService interface { - ListAwareParameters(ctx context.Context) ([]model.AwareParameter, error) - ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) -} - -type awareParameterService struct { - db *model.Database - *model.Queries -} - -func NewAwareParameterService(db *model.Database, q *model.Queries) *awareParameterService { - return &awareParameterService{db, q} -} - -// ListAwarePlatformParameterConfig returns aware platform parameter configs -func (s awareParameterService) ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) { - aa := make([]model.AwarePlatformParameterConfig, 0) - ee, err := s.ListAwarePlatformParameterEnabled(ctx) - if err != nil { - return aa, err - } - // reorganize aware_parameter_key, timeseries_id into map for each instrument - // Map of aware parameters to timeseries - m1 := make(map[uuid.UUID]model.AwarePlatformParameterConfig) - for _, e := range ee { - if _, ok := m1[e.InstrumentID]; !ok { - m1[e.InstrumentID] = model.AwarePlatformParameterConfig{ - InstrumentID: e.InstrumentID, - AwareID: e.AwareID, - AwareParameters: make(map[string]*uuid.UUID), - } - } - m1[e.InstrumentID].AwareParameters[e.AwareParameterKey] = e.TimeseriesID - } - - for k := range m1 { - aa = append(aa, m1[k]) - } - return aa, nil -} diff --git a/api/internal/servicev2/collection_group.go b/api/internal/servicev2/collection_group.go deleted file mode 100644 index 0f13d512..00000000 --- a/api/internal/servicev2/collection_group.go +++ /dev/null @@ -1,19 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type CollectionGroupService interface { - ListCollectionGroupsForProject(ctx context.Context, projectID uuid.UUID) ([]db.ListCollectionGroupsForProjectRow, error) - GetCollectionGroupDetails(ctx context.Context, id uuid.UUID) (db.VCollectionGroupDetail, error) - CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (db.CollectionGroup, error) - UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (db.CollectionGroup, error) - DeleteCollectionGroup(ctx context.Context, projectID, id uuid.UUID) error - AddTimeseriesToCollectionGroup(ctx context.Context, arg db.AddTimeseriesToCollectionGroupParams) error - RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error -} diff --git a/api/internal/servicev2/datalogger.go b/api/internal/servicev2/datalogger.go deleted file mode 100644 index f34c4f77..00000000 --- a/api/internal/servicev2/datalogger.go +++ /dev/null @@ -1,198 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/USACE/instrumentation-api/api/internal/password" - "github.com/google/uuid" -) - -type DataloggerService interface { - GetDatalogger(ctx context.Context, dataloggerID uuid.UUID) (db.VDatalogger, error) - ListDataloggers(ctx context.Context) ([]db.VDatalogger, error) - ListDataloggersForProject(ctx context.Context, projectID uuid.UUID) ([]db.VDatalogger, error) - CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) - UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) - DeleteDatalogger(ctx context.Context, d model.Datalogger) error - GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) - DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error - ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error - VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error - GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) - GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) - CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) - GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) -} - -type dataloggerService struct { - db *Database - *db.Queries -} - -func NewDataloggerService(db *Database, q *db.Queries) *dataloggerService { - return &dataloggerService{db, q} -} - -type DataloggerWithKey struct { - db.VDatalogger - Key string `json:"key"` -} - -func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (DataloggerWithKey, error) { - var a DataloggerWithKey - - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - dataloggerID, err := qtx.CreateDatalogger(ctx, db.CreateDataloggerParams{ - Name: n.Name, - Sn: n.SN, - ProjectID: n.ProjectID, - Creator: n.CreatorID, - ModelID: n.ModelID, - }) - if err != nil { - return a, err - } - - key := password.GenerateRandom(40) - hash := password.MustCreateHash(key, password.DefaultParams) - - if err := qtx.CreateDataloggerHash(ctx, db.CreateDataloggerHashParams{ - DataloggerID: dataloggerID, - Hash: hash, - }); err != nil { - return a, err - } - dl, err := qtx.GetDatalogger(ctx, dataloggerID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - dk := DataloggerWithKey{ - VDatalogger: dl, - Key: key, - } - return dk, nil -} - -func (s dataloggerService) CycleDataloggerKey(ctx context.Context, profileID, dataloggerID uuid.UUID) (DataloggerWithKey, error) { - var a DataloggerWithKey - - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - key := password.GenerateRandom(40) - hash := password.MustCreateHash(key, password.DefaultParams) - - if err := qtx.UpdateDataloggerHash(ctx, db.UpdateDataloggerHashParams{ - DataloggerID: dataloggerID, - Hash: hash, - }); err != nil { - return a, err - } - - if err := qtx.UpdateDataloggerUpdater(ctx, db.UpdateDataloggerUpdaterParams{ - ID: dataloggerID, - Updater: profileID, - UpdateDate: time.Now(), - }); err != nil { - return a, err - } - - dl, err := qtx.GetDatalogger(ctx, dataloggerID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - dk := DataloggerWithKey{ - VDatalogger: dl, - Key: key, - } - - return dk, nil -} - -func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (db.VDatalogger, error) { - var a db.VDatalogger - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - if u.UpdaterID == nil { - return a, errors.New("must set updater id") - } - - if err := qtx.UpdateDatalogger(ctx, db.UpdateDataloggerParams{ - ID: u.ID, - Name: u.Name, - Updater: *u.UpdaterID, - UpdateDate: time.Now(), - }); err != nil { - return a, err - } - - dlUpdated, err := qtx.GetDatalogger(ctx, u.ID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return dlUpdated, nil -} - -func (s dataloggerService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return uuid.Nil, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - if err := qtx.RenameEmptyDataloggerTableName(ctx, db.RenameEmptyDataloggerTableNameParams{ - DataloggerID: dataloggerID, - TableName: tableName, - }); err != nil { - return uuid.Nil, err - } - - dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ - DataloggerID: dataloggerID, - TableName: tableName, - }) - if err != nil { - return uuid.Nil, err - } - - if err := tx.Commit(ctx); err != nil { - return uuid.Nil, err - } - - return dataloggerTableID, nil -} diff --git a/api/internal/servicev2/datalogger_telemetry.go b/api/internal/servicev2/datalogger_telemetry.go deleted file mode 100644 index 800ddd0e..00000000 --- a/api/internal/servicev2/datalogger_telemetry.go +++ /dev/null @@ -1,245 +0,0 @@ -package servicev2 - -import ( - "context" - "database/sql" - "encoding/csv" - "errors" - "fmt" - "io" - "math" - "strconv" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type DataloggerTelemetryService interface { - GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (db.VDatalogger, error) - GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) - CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error - UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) - UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error - CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error -} - -type dataloggerTelemetryService struct { - db *Database - *db.Queries -} - -func NewDataloggerTelemetryService(db *Database, q *db.Queries) *dataloggerTelemetryService { - return &dataloggerTelemetryService{db, q} -} - -// UpdateDataloggerTablePreview attempts to update a table preview by datalogger_id and table_name, creates the -// datalogger table and corresponding preview if it doesn't exist -func (s dataloggerTelemetryService) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return uuid.Nil, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - // replace empty datalogger table name with most recent payload - if err := qtx.RenameEmptyDataloggerTableName(ctx, db.RenameEmptyDataloggerTableNameParams{ - DataloggerID: dataloggerID, - TableName: tableName, - }); err != nil { - return uuid.Nil, err - } - - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ - DataloggerID: dataloggerID, - TableName: tableName, - }) - if err != nil { - return uuid.Nil, err - } - if err := qtx.UpdateDataloggerTablePreview(ctx, db.UpdateDataloggerTablePreviewParams{ - DataloggerID: dataloggerID, - TableName: tableName, - Preview: prv.Preview.Bytes, - UpdateDate: prv.UpdateDate, - }); err != nil { - if !errors.Is(err, sql.ErrNoRows) { - return uuid.Nil, err - } - prv.DataloggerTableID = tableID - if err := qtx.CreateDataloggerTablePreview(ctx, db.CreateDataloggerTablePreviewParams{ - DataloggerTableID: prv.DataloggerTableID, - Preview: prv.Preview.Bytes, - UpdateDate: prv.UpdateDate, - }); err != nil { - } - } - - return tableID, tx.Commit(ctx) -} - -func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error { - if tableName == nil { - return errors.New("table name must not be nil") - } - - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.DeleteDataloggerTableError(ctx, db.DeleteDataloggerTableErrorParams{ - DataloggerID: dataloggerID, - TableName: *tableName, - }); err != nil { - return err - } - - for _, m := range e.Errors { - if err := qtx.CreateDataloggerError(ctx, db.CreateDataloggerErrorParams{ - DataloggerID: dataloggerID, - TableName: *tableName, - ErrorMessage: &m, - }); err != nil { - return err - } - } - - return tx.Commit(ctx) -} - -// ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. -// The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) -func (s dataloggerTelemetryService) CreateOrUpdateDataloggerTOA5MeasurementCollection(ctx context.Context, r io.Reader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - reader := csv.NewReader(r) - - envHeader, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - unitsHeader, err := reader.Read() - if err != nil { - return err - } - processHeader, err := reader.Read() - if err != nil { - return err - } - - meta := model.Environment{ - StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - OSVersion: envHeader[4], - ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.GetDataloggerByModelSN(ctx, db.GetDataloggerByModelSNParams{ - Model: &meta.Model, - Sn: meta.SerialNo, - }) - if err != nil { - return err - } - - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ - DataloggerID: dl.ID, - TableName: meta.TableName, - }) - if err != nil { - return err - } - - em := make([]string, 0) - defer func() { - s.UpdateDataloggerTableError(ctx, dl.ID, &meta.TableName, &model.DataloggerError{Errors: em}) - }() - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]model.Field, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = model.Field{ - Name: fieldHeader[i], - Units: unitsHeader[i], - Process: processHeader[i], - } - } - - eqt, err := qtx.GetEquivalencyTable(ctx, tableID) - if err != nil { - return err - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Fields { - fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID - } - - for { - record, err := reader.Read() - if err == io.EOF { - break - } - if err != nil { - return err - } - - t, err := time.Parse(record[0], time.RFC3339) - if err != nil { - return err - } - - for idx, cell := range record[2:] { - fieldName := fields[idx].Name - tsID, ok := fieldNameTimeseriesIDMap[fieldName] - if !ok { - // key error, field_name does not exist for equivalency table - // add error to Measurement payload to report back to user - em = append(em, fmt.Sprintf( - "key error: field_name %s does not exist for equivalency table %s", - fieldName, meta.TableName, - )) - continue - } - - v, err := strconv.ParseFloat(cell, 64) - if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { - // could not parse float - // add error to Measurement payload to report back to user - em = append(em, fmt.Sprintf( - "value error: field_name %s contains invalid value entry at %s", - fieldName, t, - )) - continue - } - - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, db.CreateOrUpdateTimeseriesMeasurementParams{ - TimeseriesID: tsID, - Time: t, - Value: v, - }); err != nil { - return err - } - } - } - - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/db.go b/api/internal/servicev2/db.go deleted file mode 100644 index e3c4ffb2..00000000 --- a/api/internal/servicev2/db.go +++ /dev/null @@ -1,55 +0,0 @@ -package servicev2 - -import ( - "context" - "database/sql" - "errors" - "fmt" - "log" - - "github.com/jackc/pgx/v5/pgxpool" -) - -type Database struct { - *pgxpool.Pool -} - -func txDo(ctx context.Context, rollback func(ctx context.Context) error) { - err := rollback(ctx) - if err != nil && !errors.Is(err, sql.ErrTxDone) { - log.Print(err.Error()) - } -} - -func batchExecErr(err *error) func(int, error) { - return func(_ int, e error) { - if e != nil { - *err = e - return - } - } -} - -func batchQueryRowErr[T any](err *error) func(int, T, error) { - return func(_ int, _ T, e error) { - if e != nil { - *err = e - return - } - } -} - -func batchQueryRowCollect[T any](rr []T, err *error) func(int, T, error) { - rrlen := len(rr) - return func(i int, r T, e error) { - if e != nil { - *err = e - return - } - if i == rrlen { - *err = fmt.Errorf("rr slice must be same length as QueryRow args") - return - } - rr[i] = r - } -} diff --git a/api/internal/servicev2/dcsloader.go b/api/internal/servicev2/dcsloader.go deleted file mode 100644 index bcf13b9c..00000000 --- a/api/internal/servicev2/dcsloader.go +++ /dev/null @@ -1,125 +0,0 @@ -package servicev2 - -import ( - "bytes" - "encoding/csv" - "encoding/json" - "fmt" - "io" - "log" - "net/http" - "net/url" - "strconv" - "time" - - "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -type DcsLoaderService interface { - ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) - PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error -} - -type dcsLoaderService struct { - apiClient *http.Client - cfg *config.DcsLoaderConfig -} - -func NewDcsLoaderService(apiClient *http.Client, cfg *config.DcsLoaderConfig) *dcsLoaderService { - return &dcsLoaderService{apiClient, cfg} -} - -func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) { - mcs := make([]model.MeasurementCollection, 0) - mCount := 0 - reader := csv.NewReader(r) - - rows := make([][]string, 0) - for { - row, err := reader.Read() - if err == io.EOF { - break - } - if err != nil { - return mcs, mCount, err - } - rows = append(rows, row) - } - - mcMap := make(map[uuid.UUID]*model.MeasurementCollection) - for _, row := range rows { - // 0=timeseries_id, 1=time, 2=value - tsid, err := uuid.Parse(row[0]) - if err != nil { - return mcs, mCount, err - } - t, err := time.Parse(time.RFC3339, row[1]) - if err != nil { - return mcs, mCount, err - } - v, err := strconv.ParseFloat(row[2], 32) - if err != nil { - return mcs, mCount, err - } - - if _, ok := mcMap[tsid]; !ok { - mcMap[tsid] = &model.MeasurementCollection{ - TimeseriesID: tsid, - Items: make([]model.Measurement, 0), - } - } - mcMap[tsid].Items = append(mcMap[tsid].Items, model.Measurement{TimeseriesID: tsid, Time: t, Value: model.FloatNanInf(v)}) - mCount++ - } - - mcs = make([]model.MeasurementCollection, len(mcMap)) - idx := 0 - for _, v := range mcMap { - mcs[idx] = *v - idx++ - } - - return mcs, mCount, nil -} - -func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error { - requestBodyBytes, err := json.Marshal(mcs) - if err != nil { - return err - } - - req, err := http.NewRequest("POST", fmt.Sprintf("%s?key=%s", s.cfg.PostURL, s.cfg.APIKey), bytes.NewReader(requestBodyBytes)) - if err != nil { - return err - } - defer req.Body.Close() - - req.Header.Add("Content-Type", "application/json") - - resp, err := s.apiClient.Do(req) - if err != nil { - urlErr := err.(*url.Error) - urlRedact := util.RedactRequest{URL: urlErr.URL} - if err := urlRedact.RedactQueryParam("key"); err != nil { - return err - } - urlErr.URL = urlRedact.URL - log.Printf("\n\t*** Error; unable to make request; %s", urlErr.Error()) - return urlErr - } - defer resp.Body.Close() - - if resp.StatusCode != 201 { - log.Printf("\n\t*** Error; Status Code: %d ***\n", resp.StatusCode) - body, err := io.ReadAll(resp.Body) - if err != nil { - log.Println("Error reading response body") - return err - } - log.Printf("%s\n", body) - } - return nil -} diff --git a/api/internal/servicev2/district_rollup.go b/api/internal/servicev2/district_rollup.go deleted file mode 100644 index 00299620..00000000 --- a/api/internal/servicev2/district_rollup.go +++ /dev/null @@ -1,23 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type DistrictRollupService interface { - ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) - ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) -} - -type districtRollupService struct { - db *Database - *db.Queries -} - -func NewDistrictRollupService(db *Database, q *db.Queries) *districtRollupService { - return &districtRollupService{db, q} -} diff --git a/api/internal/servicev2/domain.go b/api/internal/servicev2/domain.go deleted file mode 100644 index 04725563..00000000 --- a/api/internal/servicev2/domain.go +++ /dev/null @@ -1,22 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type DomainService interface { - GetDomains(ctx context.Context) ([]model.Domain, error) - GetDomainMap(ctx context.Context) (model.DomainMap, error) -} - -type domainService struct { - db *Database - *db.Queries -} - -func NewDomainService(db *Database, q *db.Queries) *domainService { - return &domainService{db, q} -} diff --git a/api/internal/servicev2/equivalency_table.go b/api/internal/servicev2/equivalency_table.go deleted file mode 100644 index 812b86dc..00000000 --- a/api/internal/servicev2/equivalency_table.go +++ /dev/null @@ -1,113 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type EquivalencyTableService interface { - GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (model.EquivalencyTable, error) - CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error - DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error - GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error -} - -type equivalencyTableService struct { - db *Database - *db.Queries -} - -func NewEquivalencyTableService(db *Database, q *db.Queries) *equivalencyTableService { - return &equivalencyTableService{db, q} -} - -// CreateEquivalencyTable creates EquivalencyTable rows -// If a row with the given datalogger id or field name already exists the row will be ignored -func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { - var a db.VDataloggerEquivalencyTable - - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - for _, r := range t.Rows { - if r.TimeseriesID != nil { - valid, err := qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID) - if err != nil { - return a, err - } - if !valid { - return a, errors.New("equivalency table timeseries invalid") - } - } - if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, db.CreateOrUpdateEquivalencyTableRowParams{ - DataloggerID: t.DataloggerID, - DataloggerTableID: &t.DataloggerTableID, - FieldName: r.FieldName, - DisplayName: &r.DisplayName, - InstrumentID: r.InstrumentID, - TimeseriesID: r.TimeseriesID, - }); err != nil { - return a, err - } - } - - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return eqt, nil -} - -// UpdateEquivalencyTable updates rows of an EquivalencyTable -func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { - var a db.VDataloggerEquivalencyTable - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - for _, r := range t.Rows { - if r.TimeseriesID != nil { - valid, err := qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID) - if err != nil { - return a, err - } - if !valid { - return a, errors.New("equivalency table timeseries invalid") - } - } - if err := qtx.UpdateEquivalencyTableRow(ctx, db.UpdateEquivalencyTableRowParams{ - ID: r.ID, - FieldName: r.FieldName, - DisplayName: &r.DisplayName, - }); err != nil { - return a, err - } - } - - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) - - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return eqt, nil -} diff --git a/api/internal/servicev2/evaluation.go b/api/internal/servicev2/evaluation.go deleted file mode 100644 index 7a585e57..00000000 --- a/api/internal/servicev2/evaluation.go +++ /dev/null @@ -1,171 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type EvaluationService interface { - ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]model.Evaluation, error) - ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]model.Evaluation, error) - ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]model.Evaluation, error) - GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (model.Evaluation, error) - RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error - CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) - UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) - DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error -} - -type evaluationService struct { - db *Database - *db.Queries -} - -func NewEvaluationService(db *Database, q *db.Queries) *evaluationService { - return &evaluationService{db, q} -} - -func (s evaluationService) RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - sub, err := qtx.CompleteEvaluationSubmittal(ctx, subID) - if err != nil { - return err - } - // Create next submittal if submitted on-time - // late submittals will have already generated next submittal - if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == model.GreenSubmittalStatusID { - if err := qtx.CreateNextEvaluationSubmittal(ctx, subID); err != nil { - return err - } - } - return tx.Commit(ctx) -} - -func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (db.VEvaluation, error) { - var a db.VEvaluation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if ev.SubmittalID != nil { - sub, err := qtx.CompleteEvaluationSubmittal(ctx, *ev.SubmittalID) - if err != nil { - return a, err - } - // Create next submittal if submitted on-time - // late submittals will have already generated next submittal - if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == model.GreenSubmittalStatusID { - qtx.CreateNextEvaluationSubmittal(ctx, *ev.SubmittalID) - } - } - evID, err := qtx.CreateEvaluation(ctx, db.CreateEvaluationParams{ - ProjectID: ev.ProjectID, - SubmittalID: ev.SubmittalID, - Name: ev.Name, - Body: ev.Body, - StartDate: ev.StartDate, - EndDate: ev.EndDate, - Creator: ev.CreatorID, - CreateDate: ev.CreateDate, - }) - if err != nil { - return a, err - } - args := make([]db.CreateEvaluationInstrumentsBatchParams, len(ev.Instruments)) - for idx, aci := range ev.Instruments { - args[idx] = db.CreateEvaluationInstrumentsBatchParams{ - EvaluationID: &evID, - InstrumentID: &aci.InstrumentID, - } - } - qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - a, err = qtx.GetEvaluation(ctx, evID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return a, nil -} - -func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (db.VEvaluation, error) { - var a db.VEvaluation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.UpdateEvaluation(ctx, db.UpdateEvaluationParams{ - ID: ev.ID, - ProjectID: ev.ProjectID, - Name: ev.Name, - Body: ev.Body, - StartDate: ev.StartDate, - EndDate: ev.EndDate, - Updater: ev.UpdaterID, - UpdateDate: ev.UpdateDate, - }); err != nil { - return a, err - } - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, &ev.ID); err != nil { - return a, err - } - args := make([]db.CreateEvaluationInstrumentsBatchParams, len(ev.Instruments)) - for idx, aci := range ev.Instruments { - args[idx] = db.CreateEvaluationInstrumentsBatchParams{ - EvaluationID: &evaluationID, - InstrumentID: &aci.InstrumentID, - } - } - qtx.CreateEvaluationInstrumentsBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - - a, err = qtx.GetEvaluation(ctx, ev.ID) - if err != nil { - return a, err - } - - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} - -func (s evaluationService) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, &evaluationID); err != nil { - return err - } - if err := qtx.DeleteEvaluation(ctx, evaluationID); err != nil { - return err - } - - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/heartbeat.go b/api/internal/servicev2/heartbeat.go deleted file mode 100644 index 0c9727f0..00000000 --- a/api/internal/servicev2/heartbeat.go +++ /dev/null @@ -1,23 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type HeartbeatService interface { - DoHeartbeat(ctx context.Context) (model.Heartbeat, error) - GetLatestHeartbeat(ctx context.Context) (model.Heartbeat, error) - ListHeartbeats(ctx context.Context) ([]model.Heartbeat, error) -} - -type heartbeatService struct { - db *Database - *db.Queries -} - -func NewHeartbeatService(db *Database, q *db.Queries) *heartbeatService { - return &heartbeatService{db, q} -} diff --git a/api/internal/servicev2/home.go b/api/internal/servicev2/home.go deleted file mode 100644 index 23e57cb4..00000000 --- a/api/internal/servicev2/home.go +++ /dev/null @@ -1,21 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type HomeService interface { - GetHome(ctx context.Context) (model.Home, error) -} - -type homeService struct { - db *Database - *db.Queries -} - -func NewHomeService(db *Database, q *db.Queries) *homeService { - return &homeService{db, q} -} diff --git a/api/internal/servicev2/instrument.go b/api/internal/servicev2/instrument.go deleted file mode 100644 index fc3d020d..00000000 --- a/api/internal/servicev2/instrument.go +++ /dev/null @@ -1,235 +0,0 @@ -package servicev2 - -import ( - "context" - "slices" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" - "github.com/twpayne/go-geom/encoding/geojson" -) - -type InstrumentService interface { - ListInstruments(ctx context.Context) ([]model.Instrument, error) - GetInstrument(ctx context.Context, instrumentID uuid.UUID) (model.Instrument, error) - GetInstrumentCount(ctx context.Context) (model.InstrumentCount, error) - CreateInstrument(ctx context.Context, i model.Instrument) (model.IDSlugName, error) - CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) - UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) - UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) - DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error -} - -type instrumentService struct { - db *Database - *db.Queries -} - -func NewInstrumentService(db *Database, q *db.Queries) *instrumentService { - return &instrumentService{db, q} -} - -var ( - saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") - ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") -) - -type requestType int - -const ( - create requestType = iota - update -) - -func (s instrumentService) CreateInstruments(ctx context.Context, ii []model.Instrument) ([]db.CreateInstrumentsBatchRow, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return nil, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - createInstrumentsArgs := make([]db.CreateInstrumentsBatchParams, len(ii)) - assignInstrumentsProjectsArgs := make([][]db.AssignInstrumentToProjectBatchParams, len(ii)) - instrumentStatusArgs := make([]db.CreateOrUpdateInstrumentStatusBatchParams, len(ii)) - instrumentAwareArgs := make([]db.CreateAwarePlatformBatchParams, 0) - - for idx, inst := range ii { - createInstrumentsArgs[idx] = db.CreateInstrumentsBatchParams{ - Name: inst.Name, - TypeID: inst.TypeID, - Geometry: inst.Geometry, - Station: inst.Station, - StationOffset: inst.StationOffset, - Creator: inst.CreatorID, - CreateDate: inst.CreateDate, - NidID: inst.NIDID, - UsgsID: inst.USGSID, - ShowCwmsTab: inst.ShowCwmsTab, - } - } - newInstruments := make([]db.CreateInstrumentsBatchRow, len(createInstrumentsArgs)) - qtx.CreateInstrumentsBatch(ctx, createInstrumentsArgs).QueryRow(func(idx int, r db.CreateInstrumentsBatchRow, e error) { - if e != nil { - err = e - return - } - assignInstrumentsProjectsArgs[idx] = make([]db.AssignInstrumentToProjectBatchParams, len(ii[idx].Projects)) - for j, p := range ii[idx].Projects { - assignInstrumentsProjectsArgs[idx][j] = db.AssignInstrumentToProjectBatchParams{ - InstrumentID: r.ID, - ProjectID: p.ID, - } - } - instrumentStatusArgs[idx] = db.CreateOrUpdateInstrumentStatusBatchParams{ - InstrumentID: r.ID, - StatusID: ii[idx].StatusID, - Time: ii[idx].StatusTime, - } - if ii[idx].AwareID != nil { - instrumentAwareArgs = append(instrumentAwareArgs, db.CreateAwarePlatformBatchParams{ - InstrumentID: &r.ID, - AwareID: *ii[idx].AwareID, - }) - } - newInstruments[idx] = r - }) - if err != nil { - return nil, err - } - qtx.AssignInstrumentToProjectBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(batchExecErr(&err)) - if err != nil { - return nil, err - } - qtx.CreateOrUpdateInstrumentStatusBatch(ctx, instrumentStatusArgs).Exec(batchExecErr(&err)) - if err != nil { - return nil, err - } - qtx.CreateAwarePlatformBatch(ctx, instrumentAwareArgs).Exec(batchExecErr(&err)) - if err != nil { - return nil, err - } - if err := handleOptsBatch(ctx, qtx, ii, create); err != nil { - return nil, err - } - - if err := tx.Commit(ctx); err != nil { - return nil, err - } - - return newInstruments, nil -} - -// UpdateInstrument updates a single instrument -func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, inst model.Instrument) (db.VInstrument, error) { - var a db.VInstrument - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.UpdateInstrument(ctx, db.UpdateInstrumentParams{ - ProjectID: projectID, - ID: inst.ID, - Name: inst.Name, - TypeID: inst.TypeID, - Geometry: inst.Geometry, - Updater: inst.UpdaterID, - UpdateDate: inst.UpdateDate, - Station: inst.Station, - StationOffset: inst.StationOffset, - NidID: inst.NIDID, - UsgsID: inst.USGSID, - ShowCwmsTab: inst.ShowCwmsTab, - }); err != nil { - return a, err - } - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, db.CreateOrUpdateInstrumentStatusParams{ - InstrumentID: inst.ID, - StatusID: inst.StatusID, - Time: inst.StatusTime, - }); err != nil { - return a, err - } - if err := handleOptsBatch(ctx, qtx, []model.Instrument{inst}, update); err != nil { - return a, err - } - a, err = qtx.GetInstrument(ctx, inst.ID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - - return a, nil -} - -func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geometry db.Geometry, p model.Profile) (db.VInstrument, error) { - var a db.VInstrument - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if _, err := qtx.UpdateInstrumentGeometry(ctx, db.UpdateInstrumentGeometryParams{ - ProjectID: projectID, - ID: instrumentID, - Geometry: geometry, - Updater: &p.ID, - }); err != nil { - return a, err - } - a, err = qtx.GetInstrument(ctx, instrumentID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} - -func handleOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument, rt requestType) error { - saa := make([]model.Instrument, 0) - ipi := make([]model.Instrument, 0) - for _, inst := range ii { - switch inst.TypeID { - case saaTypeID: - saa = append(saa, inst) - case ipiTypeID: - ipi = append(ipi, inst) - default: - } - } - if len(saa) != 0 { - var err error - switch rt { - case create: - err = createSaaOptsBatch(ctx, q, saa) - case update: - err = updateSaaOptsBatch(ctx, q, saa) - } - if err != nil { - return err - } - } - if len(ipi) != 0 { - var err error - switch rt { - case create: - err = createIpiOptsBatch(ctx, q, ipi) - case update: - err = updateIpiOptsBatch(ctx, q, ipi) - } - if err != nil { - return err - } - } - return nil -} diff --git a/api/internal/servicev2/instrument_assign.go b/api/internal/servicev2/instrument_assign.go deleted file mode 100644 index e9e6f424..00000000 --- a/api/internal/servicev2/instrument_assign.go +++ /dev/null @@ -1,329 +0,0 @@ -package servicev2 - -import ( - "context" - "fmt" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InstrumentAssignService interface { - AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (model.InstrumentsValidation, error) - ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) -} - -type instrumentAssignService struct { - db *Database - *db.Queries -} - -func NewInstrumentAssignService(db *Database, q *db.Queries) *instrumentAssignService { - return &instrumentAssignService{db, q} -} - -type ReasonCode int - -const ( - None ReasonCode = iota - Unauthorized - InvalidName - InvalidUnassign -) - -type InstrumentsValidation struct { - ReasonCode ReasonCode `json:"-"` - IsValid bool `json:"is_valid"` - Errors []string `json:"errors"` -} - -func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { - var a InstrumentsValidation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { - return v, err - } - return v, tx.Commit(ctx) -} - -func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { - var a InstrumentsValidation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { - return v, err - } - return v, tx.Commit(ctx) -} - -func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { - var a InstrumentsValidation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err - } - return v, tx.Commit(ctx) -} - -func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { - var a InstrumentsValidation - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err - } - return v, tx.Commit(ctx) -} - -func validateProjectsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - nn, err := q.ValidateProjectsAssignerAuthorized(ctx, db.ValidateProjectsAssignerAuthorizedParams{ - InstrumentID: instrumentID, - ProjectIds: projectIDs, - ProfileID: profileID, - }) - if err != nil { - return a, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", - nn[idx], - ) - } - a.Errors = vErrors - a.ReasonCode = Unauthorized - } else { - a.IsValid = true - a.Errors = make([]string, 0) - } - return a, err -} - -func validateInstrumentsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - nn, err := q.ValidateInstrumentsAssignerAuthorized(ctx, db.ValidateInstrumentsAssignerAuthorizedParams{ - InstrumentIds: instrumentIDs, - ProfileID: profileID, - }) - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", - nn[idx].InstrumentName, nn[idx].ProjectName, - ) - } - a.Errors = vErrors - a.ReasonCode = Unauthorized - } else { - a.IsValid = true - a.Errors = make([]string, 0) - } - return a, err -} - -func validateProjectsInstrumentNameUnique(ctx context.Context, q *db.Queries, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - nn, err := q.ValidateProjectsInstrumentNameUnique(ctx, db.ValidateProjectsInstrumentNameUniqueParams{ - InstrumentName: instrumentName, - ProjectIds: projectIDs, - }) - if err != nil { - return a, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx], - ) - } - a.Errors = vErrors - a.ReasonCode = InvalidName - } else { - a.IsValid = true - a.Errors = make([]string, 0) - } - return a, err -} - -func validateInstrumentNamesProjectUnique(ctx context.Context, q *db.Queries, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { - var a InstrumentsValidation - nn, err := q.ValidateInstrumentNamesProjectUnique(ctx, db.ValidateInstrumentNamesProjectUniqueParams{ - ProjectID: projectID, - InstrumentNames: instrumentNames, - }) - if err != nil { - return a, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx], - ) - } - a.Errors = vErrors - a.ReasonCode = InvalidName - } else { - a.IsValid = true - a.Errors = make([]string, 0) - } - return a, err -} - -func validateAssignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrument db.VInstrument, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - v, err := validateProjectsAssignerAuthorized(ctx, q, profileID, instrument.ID, projectIDs) - if err != nil || !v.IsValid { - return v, err - } - return validateProjectsInstrumentNameUnique(ctx, q, instrument.Name, projectIDs) -} - -func validateAssignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) - if err != nil { - return a, err - } - iIDs := make([]uuid.UUID, len(iIDNames)) - iNames := make([]string, len(iIDNames)) - for idx := range iIDNames { - iIDs[idx] = iIDNames[idx].ID - iNames[idx] = iIDNames[idx].Name - } - return validateInstrumentsAssignerAuthorized(ctx, q, profileID, iIDs) -} - -func assignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - instrument, err := q.GetInstrument(ctx, instrumentID) - if err != nil { - return a, err - } - v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) - if err != nil || !v.IsValid { - return v, err - } - for _, pID := range projectIDs { - if err := q.AssignInstrumentToProject(ctx, db.AssignInstrumentToProjectParams{ - ProjectID: pID, - InstrumentID: instrumentID, - }); err != nil { - return a, err - } - } - return v, nil -} - -func unassignProjectsFromInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - var err error - a, err = validateProjectsAssignerAuthorized(ctx, q, profileID, instrumentID, projectIDs) - if err != nil || !a.IsValid { - return a, err - } - args := make([]db.UnassignInstrumentFromProjectBatchParams, len(projectIDs)) - for idx := range projectIDs { - args[idx] = db.UnassignInstrumentFromProjectBatchParams{ - ProjectID: projectIDs[idx], - InstrumentID: instrumentID, - } - } - q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - return a, nil -} - -func assignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - var err error - a, err = validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) - if err != nil || !a.IsValid { - return a, err - } - args := make([]db.AssignInstrumentToProjectBatchParams, len(instrumentIDs)) - for idx := range instrumentIDs { - args[idx] = db.AssignInstrumentToProjectBatchParams{ - ProjectID: projectID, - InstrumentID: instrumentIDs[idx], - } - } - q.AssignInstrumentToProjectBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - return a, nil -} - -func unassignInstrumentsFromProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var a InstrumentsValidation - var err error - a, err = validateInstrumentsAssignerAuthorized(ctx, q, profileID, instrumentIDs) - if err != nil || !a.IsValid { - return a, err - } - cc, err := q.ListProjectCountForInstruments(ctx, instrumentIDs) - if err != nil { - return a, err - } - args := make([]db.UnassignInstrumentFromProjectBatchParams, 0) - for _, count := range cc { - if count.ProjectCount < 1 { - // invalid instrument, skipping - continue - } - if count.ProjectCount == 1 { - a.IsValid = false - a.ReasonCode = InvalidUnassign - a.Errors = append(a.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) - } - args = append(args, db.UnassignInstrumentFromProjectBatchParams{ - ProjectID: projectID, - InstrumentID: count.InstrumentID, - }) - } - q.UnassignInstrumentFromProjectBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - return a, nil -} diff --git a/api/internal/servicev2/instrument_constant.go b/api/internal/servicev2/instrument_constant.go deleted file mode 100644 index 8b1994f5..00000000 --- a/api/internal/servicev2/instrument_constant.go +++ /dev/null @@ -1,100 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InstrumentConstantService interface { - ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error - CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error -} - -type instrumentConstantService struct { - db *Database - *db.Queries -} - -func NewInstrumentConstantService(db *Database, q *db.Queries) *instrumentConstantService { - return &instrumentConstantService{db, q} -} - -// CreateInstrumentConstants creates many instrument constants from an array of instrument constants -// An InstrumentConstant is structurally the same as a timeseries and saved in the same tables -func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]db.CreateTimeseriesBatchRow, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return nil, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - createTimeseriesParams := make([]db.CreateTimeseriesBatchParams, len(tt)) - for idx, t := range tt { - t.Type = model.ConstantTimeseriesType - createTimeseriesParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &t.InstrumentID, - Name: t.Name, - ParameterID: t.ParameterID, - UnitID: t.UnitID, - Type: db.NullTimeseriesType{ - TimeseriesType: db.TimeseriesTypeConstant, - Valid: true, - }, - } - } - uu := make([]db.CreateTimeseriesBatchRow, len(createTimeseriesParams)) - createConstantsParams := make([]db.CreateInstrumentConstantBatchParams, len(createTimeseriesParams)) - qtx.CreateTimeseriesBatch(ctx, createTimeseriesParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("instrument id must not be nil") - } - createConstantsParams[i] = db.CreateInstrumentConstantBatchParams{ - InstrumentID: *r.InstrumentID, - TimeseriesID: r.ID, - } - uu[i] = r - }) - if err != nil { - return nil, err - } - qtx.CreateInstrumentConstantBatch(ctx, createConstantsParams).Exec(batchExecErr(&err)) - if err != nil { - return nil, err - } - if err := tx.Commit(ctx); err != nil { - return nil, err - } - return uu, nil -} - -// DeleteInstrumentConstant removes a timeseries as an Instrument Constant; Does not delete underlying timeseries -func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.DeleteInstrumentConstant(ctx, db.DeleteInstrumentConstantParams{ - InstrumentID: instrumentID, - TimeseriesID: timeseriesID, - }); err != nil { - return err - } - if err := qtx.DeleteTimeseries(ctx, timeseriesID); err != nil { - return err - } - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/instrument_group.go b/api/internal/servicev2/instrument_group.go deleted file mode 100644 index 7fb5d91d..00000000 --- a/api/internal/servicev2/instrument_group.go +++ /dev/null @@ -1,50 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InstrumentGroupService interface { - ListInstrumentGroups(ctx context.Context) ([]model.InstrumentGroup, error) - GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (model.InstrumentGroup, error) - CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) - UpdateInstrumentGroup(ctx context.Context, group model.InstrumentGroup) (model.InstrumentGroup, error) - DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error - ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]model.Instrument, error) - CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error - DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error -} - -type instrumentGroupService struct { - db *Database - *db.Queries -} - -func NewInstrumentGroupService(db *Database, q *db.Queries) *instrumentGroupService { - return &instrumentGroupService{db, q} -} - -// CreateInstrumentGroup creates many instruments from an array of instruments -func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]db.CreateInstrumentGroupsBatchRow, error) { - args := make([]db.CreateInstrumentGroupsBatchParams, len(groups)) - for idx, g := range groups { - args[idx] = db.CreateInstrumentGroupsBatchParams{ - Name: g.Name, - Description: &g.Description, - Creator: g.CreatorID, - CreateDate: g.CreateDate, - ProjectID: g.ProjectID, - } - } - var err error - gg := make([]db.CreateInstrumentGroupsBatchRow, len(groups)) - s.Queries.CreateInstrumentGroupsBatch(ctx, args).QueryRow(batchQueryRowCollect(gg, &err)) - if err != nil { - return nil, err - } - return gg, nil -} diff --git a/api/internal/servicev2/instrument_incl.go b/api/internal/servicev2/instrument_incl.go deleted file mode 100644 index 33d3de27..00000000 --- a/api/internal/servicev2/instrument_incl.go +++ /dev/null @@ -1,211 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "fmt" - "slices" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InclInstrumentService interface { - GetAllInclSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.InclSegment, error) - UpdateInclSegment(ctx context.Context, seg model.InclSegment) error - UpdateInclSegments(ctx context.Context, segs []model.InclSegment) error - GetInclMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.InclMeasurements, error) -} - -type inclInstrumentService struct { - db *Database - *db.Queries -} - -func NewInclInstrumentService(db *Database, q *db.Queries) *inclInstrumentService { - return &inclInstrumentService{db, q} -} - -func (s inclInstrumentService) UpdateInclSegments(ctx context.Context, segs []model.InclSegment) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - updateInclArgs := make([]db.UpdateInclSegmentsBatchParams, len(segs)) - createMmtArgs := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) - - for idx, seg := range segs { - updateInclArgs[idx] = db.UpdateInclSegmentsBatchParams{ - ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, - DepthTimeseriesID: seg.DepthTimeseriesID, - A0TimeseriesID: seg.A0TimeseriesID, - A180TimeseriesID: seg.A180TimeseriesID, - B0TimeseriesID: seg.B0TimeseriesID, - B180TimeseriesID: seg.B180TimeseriesID, - } - if seg.Length == nil { - continue - } - createMmtArgs = append(createMmtArgs, db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: seg.LengthTimeseriesID, - Time: time.Now(), - Value: *seg.Length, - }) - } - qtx.UpdateInclSegmentsBatch(ctx, updateInclArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -func createInclOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) - createInclSegmentBatchParams := make([][]db.CreateInclSegmentBatchParams, len(ii)) - - createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) - createInclOptsParams := make([]db.CreateInclOptsBatchParams, len(ii)) - createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - - for idx, inst := range ii { - opts, err := model.MapToStruct[model.InclOpts](inst.Opts) - if err != nil { - return err - } - createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) - createInclSegmentBatchParams[idx] = make([]db.CreateInclSegmentBatchParams, opts.NumSegments) - - for i := range opts.NumSegments { - createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), - ParameterID: model.InclParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createInclSegmentBatchParams[idx][i] = db.CreateInclSegmentBatchParams{ - ID: int32(i + 1), - InstrumentID: inst.ID, - } - } - createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + "-bottom-elevation", - ParameterID: model.InclParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createInclOptsParams[idx] = db.CreateInclOptsBatchParams{ - InstrumentID: inst.ID, - NumSegments: int32(opts.NumSegments), - InitialTime: opts.InitialTime, - } - createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - Time: time.Now(), - Value: opts.BottomElevation, - } - } - - args := slices.Concat(createTimeseriesBatchParams...) - inclArgs := slices.Concat(createInclSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) - - var err error - q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } - inclArgs[i].LengthTimeseriesID = &r.ID - }) - if err != nil { - return err - } - q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateInclSegmentBatch(ctx, inclArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - createInclOptsParams[i].BottomElevationTimeseriesID = &r.ID - createBottomElevationMmtParams[i].TimeseriesID = r.ID - }) - if err != nil { - return err - } - q.CreateInclOptsBatch(ctx, createInclOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} - -func updateInclOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - updateInclOptsParams := make([]db.UpdateInclOptsBatchParams, len(ii)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - for idx, inst := range ii { - opts, err := model.MapToStruct[model.InclOpts](inst.Opts) - if err != nil { - return err - } - updateInclOptsParams[idx] = db.UpdateInclOptsBatchParams{ - InstrumentID: inst.ID, - BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, - InitialTime: opts.InitialTime, - } - createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: opts.BottomElevationTimeseriesID, - Time: time.Now(), - Value: opts.BottomElevation, - } - } - var err error - q.UpdateInclOptsBatch(ctx, updateInclOptsParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - return err -} diff --git a/api/internal/servicev2/instrument_ipi.go b/api/internal/servicev2/instrument_ipi.go deleted file mode 100644 index ed10ffa5..00000000 --- a/api/internal/servicev2/instrument_ipi.go +++ /dev/null @@ -1,210 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "fmt" - "slices" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type IpiInstrumentService interface { - GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.IpiSegment, error) - UpdateIpiSegment(ctx context.Context, seg model.IpiSegment) error - UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error - GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.IpiMeasurements, error) -} - -type ipiInstrumentService struct { - db *Database - *db.Queries -} - -func NewIpiInstrumentService(db *Database, q *db.Queries) *ipiInstrumentService { - return &ipiInstrumentService{db, q} -} - -func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - updateIpiArgs := make([]db.UpdateIpiSegmentsBatchParams, len(segs)) - createMmtArgs := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) - - for idx, seg := range segs { - updateIpiArgs[idx] = db.UpdateIpiSegmentsBatchParams{ - ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, - LengthTimeseriesID: &seg.LengthTimeseriesID, - TiltTimeseriesID: seg.TiltTimeseriesID, - IncDevTimeseriesID: seg.IncDevTimeseriesID, - TempTimeseriesID: seg.TempTimeseriesID, - } - if seg.Length == nil { - continue - } - createMmtArgs = append(createMmtArgs, db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: seg.LengthTimeseriesID, - Time: time.Now(), - Value: *seg.Length, - }) - } - qtx.UpdateIpiSegmentsBatch(ctx, updateIpiArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) - createIpiSegmentBatchParams := make([][]db.CreateIpiSegmentBatchParams, len(ii)) - - createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) - createIpiOptsParams := make([]db.CreateIpiOptsBatchParams, len(ii)) - createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - - for idx, inst := range ii { - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) - createIpiSegmentBatchParams[idx] = make([]db.CreateIpiSegmentBatchParams, opts.NumSegments) - - for i := range opts.NumSegments { - createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createIpiSegmentBatchParams[idx][i] = db.CreateIpiSegmentBatchParams{ - ID: int32(i + 1), - InstrumentID: inst.ID, - } - } - createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + "-bottom-elevation", - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createIpiOptsParams[idx] = db.CreateIpiOptsBatchParams{ - InstrumentID: inst.ID, - NumSegments: int32(opts.NumSegments), - InitialTime: opts.InitialTime, - } - createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - Time: time.Now(), - Value: opts.BottomElevation, - } - } - - args := slices.Concat(createTimeseriesBatchParams...) - ipiArgs := slices.Concat(createIpiSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) - - var err error - q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } - ipiArgs[i].LengthTimeseriesID = &r.ID - }) - if err != nil { - return err - } - q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateIpiSegmentBatch(ctx, ipiArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID - createBottomElevationMmtParams[i].TimeseriesID = r.ID - }) - if err != nil { - return err - } - q.CreateIpiOptsBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { - if e != nil { - err = e - return - } - }) - return err -} - -func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - updateIpiOptsParams := make([]db.UpdateIpiOptsBatchParams, len(ii)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - for idx, inst := range ii { - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - updateIpiOptsParams[idx] = db.UpdateIpiOptsBatchParams{ - InstrumentID: inst.ID, - BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, - InitialTime: opts.InitialTime, - } - createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: opts.BottomElevationTimeseriesID, - Time: time.Now(), - Value: opts.BottomElevation, - } - } - var err error - q.UpdateIpiOptsBatch(ctx, updateIpiOptsParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - return err -} diff --git a/api/internal/servicev2/instrument_note.go b/api/internal/servicev2/instrument_note.go deleted file mode 100644 index 17f24044..00000000 --- a/api/internal/servicev2/instrument_note.go +++ /dev/null @@ -1,59 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InstrumentNoteService interface { - ListInstrumentNotes(ctx context.Context) ([]model.InstrumentNote, error) - ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentNote, error) - GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (model.InstrumentNote, error) - CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) - UpdateInstrumentNote(ctx context.Context, n model.InstrumentNote) (model.InstrumentNote, error) - DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error -} - -type instrumentNoteService struct { - db *Database - *db.Queries -} - -func NewInstrumentNoteService(db *Database, q *db.Queries) *instrumentNoteService { - return &instrumentNoteService{db, q} -} - -// CreateInstrumentNote creates many instrument notes from an array of instrument notes -func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]db.InstrumentNote, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return nil, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - args := make([]db.CreateInstrumentNoteBatchParams, len(notes)) - for idx, n := range notes { - args[idx] = db.CreateInstrumentNoteBatchParams{ - InstrumentID: n.InstrumentID, - Title: n.Title, - Body: n.Body, - Time: n.Time, - Creator: n.CreatorID, - CreateDate: n.CreateDate, - } - } - nn := make([]db.InstrumentNote, len(args)) - qtx.CreateInstrumentNoteBatch(ctx, args).QueryRow(batchQueryRowCollect(nn, &err)) - if err != nil { - return nil, err - } - if err := tx.Commit(ctx); err != nil { - return nil, err - } - - return nn, nil -} diff --git a/api/internal/servicev2/instrument_saa.go b/api/internal/servicev2/instrument_saa.go deleted file mode 100644 index 3b1702ea..00000000 --- a/api/internal/servicev2/instrument_saa.go +++ /dev/null @@ -1,200 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "fmt" - "slices" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type SaaInstrumentService interface { - GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.SaaSegment, error) - UpdateSaaSegment(ctx context.Context, seg model.SaaSegment) error - UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error - GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.SaaMeasurements, error) -} - -type saaInstrumentService struct { - db *Database - *db.Queries -} - -func NewSaaInstrumentService(db *Database, q *db.Queries) *saaInstrumentService { - return &saaInstrumentService{db, q} -} - -func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - updateSaaSegParams := make([]db.UpdateSaaSegmentBatchParams, len(segs)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) - for idx, seg := range segs { - updateSaaSegParams[idx] = db.UpdateSaaSegmentBatchParams{ - ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, - LengthTimeseriesID: &seg.LengthTimeseriesID, - XTimeseriesID: seg.XTimeseriesID, - YTimeseriesID: seg.YTimeseriesID, - ZTimeseriesID: seg.ZTimeseriesID, - TempTimeseriesID: seg.TempTimeseriesID, - } - if seg.Length == nil { - continue - } - createMmtParams = append(createMmtParams, db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: seg.LengthTimeseriesID, - Time: time.Now(), - Value: *seg.Length, - }) - } - qtx.UpdateSaaSegmentBatch(ctx, updateSaaSegParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - qtx.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - createTimeseriesBatchParams := make([][]db.CreateTimeseriesBatchParams, len(ii)) - createSaaSegmentBatchParams := make([][]db.CreateSaaSegmentBatchParams, len(ii)) - - createBottomElevationTsParams := make([]db.CreateTimeseriesBatchParams, len(ii)) - createSaaOptsParams := make([]db.CreateSaaOptsBatchParams, len(ii)) - createBottomElevationMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - - for idx, inst := range ii { - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - createTimeseriesBatchParams[idx] = make([]db.CreateTimeseriesBatchParams, opts.NumSegments) - createSaaSegmentBatchParams[idx] = make([]db.CreateSaaSegmentBatchParams, opts.NumSegments) - - for i := range opts.NumSegments { - createTimeseriesBatchParams[idx][i] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createSaaSegmentBatchParams[idx][i] = db.CreateSaaSegmentBatchParams{ - ID: int32(i + 1), - InstrumentID: inst.ID, - } - } - createBottomElevationTsParams[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &inst.ID, - Name: inst.Slug + "-bottom-elevation", - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, - } - createSaaOptsParams[idx] = db.CreateSaaOptsBatchParams{ - InstrumentID: inst.ID, - NumSegments: int32(opts.NumSegments), - InitialTime: opts.InitialTime, - } - createBottomElevationMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - Time: time.Now(), - Value: opts.BottomElevation, - } - } - - args := slices.Concat(createTimeseriesBatchParams...) - saaArgs := slices.Concat(createSaaSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.CreateInstrumentConstantBatchParams, len(args)) - - var err error - q.CreateTimeseriesBatch(ctx, args).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.CreateInstrumentConstantBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } - saaArgs[i].LengthTimeseriesID = &r.ID - }) - if err != nil { - return err - } - q.CreateInstrumentConstantBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateSaaSegmentBatch(ctx, saaArgs).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.CreateTimeseriesBatchRow, e error) { - if e != nil { - err = e - return - } - createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID - createBottomElevationMmtParams[i].TimeseriesID = r.ID - }) - if err != nil { - return err - } - q.CreateSaaOptsBatch(ctx, createSaaOptsParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) - return err -} - -func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []model.Instrument) error { - updateSaaOptsParams := make([]db.UpdateSaaOptsBatchParams, len(ii)) - createMmtParams := make([]db.CreateTimeseriesMeasurementsBatchParams, len(ii)) - for idx, inst := range ii { - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - updateSaaOptsParams[idx] = db.UpdateSaaOptsBatchParams{ - InstrumentID: inst.ID, - BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, - InitialTime: opts.InitialTime, - } - createMmtParams[idx] = db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: opts.BottomElevationTimeseriesID, - Time: time.Now(), - Value: opts.BottomElevation, - } - } - var err error - q.UpdateSaaOptsBatch(ctx, updateSaaOptsParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - q.CreateTimeseriesMeasurementsBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - return err -} diff --git a/api/internal/servicev2/instrument_status.go b/api/internal/servicev2/instrument_status.go deleted file mode 100644 index 153890cd..00000000 --- a/api/internal/servicev2/instrument_status.go +++ /dev/null @@ -1,39 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InstrumentStatusService interface { - ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentStatus, error) - GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (model.InstrumentStatus, error) - CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error - DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error -} - -type instrumentStatusService struct { - db *Database - *db.Queries -} - -func NewInstrumentStatusService(db *Database, q *db.Queries) *instrumentStatusService { - return &instrumentStatusService{db, q} -} - -func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { - args := make([]db.CreateOrUpdateInstrumentStatusBatchParams, len(ss)) - for idx, st := range ss { - args[idx] = db.CreateOrUpdateInstrumentStatusBatchParams{ - InstrumentID: instrumentID, - StatusID: st.StatusID, - Time: st.Time, - } - } - var err error - s.Queries.CreateOrUpdateInstrumentStatusBatch(ctx, args).Exec(batchExecErr(&err)) - return err -} diff --git a/api/internal/servicev2/measurement.go b/api/internal/servicev2/measurement.go deleted file mode 100644 index 938cb550..00000000 --- a/api/internal/servicev2/measurement.go +++ /dev/null @@ -1,160 +0,0 @@ -package servicev2 - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type MeasurementService interface { - ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow, threshold int) (*model.MeasurementCollection, error) - DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error - GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (model.Measurement, error) - CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) - DeleteTimeseriesMeasurementsRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error - DeleteTimeseriesNoteRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error -} - -type measurementService struct { - db *Database - *db.Queries -} - -func NewMeasurementService(db *Database, q *db.Queries) *measurementService { - return &measurementService{db, q} -} - -type mmtCbk func(context.Context, uuid.UUID, time.Time, float64) error -type noteCbk func(context.Context, uuid.UUID, time.Time, model.TimeseriesNote) error - -func createMeasurements(ctx context.Context, mc []model.MeasurementCollection, mmtFn mmtCbk, noteFn noteCbk) error { - for _, c := range mc { - for _, m := range c.Items { - if err := mmtFn(ctx, c.TimeseriesID, m.Time, float64(m.Value)); err != nil { - return err - } - if m.Masked != nil || m.Validated != nil || m.Annotation != nil { - if err := noteFn(ctx, c.TimeseriesID, m.Time, m.TimeseriesNote); err != nil { - return err - } - } - } - } - return nil -} - -// CreateTimeseriesMeasurements creates many timeseries from an array of timeseries -func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - mmts := make([]db.CreateTimeseriesMeasurementsBatchParams, 0) - notes := make([]db.CreateTimeseriesNotesBatchParams, 0) - - for idx := range mc { - for _, m := range mc[idx].Items { - mmts = append(mmts, db.CreateTimeseriesMeasurementsBatchParams{ - TimeseriesID: mc[idx].TimeseriesID, - Time: m.Time, - Value: float64(m.Value), - }) - notes = append(notes, db.CreateTimeseriesNotesBatchParams{ - TimeseriesID: mc[idx].TimeseriesID, - Time: m.Time, - Masked: m.Masked, - Validated: m.Validated, - Annotation: m.Annotation, - }) - } - } - qtx.CreateTimeseriesMeasurementsBatch(ctx, mmts).Exec(batchExecErr(&err)) - if err != nil { - return err - } - qtx.CreateTimeseriesNotesBatch(ctx, notes).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries -// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated -func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - mmts := make([]db.CreateOrUpdateTimeseriesMeasurementsBatchParams, 0) - notes := make([]db.CreateOrUpdateTimeseriesNoteBatchParams, 0) - - for idx := range mc { - for _, m := range mc[idx].Items { - mmts = append(mmts, db.CreateOrUpdateTimeseriesMeasurementsBatchParams{ - TimeseriesID: mc[idx].TimeseriesID, - Time: m.Time, - Value: float64(m.Value), - }) - notes = append(notes, db.CreateOrUpdateTimeseriesNoteBatchParams{ - TimeseriesID: mc[idx].TimeseriesID, - Time: m.Time, - Masked: m.Masked, - Validated: m.Validated, - Annotation: m.Annotation, - }) - } - } - qtx.CreateOrUpdateTimeseriesMeasurementsBatch(ctx, mmts).Exec(batchExecErr(&err)) - if err != nil { - return err - } - qtx.CreateOrUpdateTimeseriesNoteBatch(ctx, notes).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -// UpdateTimeseriesMeasurements updates many timeseries measurements, "overwriting" time and values to match paylaod -func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - for _, c := range mc { - if err := qtx.DeleteTimeseriesMeasurementsRange(ctx, db.DeleteTimeseriesMeasurementsRangeParams{ - TimeseriesID: c.TimeseriesID, - After: tw.After, - Before: tw.Before, - }); err != nil { - return err - } - if err := qtx.DeleteTimeseriesNoteRange(ctx, db.DeleteTimeseriesNoteRangeParams{ - TimeseriesID: c.TimeseriesID, - After: tw.After, - Before: tw.Before, - }); err != nil { - return err - } - } - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/measurement_inclinometer.go b/api/internal/servicev2/measurement_inclinometer.go deleted file mode 100644 index dec5e4fa..00000000 --- a/api/internal/servicev2/measurement_inclinometer.go +++ /dev/null @@ -1,121 +0,0 @@ -package servicev2 - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InclinometerMeasurementService interface { - ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow) (*model.InclinometerMeasurementCollection, error) - ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*model.InclinometerMeasurementValues, error) - DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error - CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) - ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) - CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error -} - -type inclinometerMeasurementService struct { - db *Database - *db.Queries -} - -func NewInclinometerMeasurementService(db *Database, q *db.Queries) *inclinometerMeasurementService { - return &inclinometerMeasurementService{db, q} -} - -// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer -// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated -func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return nil, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - // Iterate All inclinometer Measurements - for idx := range im { - for i := range im[idx].Inclinometers { - im[idx].Inclinometers[i].Creator = p.ID - im[idx].Inclinometers[i].CreateDate = createDate - if err := qtx.CreateOrUpdateInclinometerMeasurement(ctx, im[idx].TimeseriesID, im[idx].Inclinometers[i].Time, im[idx].Inclinometers[i].Values, p.ID, createDate); err != nil { - return nil, err - } - } - } - if err := tx.Commit(ctx); err != nil { - return nil, err - } - - return im, nil -} - -// CreateTimeseriesConstant creates timeseries constant -func (s inclinometerMeasurementService) CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - instrumentIDs, err := qtx.ListInstrumentIDsFromTimeseriesID(ctx, timeseriesID) - if err != nil { - return err - } - - parameterIDs, err := qtx.ListParameterIDsFromParameterName(ctx, parameterName) - if err != nil { - return err - } - - unitIDs, err := qtx.ListUnitIDsFromUnitName(ctx, unitName) - if err != nil { - return err - } - - if len(instrumentIDs) > 0 && len(parameterIDs) > 0 && len(unitIDs) > 0 { - t := model.Timeseries{} - measurement := model.Measurement{} - measurements := []model.Measurement{} - mc := model.MeasurementCollection{} - mcs := []model.MeasurementCollection{} - ts := []model.Timeseries{} - - t.InstrumentID = instrumentIDs[0] - t.Slug = parameterName - t.Name = parameterName - t.ParameterID = parameterIDs[0] - t.UnitID = unitIDs[0] - ts = append(ts, t) - - t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return err - } - // Assign timeseries - if err := qtx.CreateInstrumentConstant(ctx, t.InstrumentID, t.ID); err != nil { - return err - } - - measurement.Time = time.Now() - measurement.Value = model.FloatNanInf(value) - measurements = append(measurements, measurement) - mc.TimeseriesID = tsNew.ID - mc.Items = measurements - mcs = append(mcs, mc) - - if err = createMeasurements(ctx, mcs, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return err - } - } - - return nil -} diff --git a/api/internal/servicev2/opendcs.go b/api/internal/servicev2/opendcs.go deleted file mode 100644 index 704e9bdb..00000000 --- a/api/internal/servicev2/opendcs.go +++ /dev/null @@ -1,21 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type OpendcsService interface { - ListOpendcsSites(ctx context.Context) ([]model.Site, error) -} - -type opendcsService struct { - db *Database - *db.Queries -} - -func NewOpendcsService(db *Database, q *db.Queries) *opendcsService { - return &opendcsService{db, q} -} diff --git a/api/internal/servicev2/plot_config.go b/api/internal/servicev2/plot_config.go deleted file mode 100644 index a15eb10d..00000000 --- a/api/internal/servicev2/plot_config.go +++ /dev/null @@ -1,75 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type PlotConfigService interface { - ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]db.VPlotConfiguration, error) - GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (db.VPlotConfiguration, error) - DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error - plotConfigBullseyePlotService - plotConfigContourPlotService - plotConfigProfilePlotService - plotConfigScatterLinePlotService -} - -type plotConfigService struct { - db *Database - *db.Queries -} - -func NewPlotConfigService(db *Database, q *db.Queries) *plotConfigService { - return &plotConfigService{db, q} -} - -func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc model.PlotConfig) (uuid.UUID, error) { - pcID, err := q.CreatePlotConfig(ctx, db.CreatePlotConfigParams{ - Name: pc.Name, - ProjectID: pc.ProjectID, - Creator: pc.CreatorID, - CreateDate: pc.CreateDate, - PlotType: db.PlotType(pc.PlotType), - }) - if err != nil { - return pcID, err - } - err = q.CreatePlotConfigSettings(ctx, db.CreatePlotConfigSettingsParams{ - ID: pcID, - ShowMasked: pc.ShowMasked, - ShowNonvalidated: pc.ShowNonValidated, - ShowComments: pc.ShowComments, - AutoRange: pc.AutoRange, - DateRange: pc.DateRange, - Threshold: int32(pc.Threshold), - }) - return pcID, err -} - -func updatePlotConfigCommon(ctx context.Context, q *db.Queries, pc model.PlotConfig) error { - if err := q.UpdatePlotConfig(ctx, db.UpdatePlotConfigParams{ - ProjectID: pc.ProjectID, - ID: pc.ID, - Name: pc.Name, - Updater: pc.UpdaterID, - UpdateDate: pc.UpdateDate, - }); err != nil { - return err - } - if err := q.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return err - } - return q.CreatePlotConfigSettings(ctx, db.CreatePlotConfigSettingsParams{ - ID: pc.ID, - ShowMasked: pc.ShowMasked, - ShowNonvalidated: pc.ShowNonValidated, - ShowComments: pc.ShowComments, - AutoRange: pc.AutoRange, - DateRange: pc.DateRange, - Threshold: int32(pc.Threshold), - }) -} diff --git a/api/internal/servicev2/plot_config_bullseye.go b/api/internal/servicev2/plot_config_bullseye.go deleted file mode 100644 index ea22a406..00000000 --- a/api/internal/servicev2/plot_config_bullseye.go +++ /dev/null @@ -1,72 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type plotConfigBullseyePlotService interface { - CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) - UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) - ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]model.PlotConfigMeasurementBullseyePlot, error) -} - -func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - pc.PlotType = model.BullseyePlotType - pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) - if err != nil { - return a, err - } - if err := qtx.CreatePlotBullseyeConfig(ctx, db.CreatePlotBullseyeConfigParams{ - PlotConfigID: pcID, - XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, - YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, - }); err != nil { - return a, err - } - a, err = qtx.GetPlotConfig(ctx, pcID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - return a, err -} - -func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { - return a, err - } - if err := qtx.UpdatePlotBullseyeConfig(ctx, db.UpdatePlotBullseyeConfigParams{ - PlotConfigID: pc.ID, - XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, - YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, - }); err != nil { - return a, err - } - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - - return pcNew, err -} diff --git a/api/internal/servicev2/plot_config_contour.go b/api/internal/servicev2/plot_config_contour.go deleted file mode 100644 index 24e7ca6d..00000000 --- a/api/internal/servicev2/plot_config_contour.go +++ /dev/null @@ -1,130 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type plotConfigContourPlotService interface { - CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) - UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) - ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]time.Time, error) - GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) -} - -func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - pc.PlotType = model.ContourPlotType - pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) - if err := qtx.CreatePlotContourConfig(ctx, db.CreatePlotContourConfigParams{ - PlotConfigID: pcID, - Time: pc.Display.Time, - LocfBackfill: pc.Display.LocfBackfill, - GradientSmoothing: pc.Display.GradientSmoothing, - ContourSmoothing: pc.Display.ContourSmoothing, - ShowLabels: pc.Display.ShowLabels, - }); err != nil { - return a, err - } - if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pcID, pc.Display.TimeseriesIDs); err != nil { - return a, err - } - a, err = qtx.GetPlotConfig(ctx, pcID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - - return a, err -} - -func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { - return a, err - } - if err := qtx.UpdatePlotContourConfig(ctx, db.UpdatePlotContourConfigParams{ - PlotConfigID: pc.ID, - Time: pc.Display.Time, - LocfBackfill: pc.Display.LocfBackfill, - GradientSmoothing: pc.Display.GradientSmoothing, - ContourSmoothing: pc.Display.ContourSmoothing, - ShowLabels: pc.Display.ShowLabels, - }); err != nil { - return a, err - } - - if err := qtx.DeleteAllPlotContourConfigTimeseries(ctx, pc.ID); err != nil { - return a, err - } - if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pc.ID, pc.Display.TimeseriesIDs); err != nil { - return a, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) - if err != nil { - return a, err - } - - err = tx.Commit(ctx) - - return pcNew, err -} - -func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, pcID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { - mm, err := s.Queries.ListPlotConfigMeasurementsContourPlot(ctx, db.ListPlotConfigMeasurementsContourPlotParams{ - PlotConfigID: pcID, - Time: t, - }) - if err != nil { - return model.AggregatePlotConfigMeasurementsContourPlot{}, err - } - am := model.AggregatePlotConfigMeasurementsContourPlot{ - X: make([]float64, len(mm)), - Y: make([]float64, len(mm)), - Z: make([]*float64, len(mm)), - } - for idx := range mm { - z, ok := mm[idx].Z.(*float64) - if !ok { - return model.AggregatePlotConfigMeasurementsContourPlot{}, errors.New("failed type assertion: interface to float64") - } - am.X[idx] = mm[idx].X - am.Y[idx] = mm[idx].Y - am.Z[idx] = z - } - return am, nil -} - -func createPlotContourConfigTimeseriesBatch(ctx context.Context, q *db.Queries, pcID uuid.UUID, tt []uuid.UUID) error { - args := make([]db.CreatePlotContourConfigTimeseriesBatchParams, len(tt)) - for idx, tsID := range tt { - args[idx] = db.CreatePlotContourConfigTimeseriesBatchParams{ - PlotContourConfigID: pcID, - TimeseriesID: tsID, - } - } - var err error - q.CreatePlotContourConfigTimeseriesBatch(ctx, args).Exec(batchExecErr(&err)) - return err -} diff --git a/api/internal/servicev2/plot_config_profile.go b/api/internal/servicev2/plot_config_profile.go deleted file mode 100644 index d7a42e3b..00000000 --- a/api/internal/servicev2/plot_config_profile.go +++ /dev/null @@ -1,69 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type plotConfigProfilePlotService interface { - CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) - UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) -} - -func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - pc.PlotType = model.ProfilePlotType - pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) - if err != nil { - return a, err - } - if err := qtx.CreatePlotProfileConfig(ctx, db.CreatePlotProfileConfigParams{ - PlotConfigID: pcID, - InstrumentID: pc.Display.InstrumentID, - }); err != nil { - return a, err - } - pcNew, err := qtx.GetPlotConfig(ctx, pcID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - - return pcNew, err -} - -func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { - return a, err - } - if err := qtx.UpdatePlotProfileConfig(ctx, db.UpdatePlotProfileConfigParams{ - PlotConfigID: pc.ID, - InstrumentID: pc.Display.InstrumentID, - }); err != nil { - return a, err - } - a, err = qtx.GetPlotConfig(ctx, pc.ID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - - return a, err -} diff --git a/api/internal/servicev2/plot_config_scatter_line.go b/api/internal/servicev2/plot_config_scatter_line.go deleted file mode 100644 index 85edf668..00000000 --- a/api/internal/servicev2/plot_config_scatter_line.go +++ /dev/null @@ -1,161 +0,0 @@ -package servicev2 - -import ( - "context" - "fmt" - "strings" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type plotConfigScatterLinePlotService interface { - CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) - UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) -} - -func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - pc.PlotType = model.ScatterLinePlotType - pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) - if err := validateCreateTraces(ctx, qtx, pcID, pc.Display.Traces); err != nil { - return a, err - } - if err := qtx.CreatePlotConfigScatterLineLayout(ctx, db.CreatePlotConfigScatterLineLayoutParams{ - PlotConfigID: pcID, - YAxisTitle: pc.Display.Layout.YAxisTitle, - Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, - }); err != nil { - return a, err - } - if err := validateCreateCustomShapes(ctx, qtx, pcID, pc.Display.Layout.CustomShapes); err != nil { - return a, err - } - pcNew, err := qtx.GetPlotConfig(ctx, pcID) - if err != nil { - return a, err - } - err = tx.Commit(ctx) - - return pcNew, err -} - -func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { - var a db.VPlotConfiguration - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { - return a, err - } - if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, &pc.ID); err != nil { - return a, err - } - if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, &pc.ID); err != nil { - return a, err - } - if err := validateCreateTraces(ctx, qtx, pc.ID, pc.Display.Traces); err != nil { - return a, err - } - if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, db.UpdatePlotConfigScatterLineLayoutParams{ - PlotConfigID: pc.ID, - YAxisTitle: pc.Display.Layout.YAxisTitle, - Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, - }); err != nil { - return a, err - } - if err := validateCreateCustomShapes(ctx, qtx, pc.ID, pc.Display.Layout.CustomShapes); err != nil { - return a, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) - if err != nil { - return a, err - } - - err = tx.Commit(ctx) - - return pcNew, err -} - -func validateCreateTraces(ctx context.Context, q *db.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { - args := make([]db.CreatePlotConfigTimeseriesTracesBatchParams, len(trs)) - for idx, tr := range trs { - tr.PlotConfigurationID = pcID - if err := validateColor(tr.Color); err != nil { - return err - } - if tr.LineStyle == "" { - tr.LineStyle = "solid" - } - if tr.YAxis == "" { - tr.YAxis = "y1" - } - args[idx] = db.CreatePlotConfigTimeseriesTracesBatchParams{ - PlotConfigurationID: &tr.PlotConfigurationID, - TimeseriesID: &tr.TimeseriesID, - TraceOrder: int32(tr.TraceOrder), - Color: tr.Color, - LineStyle: db.LineStyle(tr.LineStyle), - Width: tr.Width, - ShowMarkers: tr.ShowMarkers, - YAxis: db.YAxis(tr.YAxis), - } - } - var err error - q.CreatePlotConfigTimeseriesTracesBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return nil -} - -func validateCreateCustomShapes(ctx context.Context, q *db.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { - for _, cs := range css { - cs.PlotConfigurationID = pcID - - if err := validateColor(cs.Color); err != nil { - return err - } - - if err := q.CreatePlotConfigCustomShape(ctx, db.CreatePlotConfigCustomShapeParams{ - PlotConfigurationID: &pcID, - Enabled: cs.Enabled, - Name: cs.Name, - DataPoint: cs.DataPoint, - Color: cs.Color, - }); err != nil { - return err - } - } - return nil -} - -func validateColor(colorHex string) error { - parts := strings.SplitAfter(colorHex, "#") - invalidHexErr := fmt.Errorf("invalid hex code format: %s; format must be '#000000'", colorHex) - if len(parts) != 2 { - return invalidHexErr - } - if len(parts[0]) != 1 && len(parts[1]) != 6 { - return invalidHexErr - } - for _, r := range parts[1] { - if !(r >= '0' && r <= '9' || r >= 'a' && r <= 'f' || r >= 'A' && r <= 'F') { - return invalidHexErr - } - } - return nil -} diff --git a/api/internal/servicev2/profile.go b/api/internal/servicev2/profile.go deleted file mode 100644 index 963ecf2a..00000000 --- a/api/internal/servicev2/profile.go +++ /dev/null @@ -1,89 +0,0 @@ -package servicev2 - -import ( - "context" - "errors" - "strings" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type ProfileService interface { - GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) - GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) - GetProfileForEmail(ctx context.Context, email string) (model.Profile, error) - GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) - GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) - CreateProfile(ctx context.Context, n model.ProfileInfo) (model.Profile, error) - CreateProfileToken(ctx context.Context, profileID uuid.UUID) (model.Token, error) - GetTokenInfoByTokenID(ctx context.Context, tokenID string) (model.TokenInfo, error) - UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) - DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error -} - -type profileService struct { - db *Database - *db.Queries -} - -func NewProfileService(db *Database, q *db.Queries) *profileService { - return &profileService{db, q} -} - -func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (db.VProfile, error) { - var a db.VProfile - var err error - if claims.CacUID != nil { - a, err = s.Queries.GetProfileForEDIPI(ctx, int64(*claims.CacUID)) - } else { - a, err = s.GetProfileForEmail(ctx, claims.Email) - } - if err != nil { - return a, err - } - return a, nil -} - -// UpdateProfileForClaims syncs a database profile to the provided token claims -// THe order of precence in which the function will attepmt to update profiles is edipi, email, username -func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) { - var claimsMatchProfile bool = p.Username == claims.PreferredUsername && - strings.ToLower(p.Email) == strings.ToLower(claims.Email) && - p.DisplayName == claims.Name - - if claimsMatchProfile { - return p, nil - } - if claims.CacUID != nil && !claimsMatchProfile { - if err := s.Queries.UpdateProfileForEDIPI(ctx, db.UpdateProfileForEDIPIParams{ - Username: claims.PreferredUsername, - Email: claims.Email, - DisplayName: claims.Name, - Edipi: int64(*claims.CacUID), - }); err != nil { - return p, err - } - p.Username = claims.PreferredUsername - p.DisplayName = claims.Name - p.Email = claims.Email - - return p, nil - } - if strings.ToLower(p.Email) == strings.ToLower(claims.Email) && !claimsMatchProfile { - if err := s.Queries.UpdateProfileForEmail(ctx, db.UpdateProfileForEmailParams{ - Username: claims.PreferredUsername, - DisplayName: claims.Name, - Email: claims.Email, - }); err != nil { - return p, err - } - p.Username = claims.PreferredUsername - p.DisplayName = claims.Name - - return p, nil - } - - return p, errors.New("claims did not match profile and could not be updated") -} diff --git a/api/internal/servicev2/project.go b/api/internal/servicev2/project.go deleted file mode 100644 index 21eed8fc..00000000 --- a/api/internal/servicev2/project.go +++ /dev/null @@ -1,132 +0,0 @@ -package servicev2 - -import ( - "context" - "image" - "io" - "mime/multipart" - "os" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/img" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type ProjectService interface { - SearchProjects(ctx context.Context, searchInput string, limit int) ([]model.SearchResult, error) - ListDistricts(ctx context.Context) ([]model.District, error) - ListProjects(ctx context.Context) ([]model.Project, error) - ListProjectsByFederalID(ctx context.Context, federalID string) ([]model.Project, error) - ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Project, error) - ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]model.Project, error) - ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]model.Instrument, error) - ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]model.InstrumentGroup, error) - GetProjectCount(ctx context.Context) (model.ProjectCount, error) - GetProject(ctx context.Context, projectID uuid.UUID) (model.Project, error) - CreateProject(ctx context.Context, p model.Project) (model.IDSlugName, error) - CreateProjectsBatch(ctx context.Context, projects []model.Project) ([]db.CreateProjectsBatchRow, error) - UpdateProject(ctx context.Context, p model.Project) (model.Project, error) - UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error - DeleteFlagProject(ctx context.Context, projectID uuid.UUID) error -} - -type projectService struct { - db *Database - *db.Queries -} - -func NewProjectService(db *Database, q *db.Queries) *projectService { - return &projectService{db, q} -} - -type uploader func(ctx context.Context, r io.Reader, rawPath, bucketName string) error - -// CreateProjectBulk creates one or more projects from an array of projects -func (s projectService) CreateProjectBatch(ctx context.Context, projects []model.Project) ([]db.CreateProjectsBatchRow, error) { - args := make([]db.CreateProjectsBatchParams, len(projects)) - for idx, p := range projects { - args[idx] = db.CreateProjectsBatchParams{ - FederalID: p.FederalID, - Name: p.Name, - DistrictID: p.DistrictID, - Creator: p.CreatorID, - CreateDate: p.CreateDate, - } - } - var err error - pp := make([]db.CreateProjectsBatchRow, len(args)) - s.Queries.CreateProjectsBatch(ctx, args).QueryRow(batchQueryRowCollect(pp, &err)) - if err != nil { - return nil, err - } - return pp, nil -} - -func (s projectService) UpdateProject(ctx context.Context, p model.Project) (db.VProject, error) { - var a db.VProject - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if _, err := qtx.UpdateProject(ctx, db.UpdateProjectParams{ - ID: p.ID, - Name: p.Name, - Updater: p.UpdaterID, - UpdateDate: p.UpdateDate, - DistrictID: p.DistrictID, - FederalID: p.FederalID, - }); err != nil { - return a, err - } - updated, err := qtx.GetProject(ctx, p.ID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return updated, nil -} - -func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - p, err := qtx.GetProject(ctx, projectID) - if err != nil { - return err - } - src, err := file.Open() - if err != nil { - return err - } - defer src.Close() - dst, err := os.Create(file.Filename) - if err != nil { - return err - } - defer dst.Close() - if err := img.Resize(src, dst, image.Rect(0, 0, 480, 480)); err != nil { - return err - } - if err := qtx.UpdateProjectImage(ctx, db.UpdateProjectImageParams{ - ID: projectID, - Image: &file.Filename, - }); err != nil { - return err - } - - if err := u(ctx, src, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { - return err - } - - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/project_role.go b/api/internal/servicev2/project_role.go deleted file mode 100644 index 24e0114d..00000000 --- a/api/internal/servicev2/project_role.go +++ /dev/null @@ -1,55 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/google/uuid" -) - -type ProjectRoleService interface { - ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]db.ListProjectMembersRow, error) - GetProjectMembership(ctx context.Context, roleID uuid.UUID) (db.GetProjectMembershipRow, error) - AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (db.GetProjectMembershipRow, error) - RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error - IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) - IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) -} - -type projectRoleService struct { - db *Database - *db.Queries -} - -func NewProjectRoleService(db *Database, q *db.Queries) *projectRoleService { - return &projectRoleService{db, q} -} - -// AddProjectMemberRole adds a role to a user for a specific project -func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (db.GetProjectMembershipRow, error) { - var a db.GetProjectMembershipRow - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - pprID, err := qtx.CreateProfileProjectRole(ctx, db.CreateProfileProjectRoleParams{ - ProjectID: projectID, - ProfileID: profileID, - RoleID: roleID, - GrantedBy: &grantedBy, - }) - if err != nil { - return a, err - } - a, err = qtx.GetProjectMembership(ctx, pprID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} diff --git a/api/internal/servicev2/report_config.go b/api/internal/servicev2/report_config.go deleted file mode 100644 index 21b78c28..00000000 --- a/api/internal/servicev2/report_config.go +++ /dev/null @@ -1,176 +0,0 @@ -package servicev2 - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/cloud" - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type ReportConfigService interface { - ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]db.VReportConfig, error) - CreateReportConfig(ctx context.Context, rc model.ReportConfig) (db.VReportConfig, error) - UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error - DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error - GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) - CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (db.ReportDownloadJob, error) - GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (db.ReportDownloadJob, error) - UpdateReportDownloadJob(ctx context.Context, j model.ReportDownloadJob) error -} - -type reportConfigService struct { - db *Database - *db.Queries - pubsub cloud.Pubsub - mockQueue bool -} - -func NewReportConfigService(db *Database, q *db.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { - return &reportConfigService{db, q, ps, mockQueue} -} - -func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (db.VReportConfig, error) { - var a db.VReportConfig - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - rcID, err := qtx.CreateReportConfig(ctx, db.CreateReportConfigParams{ - Name: rc.Name, - ProjectID: rc.ProjectID, - Creator: rc.CreatorID, - Description: rc.Description, - DateRange: &rc.GlobalOverrides.DateRange.Value, - DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, - ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, - ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, - ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, - ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, - }) - if err != nil { - return a, err - } - args := make([]db.AssignReportConfigPlotConfigBatchParams, len(rc.PlotConfigs)) - for idx := range rc.PlotConfigs { - args[idx] = db.AssignReportConfigPlotConfigBatchParams{ - ReportConfigID: rcID, - PlotConfigID: rc.PlotConfigs[idx].ID, - } - } - qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return a, err - } - a, err = qtx.GetReportConfig(ctx, rcID) - if err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - return a, nil -} - -func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.UpdateReportConfig(ctx, db.UpdateReportConfigParams{ - ID: rc.ID, - Name: rc.Name, - Updater: rc.UpdaterID, - Description: rc.Description, - DateRange: &rc.GlobalOverrides.DateRange.Value, - DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, - ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, - ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, - ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, - ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, - }); err != nil { - return err - } - - if err := qtx.UnassignAllReportConfigPlotConfig(ctx, rc.ID); err != nil { - return err - } - - args := make([]db.AssignReportConfigPlotConfigBatchParams, len(rc.PlotConfigs)) - for idx := range rc.PlotConfigs { - args[idx] = db.AssignReportConfigPlotConfigBatchParams{ - ReportConfigID: rc.ID, - PlotConfigID: rc.PlotConfigs[idx].ID, - } - } - qtx.AssignReportConfigPlotConfigBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return err - } - - return tx.Commit(ctx) -} - -type ReportConfigWithPlotConfigs struct { - db.VReportConfig - PlotConfigs []db.VPlotConfiguration `json:"plot_configs"` -} - -func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) { - var a ReportConfigWithPlotConfigs - rc, err := s.Queries.GetReportConfig(ctx, rcID) - if err != nil { - return a, err - } - pcs, err := s.Queries.ListReportConfigPlotConfigs(ctx, rcID) - if err != nil { - return a, err - } - a.VReportConfig = rc - a.PlotConfigs = pcs - return a, nil -} - -func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (db.ReportDownloadJob, error) { - var a db.ReportDownloadJob - tx, err := s.db.Begin(ctx) - if err != nil { - return a, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - a, err = qtx.CreateReportDownloadJob(ctx, db.CreateReportDownloadJobParams{ - ReportConfigID: &rcID, - Creator: profileID, - }) - if err != nil { - return a, err - } - msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: a.ID, IsLandscape: isLandscape} - b, err := json.Marshal(msg) - if err != nil { - return a, err - } - // NOTE: Depending on how long this takes, possibly invoke the lambdas directly - if _, err := s.pubsub.PublishMessage(ctx, b); err != nil { - return a, err - } - if err := tx.Commit(ctx); err != nil { - return a, err - } - if s.mockQueue { - if _, err := s.pubsub.MockPublishMessage(ctx, b); err != nil { - return a, err - } - } - return a, nil -} diff --git a/api/internal/servicev2/submittal.go b/api/internal/servicev2/submittal.go deleted file mode 100644 index b96d2ba0..00000000 --- a/api/internal/servicev2/submittal.go +++ /dev/null @@ -1,28 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type SubmittalService interface { - ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListUnverifiedMissingSubmittals(ctx context.Context) ([]model.Submittal, error) - UpdateSubmittal(ctx context.Context, sub model.Submittal) error - VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error - VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error -} - -type submittalService struct { - db *Database - *db.Queries -} - -func NewSubmittalService(db *Database, q *db.Queries) *submittalService { - return &submittalService{db, q} -} diff --git a/api/internal/servicev2/timeseries.go b/api/internal/servicev2/timeseries.go deleted file mode 100644 index 6b26c649..00000000 --- a/api/internal/servicev2/timeseries.go +++ /dev/null @@ -1,56 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type TimeseriesService interface { - GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) - AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error - ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]model.Timeseries, error) - GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (model.Timeseries, error) - CreateTimeseries(ctx context.Context, ts model.Timeseries) (model.Timeseries, error) - CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - UpdateTimeseries(ctx context.Context, ts model.Timeseries) (uuid.UUID, error) - DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error -} - -type timeseriesService struct { - db *Database - *db.Queries -} - -func NewTimeseriesService(db *Database, q *db.Queries) *timeseriesService { - return ×eriesService{db, q} -} - -func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) error { - uu := make([]db.CreateTimeseriesBatchParams, len(tt)) - for idx, ts := range tt { - if ts.ParameterID == uuid.Nil { - ts.ParameterID = model.UnknownParameterID - } - if ts.UnitID == uuid.Nil { - ts.UnitID = model.UnknownUnitID - } - uu[idx] = db.CreateTimeseriesBatchParams{ - InstrumentID: &ts.InstrumentID, - Name: ts.Name, - ParameterID: ts.ParameterID, - UnitID: ts.UnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeStandard, - }, - } - } - var err error - s.Queries.CreateTimeseriesBatch(ctx, uu).QueryRow(batchQueryRowErr[db.CreateTimeseriesBatchRow](&err)) - return err -} diff --git a/api/internal/servicev2/timeseries_calculated.go b/api/internal/servicev2/timeseries_calculated.go deleted file mode 100644 index 59d370ca..00000000 --- a/api/internal/servicev2/timeseries_calculated.go +++ /dev/null @@ -1,77 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type CalculatedTimeseriesService interface { - GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.CalculatedTimeseries, error) - CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error - UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error - DeleteCalculatedTimeseries(ctx context.Context, ctsID uuid.UUID) error -} - -type calculatedTimeseriesService struct { - db *Database - *db.Queries -} - -func NewCalculatedTimeseriesService(db *Database, q *db.Queries) *calculatedTimeseriesService { - return &calculatedTimeseriesService{db, q} -} - -func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, ct model.CalculatedTimeseries) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - tsID, err := qtx.CreateCalculatedTimeseries(ctx, db.CreateCalculatedTimeseriesParams{ - InstrumentID: &ct.InstrumentID, - ParameterID: ct.ParameterID, - UnitID: ct.UnitID, - Name: ct.FormulaName, - }) - if err != nil { - return err - } - if err := qtx.CreateCalculation(ctx, db.CreateCalculationParams{ - TimeseriesID: tsID, - Contents: &ct.Formula, - }); err != nil { - return err - } - return tx.Commit(ctx) -} - -func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, ct model.CalculatedTimeseries) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, db.CreateOrUpdateCalculatedTimeseriesParams{ - ID: ct.ID, - InstrumentID: &ct.InstrumentID, - ParameterID: ct.ParameterID, - UnitID: ct.UnitID, - Name: ct.FormulaName, - }); err != nil { - return err - } - if err := qtx.CreateOrUpdateCalculation(ctx, db.CreateOrUpdateCalculationParams{ - TimeseriesID: ct.ID, - Contents: &ct.Formula, - }); err != nil { - return err - } - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/timeseries_cwms.go b/api/internal/servicev2/timeseries_cwms.go deleted file mode 100644 index 7c31120f..00000000 --- a/api/internal/servicev2/timeseries_cwms.go +++ /dev/null @@ -1,98 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type TimeseriesCwmsService interface { - ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]db.VTimeseriesCwm, error) - CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) error - UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error -} - -type timeseriesCwmsService struct { - db *Database - *db.Queries -} - -func NewTimeseriesCwmsService(db *Database, q *db.Queries) *timeseriesCwmsService { - return ×eriesCwmsService{db, q} -} - -func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - for idx, tc := range tcc { - if tc.ParameterID == uuid.Nil { - tc.ParameterID = model.UnknownParameterID - } - if tc.UnitID == uuid.Nil { - tc.UnitID = model.UnknownUnitID - } - tcc[idx].Type = model.CwmsTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, db.CreateTimeseriesParams{ - InstrumentID: &instrumentID, - Name: tc.Name, - ParameterID: tc.ParameterID, - UnitID: tc.UnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeCwms, - }, - }) - if err != nil { - return err - } - if err := qtx.CreateTimeseriesCwms(ctx, db.CreateTimeseriesCwmsParams{ - TimeseriesID: tsNew.ID, - CwmsTimeseriesID: tc.CwmsTimeseriesID, - CwmsOfficeID: tc.CwmsOfficeID, - CwmsExtentEarliestTime: tc.CwmsExtentEarliestTime, - CwmsExtentLatestTime: tc.CwmsExtentLatestTime, - }); err != nil { - return err - } - } - return tx.Commit(ctx) -} - -func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, ts model.TimeseriesCwms) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - if _, err := qtx.UpdateTimeseries(ctx, db.UpdateTimeseriesParams{ - ID: ts.ID, - Name: ts.Name, - InstrumentID: &ts.InstrumentID, - ParameterID: ts.ParameterID, - UnitID: ts.UnitID, - }); err != nil { - return err - } - - if err := qtx.UpdateTimeseriesCwms(ctx, db.UpdateTimeseriesCwmsParams{ - TimeseriesID: ts.ID, - CwmsTimeseriesID: ts.CwmsTimeseriesID, - CwmsOfficeID: ts.CwmsOfficeID, - CwmsExtentEarliestTime: ts.CwmsExtentEarliestTime, - CwmsExtentLatestTime: ts.CwmsExtentLatestTime, - }); err != nil { - return err - } - - return tx.Commit(ctx) -} diff --git a/api/internal/servicev2/timeseries_process.go b/api/internal/servicev2/timeseries_process.go deleted file mode 100644 index 7711e95b..00000000 --- a/api/internal/servicev2/timeseries_process.go +++ /dev/null @@ -1,22 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type ProcessTimeseriesService interface { - SelectMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessTimeseriesResponseCollection, error) - SelectInclinometerMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessInclinometerTimeseriesResponseCollection, error) -} - -type processTimeseriesService struct { - db *Database - *db.Queries -} - -func NewProcessTimeseriesService(db *Database, q *db.Queries) *processTimeseriesService { - return &processTimeseriesService{db, q} -} diff --git a/api/internal/servicev2/unit.go b/api/internal/servicev2/unit.go deleted file mode 100644 index 98f736a2..00000000 --- a/api/internal/servicev2/unit.go +++ /dev/null @@ -1,20 +0,0 @@ -package servicev2 - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/db" -) - -type UnitService interface { - ListUnits(ctx context.Context) ([]db.VUnit, error) -} - -type unitService struct { - db *Database - *db.Queries -} - -func NewUnitService(db *Database, q *db.Queries) *unitService { - return &unitService{db, q} -} diff --git a/api/internal/servicev2/uploader.go b/api/internal/servicev2/uploader.go deleted file mode 100644 index 5faf0c64..00000000 --- a/api/internal/servicev2/uploader.go +++ /dev/null @@ -1,187 +0,0 @@ -package servicev2 - -import ( - "context" - "encoding/csv" - "io" - "math" - "strconv" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type UploaderService interface { - ListUploaderConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.UploaderConfig, error) - ListUploaderConfigMappings(ctx context.Context, ucID uuid.UUID) ([]model.UploaderConfigMapping, error) - CreateUploaderConfig(ctx context.Context, uc model.UploaderConfig) (uuid.UUID, error) - UpdateUploaderConfig(ctx context.Context, uc model.UploaderConfig) error - DeleteUploaderConfig(ctx context.Context, ucID uuid.UUID) error - CreateUploaderConfigMapping(ctx context.Context, m model.UploaderConfigMapping) error - CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error - UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error - DeleteAllUploaderConfigMappingsForUploaderConfig(ctx context.Context, ucID uuid.UUID) error - // CreateTimeseriesMeasurementsFromDuxFile(ctx context.Context, r io.Reader) error - // CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error -} - -type uploaderService struct { - db *Database - *db.Queries -} - -func NewUploaderService(db *Database, q *db.Queries) *uploaderService { - return &uploaderService{db, q} -} - -func (s uploaderService) CreateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { - args := make([]db.CreateUploaderConfigMappingsBatchParams, len(mm)) - for idx, m := range mm { - args[idx] = db.CreateUploaderConfigMappingsBatchParams{ - UploaderConfigID: m.UploaderConfigID, - FieldName: m.FieldName, - TimeseriesID: m.TimeseriesID, - } - } - var err error - s.Queries.CreateUploaderConfigMappingsBatch(ctx, args).Exec(batchExecErr(&err)) - return err -} - -func (s uploaderService) UpdateUploaderConfigMappings(ctx context.Context, ucID uuid.UUID, mm []model.UploaderConfigMapping) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - if err := qtx.DeleteAllUploaderConfigMappingsForUploaderConfig(ctx, ucID); err != nil { - return err - } - - args := make([]db.CreateUploaderConfigMappingsBatchParams, len(mm)) - for idx, m := range mm { - args[idx] = db.CreateUploaderConfigMappingsBatchParams{ - UploaderConfigID: m.UploaderConfigID, - FieldName: m.FieldName, - TimeseriesID: m.TimeseriesID, - } - } - qtx.CreateUploaderConfigMappingsBatch(ctx, args).Exec(batchExecErr(&err)) - if err != nil { - return err - } - return tx.Commit(ctx) -} - -func (s uploaderService) CreateTimeseriesMeasurementsFromTOA5File(ctx context.Context, r io.Reader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - reader := csv.NewReader(r) - - envHeader, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - unitsHeader, err := reader.Read() - if err != nil { - return err - } - processHeader, err := reader.Read() - if err != nil { - return err - } - - meta := model.Environment{ - StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - OSVersion: envHeader[4], - ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.GetDataloggerByModelSN(ctx, db.GetDataloggerByModelSNParams{ - Model: &meta.Model, - Sn: meta.SerialNo, - }) - if err != nil { - return err - } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, db.GetOrCreateDataloggerTableParams{ - DataloggerID: dl.ID, - TableName: meta.TableName, - }) - if err != nil { - return err - } - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]model.Field, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = model.Field{ - Name: fieldHeader[i], - Units: unitsHeader[i], - Process: processHeader[i], - } - } - - eqt, err := qtx.GetEquivalencyTable(ctx, tableID) - if err != nil { - return err - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Fields { - fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID - } - - for { - record, err := reader.Read() - if err == io.EOF { - break - } - if err != nil { - return err - } - - t, err := time.Parse(record[0], time.RFC3339) - if err != nil { - return err - } - - for idx, cell := range record[2:] { - fieldName := fields[idx].Name - tsID, ok := fieldNameTimeseriesIDMap[fieldName] - if !ok { - continue - } - v, err := strconv.ParseFloat(cell, 64) - if err != nil || math.IsNaN(v) || math.IsInf(v, 0) { - continue - } - if err := qtx.CreateOrUpdateTimeseriesMeasurement(ctx, db.CreateOrUpdateTimeseriesMeasurementParams{ - TimeseriesID: tsID, - Time: t, - Value: v, - }); err != nil { - return err - } - } - } - return nil -} diff --git a/api/internal/model/timewindow.go b/api/internal/util/timewindow.go similarity index 98% rename from api/internal/model/timewindow.go rename to api/internal/util/timewindow.go index 39e37f40..506d938e 100644 --- a/api/internal/model/timewindow.go +++ b/api/internal/util/timewindow.go @@ -1,4 +1,4 @@ -package model +package util import "time" diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index c8cce654..06dfb1be 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -160,3 +160,14 @@ CREATE OR REPLACE VIEW v_instrument_group AS ( GROUP BY ig.id, ic.i_count ORDER BY ig.name ); + +CREATE OR REPLACE VIEW v_instrument_status AS ( + SELECT + s1.id, + s1.instrument_id, + s1.status_id, + s2.name status, + s1.time + FROM instrument_status s1 + INNER JOIN status s2 ON s1.status_id = s2.id +); diff --git a/api/migrations/repeat/0050__views_timeseries.sql b/api/migrations/repeat/0050__views_timeseries.sql index d4ae5396..457731f0 100644 --- a/api/migrations/repeat/0050__views_timeseries.sql +++ b/api/migrations/repeat/0050__views_timeseries.sql @@ -103,23 +103,16 @@ CREATE OR REPLACE VIEW v_collection_group_details AS ( ); -select t.*, tm.time as latest_time, tm.value as latest_value -from collection_group_timeseries cgt -inner join collection_group cg on cg.id = cgt.collection_group_id -inner join v_timeseries t on t.id = cgt.timeseries_id -left join timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( - select time from timeseries_measurement - where timeseries_id = t.id - order by time desc limit 1 -) -inner join project_instrument pi on t.instrument_id = pi.instrument_id -where pi.project_id = $1 -and cgt.collection_group_id = $2; - - --- WHERE t.instrument_id = ANY( --- SELECT instrument_id --- FROM project_instrument --- WHERE project_id = $1 --- ) --- AND cgt.collection_group_id = $2 +CREATE OR REPLACE VIEW v_timeseries_measurement AS ( + SELECT + m.timeseries_id, + m.time, + m.value, + n.masked, + n.validated, + n.annotation + FROM timeseries_measurement m + LEFT JOIN timeseries_notes n ON m.timeseries_id = n.timeseries_id AND m.time = n.time + INNER JOIN timeseries t on t.id = m.timeseries_id + ORDER BY m.time ASC +); diff --git a/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql b/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql new file mode 100644 index 00000000..831e184d --- /dev/null +++ b/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql @@ -0,0 +1,8 @@ +ALTER TABLE evaluation_instrument +DROP CONSTRAINT evaluation_instrument_evaluation_id_fkey; + +ALTER TABLE evaluation_instrument +ADD CONSTRAINT evaluation_instrument_evaluation_id_fkey +FOREIGN KEY (evaluation_id) +REFERENCES evaluation(id) +ON DELETE CASCADE; diff --git a/api/queries/alert.sql b/api/queries/alert.sql index 752d1ba9..f1ceab8e 100644 --- a/api/queries/alert.sql +++ b/api/queries/alert.sql @@ -1,16 +1,16 @@ --- name: CreateAlert :exec +-- name: AlertCreate :exec insert into alert (alert_config_id) values ($1); --- name: CreateAlerts :batchexec +-- name: AlertCreateBatch :batchexec insert into alert (alert_config_id) values ($1); --- name: ListAlertsForProject :many +-- name: AlertListForProject :many select * from v_alert where project_id = $1; --- name: ListAlertsForInstrument :many +-- name: AlertListForInstrument :many select * from v_alert where alert_config_id = any( select id from alert_config_instrument @@ -18,7 +18,7 @@ where alert_config_id = any( ); --- name: ListAlertsForProfile :many +-- name: AlertListForProfile :many select a.*, case when r.alert_id is not null then true else false end as read @@ -28,7 +28,7 @@ inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_confi where aps.profile_id = $1; --- name: GetAlert :one +-- name: AlertGet :one select a.*, case when r.alert_id is not null then true else false end as read @@ -39,10 +39,10 @@ where aps.profile_id = $1 and a.id = $2; --- name: CreateAlertRead :exec +-- name: AlertReadCreate :exec insert into alert_read (profile_id, alert_id) values ($1, $2) on conflict do nothing; --- name: DeleteAlertRead :exec +-- name: AlertReadDelete :exec delete from alert_read where profile_id = $1 and alert_id = $2; diff --git a/api/queries/alert_check.sql b/api/queries/alert_check.sql index 69a39dde..f16abcf6 100644 --- a/api/queries/alert_check.sql +++ b/api/queries/alert_check.sql @@ -1,4 +1,4 @@ --- name: ListAndCheckAlertConfigs :many +-- name: AlertConfigListUpdateLastChecked :many update alert_config ac1 set last_checked = now() from ( @@ -9,11 +9,11 @@ where ac1.id = ac2.id returning ac2.*; --- name: UpdateAlertConfigLastReminded :exec +-- name: AlertConfigUpdateLastReminded :exec update alert_config set last_reminded = $2 where id = $1; --- name: UpdateSubmittalCompletionDateOrWarningSent :exec +-- name: SubmittalUpdateCompletionDateOrWarningSent :exec update submittal set submittal_status_id = $2, completion_date = $3, @@ -21,7 +21,7 @@ update submittal set where id = $1; --- name: CreateNextSubmittalFromNewAlertConfigDate :exec +-- name: SubmittalCreateNextFromNewAlertConfigDate :exec insert into submittal (alert_config_id, create_date, due_date) select ac.id, diff --git a/api/queries/alert_config.sql b/api/queries/alert_config.sql index e87e732c..9800882d 100644 --- a/api/queries/alert_config.sql +++ b/api/queries/alert_config.sql @@ -1,11 +1,11 @@ --- name: ListAlertConfigsForProject :many +-- name: AlertConfigListForProject :many select * from v_alert_config where project_id = $1 order by name; --- name: ListAlertConfigsForProjectAlertType :many +-- name: AlertConfigListForProjectAlertType :many select * from v_alert_config where project_id = $1 @@ -13,7 +13,7 @@ and alert_type_id = $2 order by name; --- name: ListAlertConfigsForInstrument :many +-- name: AlertConfigListForInstrument :many select t.* from v_alert_config t inner join alert_config_instrument aci on t.id = aci.alert_config_id @@ -21,11 +21,11 @@ where aci.instrument_id = $1 order by t.name; --- name: GetAlertConfig :one +-- name: AlertConfigGet :one select * from v_alert_config where id = $1; --- name: CreateAlertConfig :one +-- name: AlertConfigCreate :one insert into alert_config ( project_id, name, @@ -42,22 +42,22 @@ insert into alert_config ( returning id; --- name: AssignInstrumentToAlertConfig :exec +-- name: AlertConfigInstrumentCreateAssignment :exec insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2); --- name: UnassignAllInstrumentsFromAlertConfig :exec +-- name: AlertConfigInstrumentDeleteAssignmentsForAlertConfig :exec delete from alert_config_instrument where alert_config_id = $1; --- name: CreateNextSubmittalFromExistingAlertConfigDate :exec +-- name: SubmittalCreateNextFromExistingAlertConfigDate :exec insert into submittal (alert_config_id, due_date) select ac.id, ac.create_date + ac.schedule_interval from alert_config ac where ac.id = $1; --- name: UpdateAlertConfig :exec +-- name: AlertConfigUpdate :exec update alert_config set name = $3, body = $4, @@ -71,7 +71,7 @@ update alert_config set where id = $1 and project_id = $2; --- name: UpdateFutureSubmittalForAlertConfig :one +-- name: SubmittalUpdateNextForAlertConfig :one update submittal set due_date = sq.new_due_date from ( @@ -90,5 +90,5 @@ and sq.new_due_date > now() returning id; --- name: DeleteAlertConfig :exec +-- name: AlertConfigDelete :exec update alert_config set deleted=true where id = $1; diff --git a/api/queries/alert_measurement_check.sql b/api/queries/alert_measurement_check.sql index 87c8b244..62a42cf5 100644 --- a/api/queries/alert_measurement_check.sql +++ b/api/queries/alert_measurement_check.sql @@ -1,4 +1,4 @@ --- name: ListIncompleteEvaluationSubmittals :many +-- name: SubmittalListIncompleteEvaluation :many select * from v_alert_check_evaluation_submittal where submittal_id = any( select id from submittal @@ -6,7 +6,7 @@ where submittal_id = any( ); --- name: ListIncompleteMeasurementSubmittals :many +-- name: SubmittalListIncompleteMeasurement :many select * from v_alert_check_measurement_submittal where submittal_id = any( select id from submittal diff --git a/api/queries/alert_subscription.sql b/api/queries/alert_subscription.sql index ef50c8ce..150b6e3e 100644 --- a/api/queries/alert_subscription.sql +++ b/api/queries/alert_subscription.sql @@ -1,30 +1,30 @@ --- name: CreateAlertProfileSubscriptionOnAnyConflictDoNothing :exec +-- name: AlertProfileSubscriptionCreateOnAnyConflictDoNothing :exec insert into alert_profile_subscription (alert_config_id, profile_id) values ($1, $2) on conflict do nothing; --- name: DeleteAlertProfileSubscription :exec +-- name: AlertProfileSubscriptionDelete :exec delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; --- name: GetAlertSubscription :one +-- name: AlertSubscriptionGet :one select * from alert_profile_subscription where id = $1; --- name: GetAlertSubscriptionForAlertConfig :one +-- name: AlertSubscriptionGetForAlertConfigProfile :one select * from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; --- name: ListMyAlertSubscriptions :many +-- name: AlertSubscriptionListForProfile :many select * from alert_profile_subscription where profile_id = $1; --- name: UpdateMyAlertSubscription :exec +-- name: AlertSubscriptionUpdateForProfile :exec update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4; --- name: RegisterEmail :one +-- name: EmailGetOrCreate :one with e as ( insert into email (email) values ($1) on conflict on constraint unique_email do nothing @@ -36,27 +36,27 @@ select id from email where email = $1 limit 1; --- name: UnregisterEmail :exec +-- name: EmailDelete :exec delete from email where id = $1; --- name: CreateAlertEmailSubscription :exec +-- name: AlertEmailSubscriptionCreate :exec insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) on conflict on constraint email_unique_alert_config do nothing; --- name: CreateAlertProfileSubscription :exec +-- name: AlertProfileSubscriptionCreate :exec insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) on conflict on constraint profile_unique_alert_config do nothing; --- name: DeleteAlertEmailSubscription :exec +-- name: AlertEmailSubscriptionDelete :exec delete from alert_email_subscription where alert_config_id = $1 and email_id = $2; --- name: DeleteAllAlertEmailSubscritpionsForAlertConfig :exec +-- name: AlertEmailSubscritpionDeleteForAlertConfig :exec delete from alert_email_subscription where alert_config_id = $1; --- name: DeleteAllAlertProfileSubscritpionsForAlertConfig :exec +-- name: AlertProfileSubscritpionDeleteForAlertConfig :exec delete from alert_profile_subscription where alert_config_id = $1; diff --git a/api/queries/autocomplete.sql b/api/queries/autocomplete.sql index 12bbdfe6..b71be996 100644 --- a/api/queries/autocomplete.sql +++ b/api/queries/autocomplete.sql @@ -1,5 +1,5 @@ --- name: ListEmailAutocomplete :many +-- name: EmailAutocompleteList :many select id, user_type, username, email from v_email_autocomplete -where username_email ilike '%'||$1||'%' -limit $2; +where username_email ilike '%'||sqlc.arg(search_keyword)||'%' +limit sqlc.arg(result_limit); diff --git a/api/queries/aware.sql b/api/queries/aware.sql index c0424375..75659c8c 100644 --- a/api/queries/aware.sql +++ b/api/queries/aware.sql @@ -1,16 +1,16 @@ --- name: ListAwareParameters :many +-- name: AwareParameterList :many select id, key, parameter_id, unit_id from aware_parameter; --- name: ListAwarePlatformParameterEnabled :many +-- name: AwarePlatformParameterListEnabled :many select instrument_id, aware_id, aware_parameter_key, timeseries_id from v_aware_platform_parameter_enabled order by aware_id, aware_parameter_key; --- name: CreateAwarePlatform :exec +-- name: AwarePlatformCreate :exec insert into aware_platform (instrument_id, aware_id) values ($1, $2); --- name: CreateAwarePlatformBatch :batchexec +-- name: AwarePlatformCreateBatch :batchexec insert into aware_platform (instrument_id, aware_id) values ($1, $2); diff --git a/api/queries/collection_group.sql b/api/queries/collection_group.sql index 93dcdeb8..b96dbd33 100644 --- a/api/queries/collection_group.sql +++ b/api/queries/collection_group.sql @@ -1,33 +1,36 @@ --- name: ListCollectionGroupsForProject :many -select id, project_id, slug, name, creator, create_date, updater, update_date -from collection_group -where project_id = $1; +-- name: CollectionGroupListForProject :many +select * from collection_group where project_id = $1; --- name: GetCollectionGroupDetails :one +-- name: CollectionGroupDetailsGet :one select * from v_collection_group_details where id = $1; --- name: CreateCollectionGroup :one -insert into collection_group (project_id, name, slug, creator, create_date, updater, update_date) -values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) -returning id, project_id, name, slug, creator, create_date, updater, update_date; +-- name: CollectionGroupCreate :one +insert into collection_group (project_id, name, slug, creator, create_date, sort_order) +values (sqlc.arg(project_id), sqlc.arg(name)::varchar, slugify(sqlc.arg(name)::varchar, 'collection_group'), sqlc.arg(creator), sqlc.arg(create_date), sqlc.arg(sort_order)) +returning *; --- name: UpdateCollectionGroup :one -update collection_group set name=$3, updater=$4, update_date=$5 +-- name: CollectionGroupUpdate :one +update collection_group set name=$3, updater=$4, update_date=$5, sort_order=$6 where project_id=$1 and id=$2 -returning id, project_id, name, slug, creator, create_date, updater, update_date; +returning *; --- name: DeleteCollectionGroup :exec +-- name: CollectionGroupDelete :exec delete from collection_group where project_id=$1 and id=$2; --- name: AddTimeseriesToCollectionGroup :exec -insert into collection_group_timeseries (collection_group_id, timeseries_id) values ($1, $2) +-- name: CollectionGroupTimeseriesCreate :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id, sort_order) values ($1, $2, $3) on conflict on constraint collection_group_unique_timeseries do nothing; --- name: RemoveTimeseriesFromCollectionGroup :exec +-- name: CollectionGroupTimeseriesUpdateSortOrder :exec +update collection_group_timeseries set sort_order=$3 +where collection_group_id=$1 and timeseries_id=$2; + + +-- name: CollectionGroupTimeseriesDelete :exec delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2; diff --git a/api/queries/datalogger.sql b/api/queries/datalogger.sql index 02189a69..cfa1ac90 100644 --- a/api/queries/datalogger.sql +++ b/api/queries/datalogger.sql @@ -1,75 +1,75 @@ --- name: GetDataloggerModelName :one -select model from datalogger_model where id = $1; +-- name: DataloggerGetModelName :one +select model from datalogger_model where id=$1; --- name: ListDataloggersForProject :many -select * from v_datalogger where project_id = $1; +-- name: DataloggerListForProject :many +select * from v_datalogger where project_id=$1; --- name: ListAllDataloggers :many +-- name: DataloggerList :many select * from v_datalogger; --- name: GetDataloggerIsActive :one -select exists (select * from v_datalogger where model = $1 and sn = $2)::int; +-- name: DataloggerGetActive :one +select exists (select true from v_datalogger where model=$1 and sn=$2); --- name: VerifyDataloggerExists :one -select true from v_datalogger where id = $1; +-- name: DataloggerGetExists :one +select true from v_datalogger where id=$1; --- name: CreateDataloggerHash :exec +-- name: DataloggerHashCreate :exec insert into datalogger_hash (datalogger_id, "hash") values ($1, $2); --- name: GetDatalogger :one -select * from v_datalogger where id = $1; +-- name: DataloggerGet :one +select * from v_datalogger where id=$1; --- name: CreateDatalogger :one +-- name: DataloggerCreate :one insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) returning id; --- name: UpdateDatalogger :exec +-- name: DataloggerUpdate :exec update datalogger set - name = $2, - updater = $3, - update_date = $4 -where id = $1; + name=$2, + updater=$3, + update_date=$4 +where id=$1; --- name: UpdateDataloggerHash :exec -update datalogger_hash set "hash" = $2 where datalogger_id = $1; +-- name: DataloggerHashUpdate :exec +update datalogger_hash set "hash"=$2 where datalogger_id=$1; --- name: UpdateDataloggerUpdater :exec -update datalogger set updater = $2, update_date = $3 where id = $1; +-- name: DataloggerUpdateUpdater :exec +update datalogger set updater=$2, update_date=$3 where id=$1; --- name: DeleteDatalogger :exec -update datalogger set deleted = true, updater = $2, update_date = $3 where id = $1; +-- name: DataloggerDelete :exec +update datalogger set deleted=true, updater=$2, update_date=$3 where id=$1; --- name: GetDataloggerTablePreview :one -select * from v_datalogger_preview where datalogger_table_id = $1 limit 1; +-- name: DataloggerTablePreviewGet :one +select * from v_datalogger_preview where datalogger_table_id=$1 limit 1; --- name: ResetDataloggerTableName :exec -update datalogger_table set table_name = '' where id = $1; +-- name: DataloggerUpdateTableNameBlank :exec +update datalogger_table set table_name='' where id=$1; --- name: RenameEmptyDataloggerTableName :exec +-- name: DataloggerTableUpdateNameIfEmpty :exec update datalogger_table dt -set table_name = $2 -where dt.table_name = '' and dt.datalogger_id = $1 +set table_name=$2 +where dt.table_name='' and dt.datalogger_id=$1 and not exists ( - select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 + select 1 from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 ); --- name: GetOrCreateDataloggerTable :one +-- name: DataloggerTableGetOrCreate :one with new_datalogger_table as ( insert into datalogger_table (datalogger_id, table_name) values ($1, $2) on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing @@ -77,8 +77,8 @@ with new_datalogger_table as ( ) select ndt.id from new_datalogger_table ndt union -select sdt.id from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2; +select sdt.id from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2; --- name: DeleteDataloggerTable :exec -delete from datalogger_table where id = $1; +-- name: DataloggerTableDelete :exec +delete from datalogger_table where id=$1; diff --git a/api/queries/datalogger_telemetry.sql b/api/queries/datalogger_telemetry.sql index 4c2ec97b..d31a3b4e 100644 --- a/api/queries/datalogger_telemetry.sql +++ b/api/queries/datalogger_telemetry.sql @@ -1,30 +1,39 @@ --- name: GetDataloggerByModelSN :one +-- name: DataloggerGetForModelSn :one select * from v_datalogger where model = $1 and sn = $2 limit 1; --- name: GetDataloggerHashByModelSN :one +-- name: DataloggerHashGetForModelSn :one select "hash" from v_datalogger_hash where model = $1 and sn = $2 limit 1; --- name: CreateDataloggerTablePreview :exec +-- name: DataloggerTablePreviewCreate :exec insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3); --- name: UpdateDataloggerTablePreview :exec +-- name: DataloggerTablePreviewUpdate :exec update datalogger_preview set preview = $3, update_date = $4 where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); --- name: DeleteDataloggerTableError :exec +-- name: DataloggerErrorDelete :exec delete from datalogger_error where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); --- name: CreateDataloggerError :exec +-- name: DataloggerErrorCreate :exec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +); + + +-- name: DataloggerErrorCreateBatch :batchexec insert into datalogger_error (datalogger_table_id, error_message) select dt.id, $3 from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2 diff --git a/api/queries/district_rollup.sql b/api/queries/district_rollup.sql index c9f7e951..6cfae010 100644 --- a/api/queries/district_rollup.sql +++ b/api/queries/district_rollup.sql @@ -1,14 +1,14 @@ --- name: ListEvaluationDistrictRollupsForProjectAlertConfig :many +-- name: DistrictRollupListEvaluationForProjectAlertConfig :many select * from v_district_rollup where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid -and project_id = $1 -and the_month >= date_trunc('month', $2::timestamptz) -and the_month <= date_trunc('month', $3::timestamptz); +and project_id=sqlc.arg(project_id) +and the_month >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and the_month <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); --- name: ListMeasurementDistrictRollupsForProjectAlertConfig :many +-- name: DistrictRollupListMeasurementForProjectAlertConfig :many select * from v_district_rollup where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid -and project_id = $1 -and the_month >= date_trunc('month', $2::timestamptz) -and the_month <= date_trunc('month', $3::timestamptz); +and project_id=sqlc.arg(project_id) +and the_month >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and the_month <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); diff --git a/api/queries/domain.sql b/api/queries/domain.sql new file mode 100644 index 00000000..3a3f781e --- /dev/null +++ b/api/queries/domain.sql @@ -0,0 +1,10 @@ +-- name: PgTimezoneNamesList :many +select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names; + + +-- name: DomainList :many +select * from v_domain; + + +-- name: DomainGroupList :many +select * from v_domain_group; diff --git a/api/queries/domains.sql b/api/queries/domains.sql deleted file mode 100644 index 5514bcf5..00000000 --- a/api/queries/domains.sql +++ /dev/null @@ -1,6 +0,0 @@ --- name: ListDomains :many -select * from v_domain; - - --- name: ListDomainGroups :many -select * from v_domain_group; diff --git a/api/queries/equivalency_table.sql b/api/queries/equivalency_table.sql index 283f57f3..e1a8d0cd 100644 --- a/api/queries/equivalency_table.sql +++ b/api/queries/equivalency_table.sql @@ -1,10 +1,8 @@ --- name: GetIsValidDataloggerTable :one -select not exists ( - select * from datalogger_table where id = $1 and table_name = 'preparse' -); +-- name: DataloggerTableGetIsValid :one +select not exists (select * from datalogger_table where id = $1 and table_name = 'preparse'); --- name: GetIsValidEquivalencyTableTimeseries :one +-- name: EquivalencyTableTimeseriesGetIsValid :one select not exists ( select id from v_timeseries_computed where id = $1 @@ -14,7 +12,7 @@ select not exists ( ); --- name: GetEquivalencyTable :one +-- name: EquivalencyTableGet :one select datalogger_id, datalogger_table_id, @@ -24,7 +22,7 @@ from v_datalogger_equivalency_table where datalogger_table_id = $1; --- name: CreateOrUpdateEquivalencyTableRow :exec +-- name: EquivalencyTableCreateOrUpdate :exec insert into datalogger_equivalency_table (datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) values ($1, $2, $3, $4, $5, $6) @@ -32,7 +30,7 @@ on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id; --- name: UpdateEquivalencyTableRow :exec +-- name: EquivalencyTableUpdate :exec update datalogger_equivalency_table set field_name = $2, display_name = $3, @@ -41,9 +39,9 @@ update datalogger_equivalency_table set where id = $1; --- name: DeleteEquivalencyTable :exec +-- name: EquivalencyTableDeleteForDataloggerTable :exec delete from datalogger_equivalency_table where datalogger_table_id = $1; --- name: DeleteEquivalencyTableRow :exec +-- name: EquivalencyTableDelete :exec delete from datalogger_equivalency_table where id = $1; diff --git a/api/queries/evaluation.sql b/api/queries/evaluation.sql index 547c9224..69b9c0d7 100644 --- a/api/queries/evaluation.sql +++ b/api/queries/evaluation.sql @@ -1,30 +1,30 @@ --- name: ListProjectEvaluations :many +-- name: EvaluationListForProject :many select * from v_evaluation -where project_id = $1; +where project_id=$1; --- name: ListProjectEvaluationsByAlertConfig :many +-- name: EvaluationListForProjectAlertConfig :many select * from v_evaluation -where project_id = $1 +where project_id=$1 and alert_config_id is not null -and alert_config_id = $2; +and alert_config_id=$2; --- name: ListInstrumentEvaluations :many +-- name: EvaluationListForInstrument :many select * from v_evaluation where id = any( select evaluation_id from evaluation_instrument - where instrument_id = $1 + where instrument_id=$1 ); --- name: GetEvaluation :one -select * from v_evaluation where id = $1; +-- name: EvaluationGet :one +select * from v_evaluation where id=$1; --- name: CompleteEvaluationSubmittal :one +-- name: SubmittalUpdateCompleteEvaluation :one update submittal sub1 set submittal_status_id = sq.submittal_status_id, completion_date = now() @@ -39,7 +39,7 @@ from ( end as submittal_status_id from submittal sub2 inner join alert_config ac on sub2.alert_config_id = ac.id - where sub2.id = $1 + where sub2.id=$1 and sub2.completion_date is null and not sub2.marked_as_missing and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid @@ -48,16 +48,16 @@ where sub1.id = sq.submittal_id returning sub1.*; --- name: CreateNextEvaluationSubmittal :exec +-- name: SubmittalCreateNextEvaluation :exec insert into submittal (alert_config_id, due_date) select ac.id, now() + ac.schedule_interval from alert_config ac -where ac.id in (select sub.alert_config_id from submittal sub where sub.id = $1); +where ac.id in (select sub.alert_config_id from submittal sub where sub.id=$1); --- name: CreateEvaluation :one +-- name: EvaluationCreate :one insert into evaluation ( project_id, submittal_id, @@ -71,15 +71,15 @@ insert into evaluation ( returning id; --- name: CreateEvaluationInstrument :exec +-- name: EvaluationInstrumentCreate :exec insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); --- name: CreateEvaluationInstrumentsBatch :batchexec +-- name: EvaluationInstrumentCreateBatch :batchexec insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); --- name: UpdateEvaluation :exec +-- name: EvaluationUpdate :exec update evaluation set name=$3, body=$4, @@ -90,8 +90,8 @@ update evaluation set where id=$1 and project_id=$2; --- name: UnassignAllInstrumentsFromEvaluation :exec -delete from evaluation_instrument where evaluation_id = $1; +-- name: EvaluationInstrumentDeleteForEvaluation :exec +delete from evaluation_instrument where evaluation_id=$1; --- name: DeleteEvaluation :exec -delete from evaluation where id = $1; +-- name: EvaluationDelete :exec +delete from evaluation where id=$1; diff --git a/api/queries/heartbeat.sql b/api/queries/heartbeat.sql index b93a4293..20890d7a 100644 --- a/api/queries/heartbeat.sql +++ b/api/queries/heartbeat.sql @@ -1,10 +1,12 @@ --- name: CreateHeartbeat :one +-- name: HeartbeatCreate :one insert into heartbeat (time) values ($1) returning *; --- name: GetLatestHeartbeat :one -select max(time) as time from heartbeat; +-- name: HeartbeatGetLatest :one +select max(time)::timestamptz from heartbeat; --- name: ListHeartbeats :many -select * from heartbeat; +-- name: HeartbeatList :many +select * from heartbeat +order by time desc +limit sqlc.arg(result_limit); diff --git a/api/queries/home.sql b/api/queries/home.sql index ec8ef97f..3632b146 100644 --- a/api/queries/home.sql +++ b/api/queries/home.sql @@ -1,4 +1,4 @@ --- name: GetHome :one +-- name: HomeGet :one select (select count(*) from instrument where not deleted) as instrument_count, (select count(*) from project where not deleted) as project_count, diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql index e7dd2bc0..ff3f749a 100644 --- a/api/queries/instrument.sql +++ b/api/queries/instrument.sql @@ -1,44 +1,51 @@ --- name: ListInstruments :many +-- name: InstrumentList :many select * from v_instrument where not deleted; --- name: ListInstrumentsForProject :many +-- name: InstrumentListForProject :many select i.* from v_instrument i inner join project_instrument pi on pi.instrument_id = i.id where pi.project_id = $1; --- name: GetInstrument :one +-- name: InstrumentListForInstrumentGroup :many +select i.* +from v_instrument i +inner join instrument_group_instruments igi on igi.instrument_id = i.id +where instrument_group_id = $1; + + +-- name: InstrumentGet :one select * from v_instrument where not deleted -and id=$1; +and id = $1; --- name: GetInstrumentCount :one +-- name: InstrumentGetCount :one select count(*) from instrument where not deleted; --- name: CreateInstrument :one +-- name: InstrumentCreate :one insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id, slug; --- name: CreateInstrumentsBatch :batchone +-- name: InstrumentCreateBatch :batchone insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id, slug; --- name: ListInstrumentProjects :many +-- name: ProjectInstrumentListProjectIDForInstrument :many select project_id from project_instrument where instrument_id = $1; --- name: ListProjectCountForInstruments :many +-- name: ProjectInstrumentListCountByInstrument :many select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count from project_instrument pi inner join instrument i on pi.instrument_id = i.id @@ -47,18 +54,18 @@ group by pi.instrument_id, i.name order by i.name; --- name: UpdateInstrument :exec +-- name: InstrumentUpdate :exec update instrument set - name = $3, - type_id = $4, - geometry = $5, - updater = $6, - update_date = $7, - station = $8, - station_offset = $9, - nid_id = $10, - usgs_id = $11, - show_cwms_tab = $12 + name=$3, + type_id=$4, + geometry=$5, + updater=$6, + update_date=$7, + station=$8, + station_offset=$9, + nid_id=$10, + usgs_id=$11, + show_cwms_tab=$12 where id = $2 and id in ( select instrument_id @@ -67,11 +74,11 @@ and id in ( ); --- name: UpdateInstrumentGeometry :one +-- name: InstrumentUpdateGeometry :one update instrument set - geometry = $3, - updater = $4, - update_date = now() + geometry=$3, + updater=$4, + update_date=now() where id = $2 and id in ( select instrument_id @@ -81,8 +88,8 @@ and id in ( returning id; --- name: DeleteFlagInstrument :exec -update instrument set deleted = true +-- name: InstrumentDeleteFlag :exec +update instrument set deleted=true where id = any( select instrument_id from project_instrument @@ -91,7 +98,7 @@ where id = any( and id = $2; --- name: ListInstrumentIDNamesByIDs :many +-- name: InstrumentIDNameListByIDs :many select id, name from instrument where id in (sqlc.arg(instrument_ids)::uuid[]) diff --git a/api/queries/instrument_assign.sql b/api/queries/instrument_assign.sql index a1ffbfec..abd78a4a 100644 --- a/api/queries/instrument_assign.sql +++ b/api/queries/instrument_assign.sql @@ -1,22 +1,22 @@ --- name: AssignInstrumentToProject :exec +-- name: ProjectInstrumentCreate :exec insert into project_instrument (project_id, instrument_id) values ($1, $2) on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; --- name: AssignInstrumentToProjectBatch :batchexec +-- name: ProjectInstrumentCreateBatch :batchexec insert into project_instrument (project_id, instrument_id) values ($1, $2) on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; --- name: UnassignInstrumentFromProject :exec +-- name: ProjectInstrumentDelete :exec delete from project_instrument where project_id = $1 and instrument_id = $2; --- name: UnassignInstrumentFromProjectBatch :batchexec +-- name: ProjectInstrumentDeleteBatch :batchexec delete from project_instrument where project_id = $1 and instrument_id = $2; --- name: ValidateInstrumentNamesProjectUnique :many +-- name: ProjectInstrumentListForProjectInstrumentNames :many select i.name from project_instrument pi inner join instrument i on pi.instrument_id = i.id @@ -25,7 +25,7 @@ and i.name in (sqlc.arg(instrument_names)::text[]) and not i.deleted; --- name: ValidateProjectsInstrumentNameUnique :many +-- name: ProjectInstrumentListForInstrumentNameProjects :many select i.name instrument_name from project_instrument pi inner join instrument i on pi.instrument_id = i.id @@ -36,7 +36,7 @@ and not i.deleted order by pi.project_id; --- name: ValidateInstrumentsAssignerAuthorized :many +-- name: ProjectInstrumentListForInstrumentsProfileAdmin :many select p.name as project_name, i.name as instrument_name from project_instrument pi inner join project p on pi.project_id = p.id @@ -50,7 +50,7 @@ and not exists ( and not i.deleted; --- name: ValidateProjectsAssignerAuthorized :many +-- name: ProjectInstrumentListForInstrumentProjectsProfileAdmin :many select p.name from project_instrument pi inner join project p on pi.project_id = p.id diff --git a/api/queries/instrument_constant.sql b/api/queries/instrument_constant.sql index 75654c4d..58e3b112 100644 --- a/api/queries/instrument_constant.sql +++ b/api/queries/instrument_constant.sql @@ -1,16 +1,16 @@ --- name: ListInstrumentConstants :many +-- name: InstrumentConstantList :many select t.* from v_timeseries t inner join instrument_constants ic on ic.timeseries_id = t.id where ic.instrument_id = $1; --- name: CreateInstrumentConstant :exec +-- name: InstrumentConstantCreate :exec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); --- name: CreateInstrumentConstantBatch :batchexec +-- name: InstrumentConstantCreateBatch :batchexec insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); --- name: DeleteInstrumentConstant :exec +-- name: InstrumentConstantDelete :exec delete from instrument_constants where instrument_id = $1 and timeseries_id = $2; diff --git a/api/queries/instrument_group.sql b/api/queries/instrument_group.sql index c5385219..9b3e8b28 100644 --- a/api/queries/instrument_group.sql +++ b/api/queries/instrument_group.sql @@ -1,60 +1,52 @@ --- name: ListInstrumentGroups :many +-- name: InstrumentGroupList :many select * from v_instrument_group where not deleted; --- name: GetInstrumentGroup :many +-- name: InstrumentGroupGet :many select * from v_instrument_group where not deleted and id=$1; --- name: ListInstrumentGroupsForProject :many +-- name: InstrumentGroupListForProject :many select ig.* from v_instrument_group ig where ig.project_id = $1; --- name: CreateInstrumentGroup :one +-- name: InstrumentGroupCreate :one insert into instrument_group (slug, name, description, creator, create_date, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, slug, name, description, creator, create_date, updater, update_date, project_id; +returning *; --- name: CreateInstrumentGroupsBatch :batchone +-- name: InstrumentGroupCreateBatch :batchone insert into instrument_group (slug, name, description, creator, create_date, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, slug, name, description, creator, create_date, updater, update_date, project_id; +returning *; --- name: UpdateInstrumentGroup :one +-- name: InstrumentGroupUpdate :one update instrument_group set name = $2, - deleted = $3, - description = $4, - updater = $5, - update_date = $6, - project_id = $7 + description = $3, + updater = $4, + update_date = $5, + project_id = $6 where id = $1 returning *; --- name: DeleteFlagInstrumentGroup :exec +-- name: InstrumentGroupDeleteFlag :exec update instrument_group set deleted = true where id = $1; --- name: ListInstrumentGroupInstruments :many -select inst.* -from instrument_group_instruments igi -inner join v_instrument_group on igi.instrument_id = inst.id -where igi.instrument_group_id = $1 and inst.deleted = false; - - --- name: CreateInstrumentGroupInstruments :exec +-- name: InstrumentGroupInstrumentCreate :exec insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2); --- name: DeleteInstrumentGroupInstruments :exec +-- name: InstrumentGroupInstrumentDelete :exec delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2; diff --git a/api/queries/instrument_incl.sql b/api/queries/instrument_incl.sql index 63d932ac..0e792127 100644 --- a/api/queries/instrument_incl.sql +++ b/api/queries/instrument_incl.sql @@ -1,32 +1,32 @@ --- name: CreateInclOpts :exec +-- name: InclOptsCreate :exec insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: CreateInclOptsBatch :batchexec +-- name: InclOptsCreateBatch :batchexec insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: UpdateInclOpts :exec +-- name: InclOptsUpdate :exec update incl_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: UpdateInclOptsBatch :batchexec +-- name: InclOptsUpdateBatch :batchexec update incl_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: GetAllInclSegmentsForInstrument :many +-- name: InclSegmentListForInstrument :many select * from v_incl_segment where instrument_id = $1; --- name: CreateInclSegment :exec +-- name: InclSegmentCreate :exec insert into incl_segment ( id, instrument_id, @@ -38,7 +38,7 @@ insert into incl_segment ( ) values ($1, $2, $3, $4, $5, $6, $7); --- name: CreateInclSegmentBatch :batchexec +-- name: InclSegmentCreateBatch :batchexec insert into incl_segment ( id, instrument_id, @@ -50,7 +50,7 @@ insert into incl_segment ( ) values ($1, $2, $3, $4, $5, $6, $7); --- name: UpdateInclSegment :exec +-- name: InclSegmentUpdate :exec update incl_segment set depth_timeseries_id=$3, a0_timeseries_id=$4, @@ -60,7 +60,7 @@ update incl_segment set where id = $1 and instrument_id = $2; --- name: UpdateInclSegmentsBatch :batchexec +-- name: InclSegmentUpdateBatch :batchexec update incl_segment set depth_timeseries_id=$3, a0_timeseries_id=$4, @@ -70,13 +70,13 @@ update incl_segment set where id = $1 and instrument_id = $2; --- name: GetInclMeasurementsForInstrument :many +-- name: InclMeasurementListForInstrumentRange :many select m1.instrument_id, m1.time, m1.measurements from v_incl_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) union select m2.instrument_id, m2.time, m2.measurements from v_incl_measurement m2 -where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 +where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/instrument_ipi.sql b/api/queries/instrument_ipi.sql index 1de093d5..e0edfab0 100644 --- a/api/queries/instrument_ipi.sql +++ b/api/queries/instrument_ipi.sql @@ -1,32 +1,32 @@ --- name: CreateIpiOpts :exec +-- name: IpiOptsCreate :exec insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: CreateIpiOptsBatch :batchexec +-- name: IpiOptsCreateBatch :batchexec insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: UpdateIpiOpts :exec +-- name: IpiOptsUpdate :exec update ipi_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: UpdateIpiOptsBatch :batchexec +-- name: IpiOptsUpdateBatch :batchexec update ipi_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: GetAllIpiSegmentsForInstrument :many +-- name: IpiSegmentListForInstrument :many select * from v_ipi_segment where instrument_id = $1; --- name: CreateIpiSegment :exec +-- name: IpiSegmentCreate :exec insert into ipi_segment ( id, instrument_id, @@ -37,7 +37,7 @@ insert into ipi_segment ( ) values ($1, $2, $3, $4, $5, $6); --- name: CreateIpiSegmentBatch :batchexec +-- name: IpiSegmentCreateBatch :batchexec insert into ipi_segment ( id, instrument_id, @@ -48,7 +48,7 @@ insert into ipi_segment ( ) values ($1, $2, $3, $4, $5, $6); --- name: UpdateIpiSegment :exec +-- name: IpiSegmentUpdate :exec update ipi_segment set length_timeseries_id = $3, tilt_timeseries_id = $4, @@ -57,7 +57,7 @@ update ipi_segment set where id = $1 and instrument_id = $2; --- name: UpdateIpiSegmentsBatch :batchexec +-- name: IpiSegmentUpdateBatch :batchexec update ipi_segment set length_timeseries_id = $3, tilt_timeseries_id = $4, @@ -66,13 +66,13 @@ update ipi_segment set where id = $1 and instrument_id = $2; --- name: GetIpiMeasurementsForInstrument :many +-- name: IpiMeasurementListForInstrumentRange :many select m1.instrument_id, m1.time, m1.measurements from v_ipi_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) union select m2.instrument_id, m2.time, m2.measurements from v_ipi_measurement m2 -where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 +where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/instrument_note.sql b/api/queries/instrument_note.sql index 0ef3b8d6..629ed56a 100644 --- a/api/queries/instrument_note.sql +++ b/api/queries/instrument_note.sql @@ -1,41 +1,37 @@ --- name: ListInstrumentNotes :many -select - id, - instrument_id, - title, - body, - time, - creator, - create_date, - updater, - update_date +-- name: InstrumentNoteGet :one +select * from instrument_note -where (sqlc.narg(instrument_id) is null or sqlc.narg(instrument_id) = $1) -and (sqlc.narg(id) is null or sqlc.narg(id) = $1); +where id = $1; --- name: CreateInstrumentNote :one +-- name: InstrumentNoteListForInstrument :many +select * +from instrument_note +where instrument_id = $1; + + +-- name: InstrumentNoteCreate :one insert into instrument_note (instrument_id, title, body, time, creator, create_date) values ($1, $2, $3, $4, $5, $6) returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; --- name: CreateInstrumentNoteBatch :batchone +-- name: InstrumentNoteCreateBatch :batchone insert into instrument_note (instrument_id, title, body, time, creator, create_date) values ($1, $2, $3, $4, $5, $6) returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; --- name: UpdateInstrumentNote :one +-- name: InstrumentNoteUpdate :one update instrument_note set - title = $2, - body = $3, - time = $4, - updater = $5, - update_date = $6 + title=$2, + body=$3, + time=$4, + updater=$5, + update_date=$6 where id = $1 -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; +returning *; --- name: DeleteInstrumentNote :exec +-- name: InstrumentNoteDelete :exec delete from instrument_note where id = $1; diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql index 9575c7df..4d0a0324 100644 --- a/api/queries/instrument_saa.sql +++ b/api/queries/instrument_saa.sql @@ -1,32 +1,32 @@ --- name: CreateSaaOpts :exec +-- name: SaaOptsCreate :exec insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: CreateSaaOptsBatch :batchexec +-- name: SaaOptsCreateBatch :batchexec insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4); --- name: UpdateSaaOpts :exec +-- name: SaaOptsUpdate :exec update saa_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: UpdateSaaOptsBatch :batchexec +-- name: SaaOptsUpdateBatch :batchexec update saa_opts set bottom_elevation_timeseries_id = $2, initial_time = $3 where instrument_id = $1; --- name: GetAllSaaSegmentsForInstrument :many +-- name: SaaSegmentListForInstrument :many select * from v_saa_segment where instrument_id = $1; --- name: CreateSaaSegment :exec +-- name: SaaSegmentCreate :exec insert into saa_segment ( id, instrument_id, @@ -38,7 +38,7 @@ insert into saa_segment ( ) values ($1, $2, $3, $4, $5, $6, $7); --- name: CreateSaaSegmentBatch :batchexec +-- name: SaaSegmentCreateBatch :batchexec insert into saa_segment ( id, instrument_id, @@ -50,7 +50,7 @@ insert into saa_segment ( ) values ($1, $2, $3, $4, $5, $6, $7); --- name: UpdateSaaSegment :exec +-- name: SaaSegmentUpdate :exec update saa_segment set length_timeseries_id = $3, x_timeseries_id = $4, @@ -60,7 +60,7 @@ update saa_segment set where id = $1 and instrument_id = $2; --- name: UpdateSaaSegmentBatch :batchexec +-- name: SaaSegmentUpdateBatch :batchexec update saa_segment set length_timeseries_id = $3, x_timeseries_id = $4, @@ -70,13 +70,13 @@ update saa_segment set where id = $1 and instrument_id = $2; --- name: GetSaaMeasurementsForInstrument :many +-- name: SaaMeasurementListForInstrumentRange :many select m1.instrument_id, m1.time, m1.measurements from v_saa_measurement m1 -where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +where m1.instrument_id = sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) union select m2.instrument_id, m2.time, m2.measurements from v_saa_measurement m2 -where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) -and m2.instrument_id = $1 +where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/instrument_status.sql b/api/queries/instrument_status.sql index 0eb3310f..b12bb175 100644 --- a/api/queries/instrument_status.sql +++ b/api/queries/instrument_status.sql @@ -1,25 +1,23 @@ --- name: ListInstrumentStatuses :many -select - s.id, - s.status_id, - d.name status, - s.time -from instrument_status s -inner join status d on d.id = s.status_id -where (sqlc.narg(instrument_id) is null or sqlc.narg(instrument_id) = s.instrument_id) -and (sqlc.narg(id) is null or sqlc.narg(id) = s.id) +-- name: InstrumentStatusListForInstrument :many +select * from v_instrument_status +where instrument_id=$1 order by time desc; --- name: CreateOrUpdateInstrumentStatus :exec +-- name: InstrumentStatusGet :one +select * from v_instrument_status +where id=$1; + + +-- name: InstrumentStatusCreateOrUpdate :exec insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; --- name: CreateOrUpdateInstrumentStatusBatch :batchexec +-- name: InstrumentStatusCreateOrUpdateBatch :batchexec insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; --- name: DeleteInstrumentStatus :exec +-- name: InstrumentStatusDelete :exec delete from instrument_status where id = $1; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql index 34392026..c88979ff 100644 --- a/api/queries/measurement.sql +++ b/api/queries/measurement.sql @@ -1,84 +1,85 @@ --- name: ListTimeseriesMeasurements :many -select - m.timeseries_id, - m.time, - m.value, - n.masked, - n.validated, - n.annotation -from timeseries_measurement m -left join timeseries_notes n on m.timeseries_id = n.timeseries_id and m.time = n.time -inner join timeseries t on t.id = m.timeseries_id -where t.id = $1 and m.time > $2 and m.time < $3 order by m.time asc; - - --- name: DeleteTimeseriesMeasurements :exec -delete from timeseries_measurement where timeseries_id = $1 and time = $2; - - --- name: GetTimeseriesConstantMeasurement :many -select - m.timeseries_id, - m.time, - m.value -from timeseries_measurement m -inner join v_timeseries_stored t on t.id = m.timeseries_id -inner join parameter p on p.id = t.parameter_id -where t.instrument_id in ( - select instrument_id - from v_timeseries_stored t - where t.id= $1 -) -and p.name = $2; - - --- name: CreateTimeseriesMeasurement :exec +-- name: TimeseriesMeasurementListRange :many +select * from v_timeseries_measurement +where timeseries_id=sqlc.arg(timeseries_id) +and time > sqlc.arg(after_time) +and time < sqlc.arg(before_time); + + +-- name: TimeseriesMeasurementGetMostRecent :one +select * +from timeseries_measurement +where timeseries_id = sqlc.arg(timeseries_id) +order by time desc +limit 1; + + +-- name: TimeseriesMeasurementCreate :exec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do nothing; --- name: CreateTimeseriesMeasurementsBatch :batchexec +-- name: TimeseriesMeasurementCreateBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do nothing; --- name: CreateOrUpdateTimeseriesMeasurement :exec +-- name: TimeseriesMeasurementCreateOrUpdate :exec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do update set value = excluded.value; --- name: CreateOrUpdateTimeseriesMeasurementsBatch :batchexec +-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do update set value = excluded.value; --- name: CreateTimeseriesNote :exec +-- name: TimeseriesNoteCreate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do nothing; --- name: CreateTimeseriesNotesBatch :batchexec +-- name: TimeseriesNoteCreateBatch :batchexec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do nothing; --- name: CreateOrUpdateTimeseriesNote :exec +-- name: TimeseriesNoteCreateOrUpdate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; --- name: CreateOrUpdateTimeseriesNoteBatch :batchexec +-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; --- name: DeleteTimeseriesMeasurement :exec -delete from timeseries_measurement where timeseries_id = $1 and time = $2; +-- name: TimeseriesMeasurementDelete :exec +delete from timeseries_measurement where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesMeasurementDeleteBatch :batchexec +delete from timeseries_measurement where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesMeasurementDeleteRange :exec +delete from timeseries_measurement where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); --- name: DeleteTimeseriesMeasurementsRange :exec +-- name: TimeseriesMeasurementDeleteRangeBatch :batchexec delete from timeseries_measurement where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); --- name: DeleteTimeseriesNoteRange :exec +-- name: TimeseriesNoteDelete :exec +delete from timeseries_notes where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesNoteDeleteBatch :batchexec +delete from timeseries_notes where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesNoteDeleteRange :exec +delete from timeseries_notes where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); + + +-- name: TimeseriesNoteDeleteRangeBatch :batchexec delete from timeseries_notes where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); diff --git a/api/queries/plot_config.sql b/api/queries/plot_config.sql index 51fe31c3..867caa54 100644 --- a/api/queries/plot_config.sql +++ b/api/queries/plot_config.sql @@ -1,32 +1,32 @@ --- name: ListPlotConfigsForProject :many +-- name: PlotConfigListForProject :many select * from v_plot_configuration where project_id = $1; --- name: GetPlotConfig :one +-- name: PlotConfigGet :one select * from v_plot_configuration where id = $1; --- name: CreatePlotConfig :one +-- name: PlotConfigCreate :one insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) returning id; --- name: CreatePlotConfigSettings :exec +-- name: PlotConfigSettingsCreate :exec insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) values ($1, $2, $3, $4, $5, $6, $7); --- name: UpdatePlotConfig :exec +-- name: PlotConfigUpdate :exec update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2; --- name: DeletePlotConfig :exec +-- name: PlotConfigDelete :exec delete from plot_configuration where project_id = $1 and id = $2; --- name: DeletePlotConfigSettings :exec +-- name: PlotConfigSettingsDelete :exec delete from plot_configuration_settings where id = $1; diff --git a/api/queries/plot_config_bullseye.sql b/api/queries/plot_config_bullseye.sql index 7324a23a..e9b5264a 100644 --- a/api/queries/plot_config_bullseye.sql +++ b/api/queries/plot_config_bullseye.sql @@ -1,16 +1,16 @@ --- name: CreatePlotBullseyeConfig :exec +-- name: PlotBullseyeConfigCreate :exec insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3); --- name: UpdatePlotBullseyeConfig :exec +-- name: PlotBullseyeConfigUpdate :exec UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1; --- name: DeletePlotBullseyeConfig :exec +-- name: PlotBullseyeConfigDelete :exec delete from plot_bullseye_config where plot_config_id = $1; --- name: ListPlotConfigMeasurementsBullseyePlot :many +-- name: PlotConfigMeasurementListBullseye :many select t.time, locf(xm.value) as x, @@ -25,8 +25,8 @@ and xm.time = t.time left join timeseries_measurement ym on ym.timeseries_id = pc.y_axis_timeseries_id and ym.time = t.time -where pc.plot_config_id = $1 -and t.time > $2 -and t.time < $3 +where pc.plot_config_id = sqlc.arg(plot_config_id) +and t.time > sqlc.arg(after) +and t.time < sqlc.arg(before) group by t.time order by t.time asc; diff --git a/api/queries/plot_config_contour.sql b/api/queries/plot_config_contour.sql index 6f8993ca..bf44f6dc 100644 --- a/api/queries/plot_config_contour.sql +++ b/api/queries/plot_config_contour.sql @@ -1,42 +1,42 @@ --- name: CreatePlotContourConfig :exec +-- name: PlotContourConfigCreate :exec insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) values ($1, $2, $3, $4, $5, $6); --- name: UpdatePlotContourConfig :exec +-- name: PlotContourConfigUpdate :exec update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 where plot_config_id=$1; --- name: DeletePlotContourConfig :exec +-- name: PlotContourConfigDelete :exec delete from plot_contour_config where plot_config_id = $1; --- name: CreatePlotContourConfigTimeseries :exec +-- name: PlotContourConfigTimeseriesCreate :exec insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) on conflict (plot_contour_config_id, timeseries_id) do nothing; --- name: CreatePlotContourConfigTimeseriesBatch :batchexec +-- name: PlotContourConfigTimeseriesCreateBatch :batchexec insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) on conflict (plot_contour_config_id, timeseries_id) do nothing; --- name: DeleteAllPlotContourConfigTimeseries :exec +-- name: PlotContourConfigTimeseriesDeleteForPlotContourConfig :exec delete from plot_contour_config_timeseries where plot_contour_config_id = $1; --- name: ListPlotContourConfigTimes :many +-- name: PlotContourConfigListTimeRange :many select distinct mm.time from plot_contour_config_timeseries pcts inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id -where pcts.plot_contour_config_id = $1 -and mm.time > $2 -and mm.time < $3 +where pcts.plot_contour_config_id = sqlc.arg(plot_contour_config_id) +and mm.time > sqlc.arg(after) +and mm.time < sqlc.arg(before) order by time asc; --- name: ListPlotConfigMeasurementsContourPlot :many +-- name: PlotConfigMeasurementListContour :many select oi.x::double precision x, oi.y::double precision y, diff --git a/api/queries/plot_config_profile.sql b/api/queries/plot_config_profile.sql index 73ead8f8..61b3e89e 100644 --- a/api/queries/plot_config_profile.sql +++ b/api/queries/plot_config_profile.sql @@ -1,6 +1,6 @@ --- name: CreatePlotProfileConfig :exec +-- name: PlotProfileConfigCreate :exec insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2); --- name: UpdatePlotProfileConfig :exec +-- name: PlotProfileConfigUpdate :exec update plot_profile_config set instrument_id=$2 where plot_config_id=$1; diff --git a/api/queries/plot_config_scatter_line.sql b/api/queries/plot_config_scatter_line.sql index ae57e3e4..0467a22a 100644 --- a/api/queries/plot_config_scatter_line.sql +++ b/api/queries/plot_config_scatter_line.sql @@ -1,47 +1,47 @@ --- name: CreatePlotConfigScatterLineLayout :exec +-- name: PlotConfigScatterLineLayoutCreate :exec insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3); --- name: UpdatePlotConfigScatterLineLayout :exec +-- name: PlotConfigScatterLineLayoutUpdate :exec update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1; --- name: CreatePlotConfigTimeseriesTrace :exec +-- name: PlotConfigTimeseriesTraceCreate :exec insert into plot_configuration_timeseries_trace (plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values ($1, $2, $3, $4, $5, $6, $7, $8); --- name: CreatePlotConfigTimeseriesTracesBatch :batchexec +-- name: PlotConfigTimeseriesTracesCreateBatch :batchexec insert into plot_configuration_timeseries_trace (plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values ($1, $2, $3, $4, $5, $6, $7, $8); --- name: UpdatePlotConfigTimeseriesTrace :exec +-- name: PlotConfigTimeseriesTraceUpdate :exec update plot_configuration_timeseries_trace set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 where plot_configuration_id=$1 and timeseries_id=$2; --- name: DeleteAllPlotConfigTimeseriesTraces :exec +-- name: PlotConfigTimeseriesTraceDeleteForPlotConfig :exec delete from plot_configuration_timeseries_trace where plot_configuration_id=$1; --- name: CreatePlotConfigCustomShape :exec +-- name: PlotConfigCustomShapeCreate :exec insert into plot_configuration_custom_shape (plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); --- name: CreatePlotConfigCustomShapesBatch :batchexec +-- name: PlotConfigCustomShapeCreateBatch :batchexec insert into plot_configuration_custom_shape (plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); --- name: UpdatePlotConfigCustomShape :exec +-- name: PlotConfigCustomShapeUpdate :exec update plot_configuration_custom_shape set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1; --- name: DeleteAllPlotConfigCustomShapes :exec +-- name: PlotConfigCustomShapeDeleteForPlotConfig :exec delete from plot_configuration_custom_shape where plot_configuration_id=$1; diff --git a/api/queries/profile.sql b/api/queries/profile.sql index 3bbbafca..5f443b26 100644 --- a/api/queries/profile.sql +++ b/api/queries/profile.sql @@ -1,22 +1,22 @@ --- name: GetProfileForEDIPI :one +-- name: ProfileGetForEDIPI :one select * from v_profile where edipi = $1; --- name: GetProfileForEmail :one +-- name: ProfileGetForEmail :one select * from v_profile where email ilike $1 limit 1; --- name: GetProfileForUsername :one +-- name: ProfileGetForUsername :one select * from v_profile where username = $1 limit 1; --- name: GetIssuedTokens :many +-- name: ProfileTokenList :many select token_id, issued from profile_token where profile_id = $1; --- name: GetProfileForTokenID :one +-- name: ProfileGetForToken :one select p.id, p.edipi, p.username, p.email, p.is_admin from profile_token t left join v_profile p on p.id = t.profile_id @@ -24,29 +24,29 @@ where t.token_id = $1 limit 1; --- name: CreateProfile :exec +-- name: ProfileCreate :one insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name; --- name: CreateProfileToken :one +-- name: ProfileTokenCreate :one insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning *; --- name: GetTokenInfo :one +-- name: ProfileTokenGet :one select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1; --- name: UpdateProfileForEDIPI :exec +-- name: ProfileUpdateForEDIPI :exec UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4; --- name: UpdateProfileForEmail :exec +-- name: ProfileUpdateForEmail :exec update profile set username=$1, display_name=$2 where email ilike $3; --- name: UpdateProfileForUsername :exec +-- name: ProfileUpdateForUsername :exec update profile set email=$1, display_name=$2 where username=$3; --- name: DeleteToken :exec +-- name: ProfileTokenDelete :exec delete from profile_token where profile_id=$1 and token_id=$2; diff --git a/api/queries/project.sql b/api/queries/project.sql index 1e9a8e88..75022fd0 100644 --- a/api/queries/project.sql +++ b/api/queries/project.sql @@ -1,23 +1,23 @@ --- name: ListProjects :many +-- name: ProjectList :many select * from v_project; --- name: SearchProjects :many +-- name: ProjectListForNameSearch :many select * from v_project where name ilike '%'||sqlc.arg(name)||'%' limit sqlc.arg(result_limit); --- name: ListProjectsForFederalID :many +-- name: ProjectListForFederalID :many select * from v_project -where federal_id = sqc.arg(federal_id); +where federal_id = sqlc.arg(federal_id); --- name: ListDistricts :many +-- name: DistrictList :many select * from v_district; --- name: ListProjectsForProfileRole :many +-- name: ProjectListForProfileRole :many select p.* from v_project p inner join profile_project_roles pr on pr.project_id = p.id @@ -26,34 +26,34 @@ where pr.profile_id = $1 and r.name = $2; --- name: ListAdminProjects :many +-- name: ProjectListForProfileAdmin :many select pr.project_id from profile_project_roles pr inner join role ro on ro.id = pr.role_id where pr.profile_id = $1 and ro.name = 'ADMIN'; --- name: GetProjectCount :one +-- name: ProjectGetCount :one select count(*) from project where not deleted; --- name: GetProject :one +-- name: ProjectGet :one select * from v_project where id = $1; --- name: CreateProjectsBatch :batchone +-- name: ProjectCreateBatch :batchone insert into project (federal_id, slug, name, district_id, creator, create_date) values ($1, slugify($2, 'project'), $2, $3, $4, $5) returning id, slug; --- name: UpdateProject :one +-- name: ProjectUpdate :one update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id; --- name: UpdateProjectImage :exec +-- name: ProjectUpdateImage :exec update project set image = $1 where id = $2; --- name: DeleteFlagProject :exec +-- name: ProjectDeleteFlag :exec update project set deleted=true where id = $1; diff --git a/api/queries/project_role.sql b/api/queries/project_role.sql index 85db6d7e..e3dd9c1a 100644 --- a/api/queries/project_role.sql +++ b/api/queries/project_role.sql @@ -1,28 +1,28 @@ --- name: ListProjectMembers :many +-- name: ProfileProjectRoleListForProject :many select id, profile_id, username, email, role_id, role from v_profile_project_roles where project_id = $1 order by email; --- name: GetProjectMembership :one +-- name: ProfileProjectRoleGet :one select id, profile_id, username, email, role_id, role from v_profile_project_roles where id = $1; --- name: CreateProfileProjectRole :one +-- name: ProfileProjectRoleCreate :one insert into profile_project_roles (project_id, profile_id, role_id, granted_by) values ($1, $2, $3, $4) on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id returning id; --- name: DeleteProfileProjectRole :exec +-- name: ProfileProjectRoleDelete :exec delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3; --- name: IsProjectAdmin :one +-- name: ProfileProjectRoleGetIsAdmin :one select exists ( select 1 from profile_project_roles pr inner join role r on r.id = pr.role_id @@ -32,7 +32,7 @@ select exists ( ); --- name: IsProjectMember :one +-- name: ProfileProjectRoleGetIsMemberOrAdmin :one select exists ( select 1 from profile_project_roles pr inner join role r on r.id = pr.role_id diff --git a/api/queries/report_config.sql b/api/queries/report_config.sql index 9b7b2834..03e9c83d 100644 --- a/api/queries/report_config.sql +++ b/api/queries/report_config.sql @@ -1,4 +1,4 @@ --- name: CreateReportConfig :one +-- name: ReportConfigCreate :one insert into report_config ( name, slug, project_id, creator, description, date_range, date_range_enabled, show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled @@ -7,57 +7,57 @@ values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) returning id; --- name: ListProjectReportConfigs :many +-- name: ReportConfigListForProject :many select * from v_report_config where project_id = $1; --- name: ListReportConfigPlotConfigs :many +-- name: ReportConfigListForReportConfigWithPlotConfig :many select * from v_plot_configuration where id = any( select plot_config_id from report_config_plot_config where report_config_id = $1 ); --- name: GetReportConfig :one +-- name: ReportConfigGet :one select * from v_report_config where id = $1; --- name: UpdateReportConfig :exec +-- name: ReportConfigUpdate :exec update report_config set name=$2, updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1; --- name: DeleteReportConfig :exec +-- name: ReportConfigDelete :exec delete from report_config where id=$1; --- name: AssignReportConfigPlotConfig :exec +-- name: ReportConfigPlotConfigCreate :exec insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); --- name: AssignReportConfigPlotConfigBatch :batchexec +-- name: ReportConfigPlotConfigCreateBatch :batchexec insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); --- name: UnassignReportConfigPlotConfig :exec +-- name: ReportConfigPlotConfigDelete :exec delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; --- name: UnassignReportConfigPlotConfigBatch :batchexec +-- name: ReportConfigPlotConfigDeleteBatch :batchexec delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; --- name: UnassignAllReportConfigPlotConfig :exec +-- name: ReportConfigPlotConfigDeleteForReportConfig :exec delete from report_config_plot_config where report_config_id=$1; --- name: GetReportDownloadJob :one +-- name: ReportDownloadJobGet :one select * from report_download_job where id=$1 and creator=$2; --- name: CreateReportDownloadJob :one +-- name: ReportDownloadJobCreate :one insert into report_download_job (report_config_id, creator) values ($1, $2) returning *; --- name: UpdateReportDownloadJob :exec +-- name: ReportDownloadJobUpdate :exec update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1; diff --git a/api/queries/submittal.sql b/api/queries/submittal.sql index 786b9cd3..22a797f9 100644 --- a/api/queries/submittal.sql +++ b/api/queries/submittal.sql @@ -1,29 +1,29 @@ --- name: ListProjectSubmittals :many +-- name: SubmittalListForProject :many select * from v_submittal where project_id = sqlc.arg(project_id) -and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) order by due_date desc, alert_type_name asc; --- name: ListInstrumentSubmittals :many +-- name: SubmittalListForInstrument :many select sub.* from v_submittal sub inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id where aci.instrument_id = sqlc.arg(instrument_id) -and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) order by sub.due_date desc; --- name: ListAlertConfigSubmittals :many +-- name: SubmittalListForAlertConfig :many select * from v_submittal -where alert_config_id = $1 -and (sqlc.arg(show_incomplete_missing) = false or (completion_date is null and not marked_as_missing)) +where alert_config_id = sqlc.arg(alert_config_id) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) order by due_date desc; --- name: ListUnverifiedMissingSubmittals :many +-- name: SubmittalListUnverifiedMissing :many select * from v_submittal where completion_date is null @@ -31,7 +31,7 @@ and not marked_as_missing order by due_date desc; --- name: UpdateSubmittal :exec +-- name: SubmittalUpdate :exec update submittal set submittal_status_id = $2, completion_date = $3, @@ -39,7 +39,7 @@ update submittal set where id = $1; --- name: VerifyMissingSubmittal :exec +-- name: SubmittalUpdateVerifyMissing :exec update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true @@ -48,7 +48,7 @@ and completion_date is null and now() > due_date; --- name: VerifyMissingAlertConfigSubmittals :exec +-- name: SubmittalUpdateVerifyMissingForAlertConfig :exec update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql index bce8558b..680769a7 100644 --- a/api/queries/timeseries.sql +++ b/api/queries/timeseries.sql @@ -1,57 +1,63 @@ --- name: GetStoredTimeseriesExists :one +-- name: TimeseriesGetExistsStored :one select exists (select id from v_timeseries_stored where id = $1); --- name: GetTimeseries :one +-- name: TimeseriesGet :one select * from v_timeseries where id=$1; --- name: GetTimeseriesProjectMap :many -select timeseries_id, project_id -from v_timeseries_project_map -where timeseries_id in (sqlc.arg(timeseries_ids)::uuid[]); +-- name: TimeseriesGetAllBelongToProject :one +select not exists ( + select true + from timeseries ts + where not ts.instrument_id = any ( + select p.instrument_id + from project_instrument p + where p.project_id = sqlc.arg(project_id) + ) + and ts.id = any(sqlc.arg(timeseries_ids)::uuid[]) +); --- name: ListInstrumentTimeseries :many +-- name: TimeseriesListForInstrument :many select * from v_timeseries where instrument_id = $1; --- name: ListPlotConfigTimeseries :many +-- name: TimeseriesListForPlotConfig :many select t.* from v_timeseries t inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id where pct.plot_configuration_id = $1; --- name: ListInstrumentGroupTimeseries :many +-- name: TimeseriesListForInstrumentGroup :many select t.* from v_timeseries t inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id where gi.instrument_group_id = $1; --- name: ListTimeseriesForProject :many +-- name: TimeseriesListForProject :many select t.* from v_timeseries t inner join project_instrument p on p.instrument_id = t.instrument_id where p.project_id = $1; --- name: CreateTimeseries :one +-- name: TimeseriesCreate :one insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) returning id, instrument_id, slug, name, parameter_id, unit_id, type; --- name: CreateTimeseriesBatch :batchone +-- name: TimeseriesCreateBatch :batchone insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) returning id, instrument_id, slug, name, parameter_id, unit_id, type; --- name: UpdateTimeseries :one +-- name: TimeseriesUpdate :exec update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 -where id = $1 -returning id; +where id = $1; --- name: DeleteTimeseries :exec +-- name: TimeseriesDelete :exec delete from timeseries where id = $1; diff --git a/api/queries/timeseries_calculated.sql b/api/queries/timeseries_calculated.sql index f84f8a67..3e0e688d 100644 --- a/api/queries/timeseries_calculated.sql +++ b/api/queries/timeseries_calculated.sql @@ -1,4 +1,4 @@ --- name: GetCalculatedTimeseries :one +-- name: TimeseriesComputedGet :one select id, instrument_id, @@ -8,26 +8,39 @@ select name as formula_name, coalesce(contents, '') as formula from v_timeseries_computed -where id=$1; +where id = $1; --- name: CreateCalculatedTimeseries :one -INSERT INTO timeseries ( +-- name: TimeseriesComputedListForInstrument :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where instrument_id = $1; + + +-- name: TimeseriesComputedCreate :one +insert into timeseries ( instrument_id, parameter_id, unit_id, slug, name, type -) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') -RETURNING id; +) values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +returning id; --- name: CreateCalculation :exec +-- name: CalculationCreate :exec insert into calculation (timeseries_id, contents) values ($1,$2); --- name: CreateOrUpdateCalculation :exec +-- name: CalculationCreateOrUpdate :exec with p as ( select contents from calculation where timeseries_id=$1 ) @@ -35,11 +48,11 @@ insert into calculation (timeseries_id, contents) values ($1, $2) on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents); --- name: DeleteCalculatedTimeseries :exec +-- name: TimeseriesComputedDelete :exec delete from timeseries where id = $1 and id in (select timeseries_id from calculation); --- name: CreateOrUpdateCalculatedTimeseries :exec +-- name: TimeseriesComputedCreateOrUpdate :exec with p as ( select * from timeseries where id=$1 diff --git a/api/queries/timeseries_cwms.sql b/api/queries/timeseries_cwms.sql index e28bb7f0..b501c5a9 100644 --- a/api/queries/timeseries_cwms.sql +++ b/api/queries/timeseries_cwms.sql @@ -1,24 +1,24 @@ --- name: ListTimeseriesCwms :many +-- name: TimeseriesCwmsList :many select * from v_timeseries_cwms where instrument_id = $1; --- name: GetTimeseriesCwms :one +-- name: TimeseriesCwmsGet :one select * from v_timeseries_cwms where id = $1; --- name: CreateTimeseriesCwms :exec +-- name: TimeseriesCwmsCreate :exec insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values ($1, $2, $3, $4, $5); --- name: CreateTimeseriesCwmsBatch :batchexec +-- name: TimeseriesCwmsCreateBatch :batchexec insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values ($1, $2, $3, $4, $5); --- name: UpdateTimeseriesCwms :exec +-- name: TimeseriesCwmsUpdate :exec update timeseries_cwms set cwms_timeseries_id=$2, cwms_office_id=$3, diff --git a/api/queries/unit.sql b/api/queries/unit.sql index aa69dc56..d0b92e41 100644 --- a/api/queries/unit.sql +++ b/api/queries/unit.sql @@ -1,4 +1,4 @@ --- name: ListUnits :many +-- name: UnitsList :many select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure from v_unit order by name; diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql index 86042b7b..1cb173c6 100644 --- a/api/queries/uploader.sql +++ b/api/queries/uploader.sql @@ -1,14 +1,14 @@ --- name: ListUploaderConfigsForProject :many +-- name: UploaderConfigListForProject :many select * from uploader_config where project_id=$1; --- name: CreateUploaderConfig :one +-- name: UploaderConfigCreate :one insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) returning id; --- name: UpdateUploaderConfig :exec +-- name: UploaderConfigUpdate :exec update uploader_config set name=$2, description=$3, @@ -19,17 +19,17 @@ update uploader_config set where id=$1; --- name: DeleteUploaderConfig :exec +-- name: UploaderConfigDelete :exec delete from uploader_config where id=$1; --- name: ListUploaderConfigMappings :many +-- name: UploaderConfigMappingList :many select * from uploader_config_mapping where uploader_config_id=$1; --- name: CreateUploaderConfigMappingsBatch :batchexec +-- name: UploaderConfigMappingCreateBatch :batchexec insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3); --- name: DeleteAllUploaderConfigMappingsForUploaderConfig :exec +-- name: UploaderConfigMappingDeleteForUploaderConfig :exec delete from uploader_config_mapping where uploader_config_id=$1; diff --git a/go.work.sum b/go.work.sum index 36f0dc97..62ca0f7d 100644 --- a/go.work.sum +++ b/go.work.sum @@ -2,13 +2,10 @@ dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg= github.com/ClickHouse/clickhouse-go/v2 v2.27.1/go.mod h1:XvcaX7ai9T9si83rZ0cB3y2upq9AYMwdj16Trqm+sPg= -github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.12.0/go.mod h1:RZV12pcHCXQ42XnlQ3pz6FZfmrC1C+R4gaOHhRNML1g= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= -github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= @@ -30,16 +27,13 @@ github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ4 github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= -github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= -github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= -github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= @@ -59,6 +53,7 @@ github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7P github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= From 122cd3807f2901a1fd3393eb3dc69b37d0e18ac5 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 00:27:56 -0500 Subject: [PATCH 18/23] fix: failing regression tests from refactored return types --- api/Dockerfile.openapi | 2 +- api/go.mod | 2 - api/go.sum | 12 - api/internal/db/alert.sql_gen.go | 20 +- api/internal/db/alert_check.sql_gen.go | 52 +- api/internal/db/alert_config.sql_gen.go | 130 +- .../db/alert_measurement_check.sql_gen.go | 4 +- api/internal/db/batch.go | 117 +- api/internal/db/collection_group.sql_gen.go | 78 +- api/internal/db/datalogger.sql_gen.go | 106 +- .../db/datalogger_telemetry.sql_gen.go | 30 +- api/internal/db/district_rollup.sql_gen.go | 16 +- api/internal/db/evaluation.sql_gen.go | 146 +- api/internal/db/home.sql_gen.go | 2 +- api/internal/db/instrument.sql_gen.go | 195 +- api/internal/db/instrument_assign.sql_gen.go | 8 +- api/internal/db/instrument_group.sql_gen.go | 154 +- api/internal/db/instrument_incl.sql_gen.go | 2 +- api/internal/db/instrument_ipi.sql_gen.go | 2 +- api/internal/db/instrument_note.sql_gen.go | 70 +- api/internal/db/instrument_saa.sql_gen.go | 2 +- api/internal/db/manual.go | 8 - api/internal/db/measurement.manual.go | 50 + api/internal/db/measurement.sql_gen.go | 12 +- api/internal/db/models.go | 573 +- api/internal/db/overrides.go | 25 +- api/internal/db/plot_config.sql_gen.go | 52 +- api/internal/db/project.sql_gen.go | 85 +- api/internal/db/querier.go | 29 +- api/internal/db/report_config.sql_gen.go | 96 +- api/internal/db/submittal.sql_gen.go | 62 +- api/internal/db/timeseries.sql_gen.go | 26 +- .../db/timeseries_calculated.sql_gen.go | 99 +- api/internal/db/timeseries_cwms.sql_gen.go | 10 +- api/internal/db/timeseries_process.manual.go | 52 +- api/internal/db/uploader.sql_gen.go | 40 +- api/internal/dto/alert.go | 18 +- api/internal/dto/alert_config.go | 38 +- api/internal/dto/alert_evaluation_check.go | 2 +- api/internal/dto/alert_measurement_check.go | 2 +- api/internal/dto/common.go | 12 +- api/internal/dto/datalogger.go | 2 +- api/internal/dto/district_rollup.go | 2 +- api/internal/dto/evaluation.go | 26 +- api/internal/dto/instrument.go | 54 +- api/internal/dto/instrument_status.go | 21 + api/internal/dto/measurement_inclinometer.go | 6 +- api/internal/dto/project.go | 4 - api/internal/dto/report_config.go | 36 +- api/internal/dto/submittal.go | 26 +- api/internal/handler/alert.go | 21 +- api/internal/handler/alert_config.go | 20 +- api/internal/handler/alert_config_test.go | 22 +- api/internal/handler/alert_subscription.go | 14 +- api/internal/handler/alert_test.go | 6 +- api/internal/handler/autocomplete.go | 2 +- api/internal/handler/aware.go | 6 +- api/internal/handler/collection_groups.go | 18 +- .../handler/collection_groups_test.go | 20 +- api/internal/handler/datalogger.go | 36 +- api/internal/handler/datalogger_telemetry.go | 4 +- api/internal/handler/datalogger_test.go | 20 +- api/internal/handler/district_rollup.go | 4 +- api/internal/handler/domain.go | 8 +- api/internal/handler/equivalency_table.go | 7 +- api/internal/handler/evaluation.go | 18 +- api/internal/handler/evaluation_test.go | 24 +- api/internal/handler/handler_test.go | 10 +- api/internal/handler/heartbeat.go | 13 +- api/internal/handler/home.go | 3 +- api/internal/handler/instrument.go | 51 +- api/internal/handler/instrument_assign.go | 18 +- api/internal/handler/instrument_constant.go | 4 +- api/internal/handler/instrument_group.go | 27 +- api/internal/handler/instrument_group_test.go | 10 +- api/internal/handler/instrument_incl.go | 10 +- api/internal/handler/instrument_ipi.go | 10 +- api/internal/handler/instrument_note.go | 22 +- api/internal/handler/instrument_note_test.go | 17 +- api/internal/handler/instrument_saa.go | 10 +- api/internal/handler/instrument_status.go | 5 +- .../handler/instrument_status_test.go | 3 +- api/internal/handler/instrument_test.go | 20 +- api/internal/handler/measurement.go | 2 +- api/internal/handler/media.go | 2 +- api/internal/handler/opendcs.go | 26 - api/internal/handler/plot_config.go | 4 +- api/internal/handler/plot_config_bullseye.go | 14 +- api/internal/handler/plot_config_contour.go | 15 +- api/internal/handler/plot_config_profile.go | 14 +- .../handler/plot_config_scatter_line.go | 14 +- .../handler/plot_config_scatter_line_test.go | 10 +- api/internal/handler/profile.go | 3 +- api/internal/handler/project.go | 30 +- api/internal/handler/project_role.go | 7 +- api/internal/handler/project_test.go | 14 +- api/internal/handler/report_config.go | 34 +- api/internal/handler/report_config_test.go | 22 +- api/internal/handler/submittal.go | 6 +- api/internal/handler/submittal_test.go | 6 +- api/internal/handler/timeseries.go | 15 +- api/internal/handler/timeseries_calculated.go | 9 +- api/internal/handler/timeseries_cwms.go | 7 +- api/internal/handler/timeseries_cwms_test.go | 1 - api/internal/handler/timeseries_process.go | 23 +- api/internal/handler/timeseries_test.go | 4 +- api/internal/handler/unit.go | 3 +- api/internal/handler/uploader.go | 18 +- api/internal/middleware/audit.go | 6 +- api/internal/server/api.go | 12 +- api/internal/server/docs/openapi.json | 4586 ++++++++------ api/internal/server/docs/openapi.yaml | 5337 +++++++++-------- api/internal/service/alert_check.go | 36 +- api/internal/service/alert_config.go | 12 +- api/internal/service/collection_group.go | 20 +- api/internal/service/datalogger.go | 24 +- api/internal/service/datalogger_telemetry.go | 6 +- api/internal/service/db.go | 11 +- api/internal/service/evaluation.go | 24 +- api/internal/service/heartbeat.go | 42 + api/internal/service/instrument.go | 9 +- api/internal/service/instrument_constant.go | 5 +- api/internal/service/instrument_group.go | 14 +- api/internal/service/instrument_incl.go | 10 +- api/internal/service/instrument_ipi.go | 15 +- api/internal/service/instrument_note.go | 16 +- api/internal/service/instrument_saa.go | 15 +- api/internal/service/measurement.go | 8 +- api/internal/service/opendcs.go | 39 - api/internal/service/plot_config.go | 20 +- api/internal/service/project.go | 17 +- api/internal/service/project_role.go | 30 + api/internal/service/report_config.go | 18 +- api/internal/service/timeseries.go | 10 +- api/internal/service/timeseries_calculated.go | 24 +- api/internal/service/timeseries_cwms.go | 5 +- api/internal/service/uploader.go | 8 +- .../repeat/0020__views_profiles.sql | 14 +- .../repeat/0030__views_projects.sql | 37 +- .../repeat/0040__views_instruments.sql | 81 +- .../repeat/0050__views_timeseries.sql | 5 +- api/migrations/repeat/0060__views_alerts.sql | 74 +- api/migrations/repeat/0090__views_plots.sql | 8 +- .../repeat/0100__views_datalogger.sql | 34 +- .../repeat/0110__views_evaluations.sql | 20 +- .../repeat/0120__views_alert_check.sql | 42 +- .../repeat/0130__views_district_rollup.sql | 32 +- .../0140__views_depth_based_instruments.sql | 37 +- .../repeat/0160__views_report_config.sql | 16 +- .../repeat/0170__views_uploader.sql | 16 +- .../V1.18.00__timeseries_type_not_null.sql | 2 + .../schema/V1.19.00__audit_into_rename.sql | 74 + ....20.00__fix_missing_profile_references.sql | 107 + api/queries/alert_check.sql | 12 +- api/queries/alert_config.sql | 26 +- api/queries/alert_measurement_check.sql | 4 +- api/queries/collection_group.sql | 6 +- api/queries/datalogger.sql | 12 +- api/queries/datalogger_telemetry.sql | 8 +- api/queries/district_rollup.sql | 8 +- api/queries/evaluation.sql | 26 +- api/queries/home.sql | 2 +- api/queries/instrument.sql | 84 +- api/queries/instrument_assign.sql | 8 +- api/queries/instrument_group.sql | 22 +- api/queries/instrument_incl.sql | 2 +- api/queries/instrument_ipi.sql | 2 +- api/queries/instrument_note.sql | 12 +- api/queries/instrument_saa.sql | 2 +- api/queries/measurement.sql | 6 +- api/queries/plot_config.sql | 4 +- api/queries/project.sql | 4 +- api/queries/report_config.sql | 10 +- api/queries/submittal.sql | 26 +- api/queries/timeseries.sql | 2 +- api/queries/timeseries_calculated.sql | 59 +- api/queries/uploader.sql | 8 +- go.work.sum | 5 + report/generated.d.ts | 4725 +++++++-------- sqlc.yml | 12 +- 180 files changed, 10233 insertions(+), 9246 deletions(-) delete mode 100644 api/internal/db/manual.go delete mode 100644 api/internal/handler/opendcs.go create mode 100644 api/internal/service/heartbeat.go delete mode 100644 api/internal/service/opendcs.go create mode 100644 api/internal/service/project_role.go create mode 100644 api/migrations/schema/V1.18.00__timeseries_type_not_null.sql create mode 100644 api/migrations/schema/V1.19.00__audit_into_rename.sql create mode 100644 api/migrations/schema/V1.20.00__fix_missing_profile_references.sql diff --git a/api/Dockerfile.openapi b/api/Dockerfile.openapi index 224c921f..77d5a9d0 100644 --- a/api/Dockerfile.openapi +++ b/api/Dockerfile.openapi @@ -9,7 +9,7 @@ RUN go install github.com/swaggo/swag/cmd/swag@latest COPY . . RUN swag init --ot json --pd -g cmd/midas-api/main.go --parseInternal true --dir internal -RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_dto.//g' {} \; +RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_//g' {} \; FROM openapitools/openapi-generator-cli:latest-release AS docgen WORKDIR /work diff --git a/api/go.mod b/api/go.mod index dfe595d1..917c7209 100644 --- a/api/go.mod +++ b/api/go.mod @@ -25,8 +25,6 @@ require ( github.com/lib/pq v1.10.9 github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 - github.com/twpayne/go-geom v1.5.7 - github.com/twpayne/pgx-geom v0.0.2 github.com/xeipuuv/gojsonschema v1.2.0 golang.org/x/crypto v0.27.0 golang.org/x/image v0.20.0 diff --git a/api/go.sum b/api/go.sum index 3a54a230..77f8e251 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,15 +1,9 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= -github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1:1G1pk05UrOh0NlF1oeaaix1x8XzrfjIDK47TY0Zehcw= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= -github.com/alecthomas/assert/v2 v2.10.0 h1:jjRCHsj6hBJhkmhznrCzoNpbA3zqy0fYiUcYZP/GkPY= -github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= -github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= -github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/aws/aws-lambda-go v1.47.0 h1:0H8s0vumYx/YKs4sE7YM0ktwL2eWse+kfopsRI1sXVI= github.com/aws/aws-lambda-go v1.47.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= github.com/aws/aws-sdk-go-v2 v1.30.5 h1:mWSRTwQAb0aLE17dSzztCVJWI9+cRMgqebndjwDyK0g= @@ -89,8 +83,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= -github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= @@ -223,10 +215,6 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= -github.com/twpayne/go-geom v1.5.7 h1:7fdceDUr03/MP7rAKOaTV6x9njMiQdxB/D0PDzMTCDc= -github.com/twpayne/go-geom v1.5.7/go.mod h1:y4fTAQtLedXW8eG2Yo4tYrIGN1yIwwKkmA+K3iSHKBA= -github.com/twpayne/pgx-geom v0.0.2 h1:DZcp66JfCwyfQMH1JNBa0vfF+/hi4WQsfHMqBRXp8WI= -github.com/twpayne/pgx-geom v0.0.2/go.mod h1:rUjv/MgeOmPZqUbLY7Qgq56dAAHE28S7FZMFtXQMRoI= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= diff --git a/api/internal/db/alert.sql_gen.go b/api/internal/db/alert.sql_gen.go index 742218a4..fa2a7875 100644 --- a/api/internal/db/alert.sql_gen.go +++ b/api/internal/db/alert.sql_gen.go @@ -22,7 +22,7 @@ func (q *Queries) AlertCreate(ctx context.Context, alertConfigID uuid.UUID) erro } const alertGet = `-- name: AlertGet :one -select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, +select a.id, a.alert_config_id, a.created_at, a.project_id, a.project_name, a.name, a.body, a.instruments, case when r.alert_id is not null then true else false end as read from v_alert a @@ -40,7 +40,7 @@ type AlertGetParams struct { type AlertGetRow struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` + CreatedAt time.Time `json:"created_at"` ProjectID uuid.UUID `json:"project_id"` ProjectName string `json:"project_name"` Name string `json:"name"` @@ -55,7 +55,7 @@ func (q *Queries) AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow err := row.Scan( &i.ID, &i.AlertConfigID, - &i.CreateDate, + &i.CreatedAt, &i.ProjectID, &i.ProjectName, &i.Name, @@ -67,7 +67,7 @@ func (q *Queries) AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow } const alertListForInstrument = `-- name: AlertListForInstrument :many -select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert +select id, alert_config_id, created_at, project_id, project_name, name, body, instruments from v_alert where alert_config_id = any( select id from alert_config_instrument where instrument_id = $1 @@ -86,7 +86,7 @@ func (q *Queries) AlertListForInstrument(ctx context.Context, instrumentID uuid. if err := rows.Scan( &i.ID, &i.AlertConfigID, - &i.CreateDate, + &i.CreatedAt, &i.ProjectID, &i.ProjectName, &i.Name, @@ -104,7 +104,7 @@ func (q *Queries) AlertListForInstrument(ctx context.Context, instrumentID uuid. } const alertListForProfile = `-- name: AlertListForProfile :many -select a.id, a.alert_config_id, a.create_date, a.project_id, a.project_name, a.name, a.body, a.instruments, +select a.id, a.alert_config_id, a.created_at, a.project_id, a.project_name, a.name, a.body, a.instruments, case when r.alert_id is not null then true else false end as read from v_alert a @@ -116,7 +116,7 @@ where aps.profile_id = $1 type AlertListForProfileRow struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` + CreatedAt time.Time `json:"created_at"` ProjectID uuid.UUID `json:"project_id"` ProjectName string `json:"project_name"` Name string `json:"name"` @@ -137,7 +137,7 @@ func (q *Queries) AlertListForProfile(ctx context.Context, profileID uuid.UUID) if err := rows.Scan( &i.ID, &i.AlertConfigID, - &i.CreateDate, + &i.CreatedAt, &i.ProjectID, &i.ProjectName, &i.Name, @@ -156,7 +156,7 @@ func (q *Queries) AlertListForProfile(ctx context.Context, profileID uuid.UUID) } const alertListForProject = `-- name: AlertListForProject :many -select id, alert_config_id, create_date, project_id, project_name, name, body, instruments from v_alert where project_id = $1 +select id, alert_config_id, created_at, project_id, project_name, name, body, instruments from v_alert where project_id = $1 ` func (q *Queries) AlertListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) { @@ -171,7 +171,7 @@ func (q *Queries) AlertListForProject(ctx context.Context, projectID uuid.UUID) if err := rows.Scan( &i.ID, &i.AlertConfigID, - &i.CreateDate, + &i.CreatedAt, &i.ProjectID, &i.ProjectName, &i.Name, diff --git a/api/internal/db/alert_check.sql_gen.go b/api/internal/db/alert_check.sql_gen.go index e02f9bce..293f9b73 100644 --- a/api/internal/db/alert_check.sql_gen.go +++ b/api/internal/db/alert_check.sql_gen.go @@ -12,19 +12,19 @@ import ( "github.com/google/uuid" ) -const alertConfigListUpdateLastChecked = `-- name: AlertConfigListUpdateLastChecked :many +const alertConfigListUpdateLastCheckedAt = `-- name: AlertConfigListUpdateLastCheckedAt :many update alert_config ac1 -set last_checked = now() +set last_checked_at = now() from ( - select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions + select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config ) ac2 where ac1.id = ac2.id -returning ac2.id, ac2.name, ac2.body, ac2.creator, ac2.creator_username, ac2.create_date, ac2.updater, ac2.updater_username, ac2.update_date, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.start_date, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked, ac2.last_reminded, ac2.create_next_submittal_from, ac2.instruments, ac2.alert_email_subscriptions +returning ac2.id, ac2.name, ac2.body, ac2.created_by, ac2.created_by_username, ac2.created_at, ac2.updated_by, ac2.updated_by_username, ac2.updated_at, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.started_at, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked_at, ac2.last_reminded_at, ac2.create_next_submittal_from, ac2.instruments, ac2.alert_email_subscriptions ` -func (q *Queries) AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAlertConfig, error) { - rows, err := q.db.Query(ctx, alertConfigListUpdateLastChecked) +func (q *Queries) AlertConfigListUpdateLastCheckedAt(ctx context.Context) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListUpdateLastCheckedAt) if err != nil { return nil, err } @@ -36,23 +36,23 @@ func (q *Queries) AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAler &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertTypeID, &i.AlertType, - &i.StartDate, + &i.StartedAt, &i.ScheduleInterval, &i.MuteConsecutiveAlerts, &i.RemindInterval, &i.WarningInterval, - &i.LastChecked, - &i.LastReminded, + &i.LastCheckedAt, + &i.LastRemindedAt, &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, @@ -67,22 +67,22 @@ func (q *Queries) AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAler return items, nil } -const alertConfigUpdateLastReminded = `-- name: AlertConfigUpdateLastReminded :exec -update alert_config set last_reminded = $2 where id = $1 +const alertConfigUpdateLastRemindedAt = `-- name: AlertConfigUpdateLastRemindedAt :exec +update alert_config set last_reminded_at = $2 where id = $1 ` -type AlertConfigUpdateLastRemindedParams struct { - ID uuid.UUID `json:"id"` - LastReminded *time.Time `json:"last_reminded"` +type AlertConfigUpdateLastRemindedAtParams struct { + ID uuid.UUID `json:"id"` + LastRemindedAt *time.Time `json:"last_reminded_at"` } -func (q *Queries) AlertConfigUpdateLastReminded(ctx context.Context, arg AlertConfigUpdateLastRemindedParams) error { - _, err := q.db.Exec(ctx, alertConfigUpdateLastReminded, arg.ID, arg.LastReminded) +func (q *Queries) AlertConfigUpdateLastRemindedAt(ctx context.Context, arg AlertConfigUpdateLastRemindedAtParams) error { + _, err := q.db.Exec(ctx, alertConfigUpdateLastRemindedAt, arg.ID, arg.LastRemindedAt) return err } const submittalCreateNextFromNewAlertConfigDate = `-- name: SubmittalCreateNextFromNewAlertConfigDate :exec -insert into submittal (alert_config_id, create_date, due_date) +insert into submittal (alert_config_id, created_at, due_at) select ac.id, $2::timestamptz, @@ -104,7 +104,7 @@ func (q *Queries) SubmittalCreateNextFromNewAlertConfigDate(ctx context.Context, const submittalUpdateCompletionDateOrWarningSent = `-- name: SubmittalUpdateCompletionDateOrWarningSent :exec update submittal set submittal_status_id = $2, - completion_date = $3, + completed_at = $3, warning_sent = $4 where id = $1 ` @@ -112,7 +112,7 @@ where id = $1 type SubmittalUpdateCompletionDateOrWarningSentParams struct { ID uuid.UUID `json:"id"` SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` - CompletionDate *time.Time `json:"completion_date"` + CompletedAt *time.Time `json:"completed_at"` WarningSent bool `json:"warning_sent"` } @@ -120,7 +120,7 @@ func (q *Queries) SubmittalUpdateCompletionDateOrWarningSent(ctx context.Context _, err := q.db.Exec(ctx, submittalUpdateCompletionDateOrWarningSent, arg.ID, arg.SubmittalStatusID, - arg.CompletionDate, + arg.CompletedAt, arg.WarningSent, ) return err diff --git a/api/internal/db/alert_config.sql_gen.go b/api/internal/db/alert_config.sql_gen.go index d6ecc891..b31c65da 100644 --- a/api/internal/db/alert_config.sql_gen.go +++ b/api/internal/db/alert_config.sql_gen.go @@ -18,13 +18,13 @@ insert into alert_config ( name, body, alert_type_id, - start_date, + started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, - creator, - create_date + created_by, + created_at ) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) returning id ` @@ -34,13 +34,13 @@ type AlertConfigCreateParams struct { Name string `json:"name"` Body string `json:"body"` AlertTypeID uuid.UUID `json:"alert_type_id"` - StartDate time.Time `json:"start_date"` + StartedAt time.Time `json:"started_at"` ScheduleInterval string `json:"schedule_interval"` MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` RemindInterval string `json:"remind_interval"` WarningInterval string `json:"warning_interval"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } func (q *Queries) AlertConfigCreate(ctx context.Context, arg AlertConfigCreateParams) (uuid.UUID, error) { @@ -49,13 +49,13 @@ func (q *Queries) AlertConfigCreate(ctx context.Context, arg AlertConfigCreatePa arg.Name, arg.Body, arg.AlertTypeID, - arg.StartDate, + arg.StartedAt, arg.ScheduleInterval, arg.MuteConsecutiveAlerts, arg.RemindInterval, arg.WarningInterval, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, ) var id uuid.UUID err := row.Scan(&id) @@ -72,7 +72,7 @@ func (q *Queries) AlertConfigDelete(ctx context.Context, id uuid.UUID) error { } const alertConfigGet = `-- name: AlertConfigGet :one -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where id = $1 +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where id = $1 ` func (q *Queries) AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { @@ -82,23 +82,23 @@ func (q *Queries) AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfi &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertTypeID, &i.AlertType, - &i.StartDate, + &i.StartedAt, &i.ScheduleInterval, &i.MuteConsecutiveAlerts, &i.RemindInterval, &i.WarningInterval, - &i.LastChecked, - &i.LastReminded, + &i.LastCheckedAt, + &i.LastRemindedAt, &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, @@ -130,7 +130,7 @@ func (q *Queries) AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx conte } const alertConfigListForInstrument = `-- name: AlertConfigListForInstrument :many -select t.id, t.name, t.body, t.creator, t.creator_username, t.create_date, t.updater, t.updater_username, t.update_date, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.start_date, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked, t.last_reminded, t.create_next_submittal_from, t.instruments, t.alert_email_subscriptions +select t.id, t.name, t.body, t.created_by, t.created_by_username, t.created_at, t.updated_by, t.updated_by_username, t.updated_at, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.started_at, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked_at, t.last_reminded_at, t.create_next_submittal_from, t.instruments, t.alert_email_subscriptions from v_alert_config t inner join alert_config_instrument aci on t.id = aci.alert_config_id where aci.instrument_id = $1 @@ -150,23 +150,23 @@ func (q *Queries) AlertConfigListForInstrument(ctx context.Context, instrumentID &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertTypeID, &i.AlertType, - &i.StartDate, + &i.StartedAt, &i.ScheduleInterval, &i.MuteConsecutiveAlerts, &i.RemindInterval, &i.WarningInterval, - &i.LastChecked, - &i.LastReminded, + &i.LastCheckedAt, + &i.LastRemindedAt, &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, @@ -182,7 +182,7 @@ func (q *Queries) AlertConfigListForInstrument(ctx context.Context, instrumentID } const alertConfigListForProject = `-- name: AlertConfigListForProject :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 order by name @@ -201,23 +201,23 @@ func (q *Queries) AlertConfigListForProject(ctx context.Context, projectID uuid. &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertTypeID, &i.AlertType, - &i.StartDate, + &i.StartedAt, &i.ScheduleInterval, &i.MuteConsecutiveAlerts, &i.RemindInterval, &i.WarningInterval, - &i.LastChecked, - &i.LastReminded, + &i.LastCheckedAt, + &i.LastRemindedAt, &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, @@ -233,7 +233,7 @@ func (q *Queries) AlertConfigListForProject(ctx context.Context, projectID uuid. } const alertConfigListForProjectAlertType = `-- name: AlertConfigListForProjectAlertType :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_type_id, alert_type, start_date, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked, last_reminded, create_next_submittal_from, instruments, alert_email_subscriptions +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where project_id = $1 and alert_type_id = $2 @@ -258,23 +258,23 @@ func (q *Queries) AlertConfigListForProjectAlertType(ctx context.Context, arg Al &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertTypeID, &i.AlertType, - &i.StartDate, + &i.StartedAt, &i.ScheduleInterval, &i.MuteConsecutiveAlerts, &i.RemindInterval, &i.WarningInterval, - &i.LastChecked, - &i.LastReminded, + &i.LastCheckedAt, + &i.LastRemindedAt, &i.CreateNextSubmittalFrom, &i.Instruments, &i.AlertEmailSubscriptions, @@ -293,13 +293,13 @@ const alertConfigUpdate = `-- name: AlertConfigUpdate :exec update alert_config set name = $3, body = $4, - start_date = $5, + started_at = $5, schedule_interval = $6, mute_consecutive_alerts = $7, remind_interval = $8, warning_interval = $9, - updater = $10, - update_date = $11 + updated_by = $10, + updated_at = $11 where id = $1 and project_id = $2 ` @@ -308,13 +308,13 @@ type AlertConfigUpdateParams struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Body string `json:"body"` - StartDate time.Time `json:"start_date"` + StartedAt time.Time `json:"started_at"` ScheduleInterval string `json:"schedule_interval"` MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` RemindInterval string `json:"remind_interval"` WarningInterval string `json:"warning_interval"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error { @@ -323,20 +323,20 @@ func (q *Queries) AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdatePa arg.ProjectID, arg.Name, arg.Body, - arg.StartDate, + arg.StartedAt, arg.ScheduleInterval, arg.MuteConsecutiveAlerts, arg.RemindInterval, arg.WarningInterval, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, ) return err } const submittalCreateNextFromExistingAlertConfigDate = `-- name: SubmittalCreateNextFromExistingAlertConfigDate :exec -insert into submittal (alert_config_id, due_date) -select ac.id, ac.create_date + ac.schedule_interval +insert into submittal (alert_config_id, due_at) +select ac.id, ac.created_at + ac.schedule_interval from alert_config ac where ac.id = $1 ` @@ -348,20 +348,20 @@ func (q *Queries) SubmittalCreateNextFromExistingAlertConfigDate(ctx context.Con const submittalUpdateNextForAlertConfig = `-- name: SubmittalUpdateNextForAlertConfig :one update submittal -set due_date = sq.new_due_date +set due_at = sq.new_due_at from ( select sub.id as submittal_id, - sub.create_date + ac.schedule_interval as new_due_date + sub.created_at + ac.schedule_interval as new_due_at from submittal sub inner join alert_config ac on sub.alert_config_id = ac.id where sub.alert_config_id = $1 - and sub.due_date > now() - and sub.completion_date is null + and sub.due_at > now() + and sub.completed_at is null and not sub.marked_as_missing ) sq where id = sq.submittal_id -and sq.new_due_date > now() +and sq.new_due_at > now() returning id ` diff --git a/api/internal/db/alert_measurement_check.sql_gen.go b/api/internal/db/alert_measurement_check.sql_gen.go index d2fe6ebe..12b6f30d 100644 --- a/api/internal/db/alert_measurement_check.sql_gen.go +++ b/api/internal/db/alert_measurement_check.sql_gen.go @@ -13,7 +13,7 @@ const submittalListIncompleteEvaluation = `-- name: SubmittalListIncompleteEvalu select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal where submittal_id = any( select id from submittal - where completion_date is null and not marked_as_missing + where completed_at is null and not marked_as_missing ) ` @@ -48,7 +48,7 @@ const submittalListIncompleteMeasurement = `-- name: SubmittalListIncompleteMeas select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal where submittal_id = any( select id from submittal - where completion_date is null and not marked_as_missing + where completed_at is null and not marked_as_missing ) ` diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index f2c5eef9..48ad9c28 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -505,8 +505,20 @@ func (b *InstrumentConstantCreateBatchBatchResults) Close() error { } const instrumentCreateBatch = `-- name: InstrumentCreateBatch :batchone -insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify($1, 'instrument'), + $1, + $2, + ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + $4, + $5, + $6, + $7, + $8, + $9, + $10 +) returning id, slug ` @@ -519,11 +531,11 @@ type InstrumentCreateBatchBatchResults struct { type InstrumentCreateBatchParams struct { Name string `json:"name"` TypeID uuid.UUID `json:"type_id"` - Geometry Geometry `json:"geometry"` + Geometry []byte `json:"geometry"` Station *int32 `json:"station"` StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` NidID *string `json:"nid_id"` UsgsID *string `json:"usgs_id"` ShowCwmsTab bool `json:"show_cwms_tab"` @@ -543,8 +555,8 @@ func (q *Queries) InstrumentCreateBatch(ctx context.Context, arg []InstrumentCre a.Geometry, a.Station, a.StationOffset, - a.Creator, - a.CreateDate, + a.CreatedBy, + a.CreatedAt, a.NidID, a.UsgsID, a.ShowCwmsTab, @@ -579,9 +591,9 @@ func (b *InstrumentCreateBatchBatchResults) Close() error { } const instrumentGroupCreateBatch = `-- name: InstrumentGroupCreateBatch :batchone -insert into instrument_group (slug, name, description, creator, create_date, project_id) +insert into instrument_group (slug, name, description, created_by, created_at, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id ` type InstrumentGroupCreateBatchBatchResults struct { @@ -593,8 +605,20 @@ type InstrumentGroupCreateBatchBatchResults struct { type InstrumentGroupCreateBatchParams struct { Name string `json:"name"` Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupCreateBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` ProjectID *uuid.UUID `json:"project_id"` } @@ -604,8 +628,8 @@ func (q *Queries) InstrumentGroupCreateBatch(ctx context.Context, arg []Instrume vals := []interface{}{ a.Name, a.Description, - a.Creator, - a.CreateDate, + a.CreatedBy, + a.CreatedAt, a.ProjectID, } batch.Queue(instrumentGroupCreateBatch, vals...) @@ -614,10 +638,10 @@ func (q *Queries) InstrumentGroupCreateBatch(ctx context.Context, arg []Instrume return &InstrumentGroupCreateBatchBatchResults{br, len(arg), false} } -func (b *InstrumentGroupCreateBatchBatchResults) QueryRow(f func(int, InstrumentGroup, error)) { +func (b *InstrumentGroupCreateBatchBatchResults) QueryRow(f func(int, InstrumentGroupCreateBatchRow, error)) { defer b.br.Close() for t := 0; t < b.tot; t++ { - var i InstrumentGroup + var i InstrumentGroupCreateBatchRow if b.closed { if f != nil { f(t, i, ErrBatchAlreadyClosed) @@ -627,14 +651,13 @@ func (b *InstrumentGroupCreateBatchBatchResults) QueryRow(f func(int, Instrument row := b.br.QueryRow() err := row.Scan( &i.ID, - &i.Deleted, &i.Slug, &i.Name, &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ProjectID, ) if f != nil { @@ -649,9 +672,9 @@ func (b *InstrumentGroupCreateBatchBatchResults) Close() error { } const instrumentNoteCreateBatch = `-- name: InstrumentNoteCreateBatch :batchone -insert into instrument_note (instrument_id, title, body, time, creator, create_date) +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) values ($1, $2, $3, $4, $5, $6) -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at ` type InstrumentNoteCreateBatchBatchResults struct { @@ -665,8 +688,8 @@ type InstrumentNoteCreateBatchParams struct { Title string `json:"title"` Body string `json:"body"` Time time.Time `json:"time"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } func (q *Queries) InstrumentNoteCreateBatch(ctx context.Context, arg []InstrumentNoteCreateBatchParams) *InstrumentNoteCreateBatchBatchResults { @@ -677,8 +700,8 @@ func (q *Queries) InstrumentNoteCreateBatch(ctx context.Context, arg []Instrumen a.Title, a.Body, a.Time, - a.Creator, - a.CreateDate, + a.CreatedBy, + a.CreatedAt, } batch.Queue(instrumentNoteCreateBatch, vals...) } @@ -703,10 +726,10 @@ func (b *InstrumentNoteCreateBatchBatchResults) QueryRow(f func(int, InstrumentN &i.Title, &i.Body, &i.Time, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, ) if f != nil { f(t, i, err) @@ -1175,7 +1198,7 @@ func (b *PlotContourConfigTimeseriesCreateBatchBatchResults) Close() error { } const projectCreateBatch = `-- name: ProjectCreateBatch :batchone -insert into project (federal_id, slug, name, district_id, creator, create_date) +insert into project (federal_id, slug, name, district_id, created_by, created_at) values ($1, slugify($2, 'project'), $2, $3, $4, $5) returning id, slug ` @@ -1190,8 +1213,8 @@ type ProjectCreateBatchParams struct { FederalID *string `json:"federal_id"` Name string `json:"name"` DistrictID *uuid.UUID `json:"district_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } type ProjectCreateBatchRow struct { @@ -1206,8 +1229,8 @@ func (q *Queries) ProjectCreateBatch(ctx context.Context, arg []ProjectCreateBat a.FederalID, a.Name, a.DistrictID, - a.Creator, - a.CreateDate, + a.CreatedBy, + a.CreatedAt, } batch.Queue(projectCreateBatch, vals...) } @@ -1688,21 +1711,21 @@ type TimeseriesCreateBatchBatchResults struct { } type TimeseriesCreateBatchParams struct { - InstrumentID *uuid.UUID `json:"instrument_id"` - Name string `json:"name"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } type TimeseriesCreateBatchRow struct { - ID uuid.UUID `json:"id"` - InstrumentID *uuid.UUID `json:"instrument_id"` - Slug string `json:"slug"` - Name string `json:"name"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } func (q *Queries) TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults { diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go index 72182660..e7558afd 100644 --- a/api/internal/db/collection_group.sql_gen.go +++ b/api/internal/db/collection_group.sql_gen.go @@ -13,25 +13,25 @@ import ( ) const collectionGroupCreate = `-- name: CollectionGroupCreate :one -insert into collection_group (project_id, name, slug, creator, create_date, sort_order) +insert into collection_group (project_id, name, slug, created_by, created_at, sort_order) values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5) -returning id, project_id, name, slug, creator, create_date, updater, update_date, sort_order +returning id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order ` type CollectionGroupCreateParams struct { - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - SortOrder int32 `json:"sort_order"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + SortOrder int32 `json:"sort_order"` } func (q *Queries) CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) { row := q.db.QueryRow(ctx, collectionGroupCreate, arg.ProjectID, arg.Name, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, arg.SortOrder, ) var i CollectionGroup @@ -40,10 +40,10 @@ func (q *Queries) CollectionGroupCreate(ctx context.Context, arg CollectionGroup &i.ProjectID, &i.Name, &i.Slug, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.SortOrder, ) return i, err @@ -64,21 +64,21 @@ func (q *Queries) CollectionGroupDelete(ctx context.Context, arg CollectionGroup } const collectionGroupDetailsGet = `-- name: CollectionGroupDetailsGet :one -select id, project_id, name, slug, creator, create_date, updater, update_date, sort_order, timeseries from v_collection_group_details where id = $1 +select id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order, timeseries from v_collection_group_details where id = $1 ` -func (q *Queries) CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) { +func (q *Queries) CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetails, error) { row := q.db.QueryRow(ctx, collectionGroupDetailsGet, id) - var i VCollectionGroupDetail + var i VCollectionGroupDetails err := row.Scan( &i.ID, &i.ProjectID, &i.Name, &i.Slug, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.SortOrder, &i.Timeseries, ) @@ -86,7 +86,7 @@ func (q *Queries) CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) ( } const collectionGroupListForProject = `-- name: CollectionGroupListForProject :many -select id, project_id, name, slug, creator, create_date, updater, update_date, sort_order from collection_group where project_id = $1 +select id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order from collection_group where project_id = $1 ` func (q *Queries) CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) { @@ -103,10 +103,10 @@ func (q *Queries) CollectionGroupListForProject(ctx context.Context, projectID u &i.ProjectID, &i.Name, &i.Slug, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.SortOrder, ); err != nil { return nil, err @@ -166,18 +166,18 @@ func (q *Queries) CollectionGroupTimeseriesUpdateSortOrder(ctx context.Context, } const collectionGroupUpdate = `-- name: CollectionGroupUpdate :one -update collection_group set name=$3, updater=$4, update_date=$5, sort_order=$6 +update collection_group set name=$3, updated_by=$4, updated_at=$5, sort_order=$6 where project_id=$1 and id=$2 -returning id, project_id, name, slug, creator, create_date, updater, update_date, sort_order +returning id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order ` type CollectionGroupUpdateParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - SortOrder int32 `json:"sort_order"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SortOrder int32 `json:"sort_order"` } func (q *Queries) CollectionGroupUpdate(ctx context.Context, arg CollectionGroupUpdateParams) (CollectionGroup, error) { @@ -185,8 +185,8 @@ func (q *Queries) CollectionGroupUpdate(ctx context.Context, arg CollectionGroup arg.ProjectID, arg.ID, arg.Name, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, arg.SortOrder, ) var i CollectionGroup @@ -195,10 +195,10 @@ func (q *Queries) CollectionGroupUpdate(ctx context.Context, arg CollectionGroup &i.ProjectID, &i.Name, &i.Slug, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.SortOrder, ) return i, err diff --git a/api/internal/db/datalogger.sql_gen.go b/api/internal/db/datalogger.sql_gen.go index 940ad75d..1757d9e3 100644 --- a/api/internal/db/datalogger.sql_gen.go +++ b/api/internal/db/datalogger.sql_gen.go @@ -13,7 +13,7 @@ import ( ) const dataloggerCreate = `-- name: DataloggerCreate :one -insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) +insert into datalogger (name, sn, project_id, created_by, updated_by, slug, model_id) values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) returning id ` @@ -22,7 +22,7 @@ type DataloggerCreateParams struct { Name string `json:"name"` Sn string `json:"sn"` ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` + CreatedBy uuid.UUID `json:"created_by"` ModelID uuid.UUID `json:"model_id"` } @@ -31,7 +31,7 @@ func (q *Queries) DataloggerCreate(ctx context.Context, arg DataloggerCreatePara arg.Name, arg.Sn, arg.ProjectID, - arg.Creator, + arg.CreatedBy, arg.ModelID, ) var id uuid.UUID @@ -40,22 +40,22 @@ func (q *Queries) DataloggerCreate(ctx context.Context, arg DataloggerCreatePara } const dataloggerDelete = `-- name: DataloggerDelete :exec -update datalogger set deleted=true, updater=$2, update_date=$3 where id=$1 +update datalogger set deleted=true, updated_by=$2, updated_at=$3 where id=$1 ` type DataloggerDeleteParams struct { - ID uuid.UUID `json:"id"` - Updater uuid.UUID `json:"updater"` - UpdateDate time.Time `json:"update_date"` + ID uuid.UUID `json:"id"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) DataloggerDelete(ctx context.Context, arg DataloggerDeleteParams) error { - _, err := q.db.Exec(ctx, dataloggerDelete, arg.ID, arg.Updater, arg.UpdateDate) + _, err := q.db.Exec(ctx, dataloggerDelete, arg.ID, arg.UpdatedBy, arg.UpdatedAt) return err } const dataloggerGet = `-- name: DataloggerGet :one -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where id=$1 +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger where id=$1 ` func (q *Queries) DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, error) { @@ -65,12 +65,12 @@ func (q *Queries) DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, &i.ID, &i.Sn, &i.ProjectID, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.Name, &i.Slug, &i.ModelID, @@ -148,7 +148,7 @@ func (q *Queries) DataloggerHashUpdate(ctx context.Context, arg DataloggerHashUp } const dataloggerList = `-- name: DataloggerList :many -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger ` func (q *Queries) DataloggerList(ctx context.Context) ([]VDatalogger, error) { @@ -164,12 +164,12 @@ func (q *Queries) DataloggerList(ctx context.Context) ([]VDatalogger, error) { &i.ID, &i.Sn, &i.ProjectID, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.Name, &i.Slug, &i.ModelID, @@ -188,7 +188,7 @@ func (q *Queries) DataloggerList(ctx context.Context) ([]VDatalogger, error) { } const dataloggerListForProject = `-- name: DataloggerListForProject :many -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger where project_id=$1 +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger where project_id=$1 ` func (q *Queries) DataloggerListForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) { @@ -204,12 +204,12 @@ func (q *Queries) DataloggerListForProject(ctx context.Context, projectID uuid.U &i.ID, &i.Sn, &i.ProjectID, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.Name, &i.Slug, &i.ModelID, @@ -260,13 +260,13 @@ func (q *Queries) DataloggerTableGetOrCreate(ctx context.Context, arg Datalogger } const dataloggerTablePreviewGet = `-- name: DataloggerTablePreviewGet :one -select datalogger_table_id, preview, update_date from v_datalogger_preview where datalogger_table_id=$1 limit 1 +select datalogger_table_id, preview, updated_at from v_datalogger_preview where datalogger_table_id=$1 limit 1 ` func (q *Queries) DataloggerTablePreviewGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) { row := q.db.QueryRow(ctx, dataloggerTablePreviewGet, dataloggerTableID) var i VDataloggerPreview - err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdateDate) + err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdatedAt) return i, err } @@ -292,48 +292,48 @@ func (q *Queries) DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg Data const dataloggerUpdate = `-- name: DataloggerUpdate :exec update datalogger set name=$2, - updater=$3, - update_date=$4 + updated_by=$3, + updated_at=$4 where id=$1 ` type DataloggerUpdateParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater uuid.UUID `json:"updater"` - UpdateDate time.Time `json:"update_date"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error { _, err := q.db.Exec(ctx, dataloggerUpdate, arg.ID, arg.Name, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, ) return err } -const dataloggerUpdateTableNameBlank = `-- name: DataloggerUpdateTableNameBlank :exec -update datalogger_table set table_name='' where id=$1 +const dataloggerUpdateAuditInfo = `-- name: DataloggerUpdateAuditInfo :exec +update datalogger set updated_by=$2, updated_at=$3 where id=$1 ` -func (q *Queries) DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, dataloggerUpdateTableNameBlank, id) +type DataloggerUpdateAuditInfoParams struct { + ID uuid.UUID `json:"id"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerUpdateAuditInfo(ctx context.Context, arg DataloggerUpdateAuditInfoParams) error { + _, err := q.db.Exec(ctx, dataloggerUpdateAuditInfo, arg.ID, arg.UpdatedBy, arg.UpdatedAt) return err } -const dataloggerUpdateUpdater = `-- name: DataloggerUpdateUpdater :exec -update datalogger set updater=$2, update_date=$3 where id=$1 +const dataloggerUpdateTableNameBlank = `-- name: DataloggerUpdateTableNameBlank :exec +update datalogger_table set table_name='' where id=$1 ` -type DataloggerUpdateUpdaterParams struct { - ID uuid.UUID `json:"id"` - Updater uuid.UUID `json:"updater"` - UpdateDate time.Time `json:"update_date"` -} - -func (q *Queries) DataloggerUpdateUpdater(ctx context.Context, arg DataloggerUpdateUpdaterParams) error { - _, err := q.db.Exec(ctx, dataloggerUpdateUpdater, arg.ID, arg.Updater, arg.UpdateDate) +func (q *Queries) DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, dataloggerUpdateTableNameBlank, id) return err } diff --git a/api/internal/db/datalogger_telemetry.sql_gen.go b/api/internal/db/datalogger_telemetry.sql_gen.go index 8426456e..4d3f7f0a 100644 --- a/api/internal/db/datalogger_telemetry.sql_gen.go +++ b/api/internal/db/datalogger_telemetry.sql_gen.go @@ -34,7 +34,7 @@ func (q *Queries) DataloggerErrorCreate(ctx context.Context, arg DataloggerError const dataloggerErrorDelete = `-- name: DataloggerErrorDelete :exec delete from datalogger_error -where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) ` type DataloggerErrorDeleteParams struct { @@ -48,7 +48,7 @@ func (q *Queries) DataloggerErrorDelete(ctx context.Context, arg DataloggerError } const dataloggerGetForModelSn = `-- name: DataloggerGetForModelSn :one -select id, sn, project_id, creator, creator_username, create_date, updater, updater_username, update_date, name, slug, model_id, model, errors, tables from v_datalogger +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger where model = $1 and sn = $2 limit 1 ` @@ -65,12 +65,12 @@ func (q *Queries) DataloggerGetForModelSn(ctx context.Context, arg DataloggerGet &i.ID, &i.Sn, &i.ProjectID, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.Name, &i.Slug, &i.ModelID, @@ -100,30 +100,30 @@ func (q *Queries) DataloggerHashGetForModelSn(ctx context.Context, arg Datalogge } const dataloggerTablePreviewCreate = `-- name: DataloggerTablePreviewCreate :exec -insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3) +insert into datalogger_preview (datalogger_table_id, preview, updated_at) values ($1, $2, $3) ` type DataloggerTablePreviewCreateParams struct { DataloggerTableID uuid.UUID `json:"datalogger_table_id"` Preview []byte `json:"preview"` - UpdateDate time.Time `json:"update_date"` + UpdatedAt time.Time `json:"updated_at"` } func (q *Queries) DataloggerTablePreviewCreate(ctx context.Context, arg DataloggerTablePreviewCreateParams) error { - _, err := q.db.Exec(ctx, dataloggerTablePreviewCreate, arg.DataloggerTableID, arg.Preview, arg.UpdateDate) + _, err := q.db.Exec(ctx, dataloggerTablePreviewCreate, arg.DataloggerTableID, arg.Preview, arg.UpdatedAt) return err } const dataloggerTablePreviewUpdate = `-- name: DataloggerTablePreviewUpdate :exec -update datalogger_preview set preview = $3, update_date = $4 -where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +update datalogger_preview set preview = $3, updated_at = $4 +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) ` type DataloggerTablePreviewUpdateParams struct { DataloggerID uuid.UUID `json:"datalogger_id"` TableName string `json:"table_name"` Preview []byte `json:"preview"` - UpdateDate time.Time `json:"update_date"` + UpdatedAt time.Time `json:"updated_at"` } func (q *Queries) DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error { @@ -131,7 +131,7 @@ func (q *Queries) DataloggerTablePreviewUpdate(ctx context.Context, arg Datalogg arg.DataloggerID, arg.TableName, arg.Preview, - arg.UpdateDate, + arg.UpdatedAt, ) return err } diff --git a/api/internal/db/district_rollup.sql_gen.go b/api/internal/db/district_rollup.sql_gen.go index aab2a70c..a322db89 100644 --- a/api/internal/db/district_rollup.sql_gen.go +++ b/api/internal/db/district_rollup.sql_gen.go @@ -13,11 +13,11 @@ import ( ) const districtRollupListEvaluationForProjectAlertConfig = `-- name: DistrictRollupListEvaluationForProjectAlertConfig :many -select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +select alert_type_id, office_id, district_initials, project_name, project_id, month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid and project_id=$1 -and the_month >= date_trunc('month', $2::timestamptz) -and the_month <= date_trunc('month', $3::timestamptz) +and "month" >= date_trunc('month', $2::timestamptz) +and "month" <= date_trunc('month', $3::timestamptz) ` type DistrictRollupListEvaluationForProjectAlertConfigParams struct { @@ -41,7 +41,7 @@ func (q *Queries) DistrictRollupListEvaluationForProjectAlertConfig(ctx context. &i.DistrictInitials, &i.ProjectName, &i.ProjectID, - &i.TheMonth, + &i.Month, &i.ExpectedTotalSubmittals, &i.ActualTotalSubmittals, &i.RedSubmittals, @@ -59,11 +59,11 @@ func (q *Queries) DistrictRollupListEvaluationForProjectAlertConfig(ctx context. } const districtRollupListMeasurementForProjectAlertConfig = `-- name: DistrictRollupListMeasurementForProjectAlertConfig :many -select alert_type_id, office_id, district_initials, project_name, project_id, the_month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +select alert_type_id, office_id, district_initials, project_name, project_id, month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid and project_id=$1 -and the_month >= date_trunc('month', $2::timestamptz) -and the_month <= date_trunc('month', $3::timestamptz) +and "month" >= date_trunc('month', $2::timestamptz) +and "month" <= date_trunc('month', $3::timestamptz) ` type DistrictRollupListMeasurementForProjectAlertConfigParams struct { @@ -87,7 +87,7 @@ func (q *Queries) DistrictRollupListMeasurementForProjectAlertConfig(ctx context &i.DistrictInitials, &i.ProjectName, &i.ProjectID, - &i.TheMonth, + &i.Month, &i.ExpectedTotalSubmittals, &i.ActualTotalSubmittals, &i.RedSubmittals, diff --git a/api/internal/db/evaluation.sql_gen.go b/api/internal/db/evaluation.sql_gen.go index 0e469eb2..c1747d7f 100644 --- a/api/internal/db/evaluation.sql_gen.go +++ b/api/internal/db/evaluation.sql_gen.go @@ -18,10 +18,10 @@ insert into evaluation ( submittal_id, name, body, - start_date, - end_date, - creator, - create_date + started_at, + ended_at, + created_by, + created_at ) values ($1,$2,$3,$4,$5,$6,$7,$8) returning id ` @@ -31,10 +31,10 @@ type EvaluationCreateParams struct { SubmittalID *uuid.UUID `json:"submittal_id"` Name string `json:"name"` Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } func (q *Queries) EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) { @@ -43,10 +43,10 @@ func (q *Queries) EvaluationCreate(ctx context.Context, arg EvaluationCreatePara arg.SubmittalID, arg.Name, arg.Body, - arg.StartDate, - arg.EndDate, - arg.Creator, - arg.CreateDate, + arg.StartedAt, + arg.EndedAt, + arg.CreatedBy, + arg.CreatedAt, ) var id uuid.UUID err := row.Scan(&id) @@ -63,7 +63,7 @@ func (q *Queries) EvaluationDelete(ctx context.Context, id uuid.UUID) error { } const evaluationGet = `-- name: EvaluationGet :one -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation where id=$1 +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation where id=$1 ` func (q *Queries) EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) { @@ -73,19 +73,19 @@ func (q *Queries) EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertConfigID, &i.AlertConfigName, &i.SubmittalID, - &i.StartDate, - &i.EndDate, + &i.StartedAt, + &i.EndedAt, &i.Instruments, ) return i, err @@ -115,7 +115,7 @@ func (q *Queries) EvaluationInstrumentDeleteForEvaluation(ctx context.Context, e } const evaluationListForInstrument = `-- name: EvaluationListForInstrument :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation where id = any( select evaluation_id from evaluation_instrument @@ -136,19 +136,19 @@ func (q *Queries) EvaluationListForInstrument(ctx context.Context, instrumentID &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertConfigID, &i.AlertConfigName, &i.SubmittalID, - &i.StartDate, - &i.EndDate, + &i.StartedAt, + &i.EndedAt, &i.Instruments, ); err != nil { return nil, err @@ -162,7 +162,7 @@ func (q *Queries) EvaluationListForInstrument(ctx context.Context, instrumentID } const evaluationListForProject = `-- name: EvaluationListForProject :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation where project_id=$1 ` @@ -180,19 +180,19 @@ func (q *Queries) EvaluationListForProject(ctx context.Context, projectID uuid.U &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertConfigID, &i.AlertConfigName, &i.SubmittalID, - &i.StartDate, - &i.EndDate, + &i.StartedAt, + &i.EndedAt, &i.Instruments, ); err != nil { return nil, err @@ -206,7 +206,7 @@ func (q *Queries) EvaluationListForProject(ctx context.Context, projectID uuid.U } const evaluationListForProjectAlertConfig = `-- name: EvaluationListForProjectAlertConfig :many -select id, name, body, creator, creator_username, create_date, updater, updater_username, update_date, project_id, project_name, alert_config_id, alert_config_name, submittal_id, start_date, end_date, instruments from v_evaluation +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation where project_id=$1 and alert_config_id is not null and alert_config_id=$2 @@ -230,19 +230,19 @@ func (q *Queries) EvaluationListForProjectAlertConfig(ctx context.Context, arg E &i.ID, &i.Name, &i.Body, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.ProjectID, &i.ProjectName, &i.AlertConfigID, &i.AlertConfigName, &i.SubmittalID, - &i.StartDate, - &i.EndDate, + &i.StartedAt, + &i.EndedAt, &i.Instruments, ); err != nil { return nil, err @@ -259,22 +259,22 @@ const evaluationUpdate = `-- name: EvaluationUpdate :exec update evaluation set name=$3, body=$4, - start_date=$5, - end_date=$6, - updater=$7, - update_date=$8 + started_at=$5, + ended_at=$6, + updated_by=$7, + updated_at=$8 where id=$1 and project_id=$2 ` type EvaluationUpdateParams struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error { @@ -283,21 +283,21 @@ func (q *Queries) EvaluationUpdate(ctx context.Context, arg EvaluationUpdatePara arg.ProjectID, arg.Name, arg.Body, - arg.StartDate, - arg.EndDate, - arg.Updater, - arg.UpdateDate, + arg.StartedAt, + arg.EndedAt, + arg.UpdatedBy, + arg.UpdatedAt, ) return err } const submittalCreateNextEvaluation = `-- name: SubmittalCreateNextEvaluation :exec -insert into submittal (alert_config_id, due_date) +insert into submittal (alert_config_id, due_at) select ac.id, now() + ac.schedule_interval from alert_config ac -where ac.id in (select sub.alert_config_id from submittal sub where sub.id=$1) +where ac.id = any(select sub.alert_config_id from submittal sub where sub.id=$1) ` func (q *Queries) SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUID) error { @@ -308,25 +308,25 @@ func (q *Queries) SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUI const submittalUpdateCompleteEvaluation = `-- name: SubmittalUpdateCompleteEvaluation :one update submittal sub1 set submittal_status_id = sq.submittal_status_id, - completion_date = now() + completed_at = now() from ( select sub2.id as submittal_id, case -- if completed before due date, mark submittal as green id - when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + when now() <= sub2.due_at then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid -- if completed after due date, mark as yellow else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid end as submittal_status_id from submittal sub2 inner join alert_config ac on sub2.alert_config_id = ac.id where sub2.id=$1 - and sub2.completion_date is null + and sub2.completed_at is null and not sub2.marked_as_missing and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid ) sq where sub1.id = sq.submittal_id -returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completion_date, sub1.create_date, sub1.due_date, sub1.marked_as_missing, sub1.warning_sent +returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completed_at, sub1.created_at, sub1.due_at, sub1.marked_as_missing, sub1.warning_sent ` func (q *Queries) SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid.UUID) (Submittal, error) { @@ -336,9 +336,9 @@ func (q *Queries) SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid &i.ID, &i.AlertConfigID, &i.SubmittalStatusID, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, &i.MarkedAsMissing, &i.WarningSent, ) diff --git a/api/internal/db/home.sql_gen.go b/api/internal/db/home.sql_gen.go index 5a592fbd..d6d94bdb 100644 --- a/api/internal/db/home.sql_gen.go +++ b/api/internal/db/home.sql_gen.go @@ -14,7 +14,7 @@ select (select count(*) from instrument where not deleted) as instrument_count, (select count(*) from project where not deleted) as project_count, (select count(*) from instrument_group) as instrument_group_count, - (select count(*) from instrument where not deleted and create_date > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from instrument where not deleted and created_at > now() - '7 days'::interval) as new_instruments_7d, (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h ` diff --git a/api/internal/db/instrument.sql_gen.go b/api/internal/db/instrument.sql_gen.go index 9add2526..01c23c09 100644 --- a/api/internal/db/instrument.sql_gen.go +++ b/api/internal/db/instrument.sql_gen.go @@ -13,19 +13,30 @@ import ( ) const instrumentCreate = `-- name: InstrumentCreate :one -insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) -returning id, slug +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify($1, 'instrument'), + $1, + $2, + ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + $4, + $5, + $6, + $7, + $8, + $9, + $10 +) returning id, slug ` type InstrumentCreateParams struct { Name string `json:"name"` TypeID uuid.UUID `json:"type_id"` - Geometry Geometry `json:"geometry"` + Geometry []byte `json:"geometry"` Station *int32 `json:"station"` StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` NidID *string `json:"nid_id"` UsgsID *string `json:"usgs_id"` ShowCwmsTab bool `json:"show_cwms_tab"` @@ -43,8 +54,8 @@ func (q *Queries) InstrumentCreate(ctx context.Context, arg InstrumentCreatePara arg.Geometry, arg.Station, arg.StationOffset, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, arg.NidID, arg.UsgsID, arg.ShowCwmsTab, @@ -75,10 +86,9 @@ func (q *Queries) InstrumentDeleteFlag(ctx context.Context, arg InstrumentDelete } const instrumentGet = `-- name: InstrumentGet :one -select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts +select id, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, "offset", created_by, created_at, updated_by, updated_at, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts from v_instrument -where not deleted -and id = $1 +where id = $1 ` func (q *Queries) InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) { @@ -86,7 +96,6 @@ func (q *Queries) InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, var i VInstrument err := row.Scan( &i.ID, - &i.Deleted, &i.StatusID, &i.Status, &i.StatusTime, @@ -98,11 +107,11 @@ func (q *Queries) InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, &i.Icon, &i.Geometry, &i.Station, - &i.StationOffset, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.NidID, &i.UsgsID, &i.Telemetry, @@ -130,7 +139,7 @@ func (q *Queries) InstrumentGetCount(ctx context.Context) (int64, error) { const instrumentIDNameListByIDs = `-- name: InstrumentIDNameListByIDs :many select id, name from instrument -where id in ($1::uuid[]) +where id = any($1::uuid[]) and not deleted ` @@ -159,62 +168,8 @@ func (q *Queries) InstrumentIDNameListByIDs(ctx context.Context, instrumentIds [ return items, nil } -const instrumentList = `-- name: InstrumentList :many -select id, deleted, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, station_offset, creator, create_date, updater, update_date, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts -from v_instrument -where not deleted -` - -func (q *Queries) InstrumentList(ctx context.Context) ([]VInstrument, error) { - rows, err := q.db.Query(ctx, instrumentList) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VInstrument{} - for rows.Next() { - var i VInstrument - if err := rows.Scan( - &i.ID, - &i.Deleted, - &i.StatusID, - &i.Status, - &i.StatusTime, - &i.Slug, - &i.Name, - &i.TypeID, - &i.ShowCwmsTab, - &i.Type, - &i.Icon, - &i.Geometry, - &i.Station, - &i.StationOffset, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, - &i.NidID, - &i.UsgsID, - &i.Telemetry, - &i.HasCwms, - &i.Projects, - &i.Constants, - &i.Groups, - &i.AlertConfigs, - &i.Opts, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const instrumentListForInstrumentGroup = `-- name: InstrumentListForInstrumentGroup :many -select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +select i.id, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i."offset", i.created_by, i.created_at, i.updated_by, i.updated_at, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts from v_instrument i inner join instrument_group_instruments igi on igi.instrument_id = i.id where instrument_group_id = $1 @@ -231,7 +186,6 @@ func (q *Queries) InstrumentListForInstrumentGroup(ctx context.Context, instrume var i VInstrument if err := rows.Scan( &i.ID, - &i.Deleted, &i.StatusID, &i.Status, &i.StatusTime, @@ -243,11 +197,11 @@ func (q *Queries) InstrumentListForInstrumentGroup(ctx context.Context, instrume &i.Icon, &i.Geometry, &i.Station, - &i.StationOffset, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.NidID, &i.UsgsID, &i.Telemetry, @@ -269,7 +223,7 @@ func (q *Queries) InstrumentListForInstrumentGroup(ctx context.Context, instrume } const instrumentListForProject = `-- name: InstrumentListForProject :many -select i.id, i.deleted, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i.station_offset, i.creator, i.create_date, i.updater, i.update_date, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +select i.id, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i."offset", i.created_by, i.created_at, i.updated_by, i.updated_at, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts from v_instrument i inner join project_instrument pi on pi.instrument_id = i.id where pi.project_id = $1 @@ -286,7 +240,6 @@ func (q *Queries) InstrumentListForProject(ctx context.Context, projectID uuid.U var i VInstrument if err := rows.Scan( &i.ID, - &i.Deleted, &i.StatusID, &i.Status, &i.StatusTime, @@ -298,11 +251,11 @@ func (q *Queries) InstrumentListForProject(ctx context.Context, projectID uuid.U &i.Icon, &i.Geometry, &i.Station, - &i.StationOffset, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.NidID, &i.UsgsID, &i.Telemetry, @@ -325,84 +278,84 @@ func (q *Queries) InstrumentListForProject(ctx context.Context, projectID uuid.U const instrumentUpdate = `-- name: InstrumentUpdate :exec update instrument set - name=$3, - type_id=$4, - geometry=$5, - updater=$6, - update_date=$7, - station=$8, - station_offset=$9, - nid_id=$10, - usgs_id=$11, - show_cwms_tab=$12 -where id = $2 -and id in ( + name=$1, + type_id=$2, + geometry=ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + updated_by=$4, + updated_at=$5, + station=$6, + station_offset=$7, + nid_id=$8, + usgs_id=$9, + show_cwms_tab=$10 +where id = $11 +and id = any( select instrument_id from project_instrument - where project_id = $1 + where project_id = $12 ) ` type InstrumentUpdateParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` Name string `json:"name"` TypeID uuid.UUID `json:"type_id"` - Geometry Geometry `json:"geometry"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + Geometry []byte `json:"geometry"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` Station *int32 `json:"station"` StationOffset *int32 `json:"station_offset"` NidID *string `json:"nid_id"` UsgsID *string `json:"usgs_id"` ShowCwmsTab bool `json:"show_cwms_tab"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` } func (q *Queries) InstrumentUpdate(ctx context.Context, arg InstrumentUpdateParams) error { _, err := q.db.Exec(ctx, instrumentUpdate, - arg.ProjectID, - arg.ID, arg.Name, arg.TypeID, arg.Geometry, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, arg.Station, arg.StationOffset, arg.NidID, arg.UsgsID, arg.ShowCwmsTab, + arg.ID, + arg.ProjectID, ) return err } const instrumentUpdateGeometry = `-- name: InstrumentUpdateGeometry :one update instrument set - geometry=$3, - updater=$4, - update_date=now() -where id = $2 -and id in ( + geometry=ST_SetSRID(ST_GeomFromGeoJSON($1::json), 4326), + updated_by=$2, + updated_at=now() +where id = $3 +and id = any( select instrument_id from project_instrument - where project_id = $1 + where project_id = $4 ) returning id ` type InstrumentUpdateGeometryParams struct { - ProjectID uuid.UUID `json:"project_id"` + Geometry []byte `json:"geometry"` + UpdatedBy *uuid.UUID `json:"updated_by"` ID uuid.UUID `json:"id"` - Geometry Geometry `json:"geometry"` - Updater *uuid.UUID `json:"updater"` + ProjectID uuid.UUID `json:"project_id"` } func (q *Queries) InstrumentUpdateGeometry(ctx context.Context, arg InstrumentUpdateGeometryParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, instrumentUpdateGeometry, - arg.ProjectID, - arg.ID, arg.Geometry, - arg.Updater, + arg.UpdatedBy, + arg.ID, + arg.ProjectID, ) var id uuid.UUID err := row.Scan(&id) @@ -413,7 +366,7 @@ const projectInstrumentListCountByInstrument = `-- name: ProjectInstrumentListCo select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count from project_instrument pi inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in ($1::uuid[]) +where pi.instrument_id = any($1::uuid[]) group by pi.instrument_id, i.name order by i.name ` diff --git a/api/internal/db/instrument_assign.sql_gen.go b/api/internal/db/instrument_assign.sql_gen.go index 9eb2b3a5..2120ab24 100644 --- a/api/internal/db/instrument_assign.sql_gen.go +++ b/api/internal/db/instrument_assign.sql_gen.go @@ -46,7 +46,7 @@ from project_instrument pi inner join instrument i on pi.instrument_id = i.id inner join project p on pi.project_id = p.id where i.name = $1 -and pi.project_id in ($2::uuid[]) +and pi.project_id = any($2::uuid[]) and not i.deleted order by pi.project_id ` @@ -82,7 +82,7 @@ from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id where pi.instrument_id = $1 -and pi.project_id in ($2::uuid[]) +and pi.project_id = any($2::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr where profile_id = $3 @@ -123,7 +123,7 @@ select p.name as project_name, i.name as instrument_name from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in ($1::uuid[]) +where pi.instrument_id = any($1::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr where ppr.profile_id = $2 @@ -167,7 +167,7 @@ select i.name from project_instrument pi inner join instrument i on pi.instrument_id = i.id where pi.project_id = $1 -and i.name in ($2::text[]) +and i.name = any($2::text[]) and not i.deleted ` diff --git a/api/internal/db/instrument_group.sql_gen.go b/api/internal/db/instrument_group.sql_gen.go index db84436e..b57a4dfc 100644 --- a/api/internal/db/instrument_group.sql_gen.go +++ b/api/internal/db/instrument_group.sql_gen.go @@ -13,38 +13,49 @@ import ( ) const instrumentGroupCreate = `-- name: InstrumentGroupCreate :one -insert into instrument_group (slug, name, description, creator, create_date, project_id) +insert into instrument_group (slug, name, description, created_by, created_at, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id ` type InstrumentGroupCreateParams struct { Name string `json:"name"` Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` ProjectID *uuid.UUID `json:"project_id"` } -func (q *Queries) InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroup, error) { +type InstrumentGroupCreateRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroupCreateRow, error) { row := q.db.QueryRow(ctx, instrumentGroupCreate, arg.Name, arg.Description, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, arg.ProjectID, ) - var i InstrumentGroup + var i InstrumentGroupCreateRow err := row.Scan( &i.ID, - &i.Deleted, &i.Slug, &i.Name, &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ProjectID, ) return i, err @@ -59,44 +70,29 @@ func (q *Queries) InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) e return err } -const instrumentGroupGet = `-- name: InstrumentGroupGet :many -select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count +const instrumentGroupGet = `-- name: InstrumentGroupGet :one +select id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id, instrument_count, timeseries_count from v_instrument_group -where not deleted -and id=$1 +where id=$1 ` -func (q *Queries) InstrumentGroupGet(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) { - rows, err := q.db.Query(ctx, instrumentGroupGet, id) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VInstrumentGroup{} - for rows.Next() { - var i VInstrumentGroup - if err := rows.Scan( - &i.ID, - &i.Slug, - &i.Name, - &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, - &i.ProjectID, - &i.Deleted, - &i.InstrumentCount, - &i.TimeseriesCount, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *Queries) InstrumentGroupGet(ctx context.Context, id uuid.UUID) (VInstrumentGroup, error) { + row := q.db.QueryRow(ctx, instrumentGroupGet, id) + var i VInstrumentGroup + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + &i.InstrumentCount, + &i.TimeseriesCount, + ) + return i, err } const instrumentGroupInstrumentCreate = `-- name: InstrumentGroupInstrumentCreate :exec @@ -128,9 +124,8 @@ func (q *Queries) InstrumentGroupInstrumentDelete(ctx context.Context, arg Instr } const instrumentGroupList = `-- name: InstrumentGroupList :many -select id, slug, name, description, creator, create_date, updater, update_date, project_id, deleted, instrument_count, timeseries_count +select id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id, instrument_count, timeseries_count from v_instrument_group -where not deleted ` func (q *Queries) InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) { @@ -147,12 +142,11 @@ func (q *Queries) InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, &i.Slug, &i.Name, &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ProjectID, - &i.Deleted, &i.InstrumentCount, &i.TimeseriesCount, ); err != nil { @@ -167,7 +161,7 @@ func (q *Queries) InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, } const instrumentGroupListForProject = `-- name: InstrumentGroupListForProject :many -select ig.id, ig.slug, ig.name, ig.description, ig.creator, ig.create_date, ig.updater, ig.update_date, ig.project_id, ig.deleted, ig.instrument_count, ig.timeseries_count +select ig.id, ig.slug, ig.name, ig.description, ig.created_by, ig.created_at, ig.updated_by, ig.updated_at, ig.project_id, ig.instrument_count, ig.timeseries_count from v_instrument_group ig where ig.project_id = $1 ` @@ -186,12 +180,11 @@ func (q *Queries) InstrumentGroupListForProject(ctx context.Context, projectID * &i.Slug, &i.Name, &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ProjectID, - &i.Deleted, &i.InstrumentCount, &i.TimeseriesCount, ); err != nil { @@ -209,42 +202,53 @@ const instrumentGroupUpdate = `-- name: InstrumentGroupUpdate :one update instrument_group set name = $2, description = $3, - updater = $4, - update_date = $5, + updated_by = $4, + updated_at = $5, project_id = $6 where id = $1 - returning id, deleted, slug, name, description, creator, create_date, updater, update_date, project_id +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id ` type InstrumentGroupUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Description *string `json:"description"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupUpdateRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` ProjectID *uuid.UUID `json:"project_id"` } -func (q *Queries) InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroup, error) { +func (q *Queries) InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroupUpdateRow, error) { row := q.db.QueryRow(ctx, instrumentGroupUpdate, arg.ID, arg.Name, arg.Description, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, arg.ProjectID, ) - var i InstrumentGroup + var i InstrumentGroupUpdateRow err := row.Scan( &i.ID, - &i.Deleted, &i.Slug, &i.Name, &i.Description, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ProjectID, ) return i, err diff --git a/api/internal/db/instrument_incl.sql_gen.go b/api/internal/db/instrument_incl.sql_gen.go index 7966917f..69b73193 100644 --- a/api/internal/db/instrument_incl.sql_gen.go +++ b/api/internal/db/instrument_incl.sql_gen.go @@ -19,7 +19,7 @@ where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 union select m2.instrument_id, m2.time, m2.measurements from v_incl_measurement m2 -where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = $1) +where m2.time = any(select o.initial_time from incl_opts o where o.instrument_id = $1) and m2.instrument_id = $1 order by time asc ` diff --git a/api/internal/db/instrument_ipi.sql_gen.go b/api/internal/db/instrument_ipi.sql_gen.go index 54a03882..c952d02c 100644 --- a/api/internal/db/instrument_ipi.sql_gen.go +++ b/api/internal/db/instrument_ipi.sql_gen.go @@ -19,7 +19,7 @@ where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 union select m2.instrument_id, m2.time, m2.measurements from v_ipi_measurement m2 -where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = $1) +where m2.time = any(select o.initial_time from ipi_opts o where o.instrument_id = $1) and m2.instrument_id = $1 order by time asc ` diff --git a/api/internal/db/instrument_note.sql_gen.go b/api/internal/db/instrument_note.sql_gen.go index c257c93b..f9874322 100644 --- a/api/internal/db/instrument_note.sql_gen.go +++ b/api/internal/db/instrument_note.sql_gen.go @@ -13,9 +13,9 @@ import ( ) const instrumentNoteCreate = `-- name: InstrumentNoteCreate :one -insert into instrument_note (instrument_id, title, body, time, creator, create_date) +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) values ($1, $2, $3, $4, $5, $6) -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at ` type InstrumentNoteCreateParams struct { @@ -23,8 +23,8 @@ type InstrumentNoteCreateParams struct { Title string `json:"title"` Body string `json:"body"` Time time.Time `json:"time"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } func (q *Queries) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) { @@ -33,8 +33,8 @@ func (q *Queries) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCr arg.Title, arg.Body, arg.Time, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, ) var i InstrumentNote err := row.Scan( @@ -43,10 +43,10 @@ func (q *Queries) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCr &i.Title, &i.Body, &i.Time, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, ) return i, err } @@ -61,7 +61,7 @@ func (q *Queries) InstrumentNoteDelete(ctx context.Context, id uuid.UUID) error } const instrumentNoteGet = `-- name: InstrumentNoteGet :one -select id, instrument_id, title, body, time, creator, create_date, updater, update_date +select id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at from instrument_note where id = $1 ` @@ -75,16 +75,16 @@ func (q *Queries) InstrumentNoteGet(ctx context.Context, id uuid.UUID) (Instrume &i.Title, &i.Body, &i.Time, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, ) return i, err } const instrumentNoteListForInstrument = `-- name: InstrumentNoteListForInstrument :many -select id, instrument_id, title, body, time, creator, create_date, updater, update_date +select id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at from instrument_note where instrument_id = $1 ` @@ -104,10 +104,10 @@ func (q *Queries) InstrumentNoteListForInstrument(ctx context.Context, instrumen &i.Title, &i.Body, &i.Time, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, ); err != nil { return nil, err } @@ -124,19 +124,19 @@ update instrument_note set title=$2, body=$3, time=$4, - updater=$5, - update_date=$6 + updated_by=$5, + updated_at=$6 where id = $1 -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at ` type InstrumentNoteUpdateParams struct { - ID uuid.UUID `json:"id"` - Title string `json:"title"` - Body string `json:"body"` - Time time.Time `json:"time"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + ID uuid.UUID `json:"id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUpdateParams) (InstrumentNote, error) { @@ -145,8 +145,8 @@ func (q *Queries) InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUp arg.Title, arg.Body, arg.Time, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, ) var i InstrumentNote err := row.Scan( @@ -155,10 +155,10 @@ func (q *Queries) InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUp &i.Title, &i.Body, &i.Time, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, ) return i, err } diff --git a/api/internal/db/instrument_saa.sql_gen.go b/api/internal/db/instrument_saa.sql_gen.go index f5a44f33..fd781274 100644 --- a/api/internal/db/instrument_saa.sql_gen.go +++ b/api/internal/db/instrument_saa.sql_gen.go @@ -19,7 +19,7 @@ where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 union select m2.instrument_id, m2.time, m2.measurements from v_saa_measurement m2 -where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = $1) +where m2.time = any(select o.initial_time from saa_opts o where o.instrument_id = $1) and m2.instrument_id = $1 order by time asc ` diff --git a/api/internal/db/manual.go b/api/internal/db/manual.go deleted file mode 100644 index acfa554b..00000000 --- a/api/internal/db/manual.go +++ /dev/null @@ -1,8 +0,0 @@ -package db - -import "github.com/jackc/pgx/v5" - -func CollectRows[T any](rows pgx.Rows) ([]T, error) { - ss, err := pgx.CollectRows(rows, pgx.RowToStructByName[T]) - return ss, err -} diff --git a/api/internal/db/measurement.manual.go b/api/internal/db/measurement.manual.go index e45ee041..700e69c0 100644 --- a/api/internal/db/measurement.manual.go +++ b/api/internal/db/measurement.manual.go @@ -1,10 +1,36 @@ package db import ( + "context" "math" "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" ) +type TimeseriesMeasurementCollectionGetForRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` + Threshold int `json:"threshold"` +} + +func (q *Queries) TimeseriesMeasurementCollectionGetForRange(ctx context.Context, arg TimeseriesMeasurementCollectionGetForRangeParams) (MeasurementCollection, error) { + var mc MeasurementCollection + rows, err := q.db.Query(ctx, timeseriesMeasurementListForRange, arg.TimeseriesID, arg.After, arg.Before) + if err != nil { + return mc, err + } + mm, err := pgx.CollectRows[Measurement](rows, pgx.RowToStructByNameLax) + if err != nil { + return mc, err + } + mc.TimeseriesID = arg.TimeseriesID + mc.Items = LTTB(mm, arg.Threshold) + return mc, nil +} + type MeasurementGetter interface { getTime() time.Time getValue() float64 @@ -18,6 +44,30 @@ func (m VTimeseriesMeasurement) getValue() float64 { return float64(m.Value) } +func (m Measurement) getTime() time.Time { + return m.Time +} + +func (m Measurement) getValue() float64 { + return float64(m.Value) +} + +func (ml MeasurementLean) getTime() time.Time { + var t time.Time + for k := range ml { + t = k + } + return t +} + +func (ml MeasurementLean) getValue() float64 { + var m float64 + for _, v := range ml { + m = v + } + return m +} + // A slightly modified LTTB (Largest-Triange-Three-Buckets) algorithm for downsampling timeseries measurements // https://godoc.org/github.com/dgryski/go-lttb func LTTB[T MeasurementGetter](data []T, threshold int) []T { diff --git a/api/internal/db/measurement.sql_gen.go b/api/internal/db/measurement.sql_gen.go index c59264be..446de27b 100644 --- a/api/internal/db/measurement.sql_gen.go +++ b/api/internal/db/measurement.sql_gen.go @@ -88,21 +88,21 @@ func (q *Queries) TimeseriesMeasurementGetMostRecent(ctx context.Context, timese return i, err } -const timeseriesMeasurementListRange = `-- name: TimeseriesMeasurementListRange :many +const timeseriesMeasurementListForRange = `-- name: TimeseriesMeasurementListForRange :many select timeseries_id, time, value, masked, validated, annotation from v_timeseries_measurement where timeseries_id=$1 and time > $2 and time < $3 ` -type TimeseriesMeasurementListRangeParams struct { +type TimeseriesMeasurementListForRangeParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` - AfterTime time.Time `json:"after_time"` - BeforeTime time.Time `json:"before_time"` + After time.Time `json:"after"` + Before time.Time `json:"before"` } -func (q *Queries) TimeseriesMeasurementListRange(ctx context.Context, arg TimeseriesMeasurementListRangeParams) ([]VTimeseriesMeasurement, error) { - rows, err := q.db.Query(ctx, timeseriesMeasurementListRange, arg.TimeseriesID, arg.AfterTime, arg.BeforeTime) +func (q *Queries) TimeseriesMeasurementListForRange(ctx context.Context, arg TimeseriesMeasurementListForRangeParams) ([]VTimeseriesMeasurement, error) { + rows, err := q.db.Query(ctx, timeseriesMeasurementListForRange, arg.TimeseriesID, arg.After, arg.Before) if err != nil { return nil, err } diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 842cc8a2..25979c8e 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -6,11 +6,11 @@ package db import ( "database/sql/driver" + "encoding/json" "fmt" "time" "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" ) type JobStatus string @@ -325,7 +325,7 @@ type Agency struct { type Alert struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` + CreatedAt time.Time `json:"created_at"` } type AlertConfig struct { @@ -333,18 +333,18 @@ type AlertConfig struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Body string `json:"body"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` AlertTypeID uuid.UUID `json:"alert_type_id"` - StartDate time.Time `json:"start_date"` + StartedAt time.Time `json:"started_at"` ScheduleInterval string `json:"schedule_interval"` NMissedBeforeAlert int32 `json:"n_missed_before_alert"` WarningInterval string `json:"warning_interval"` RemindInterval string `json:"remind_interval"` - LastChecked *time.Time `json:"last_checked"` - LastReminded *time.Time `json:"last_reminded"` + LastCheckedAt *time.Time `json:"last_checked_at"` + LastRemindedAt *time.Time `json:"last_reminded_at"` Deleted bool `json:"deleted"` MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` } @@ -405,15 +405,15 @@ type Calculation struct { } type CollectionGroup struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Slug string `json:"slug"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - SortOrder int32 `json:"sort_order"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SortOrder int32 `json:"sort_order"` } type CollectionGroupTimeseries struct { @@ -428,17 +428,17 @@ type Config struct { } type Datalogger struct { - ID uuid.UUID `json:"id"` - Sn string `json:"sn"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater uuid.UUID `json:"updater"` - UpdateDate time.Time `json:"update_date"` - Name string `json:"name"` - Slug string `json:"slug"` - ModelID uuid.UUID `json:"model_id"` - Deleted bool `json:"deleted"` + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Deleted bool `json:"deleted"` } type DataloggerEquivalencyTable struct { @@ -470,7 +470,7 @@ type DataloggerModel struct { type DataloggerPreview struct { Preview []byte `json:"preview"` - UpdateDate time.Time `json:"update_date"` + UpdatedAt time.Time `json:"updated_at"` DataloggerTableID uuid.UUID `json:"datalogger_table_id"` } @@ -505,12 +505,12 @@ type Evaluation struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Body string `json:"body"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` SubmittalID *uuid.UUID `json:"submittal_id"` } @@ -523,7 +523,7 @@ type Heartbeat struct { Time time.Time `json:"time"` } -type InclOpt struct { +type InclOpts struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` @@ -549,24 +549,24 @@ type InclinometerMeasurement struct { } type Instrument struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"deleted"` - Slug string `json:"slug"` - Name string `json:"name"` - Geometry Geometry `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - TypeID uuid.UUID `json:"type_id"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` -} - -type InstrumentConstant struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Geometry interface{} `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + TypeID uuid.UUID `json:"type_id"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentConstants struct { TimeseriesID uuid.UUID `json:"timeseries_id"` InstrumentID uuid.UUID `json:"instrument_id"` } @@ -577,14 +577,14 @@ type InstrumentGroup struct { Slug string `json:"slug"` Name string `json:"name"` Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` ProjectID *uuid.UUID `json:"project_id"` } -type InstrumentGroupInstrument struct { +type InstrumentGroupInstruments struct { InstrumentID uuid.UUID `json:"instrument_id"` InstrumentGroupID uuid.UUID `json:"instrument_group_id"` } @@ -595,10 +595,10 @@ type InstrumentNote struct { Title string `json:"title"` Body string `json:"body"` Time time.Time `json:"time"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } type InstrumentStatus struct { @@ -621,7 +621,7 @@ type InstrumentType struct { Icon *string `json:"icon"` } -type IpiOpt struct { +type IpiOpts struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` @@ -658,15 +658,15 @@ type PlotBullseyeConfig struct { } type PlotConfiguration struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - PlotType PlotType `json:"plot_type"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + PlotType PlotType `json:"plot_type"` } type PlotConfigurationCustomShape struct { @@ -677,7 +677,7 @@ type PlotConfigurationCustomShape struct { Color string `json:"color"` } -type PlotConfigurationSetting struct { +type PlotConfigurationSettings struct { ID uuid.UUID `json:"id"` ShowMasked bool `json:"show_masked"` ShowNonvalidated bool `json:"show_nonvalidated"` @@ -733,7 +733,7 @@ type Profile struct { DisplayName string `json:"display_name"` } -type ProfileProjectRole struct { +type ProfileProjectRoles struct { ID uuid.UUID `json:"id"` ProfileID uuid.UUID `json:"profile_id"` RoleID uuid.UUID `json:"role_id"` @@ -757,10 +757,10 @@ type Project struct { Deleted bool `json:"deleted"` Slug string `json:"slug"` Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` DistrictID *uuid.UUID `json:"district_id"` } @@ -775,10 +775,10 @@ type ReportConfig struct { Slug string `json:"slug"` Name string `json:"name"` Description string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` DateRange *string `json:"date_range"` DateRangeEnabled *bool `json:"date_range_enabled"` ShowMasked *bool `json:"show_masked"` @@ -793,15 +793,15 @@ type ReportConfigPlotConfig struct { } type ReportDownloadJob struct { - ID uuid.UUID `json:"id"` - ReportConfigID *uuid.UUID `json:"report_config_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Status JobStatus `json:"status"` - FileKey *string `json:"file_key"` - FileExpiry *time.Time `json:"file_expiry"` - Progress int32 `json:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date"` + ID uuid.UUID `json:"id"` + ReportConfigID *uuid.UUID `json:"report_config_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + Status JobStatus `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` + Progress int32 `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` } type Role struct { @@ -810,7 +810,7 @@ type Role struct { Deleted bool `json:"deleted"` } -type SaaOpt struct { +type SaaOpts struct { InstrumentID uuid.UUID `json:"instrument_id"` NumSegments int32 `json:"num_segments"` BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` @@ -837,9 +837,9 @@ type Submittal struct { ID uuid.UUID `json:"id"` AlertConfigID *uuid.UUID `json:"alert_config_id"` SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` - CompletionDate *time.Time `json:"completion_date"` - CreateDate time.Time `json:"create_date"` - DueDate time.Time `json:"due_date"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` MarkedAsMissing bool `json:"marked_as_missing"` WarningSent bool `json:"warning_sent"` } @@ -849,7 +849,7 @@ type SubmittalStatus struct { Name string `json:"name"` } -type TelemetryGo struct { +type TelemetryGoes struct { ID uuid.UUID `json:"id"` NesdisID string `json:"nesdis_id"` } @@ -866,16 +866,16 @@ type TelemetryType struct { } type Timeseries struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - InstrumentID *uuid.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } -type TimeseriesCwm struct { +type TimeseriesCwms struct { TimeseriesID uuid.UUID `json:"timeseries_id"` CwmsTimeseriesID string `json:"cwms_timeseries_id"` CwmsOfficeID string `json:"cwms_office_id"` @@ -889,7 +889,7 @@ type TimeseriesMeasurement struct { TimeseriesID uuid.UUID `json:"timeseries_id"` } -type TimeseriesNote struct { +type TimeseriesNotes struct { Masked *bool `json:"masked"` Validated *bool `json:"validated"` Annotation *string `json:"annotation"` @@ -916,10 +916,10 @@ type UploaderConfig struct { Slug string `json:"slug"` Name string `json:"name"` Description string `json:"description"` - CreateDate time.Time `json:"create_date"` - Creator uuid.UUID `json:"creator"` - UpdateDate *time.Time `json:"update_date"` - Updater *uuid.UUID `json:"updater"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` Type UploaderConfigType `json:"type"` TzName string `json:"tz_name"` TimeField string `json:"time_field"` @@ -940,7 +940,7 @@ type UploaderConfigMapping struct { type VAlert struct { ID uuid.UUID `json:"id"` AlertConfigID uuid.UUID `json:"alert_config_id"` - CreateDate time.Time `json:"create_date"` + CreatedAt time.Time `json:"created_at"` ProjectID uuid.UUID `json:"project_id"` ProjectName string `json:"project_name"` Name string `json:"name"` @@ -971,23 +971,23 @@ type VAlertConfig struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Body string `json:"body"` - Creator *uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy *uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` ProjectID uuid.UUID `json:"project_id"` ProjectName string `json:"project_name"` AlertTypeID uuid.UUID `json:"alert_type_id"` AlertType string `json:"alert_type"` - StartDate time.Time `json:"start_date"` + StartedAt time.Time `json:"started_at"` ScheduleInterval string `json:"schedule_interval"` MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` RemindInterval string `json:"remind_interval"` WarningInterval string `json:"warning_interval"` - LastChecked *time.Time `json:"last_checked"` - LastReminded *time.Time `json:"last_reminded"` + LastCheckedAt *time.Time `json:"last_checked_at"` + LastRemindedAt *time.Time `json:"last_reminded_at"` CreateNextSubmittalFrom *time.Time `json:"create_next_submittal_from"` Instruments []InstrumentIDName `json:"instruments"` AlertEmailSubscriptions []EmailAutocompleteResult `json:"alert_email_subscriptions"` @@ -1000,35 +1000,35 @@ type VAwarePlatformParameterEnabled struct { TimeseriesID *uuid.UUID `json:"timeseries_id"` } -type VCollectionGroupDetail struct { +type VCollectionGroupDetails struct { ID uuid.UUID `json:"id"` ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Slug string `json:"slug"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` SortOrder int32 `json:"sort_order"` Timeseries []CollectionGroupDetailsTimeseries `json:"timeseries"` } type VDatalogger struct { - ID uuid.UUID `json:"id"` - Sn string `json:"sn"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater uuid.UUID `json:"updater"` - UpdaterUsername string `json:"updater_username"` - UpdateDate time.Time `json:"update_date"` - Name string `json:"name"` - Slug string `json:"slug"` - ModelID uuid.UUID `json:"model_id"` - Model *string `json:"model"` - Errors []string `json:"errors"` - Tables []DataloggerTableIDName `json:"tables"` + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy uuid.UUID `json:"updated_by"` + UpdatedByUsername string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Model *string `json:"model"` + Errors []string `json:"errors"` + Tables []DataloggerTableIDName `json:"tables"` } type VDataloggerEquivalencyTable struct { @@ -1046,9 +1046,9 @@ type VDataloggerHash struct { } type VDataloggerPreview struct { - DataloggerTableID uuid.UUID `json:"datalogger_table_id"` - Preview []byte `json:"preview"` - UpdateDate time.Time `json:"update_date"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview json.RawMessage `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` } type VDistrict struct { @@ -1062,17 +1062,17 @@ type VDistrict struct { } type VDistrictRollup struct { - AlertTypeID uuid.UUID `json:"alert_type_id"` - OfficeID *uuid.UUID `json:"office_id"` - DistrictInitials *string `json:"district_initials"` - ProjectName string `json:"project_name"` - ProjectID uuid.UUID `json:"project_id"` - TheMonth pgtype.Interval `json:"the_month"` - ExpectedTotalSubmittals int64 `json:"expected_total_submittals"` - ActualTotalSubmittals int64 `json:"actual_total_submittals"` - RedSubmittals int64 `json:"red_submittals"` - YellowSubmittals int64 `json:"yellow_submittals"` - GreenSubmittals int64 `json:"green_submittals"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + OfficeID *uuid.UUID `json:"office_id"` + DistrictInitials *string `json:"district_initials"` + ProjectName string `json:"project_name"` + ProjectID uuid.UUID `json:"project_id"` + Month time.Time `json:"month"` + ExpectedTotalSubmittals int64 `json:"expected_total_submittals"` + ActualTotalSubmittals int64 `json:"actual_total_submittals"` + RedSubmittals int64 `json:"red_submittals"` + YellowSubmittals int64 `json:"yellow_submittals"` + GreenSubmittals int64 `json:"green_submittals"` } type VDomain struct { @@ -1096,23 +1096,23 @@ type VEmailAutocomplete struct { } type VEvaluation struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Body string `json:"body"` - Creator *uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate *time.Time `json:"update_date"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - AlertConfigID *uuid.UUID `json:"alert_config_id"` - AlertConfigName *string `json:"alert_config_name"` - SubmittalID *uuid.UUID `json:"submittal_id"` - StartDate time.Time `json:"start_date"` - EndDate time.Time `json:"end_date"` - Instruments []InstrumentIDName `json:"instruments"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + CreatedBy *uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name"` + SubmittalID *uuid.UUID `json:"submittal_id"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + Instruments []InstrumentIDName `json:"instruments"` } type VInclMeasurement struct { @@ -1132,33 +1132,32 @@ type VInclSegment struct { } type VInstrument struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"deleted"` - StatusID uuid.UUID `json:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time"` - Slug string `json:"slug"` - Name string `json:"name"` - TypeID uuid.UUID `json:"type_id"` - ShowCwmsTab bool `json:"show_cwms_tab"` - Type string `json:"type"` - Icon *string `json:"icon"` - Geometry Geometry `json:"geometry"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"station_offset"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` - NidID *string `json:"nid_id"` - UsgsID *string `json:"usgs_id"` - Telemetry []IDSlugName `json:"telemetry"` - HasCwms bool `json:"has_cwms"` - Projects []IDSlugName `json:"projects"` - Constants []uuid.UUID `json:"constants"` - Groups []uuid.UUID `json:"groups"` - AlertConfigs []uuid.UUID `json:"alert_configs"` - Opts interface{} `json:"opts"` + ID uuid.UUID `json:"id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Slug string `json:"slug"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry json.RawMessage `json:"geometry"` + Station *int32 `json:"station"` + Offset *int32 `json:"offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + Telemetry []IDSlugName `json:"telemetry"` + HasCwms bool `json:"has_cwms"` + Projects []IDSlugName `json:"projects"` + Constants []uuid.UUID `json:"constants"` + Groups []uuid.UUID `json:"groups"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + Opts interface{} `json:"opts"` } type VInstrumentGroup struct { @@ -1166,12 +1165,11 @@ type VInstrumentGroup struct { Slug string `json:"slug"` Name string `json:"name"` Description *string `json:"description"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` ProjectID *uuid.UUID `json:"project_id"` - Deleted bool `json:"deleted"` InstrumentCount int64 `json:"instrument_count"` TimeseriesCount interface{} `json:"timeseries_count"` } @@ -1212,10 +1210,10 @@ type VPlotConfiguration struct { Slug string `json:"slug"` Name string `json:"name"` ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` ShowMasked bool `json:"show_masked"` ShowNonvalidated bool `json:"show_nonvalidated"` ShowComments bool `json:"show_comments"` @@ -1238,7 +1236,7 @@ type VProfile struct { Tokens []VProfileToken `json:"tokens"` } -type VProfileProjectRole struct { +type VProfileProjectRoles struct { ID uuid.UUID `json:"id"` ProfileID uuid.UUID `json:"profile_id"` Edipi int64 `json:"edipi"` @@ -1258,35 +1256,34 @@ type VProject struct { Image interface{} `json:"image"` DistrictID *uuid.UUID `json:"district_id"` OfficeID *uuid.UUID `json:"office_id"` - Deleted bool `json:"deleted"` Slug string `json:"slug"` Name string `json:"name"` - Creator uuid.UUID `json:"creator"` - CreatorUsername *string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate *time.Time `json:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` InstrumentCount int64 `json:"instrument_count"` InstrumentGroupCount int64 `json:"instrument_group_count"` } type VReportConfig struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Description string `json:"description"` - ProjectID uuid.UUID `json:"project_id"` - ProjectName string `json:"project_name"` - DistrictName *string `json:"district_name"` - Creator uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - CreateDate time.Time `json:"create_date"` - Updater *uuid.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` - UpdateDate *time.Time `json:"update_date"` - PlotConfigs []IDSlugName `json:"plot_configs"` - GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + PlotConfigs []IDSlugName `json:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` } type VSaaMeasurement struct { @@ -1315,58 +1312,58 @@ type VSubmittal struct { ProjectID uuid.UUID `json:"project_id"` SubmittalStatusID uuid.UUID `json:"submittal_status_id"` SubmittalStatusName string `json:"submittal_status_name"` - CompletionDate *time.Time `json:"completion_date"` - CreateDate time.Time `json:"create_date"` - DueDate time.Time `json:"due_date"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` MarkedAsMissing bool `json:"marked_as_missing"` WarningSent bool `json:"warning_sent"` } type VTimeseries struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Type NullTimeseriesType `json:"type"` - IsComputed bool `json:"is_computed"` - Variable interface{} `json:"variable"` - InstrumentID uuid.UUID `json:"instrument_id"` - InstrumentSlug string `json:"instrument_slug"` - Instrument string `json:"instrument"` - ParameterID uuid.UUID `json:"parameter_id"` - Parameter string `json:"parameter"` - UnitID uuid.UUID `json:"unit_id"` - Unit string `json:"unit"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type TimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` } type VTimeseriesComputed struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - InstrumentID *uuid.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` - Contents *string `json:"contents"` -} - -type VTimeseriesCwm struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Type NullTimeseriesType `json:"type"` - IsComputed bool `json:"is_computed"` - Variable interface{} `json:"variable"` - InstrumentID uuid.UUID `json:"instrument_id"` - InstrumentSlug string `json:"instrument_slug"` - Instrument string `json:"instrument"` - ParameterID uuid.UUID `json:"parameter_id"` - Parameter string `json:"parameter"` - UnitID uuid.UUID `json:"unit_id"` - Unit string `json:"unit"` - CwmsTimeseriesID string `json:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` - CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` + Contents *string `json:"contents"` +} + +type VTimeseriesCwms struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type TimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` } type VTimeseriesDependency struct { @@ -1391,13 +1388,13 @@ type VTimeseriesProjectMap struct { } type VTimeseriesStored struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - InstrumentID *uuid.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } type VUnit struct { @@ -1416,12 +1413,12 @@ type VUploaderConfig struct { Slug string `json:"slug"` Name string `json:"name"` Description string `json:"description"` - CreateDate time.Time `json:"create_date"` - Creator uuid.UUID `json:"creator"` - CreatorUsername string `json:"creator_username"` - UpdateDate *time.Time `json:"update_date"` - Updater *uuid.UUID `json:"updater"` - UpdaterUsername *string `json:"updater_username"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedByUsername *string `json:"updated_by_username"` Type UploaderConfigType `json:"type"` TzName string `json:"tz_name"` TimeField string `json:"time_field"` diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index 0119424d..fde5d3d9 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -9,8 +9,6 @@ import ( "time" "github.com/google/uuid" - "github.com/twpayne/go-geom/encoding/geojson" - "github.com/twpayne/go-geom/encoding/wkb" ) type Opts map[string]interface{} @@ -52,6 +50,7 @@ type CollectionGroupDetailsTimeseries struct { VTimeseries LatestTime *time.Time `json:"latest_time" db:"latest_time"` LatestValue *float32 `json:"latest_value" db:"latest_value"` + SortOrder int32 `json:"sort_order" db:"sort_order"` } type EmailAutocompleteResult struct { @@ -61,18 +60,6 @@ type EmailAutocompleteResult struct { Email string `json:"email"` } -type Geometry struct { - *wkb.Geom -} - -func (g Geometry) MarshalJSON() ([]byte, error) { - return geojson.Marshal(g.T) -} - -func (g *Geometry) UnmarshalJSON(v []byte) error { - return geojson.Unmarshal(v, &g.T) -} - type IDSlugName struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` @@ -93,6 +80,16 @@ type IpiMeasurement struct { Elelvation *float64 `json:"elevation"` } +type Measurement struct { + TimeseriesID uuid.UUID `json:"-"` + Time time.Time `json:"time"` + Value float64 `json:"value"` + Masked *bool `json:"masked,omitempty"` + Validated *bool `json:"validated,omitempty"` + Annotation *string `json:"annotation,omitempty"` + Error string `json:"error,omitempty"` +} + type VProfileToken struct { TokenID string `json:"token_id"` Issued time.Time `json:"issued"` diff --git a/api/internal/db/plot_config.sql_gen.go b/api/internal/db/plot_config.sql_gen.go index cfcef6ec..176d79a5 100644 --- a/api/internal/db/plot_config.sql_gen.go +++ b/api/internal/db/plot_config.sql_gen.go @@ -13,24 +13,24 @@ import ( ) const plotConfigCreate = `-- name: PlotConfigCreate :one -insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +insert into plot_configuration (slug, name, project_id, created_by, created_at, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) returning id ` type PlotConfigCreateParams struct { - Name string `json:"name"` - ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date"` - PlotType PlotType `json:"plot_type"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + PlotType PlotType `json:"plot_type"` } func (q *Queries) PlotConfigCreate(ctx context.Context, arg PlotConfigCreateParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, plotConfigCreate, arg.Name, arg.ProjectID, - arg.Creator, - arg.CreateDate, + arg.CreatedBy, + arg.CreatedAt, arg.PlotType, ) var id uuid.UUID @@ -53,7 +53,7 @@ func (q *Queries) PlotConfigDelete(ctx context.Context, arg PlotConfigDeletePara } const plotConfigGet = `-- name: PlotConfigGet :one -select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = $1 ` @@ -66,10 +66,10 @@ func (q *Queries) PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfigu &i.Slug, &i.Name, &i.ProjectID, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ShowMasked, &i.ShowNonvalidated, &i.ShowComments, @@ -84,7 +84,7 @@ func (q *Queries) PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfigu } const plotConfigListForProject = `-- name: PlotConfigListForProject :many -select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where project_id = $1 ` @@ -103,10 +103,10 @@ func (q *Queries) PlotConfigListForProject(ctx context.Context, projectID uuid.U &i.Slug, &i.Name, &i.ProjectID, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ShowMasked, &i.ShowNonvalidated, &i.ShowComments, @@ -165,15 +165,15 @@ func (q *Queries) PlotConfigSettingsDelete(ctx context.Context, id uuid.UUID) er } const plotConfigUpdate = `-- name: PlotConfigUpdate :exec -update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2 +update plot_configuration set name = $3, updated_by = $4, updated_at = $5 where project_id = $1 and id = $2 ` type PlotConfigUpdateParams struct { - ProjectID uuid.UUID `json:"project_id"` - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` } func (q *Queries) PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdateParams) error { @@ -181,8 +181,8 @@ func (q *Queries) PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdatePara arg.ProjectID, arg.ID, arg.Name, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, ) return err } diff --git a/api/internal/db/project.sql_gen.go b/api/internal/db/project.sql_gen.go index 5afef6f0..98596096 100644 --- a/api/internal/db/project.sql_gen.go +++ b/api/internal/db/project.sql_gen.go @@ -54,7 +54,7 @@ func (q *Queries) ProjectDeleteFlag(ctx context.Context, id uuid.UUID) error { } const projectGet = `-- name: ProjectGet :one -select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project where id = $1 +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project where id = $1 ` func (q *Queries) ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error) { @@ -66,15 +66,14 @@ func (q *Queries) ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error &i.Image, &i.DistrictID, &i.OfficeID, - &i.Deleted, &i.Slug, &i.Name, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.InstrumentCount, &i.InstrumentGroupCount, ) @@ -93,7 +92,7 @@ func (q *Queries) ProjectGetCount(ctx context.Context) (int64, error) { } const projectList = `-- name: ProjectList :many -select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project ` func (q *Queries) ProjectList(ctx context.Context) ([]VProject, error) { @@ -111,15 +110,14 @@ func (q *Queries) ProjectList(ctx context.Context) ([]VProject, error) { &i.Image, &i.DistrictID, &i.OfficeID, - &i.Deleted, &i.Slug, &i.Name, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.InstrumentCount, &i.InstrumentGroupCount, ); err != nil { @@ -134,7 +132,7 @@ func (q *Queries) ProjectList(ctx context.Context) ([]VProject, error) { } const projectListForFederalID = `-- name: ProjectListForFederalID :many -select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project where federal_id = $1 ` @@ -153,15 +151,14 @@ func (q *Queries) ProjectListForFederalID(ctx context.Context, federalID *string &i.Image, &i.DistrictID, &i.OfficeID, - &i.Deleted, &i.Slug, &i.Name, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.InstrumentCount, &i.InstrumentGroupCount, ); err != nil { @@ -176,7 +173,7 @@ func (q *Queries) ProjectListForFederalID(ctx context.Context, federalID *string } const projectListForNameSearch = `-- name: ProjectListForNameSearch :many -select id, federal_id, image, district_id, office_id, deleted, slug, name, creator, creator_username, create_date, updater, updater_username, update_date, instrument_count, instrument_group_count from v_project +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project where name ilike '%'||$1||'%' limit $2 ` @@ -201,15 +198,14 @@ func (q *Queries) ProjectListForNameSearch(ctx context.Context, arg ProjectListF &i.Image, &i.DistrictID, &i.OfficeID, - &i.Deleted, &i.Slug, &i.Name, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.InstrumentCount, &i.InstrumentGroupCount, ); err != nil { @@ -251,7 +247,7 @@ func (q *Queries) ProjectListForProfileAdmin(ctx context.Context, profileID uuid } const projectListForProfileRole = `-- name: ProjectListForProfileRole :many -select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.deleted, p.slug, p.name, p.creator, p.creator_username, p.create_date, p.updater, p.updater_username, p.update_date, p.instrument_count, p.instrument_group_count +select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.slug, p.name, p.created_by, p.created_by_username, p.created_at, p.updated_by, p.updated_by_username, p.updated_at, p.instrument_count, p.instrument_group_count from v_project p inner join profile_project_roles pr on pr.project_id = p.id inner join role r on r.id = pr.role_id @@ -279,15 +275,14 @@ func (q *Queries) ProjectListForProfileRole(ctx context.Context, arg ProjectList &i.Image, &i.DistrictID, &i.OfficeID, - &i.Deleted, &i.Slug, &i.Name, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.InstrumentCount, &i.InstrumentGroupCount, ); err != nil { @@ -302,14 +297,14 @@ func (q *Queries) ProjectListForProfileRole(ctx context.Context, arg ProjectList } const projectUpdate = `-- name: ProjectUpdate :one -update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id +update project set name=$2, updated_by=$3, updated_at=$4, district_id=$5, federal_id=$6 where id=$1 returning id ` type ProjectUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` DistrictID *uuid.UUID `json:"district_id"` FederalID *string `json:"federal_id"` } @@ -318,8 +313,8 @@ func (q *Queries) ProjectUpdate(ctx context.Context, arg ProjectUpdateParams) (u row := q.db.QueryRow(ctx, projectUpdate, arg.ID, arg.Name, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, arg.DistrictID, arg.FederalID, ) diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index c50f19ae..1c276a76 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -20,9 +20,9 @@ type Querier interface { AlertConfigListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) AlertConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) AlertConfigListForProjectAlertType(ctx context.Context, arg AlertConfigListForProjectAlertTypeParams) ([]VAlertConfig, error) - AlertConfigListUpdateLastChecked(ctx context.Context) ([]VAlertConfig, error) + AlertConfigListUpdateLastCheckedAt(ctx context.Context) ([]VAlertConfig, error) AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error - AlertConfigUpdateLastReminded(ctx context.Context, arg AlertConfigUpdateLastRemindedParams) error + AlertConfigUpdateLastRemindedAt(ctx context.Context, arg AlertConfigUpdateLastRemindedAtParams) error AlertCreate(ctx context.Context, alertConfigID uuid.UUID) error AlertCreateBatch(ctx context.Context, alertConfigID []uuid.UUID) *AlertCreateBatchBatchResults AlertEmailSubscriptionCreate(ctx context.Context, arg AlertEmailSubscriptionCreateParams) error @@ -47,10 +47,10 @@ type Querier interface { AwarePlatformCreateBatch(ctx context.Context, arg []AwarePlatformCreateBatchParams) *AwarePlatformCreateBatchBatchResults AwarePlatformParameterListEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) CalculationCreate(ctx context.Context, arg CalculationCreateParams) error - CalculationCreateOrUpdate(ctx context.Context, arg CalculationCreateOrUpdateParams) error + CalculationUpdate(ctx context.Context, arg CalculationUpdateParams) error CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) CollectionGroupDelete(ctx context.Context, arg CollectionGroupDeleteParams) error - CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetail, error) + CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetails, error) CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) CollectionGroupTimeseriesCreate(ctx context.Context, arg CollectionGroupTimeseriesCreateParams) error CollectionGroupTimeseriesDelete(ctx context.Context, arg CollectionGroupTimeseriesDeleteParams) error @@ -79,8 +79,8 @@ type Querier interface { DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg DataloggerTableUpdateNameIfEmptyParams) error DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error + DataloggerUpdateAuditInfo(ctx context.Context, arg DataloggerUpdateAuditInfoParams) error DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error - DataloggerUpdateUpdater(ctx context.Context, arg DataloggerUpdateUpdaterParams) error DistrictList(ctx context.Context) ([]VDistrict, error) DistrictRollupListEvaluationForProjectAlertConfig(ctx context.Context, arg DistrictRollupListEvaluationForProjectAlertConfigParams) ([]VDistrictRollup, error) DistrictRollupListMeasurementForProjectAlertConfig(ctx context.Context, arg DistrictRollupListMeasurementForProjectAlertConfigParams) ([]VDistrictRollup, error) @@ -128,17 +128,16 @@ type Querier interface { InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) InstrumentGetCount(ctx context.Context) (int64, error) - InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroup, error) + InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroupCreateRow, error) InstrumentGroupCreateBatch(ctx context.Context, arg []InstrumentGroupCreateBatchParams) *InstrumentGroupCreateBatchBatchResults InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) error - InstrumentGroupGet(ctx context.Context, id uuid.UUID) ([]VInstrumentGroup, error) + InstrumentGroupGet(ctx context.Context, id uuid.UUID) (VInstrumentGroup, error) InstrumentGroupInstrumentCreate(ctx context.Context, arg InstrumentGroupInstrumentCreateParams) error InstrumentGroupInstrumentDelete(ctx context.Context, arg InstrumentGroupInstrumentDeleteParams) error InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) InstrumentGroupListForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) - InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroup, error) + InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroupUpdateRow, error) InstrumentIDNameListByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]InstrumentIDNameListByIDsRow, error) - InstrumentList(ctx context.Context) ([]VInstrument, error) InstrumentListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VInstrument, error) InstrumentListForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) @@ -274,17 +273,19 @@ type Querier interface { SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error + // the below queried are needed becuase the slug is currently used as the variable name, it would + // be better if we used a generated column for this on the timeseries table, maybe converted to snake_case TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) - TimeseriesComputedCreateOrUpdate(ctx context.Context, arg TimeseriesComputedCreateOrUpdateParams) error TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error TimeseriesComputedGet(ctx context.Context, id uuid.UUID) (TimeseriesComputedGetRow, error) TimeseriesComputedListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]TimeseriesComputedListForInstrumentRow, error) + TimeseriesComputedUpdate(ctx context.Context, arg TimeseriesComputedUpdateParams) error TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults TimeseriesCwmsCreate(ctx context.Context, arg TimeseriesCwmsCreateParams) error TimeseriesCwmsCreateBatch(ctx context.Context, arg []TimeseriesCwmsCreateBatchParams) *TimeseriesCwmsCreateBatchBatchResults - TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) - TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) + TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwms, error) + TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwms, error) TimeseriesCwmsUpdate(ctx context.Context, arg TimeseriesCwmsUpdateParams) error TimeseriesDelete(ctx context.Context, id uuid.UUID) error TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) @@ -303,7 +304,7 @@ type Querier interface { TimeseriesMeasurementDeleteRange(ctx context.Context, arg TimeseriesMeasurementDeleteRangeParams) error TimeseriesMeasurementDeleteRangeBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteRangeBatchParams) *TimeseriesMeasurementDeleteRangeBatchBatchResults TimeseriesMeasurementGetMostRecent(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesMeasurement, error) - TimeseriesMeasurementListRange(ctx context.Context, arg TimeseriesMeasurementListRangeParams) ([]VTimeseriesMeasurement, error) + TimeseriesMeasurementListForRange(ctx context.Context, arg TimeseriesMeasurementListForRangeParams) ([]VTimeseriesMeasurement, error) TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error @@ -316,7 +317,7 @@ type Querier interface { UnitsList(ctx context.Context) ([]VUnit, error) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error - UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) + UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) diff --git a/api/internal/db/report_config.sql_gen.go b/api/internal/db/report_config.sql_gen.go index a3490fa8..e5d02ed3 100644 --- a/api/internal/db/report_config.sql_gen.go +++ b/api/internal/db/report_config.sql_gen.go @@ -14,7 +14,7 @@ import ( const reportConfigCreate = `-- name: ReportConfigCreate :one insert into report_config ( - name, slug, project_id, creator, description, date_range, date_range_enabled, + name, slug, project_id, created_by, description, date_range, date_range_enabled, show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled ) values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) @@ -24,7 +24,7 @@ returning id type ReportConfigCreateParams struct { Name string `json:"name"` ProjectID uuid.UUID `json:"project_id"` - Creator uuid.UUID `json:"creator"` + CreatedBy uuid.UUID `json:"created_by"` Description string `json:"description"` DateRange *string `json:"date_range"` DateRangeEnabled *bool `json:"date_range_enabled"` @@ -38,7 +38,7 @@ func (q *Queries) ReportConfigCreate(ctx context.Context, arg ReportConfigCreate row := q.db.QueryRow(ctx, reportConfigCreate, arg.Name, arg.ProjectID, - arg.Creator, + arg.CreatedBy, arg.Description, arg.DateRange, arg.DateRangeEnabled, @@ -62,7 +62,7 @@ func (q *Queries) ReportConfigDelete(ctx context.Context, id uuid.UUID) error { } const reportConfigGet = `-- name: ReportConfigGet :one -select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where id = $1 +select id, slug, name, description, project_id, project_name, district_name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, plot_configs, global_overrides from v_report_config where id = $1 ` func (q *Queries) ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportConfig, error) { @@ -76,12 +76,12 @@ func (q *Queries) ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportCon &i.ProjectID, &i.ProjectName, &i.DistrictName, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.PlotConfigs, &i.GlobalOverrides, ) @@ -89,7 +89,7 @@ func (q *Queries) ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportCon } const reportConfigListForProject = `-- name: ReportConfigListForProject :many -select id, slug, name, description, project_id, project_name, district_name, creator, creator_username, create_date, updater, updater_username, update_date, plot_configs, global_overrides from v_report_config where project_id = $1 +select id, slug, name, description, project_id, project_name, district_name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, plot_configs, global_overrides from v_report_config where project_id = $1 ` func (q *Queries) ReportConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) { @@ -109,12 +109,12 @@ func (q *Queries) ReportConfigListForProject(ctx context.Context, projectID uuid &i.ProjectID, &i.ProjectName, &i.DistrictName, - &i.Creator, - &i.CreatorUsername, - &i.CreateDate, - &i.Updater, - &i.UpdaterUsername, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, &i.PlotConfigs, &i.GlobalOverrides, ); err != nil { @@ -129,7 +129,7 @@ func (q *Queries) ReportConfigListForProject(ctx context.Context, projectID uuid } const reportConfigListForReportConfigWithPlotConfig = `-- name: ReportConfigListForReportConfigWithPlotConfig :many -select id, slug, name, project_id, creator, create_date, updater, update_date, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = any( +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = any( select plot_config_id from report_config_plot_config where report_config_id = $1 ) ` @@ -148,10 +148,10 @@ func (q *Queries) ReportConfigListForReportConfigWithPlotConfig(ctx context.Cont &i.Slug, &i.Name, &i.ProjectID, - &i.Creator, - &i.CreateDate, - &i.Updater, - &i.UpdateDate, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, &i.ShowMasked, &i.ShowNonvalidated, &i.ShowComments, @@ -211,15 +211,15 @@ func (q *Queries) ReportConfigPlotConfigDeleteForReportConfig(ctx context.Contex const reportConfigUpdate = `-- name: ReportConfigUpdate :exec update report_config set name=$2, -updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +updated_by=$3, updated_at=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1 ` type ReportConfigUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` - Updater *uuid.UUID `json:"updater"` - UpdateDate *time.Time `json:"update_date"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` Description string `json:"description"` DateRange *string `json:"date_range"` DateRangeEnabled *bool `json:"date_range_enabled"` @@ -233,8 +233,8 @@ func (q *Queries) ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdate _, err := q.db.Exec(ctx, reportConfigUpdate, arg.ID, arg.Name, - arg.Updater, - arg.UpdateDate, + arg.UpdatedBy, + arg.UpdatedAt, arg.Description, arg.DateRange, arg.DateRangeEnabled, @@ -247,68 +247,68 @@ func (q *Queries) ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdate } const reportDownloadJobCreate = `-- name: ReportDownloadJobCreate :one -insert into report_download_job (report_config_id, creator) values ($1, $2) returning id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date +insert into report_download_job (report_config_id, created_by) values ($1, $2) returning id, report_config_id, created_by, created_at, status, file_key, file_expiry, progress, progress_updated_at ` type ReportDownloadJobCreateParams struct { ReportConfigID *uuid.UUID `json:"report_config_id"` - Creator uuid.UUID `json:"creator"` + CreatedBy uuid.UUID `json:"created_by"` } func (q *Queries) ReportDownloadJobCreate(ctx context.Context, arg ReportDownloadJobCreateParams) (ReportDownloadJob, error) { - row := q.db.QueryRow(ctx, reportDownloadJobCreate, arg.ReportConfigID, arg.Creator) + row := q.db.QueryRow(ctx, reportDownloadJobCreate, arg.ReportConfigID, arg.CreatedBy) var i ReportDownloadJob err := row.Scan( &i.ID, &i.ReportConfigID, - &i.Creator, - &i.CreateDate, + &i.CreatedBy, + &i.CreatedAt, &i.Status, &i.FileKey, &i.FileExpiry, &i.Progress, - &i.ProgressUpdateDate, + &i.ProgressUpdatedAt, ) return i, err } const reportDownloadJobGet = `-- name: ReportDownloadJobGet :one -select id, report_config_id, creator, create_date, status, file_key, file_expiry, progress, progress_update_date from report_download_job where id=$1 and creator=$2 +select id, report_config_id, created_by, created_at, status, file_key, file_expiry, progress, progress_updated_at from report_download_job where id=$1 and created_by=$2 ` type ReportDownloadJobGetParams struct { - ID uuid.UUID `json:"id"` - Creator uuid.UUID `json:"creator"` + ID uuid.UUID `json:"id"` + CreatedBy uuid.UUID `json:"created_by"` } func (q *Queries) ReportDownloadJobGet(ctx context.Context, arg ReportDownloadJobGetParams) (ReportDownloadJob, error) { - row := q.db.QueryRow(ctx, reportDownloadJobGet, arg.ID, arg.Creator) + row := q.db.QueryRow(ctx, reportDownloadJobGet, arg.ID, arg.CreatedBy) var i ReportDownloadJob err := row.Scan( &i.ID, &i.ReportConfigID, - &i.Creator, - &i.CreateDate, + &i.CreatedBy, + &i.CreatedAt, &i.Status, &i.FileKey, &i.FileExpiry, &i.Progress, - &i.ProgressUpdateDate, + &i.ProgressUpdatedAt, ) return i, err } const reportDownloadJobUpdate = `-- name: ReportDownloadJobUpdate :exec -update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1 +update report_download_job set status=$2, progress=$3, progress_updated_at=$4, file_key=$5, file_expiry=$6 where id=$1 ` type ReportDownloadJobUpdateParams struct { - ID uuid.UUID `json:"id"` - Status JobStatus `json:"status"` - Progress int32 `json:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date"` - FileKey *string `json:"file_key"` - FileExpiry *time.Time `json:"file_expiry"` + ID uuid.UUID `json:"id"` + Status JobStatus `json:"status"` + Progress int32 `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` } func (q *Queries) ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloadJobUpdateParams) error { @@ -316,7 +316,7 @@ func (q *Queries) ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloa arg.ID, arg.Status, arg.Progress, - arg.ProgressUpdateDate, + arg.ProgressUpdatedAt, arg.FileKey, arg.FileExpiry, ) diff --git a/api/internal/db/submittal.sql_gen.go b/api/internal/db/submittal.sql_gen.go index 7ad72208..f29e74ec 100644 --- a/api/internal/db/submittal.sql_gen.go +++ b/api/internal/db/submittal.sql_gen.go @@ -13,11 +13,11 @@ import ( ) const submittalListForAlertConfig = `-- name: SubmittalListForAlertConfig :many -select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent from v_submittal where alert_config_id = $1 -and ($2::boolean = false or (completion_date is null and not marked_as_missing)) -order by due_date desc +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc ` type SubmittalListForAlertConfigParams struct { @@ -43,9 +43,9 @@ func (q *Queries) SubmittalListForAlertConfig(ctx context.Context, arg Submittal &i.ProjectID, &i.SubmittalStatusID, &i.SubmittalStatusName, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, &i.MarkedAsMissing, &i.WarningSent, ); err != nil { @@ -60,12 +60,12 @@ func (q *Queries) SubmittalListForAlertConfig(ctx context.Context, arg Submittal } const submittalListForInstrument = `-- name: SubmittalListForInstrument :many -select sub.id, sub.alert_config_id, sub.alert_config_name, sub.alert_type_id, sub.alert_type_name, sub.project_id, sub.submittal_status_id, sub.submittal_status_name, sub.completion_date, sub.create_date, sub.due_date, sub.marked_as_missing, sub.warning_sent +select sub.id, sub.alert_config_id, sub.alert_config_name, sub.alert_type_id, sub.alert_type_name, sub.project_id, sub.submittal_status_id, sub.submittal_status_name, sub.completed_at, sub.created_at, sub.due_at, sub.marked_as_missing, sub.warning_sent from v_submittal sub inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id where aci.instrument_id = $1 -and ($2::boolean = false or (completion_date is null and not marked_as_missing)) -order by sub.due_date desc +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by sub.due_at desc ` type SubmittalListForInstrumentParams struct { @@ -91,9 +91,9 @@ func (q *Queries) SubmittalListForInstrument(ctx context.Context, arg SubmittalL &i.ProjectID, &i.SubmittalStatusID, &i.SubmittalStatusName, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, &i.MarkedAsMissing, &i.WarningSent, ); err != nil { @@ -108,11 +108,11 @@ func (q *Queries) SubmittalListForInstrument(ctx context.Context, arg SubmittalL } const submittalListForProject = `-- name: SubmittalListForProject :many -select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent from v_submittal where project_id = $1 -and ($2::boolean = false or (completion_date is null and not marked_as_missing)) -order by due_date desc, alert_type_name asc +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc, alert_type_name asc ` type SubmittalListForProjectParams struct { @@ -138,9 +138,9 @@ func (q *Queries) SubmittalListForProject(ctx context.Context, arg SubmittalList &i.ProjectID, &i.SubmittalStatusID, &i.SubmittalStatusName, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, &i.MarkedAsMissing, &i.WarningSent, ); err != nil { @@ -155,11 +155,11 @@ func (q *Queries) SubmittalListForProject(ctx context.Context, arg SubmittalList } const submittalListUnverifiedMissing = `-- name: SubmittalListUnverifiedMissing :many -select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completion_date, create_date, due_date, marked_as_missing, warning_sent +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent from v_submittal -where completion_date is null +where completed_at is null and not marked_as_missing -order by due_date desc +order by due_at desc ` func (q *Queries) SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmittal, error) { @@ -180,9 +180,9 @@ func (q *Queries) SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmit &i.ProjectID, &i.SubmittalStatusID, &i.SubmittalStatusName, - &i.CompletionDate, - &i.CreateDate, - &i.DueDate, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, &i.MarkedAsMissing, &i.WarningSent, ); err != nil { @@ -199,7 +199,7 @@ func (q *Queries) SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmit const submittalUpdate = `-- name: SubmittalUpdate :exec update submittal set submittal_status_id = $2, - completion_date = $3, + completed_at = $3, warning_sent = $4 where id = $1 ` @@ -207,7 +207,7 @@ where id = $1 type SubmittalUpdateParams struct { ID uuid.UUID `json:"id"` SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` - CompletionDate *time.Time `json:"completion_date"` + CompletedAt *time.Time `json:"completed_at"` WarningSent bool `json:"warning_sent"` } @@ -215,7 +215,7 @@ func (q *Queries) SubmittalUpdate(ctx context.Context, arg SubmittalUpdateParams _, err := q.db.Exec(ctx, submittalUpdate, arg.ID, arg.SubmittalStatusID, - arg.CompletionDate, + arg.CompletedAt, arg.WarningSent, ) return err @@ -226,8 +226,8 @@ update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true where id = $1 -and completion_date is null -and now() > due_date +and completed_at is null +and now() > due_at ` func (q *Queries) SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error { @@ -240,8 +240,8 @@ update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true where alert_config_id = $1 -and completion_date is null -and now() > due_date +and completed_at is null +and now() > due_at ` func (q *Queries) SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error { diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go index 8779ba05..a876f1bb 100644 --- a/api/internal/db/timeseries.sql_gen.go +++ b/api/internal/db/timeseries.sql_gen.go @@ -18,21 +18,21 @@ returning id, instrument_id, slug, name, parameter_id, unit_id, type ` type TimeseriesCreateParams struct { - InstrumentID *uuid.UUID `json:"instrument_id"` - Name string `json:"name"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } type TimeseriesCreateRow struct { - ID uuid.UUID `json:"id"` - InstrumentID *uuid.UUID `json:"instrument_id"` - Slug string `json:"slug"` - Name string `json:"name"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Type NullTimeseriesType `json:"type"` + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` } func (q *Queries) TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) { @@ -286,7 +286,7 @@ func (q *Queries) TimeseriesListForProject(ctx context.Context, projectID uuid.U } const timeseriesUpdate = `-- name: TimeseriesUpdate :exec -update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 +update timeseries set name=$2, instrument_id=$3, parameter_id=$4, unit_id=$5 where id = $1 ` diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go index 62311be3..a4ed115d 100644 --- a/api/internal/db/timeseries_calculated.sql_gen.go +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -25,33 +25,24 @@ func (q *Queries) CalculationCreate(ctx context.Context, arg CalculationCreatePa return err } -const calculationCreateOrUpdate = `-- name: CalculationCreateOrUpdate :exec -with p as ( - select contents from calculation where timeseries_id=$1 -) -insert into calculation (timeseries_id, contents) values ($1, $2) -on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents) +const calculationUpdate = `-- name: CalculationUpdate :exec +update calculation set contents=$2 where timeseries_id=$1 ` -type CalculationCreateOrUpdateParams struct { +type CalculationUpdateParams struct { TimeseriesID uuid.UUID `json:"timeseries_id"` Contents *string `json:"contents"` } -func (q *Queries) CalculationCreateOrUpdate(ctx context.Context, arg CalculationCreateOrUpdateParams) error { - _, err := q.db.Exec(ctx, calculationCreateOrUpdate, arg.TimeseriesID, arg.Contents) +func (q *Queries) CalculationUpdate(ctx context.Context, arg CalculationUpdateParams) error { + _, err := q.db.Exec(ctx, calculationUpdate, arg.TimeseriesID, arg.Contents) return err } const timeseriesComputedCreate = `-- name: TimeseriesComputedCreate :one -insert into timeseries ( - instrument_id, - parameter_id, - unit_id, - slug, - name, - type -) values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') + +insert into timeseries (instrument_id, parameter_id, unit_id, slug, name, type) +values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') returning id ` @@ -62,6 +53,8 @@ type TimeseriesComputedCreateParams struct { Name string `json:"name"` } +// the below queried are needed becuase the slug is currently used as the variable name, it would +// be better if we used a generated column for this on the timeseries table, maybe converted to snake_case func (q *Queries) TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, timeseriesComputedCreate, arg.InstrumentID, @@ -74,50 +67,8 @@ func (q *Queries) TimeseriesComputedCreate(ctx context.Context, arg TimeseriesCo return id, err } -const timeseriesComputedCreateOrUpdate = `-- name: TimeseriesComputedCreateOrUpdate :exec -with p as ( - select id, slug, name, instrument_id, parameter_id, unit_id, type from timeseries - where id=$1 -) -insert into timeseries ( - id, - instrument_id, - parameter_id, - unit_id, - slug, - name, - type -) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') -on conflict (id) do update set - instrument_id=coalesce(excluded.instrument_id, p.instrument_id), - parameter_id=coalesce(excluded.parameter_id, p.parameter_id), - unit_id=coalesce(excluded.unit_id, p.unit_id), - slug=coalesce(excluded.slug, p.slug), - name=coalesce(excluded.name, p.name), - type='computed' -` - -type TimeseriesComputedCreateOrUpdateParams struct { - ID uuid.UUID `json:"id"` - InstrumentID *uuid.UUID `json:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id"` - UnitID uuid.UUID `json:"unit_id"` - Name string `json:"name"` -} - -func (q *Queries) TimeseriesComputedCreateOrUpdate(ctx context.Context, arg TimeseriesComputedCreateOrUpdateParams) error { - _, err := q.db.Exec(ctx, timeseriesComputedCreateOrUpdate, - arg.ID, - arg.InstrumentID, - arg.ParameterID, - arg.UnitID, - arg.Name, - ) - return err -} - const timeseriesComputedDelete = `-- name: TimeseriesComputedDelete :exec -delete from timeseries where id = $1 and id in (select timeseries_id from calculation) +delete from timeseries where id = $1 and id = any(select timeseries_id from calculation) ` func (q *Queries) TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error { @@ -213,3 +164,31 @@ func (q *Queries) TimeseriesComputedListForInstrument(ctx context.Context, instr } return items, nil } + +const timeseriesComputedUpdate = `-- name: TimeseriesComputedUpdate :exec +update timeseries set + parameter_id=$2, + unit_id=$3, + slug=$4, + name=$5 +where id = $1 +` + +type TimeseriesComputedUpdateParams struct { + ID uuid.UUID `json:"id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +func (q *Queries) TimeseriesComputedUpdate(ctx context.Context, arg TimeseriesComputedUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesComputedUpdate, + arg.ID, + arg.ParameterID, + arg.UnitID, + arg.Slug, + arg.Name, + ) + return err +} diff --git a/api/internal/db/timeseries_cwms.sql_gen.go b/api/internal/db/timeseries_cwms.sql_gen.go index b5729e03..1d7d9628 100644 --- a/api/internal/db/timeseries_cwms.sql_gen.go +++ b/api/internal/db/timeseries_cwms.sql_gen.go @@ -41,9 +41,9 @@ select id, slug, name, type, is_computed, variable, instrument_id, instrument_sl where id = $1 ` -func (q *Queries) TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwm, error) { +func (q *Queries) TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwms, error) { row := q.db.QueryRow(ctx, timeseriesCwmsGet, id) - var i VTimeseriesCwm + var i VTimeseriesCwms err := row.Scan( &i.ID, &i.Slug, @@ -71,15 +71,15 @@ select id, slug, name, type, is_computed, variable, instrument_id, instrument_sl where instrument_id = $1 ` -func (q *Queries) TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwm, error) { +func (q *Queries) TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwms, error) { rows, err := q.db.Query(ctx, timeseriesCwmsList, instrumentID) if err != nil { return nil, err } defer rows.Close() - items := []VTimeseriesCwm{} + items := []VTimeseriesCwms{} for rows.Next() { - var i VTimeseriesCwm + var i VTimeseriesCwms if err := rows.Scan( &i.ID, &i.Slug, diff --git a/api/internal/db/timeseries_process.manual.go b/api/internal/db/timeseries_process.manual.go index 02bc3898..97a19ac2 100644 --- a/api/internal/db/timeseries_process.manual.go +++ b/api/internal/db/timeseries_process.manual.go @@ -79,11 +79,6 @@ func (cc *TimeseriesMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UU return dd } -type Measurement struct { - VTimeseriesMeasurement - Error string `json:"error,omitempty"` -} - type FloatNanInf float64 func (j FloatNanInf) MarshalJSON() ([]byte, error) { @@ -129,22 +124,6 @@ type MeasurementCollectionLean struct { type MeasurementLean map[time.Time]float64 -func (ml MeasurementLean) getTime() time.Time { - var t time.Time - for k := range ml { - t = k - } - return t -} - -func (ml MeasurementLean) getValue() float64 { - var m float64 - for _, v := range ml { - m = v - } - return m -} - func (mrc *ProcessTimeseriesResponseCollection) GroupByInstrument(threshold int) (map[uuid.UUID][]MeasurementCollectionLean, error) { if len(*mrc) == 0 { return make(map[uuid.UUID][]MeasurementCollectionLean), nil @@ -195,11 +174,8 @@ func (mrc *ProcessTimeseriesResponseCollection) CollectSingleTimeseries(threshol mmts := make([]Measurement, len(t.Measurements)) for i, m := range t.Measurements { mmts[i] = Measurement{ - VTimeseriesMeasurement: VTimeseriesMeasurement{ - TimeseriesID: t.TimeseriesID, - Time: m.Time, - Value: m.Value, - }, + Time: m.Time, + Value: m.Value, Error: m.Error, } } @@ -386,20 +362,20 @@ func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasu where ` + filterSQL + ` ) ), next_low as ( - select nlm.timeseries_id as timeseries_id, nlm.time, m1.value + select nlm.timeseries_id as timeseries_id, json_build_object('time', nlm.time, 'value', m1.value) measurement from ( - select timeseries_id, max(time) as time + select timeseries_id, max("time") "time" from timeseries_measurement - where timeseries_id in (select id from required_timeseries) and time < $2 + where timeseries_id in (select id from required_timeseries) and "time" < $2 group by timeseries_id ) nlm inner join timeseries_measurement m1 on m1.time = nlm.time and m1.timeseries_id = nlm.timeseries_id ), next_high as ( - select nhm.timeseries_id as timeseries_id, nhm.time, m2.value + select nhm.timeseries_id as timeseries_id, json_build_object('time', nhm.time, 'value', m2.value) measurement from ( - select timeseries_id, min(time) as time + select timeseries_id, min("time") "time" from timeseries_measurement - where timeseries_id in (select id from required_timeseries) and time > $3 + where timeseries_id in (select id from required_timeseries) and "time" > $3 group by timeseries_id ) nhm inner join timeseries_measurement m2 on m2.time = nhm.time and m2.timeseries_id = nhm.timeseries_id @@ -412,12 +388,12 @@ func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasu false is_computed, null formula, coalesce(( - select array_agg(time, value order by time asc) + select json_agg(json_build_object('time', "time", 'value', value) order by time asc) from timeseries_measurement - where timeseries_id = rt.id and time >= $2 and time <= $3 - ), '{}') measurements, - (select nl.time, nl.value) next_measurement_low, - (select nh.time, nh.value) next_measurement_high + where timeseries_id = rt.id and "time" >= $2 and "time" <= $3 + ), '[]'::json) measurements, + nl.measurement next_measurement_low, + nh.measurement next_measurement_high from required_timeseries rt inner join timeseries ts on ts.id = rt.id inner join instrument i on i.id = ts.instrument_id @@ -432,7 +408,7 @@ func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasu slug variable, true is_computed, contents formula, - array[] measurements, + '[]'::json measurements, null next_measurement_low, null next_measurement_high from v_timeseries_computed diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go index 153c0caf..206dadc7 100644 --- a/api/internal/db/uploader.sql_gen.go +++ b/api/internal/db/uploader.sql_gen.go @@ -13,7 +13,7 @@ import ( ) const uploaderConfigCreate = `-- name: UploaderConfigCreate :one -insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) +insert into uploader_config (project_id, name, slug, description, created_at, created_by, type, tz_name) values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) returning id ` @@ -22,8 +22,8 @@ type UploaderConfigCreateParams struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Description string `json:"description"` - CreateDate time.Time `json:"create_date"` - Creator uuid.UUID `json:"creator"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` Type UploaderConfigType `json:"type"` TzName string `json:"tz_name"` } @@ -33,8 +33,8 @@ func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCr arg.ProjectID, arg.Name, arg.Description, - arg.CreateDate, - arg.Creator, + arg.CreatedAt, + arg.CreatedBy, arg.Type, arg.TzName, ) @@ -53,28 +53,30 @@ func (q *Queries) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error } const uploaderConfigListForProject = `-- name: UploaderConfigListForProject :many -select id, project_id, slug, name, description, create_date, creator, update_date, updater, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field from uploader_config where project_id=$1 +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field from v_uploader_config where project_id=$1 ` -func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]UploaderConfig, error) { +func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) { rows, err := q.db.Query(ctx, uploaderConfigListForProject, projectID) if err != nil { return nil, err } defer rows.Close() - items := []UploaderConfig{} + items := []VUploaderConfig{} for rows.Next() { - var i UploaderConfig + var i VUploaderConfig if err := rows.Scan( &i.ID, &i.ProjectID, &i.Slug, &i.Name, &i.Description, - &i.CreateDate, - &i.Creator, - &i.UpdateDate, - &i.Updater, + &i.CreatedAt, + &i.CreatedBy, + &i.CreatedByUsername, + &i.UpdatedBy, + &i.UpdatedAt, + &i.UpdatedByUsername, &i.Type, &i.TzName, &i.TimeField, @@ -132,8 +134,8 @@ const uploaderConfigUpdate = `-- name: UploaderConfigUpdate :exec update uploader_config set name=$2, description=$3, - update_date=$4, - updater=$5, + updated_by=$4, + updated_at=$5, type=$6, tz_name=$7 where id=$1 @@ -143,8 +145,8 @@ type UploaderConfigUpdateParams struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Description string `json:"description"` - UpdateDate *time.Time `json:"update_date"` - Updater *uuid.UUID `json:"updater"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` Type UploaderConfigType `json:"type"` TzName string `json:"tz_name"` } @@ -154,8 +156,8 @@ func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUp arg.ID, arg.Name, arg.Description, - arg.UpdateDate, - arg.Updater, + arg.UpdatedBy, + arg.UpdatedAt, arg.Type, arg.TzName, ) diff --git a/api/internal/dto/alert.go b/api/internal/dto/alert.go index da83b841..341c73a7 100644 --- a/api/internal/dto/alert.go +++ b/api/internal/dto/alert.go @@ -7,13 +7,13 @@ import ( ) type Alert struct { - Read *bool `json:"read,omitempty"` - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` + Read *bool `json:"read,omitempty"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + CreatedAt time.Time `json:"created_at"` + Instruments []AlertConfigInstrument `json:"instruments"` } diff --git a/api/internal/dto/alert_config.go b/api/internal/dto/alert_config.go index 84c8e9d6..81ba33bf 100644 --- a/api/internal/dto/alert_config.go +++ b/api/internal/dto/alert_config.go @@ -7,29 +7,29 @@ import ( ) type AlertConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertType string `json:"alert_type" db:"alert_type"` - StartDate time.Time `json:"start_date" db:"start_date"` - ScheduleInterval string `json:"schedule_interval" db:"schedule_interval"` - RemindInterval string `json:"remind_interval" db:"remind_interval"` - WarningInterval string `json:"warning_interval" db:"warning_interval"` - LastChecked *time.Time `json:"last_checked" db:"last_checked"` - LastReminded *time.Time `json:"last_reminded" db:"last_reminded"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` - AlertEmailSubscriptions dbJSONSlice[EmailAutocompleteResult] `json:"alert_email_subscriptions" db:"alert_email_subscriptions"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts" db:"mute_consecutive_alerts"` - CreateNextSubmittalFrom *time.Time `json:"-" db:"-"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertType string `json:"alert_type"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + LastChecked *time.Time `json:"last_checked"` + LastReminded *time.Time `json:"last_reminded"` + Instruments []AlertConfigInstrument `json:"instruments"` + AlertEmailSubscriptions []EmailAutocompleteResult `json:"alert_email_subscriptions"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + CreateNextSubmittalFrom *time.Time `json:"-"` AuditInfo } type AlertConfigInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` } func (a *AlertConfig) GetToAddresses() []string { diff --git a/api/internal/dto/alert_evaluation_check.go b/api/internal/dto/alert_evaluation_check.go index 09c808ee..4bbcd970 100644 --- a/api/internal/dto/alert_evaluation_check.go +++ b/api/internal/dto/alert_evaluation_check.go @@ -45,7 +45,7 @@ func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.Aler "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Evaluation Submittals:\r\n" + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", } templContent, err := email.CreateEmailTemplateContent(preformatted) diff --git a/api/internal/dto/alert_measurement_check.go b/api/internal/dto/alert_measurement_check.go index 53a3bbde..27032803 100644 --- a/api/internal/dto/alert_measurement_check.go +++ b/api/internal/dto/alert_measurement_check.go @@ -52,7 +52,7 @@ func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.Aler "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Measurement Submittals:\r\n" + "{{range .AlertChecks}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + "{{range .AffectedTimeseries}}" + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + "{{end}}\r\n{{end}}", diff --git a/api/internal/dto/common.go b/api/internal/dto/common.go index 0713990b..11c880aa 100644 --- a/api/internal/dto/common.go +++ b/api/internal/dto/common.go @@ -9,12 +9,12 @@ import ( ) type AuditInfo struct { - CreatorID uuid.UUID `json:"creator_id" db:"creator"` - CreatorUsername *string `json:"creator_username,omitempty" db:"creator_username"` - CreateDate time.Time `json:"create_date" db:"create_date"` - UpdaterID *uuid.UUID `json:"updater_id" db:"updater"` - UpdaterUsername *string `json:"updater_username,omitempty" db:"updater_username"` - UpdateDate *time.Time `json:"update_date" db:"update_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername *string `json:"created_by_username,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username,omitempty"` + UpdatedAt *time.Time `json:"updatedd_at"` } type Opts map[string]interface{} diff --git a/api/internal/dto/datalogger.go b/api/internal/dto/datalogger.go index fbe777b7..ef711b7e 100644 --- a/api/internal/dto/datalogger.go +++ b/api/internal/dto/datalogger.go @@ -41,7 +41,7 @@ type DataloggerTable struct { type DataloggerTablePreview struct { DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` - UpdateDate time.Time `json:"update_date" db:"update_date"` + UpdatedAt time.Time `json:"updated_at"` Preview json.RawMessage `json:"preview" db:"preview"` } diff --git a/api/internal/dto/district_rollup.go b/api/internal/dto/district_rollup.go index b16527ea..7e223289 100644 --- a/api/internal/dto/district_rollup.go +++ b/api/internal/dto/district_rollup.go @@ -12,7 +12,7 @@ type DistrictRollup struct { DistrictInitials *string `json:"district_initials" db:"district_initials"` ProjectName string `json:"project_name" db:"project_name"` ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Month time.Time `json:"month" db:"the_month"` + Month time.Time `json:"month" db:"month"` ExpectedTotalSubmittals int `json:"expected_total_submittals" db:"expected_total_submittals"` ActualTotalSubmittals int `json:"actual_total_submittals" db:"actual_total_submittals"` RedSubmittals int `json:"red_submittals" db:"red_submittals"` diff --git a/api/internal/dto/evaluation.go b/api/internal/dto/evaluation.go index fbbdb25b..a8c4cae1 100644 --- a/api/internal/dto/evaluation.go +++ b/api/internal/dto/evaluation.go @@ -7,21 +7,21 @@ import ( ) type Evaluation struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertConfigID *uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName *string `json:"alert_config_name" db:"alert_config_name"` - SubmittalID *uuid.UUID `json:"submittal_id" db:"submittal_id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - StartDate time.Time `json:"start_date" db:"start_date"` - EndDate time.Time `json:"end_date" db:"end_date"` - Instruments dbJSONSlice[EvaluationInstrument] `json:"instruments" db:"instruments"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name"` + SubmittalID *uuid.UUID `json:"submittal_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + Instruments []EvaluationInstrument `json:"instruments"` AuditInfo } type EvaluationInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` } diff --git a/api/internal/dto/instrument.go b/api/internal/dto/instrument.go index 80ca4ca4..41e6bfa6 100644 --- a/api/internal/dto/instrument.go +++ b/api/internal/dto/instrument.go @@ -1,36 +1,36 @@ package dto import ( + "encoding/json" "time" - "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) type Instrument struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - AwareID *uuid.UUID `json:"aware_id,omitempty"` - Groups dbSlice[uuid.UUID] `json:"groups" db:"groups"` - Constants dbSlice[uuid.UUID] `json:"constants" db:"constants"` - AlertConfigs dbSlice[uuid.UUID] `json:"alert_configs" db:"alert_configs"` - StatusID uuid.UUID `json:"status_id" db:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time" db:"status_time"` - Deleted bool `json:"-"` - TypeID uuid.UUID `json:"type_id" db:"type_id"` - Type string `json:"type"` - Icon *string `json:"icon" db:"icon"` - Geometry db.Geometry `json:"geometry,omitempty"` - Station *int32 `json:"station"` - StationOffset *int32 `json:"offset" db:"station_offset"` - Projects dbJSONSlice[IDSlugName] `json:"projects" db:"projects"` - NIDID *string `json:"nid_id" db:"nid_id"` - USGSID *string `json:"usgs_id" db:"usgs_id"` - HasCwms bool `json:"has_cwms" db:"has_cwms"` - ShowCwmsTab bool `json:"show_cwms_tab" db:"show_cwms_tab"` - Opts Opts `json:"opts" db:"opts"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + AwareID *uuid.UUID `json:"aware_id,omitempty"` + Groups []uuid.UUID `json:"groups"` + Constants []uuid.UUID `json:"constants"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Deleted bool `json:"-"` + TypeID uuid.UUID `json:"type_id"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry json.RawMessage `json:"geometry,omitempty"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"offset"` + Projects []IDSlugName `json:"projects"` + NIDID *string `json:"nid_id"` + USGSID *string `json:"usgs_id"` + HasCwms bool `json:"has_cwms"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Opts Opts `json:"opts"` AuditInfo } @@ -53,7 +53,7 @@ type InstrumentCount struct { } type InstrumentsProjectCount struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` - ProjectCount int `json:"project_count" db:"project_count"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int `json:"project_count"` } diff --git a/api/internal/dto/instrument_status.go b/api/internal/dto/instrument_status.go index 97e4c2ca..52521ada 100644 --- a/api/internal/dto/instrument_status.go +++ b/api/internal/dto/instrument_status.go @@ -1,8 +1,10 @@ package dto import ( + "encoding/json" "time" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) @@ -16,3 +18,22 @@ type InstrumentStatus struct { type InstrumentStatusCollection struct { Items []InstrumentStatus } + +// UnmarshalJSON implements the UnmarshalJSON interface +func (c *InstrumentStatusCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var s InstrumentStatus + if err := json.Unmarshal(b, &s); err != nil { + return err + } + c.Items = []InstrumentStatus{s} + default: + c.Items = make([]InstrumentStatus, 0) + } + return nil +} diff --git a/api/internal/dto/measurement_inclinometer.go b/api/internal/dto/measurement_inclinometer.go index d85180b8..96cafc53 100644 --- a/api/internal/dto/measurement_inclinometer.go +++ b/api/internal/dto/measurement_inclinometer.go @@ -11,11 +11,11 @@ import ( // DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) type InclinometerMeasurement struct { - TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` + TimeseriesID uuid.UUID `json:"-"` Time time.Time `json:"time"` Values types.JSONText `json:"values"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` } // DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) diff --git a/api/internal/dto/project.go b/api/internal/dto/project.go index fc6d38cf..aab3a17c 100644 --- a/api/internal/dto/project.go +++ b/api/internal/dto/project.go @@ -28,8 +28,4 @@ type Project struct { AuditInfo } -type ProjectCount struct { - ProjectCount int `json:"project_count"` -} - type ProjectCollection []Project diff --git a/api/internal/dto/report_config.go b/api/internal/dto/report_config.go index ecedf26a..f65370fe 100644 --- a/api/internal/dto/report_config.go +++ b/api/internal/dto/report_config.go @@ -9,28 +9,28 @@ import ( ) type ReportConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Slug string `json:"slug" db:"slug"` - Name string `json:"name" db:"name"` - Description string `json:"description" db:"description"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - DistrictName *string `json:"district_name" db:"district_name"` - PlotConfigs dbJSONSlice[IDSlugName] `json:"plot_configs" db:"plot_configs"` - GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides" db:"global_overrides"` + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + PlotConfigs []IDSlugName `json:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` AuditInfo } type ReportDownloadJob struct { - ID uuid.UUID `json:"id" db:"id"` - ReportConfigID uuid.UUID `json:"report_config_id" db:"report_config_id"` - Creator uuid.UUID `json:"creator" db:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Status string `json:"status" db:"status"` - FileKey *string `json:"file_key" db:"file_key"` - FileExpiry *time.Time `json:"file_expiry" db:"file_expiry"` - Progress int `json:"progress" db:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date" db:"progress_update_date"` + ID uuid.UUID `json:"id"` + ReportConfigID uuid.UUID `json:"report_config_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + Status string `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` + Progress int `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` } func (o *ReportConfigGlobalOverrides) Scan(src interface{}) error { diff --git a/api/internal/dto/submittal.go b/api/internal/dto/submittal.go index 2a16c1c9..a2538168 100644 --- a/api/internal/dto/submittal.go +++ b/api/internal/dto/submittal.go @@ -7,17 +7,17 @@ import ( ) type Submittal struct { - ID uuid.UUID `json:"id" db:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName string `json:"alert_config_name" db:"alert_config_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertTypeName string `json:"alert_type_name" db:"alert_type_name"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - SubmittalStatusID uuid.UUID `json:"submittal_status_id" db:"submittal_status_id"` - SubmittalStatusName string `json:"submittal_status_name" db:"submittal_status_name"` - CompletionDate *time.Time `json:"completion_date" db:"completion_date"` - CreateDate time.Time `json:"create_date" db:"create_date"` - DueDate time.Time `json:"due_date" db:"due_date"` - MarkedAsMissing bool `json:"marked_as_missing" db:"marked_as_missing"` - WarningSent bool `json:"warning_sent" db:"warning_sent"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + AlertConfigName string `json:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertTypeName string `json:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` } diff --git a/api/internal/handler/alert.go b/api/internal/handler/alert.go index aef238e8..90b7e486 100644 --- a/api/internal/handler/alert.go +++ b/api/internal/handler/alert.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -19,7 +18,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.Alert +// @Success 200 {array} db.VAlert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -36,21 +35,21 @@ func (h *ApiHandler) ListAlertsForInstrument(c echo.Context) error { return c.JSON(http.StatusOK, aa) } -// ListMyAlerts godoc +// ListAlertsForProfile godoc // // @Summary lists subscribed alerts for a single user // @Description list all alerts a profile is subscribed to // @Tags alert // @Produce json // @Param key query string false "api key" -// @Success 200 {array} dto.Alert +// @Success 200 {array} db.AlertListForProfileRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts [get] // @Security Bearer -func (h *ApiHandler) ListMyAlerts(c echo.Context) error { - p := c.Get("profile").(dto.Profile) +func (h *ApiHandler) ListAlertsForProfile(c echo.Context) error { + p := c.Get("profile").(db.VProfile) profileID := p.ID aa, err := h.DBService.AlertListForProfile(c.Request().Context(), profileID) if err != nil { @@ -68,14 +67,14 @@ func (h *ApiHandler) ListMyAlerts(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.Alert +// @Success 201 {object} db.AlertGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/read [post] // @Security Bearer func (h *ApiHandler) DoAlertRead(c echo.Context) error { - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { @@ -88,7 +87,7 @@ func (h *ApiHandler) DoAlertRead(c echo.Context) error { if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, a) + return c.JSON(http.StatusCreated, a) } // DoAlertUnread godoc @@ -100,14 +99,14 @@ func (h *ApiHandler) DoAlertRead(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.Alert +// @Success 200 {object} db.AlertGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/unread [post] // @Security Bearer func (h *ApiHandler) DoAlertUnread(c echo.Context) error { - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { diff --git a/api/internal/handler/alert_config.go b/api/internal/handler/alert_config.go index c361af23..c5dc51d0 100644 --- a/api/internal/handler/alert_config.go +++ b/api/internal/handler/alert_config.go @@ -18,7 +18,7 @@ import ( // @Tags alert-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -57,7 +57,7 @@ func (h *ApiHandler) ListAlertConfigsForProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -81,7 +81,7 @@ func (h *ApiHandler) ListInstrumentAlertConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {object} dto.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -107,7 +107,7 @@ func (h *ApiHandler) GetAlertConfig(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -122,8 +122,8 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(dto.Profile) - ac.ProjectID, ac.CreatorID, ac.CreateDate = projectID, profile.ID, time.Now() + profile := c.Get("profile").(db.VProfile) + ac.ProjectID, ac.CreatedBy, ac.CreatedAt = projectID, profile.ID, time.Now() acNew, err := h.DBService.AlertConfigCreate(c.Request().Context(), ac) if err != nil { @@ -142,7 +142,7 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { // @Param alert_config_id path string true "alert config uuid" Format(uuid) // @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {array} dto.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -157,9 +157,9 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - ac.UpdaterID, ac.UpdateDate = &p.ID, &t + ac.UpdatedBy, ac.UpdatedAt = &p.ID, &t aUpdated, err := h.DBService.AlertConfigUpdate(c.Request().Context(), acID, ac) if err != nil { return httperr.InternalServerError(err) @@ -175,7 +175,7 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { // @Param project_id path string true "Project ID" Format(uuid) // @Param alert_config_id path string true "instrument uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} dto.AlertConfig +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/alert_config_test.go b/api/internal/handler/alert_config_test.go index 8600f082..d8a73af3 100644 --- a/api/internal/handler/alert_config_test.go +++ b/api/internal/handler/alert_config_test.go @@ -36,22 +36,22 @@ var alertConfigSchema = fmt.Sprintf(`{ "project_id": { "type": "string" }, "alert_type_id": { "type": "string" }, "alert_type": { "type": "string" }, - "start_date": { "type": "string" }, + "started_at": { "type": "string" }, "schedule_interval": { "type": "string" }, "mute_consecutive_alerts": { "type": "boolean" }, "remind_interval": { "type": ["string", "null"] }, "warning_interval": { "type": ["string", "null"] }, - "last_checked": { "type": ["string", "null"], "format": "date-time" }, - "last_reminded": { "type": ["string", "null"], "format": "date-time" }, + "last_checked_at": { "type": ["string", "null"], "format": "date-time" }, + "last_reminded_at": { "type": ["string", "null"], "format": "date-time" }, "instruments": { "type": "array", "items": %s }, "alert_email_subscriptions": { "type": "array", "items": %s }, "alert_status": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, "additionalProperties": true }`, alertConfigInstrumentSchema, alertConfigEmailSchema) @@ -73,7 +73,7 @@ const createAlertConfigBody = `{ "name": "New Test Alert Config", "body": "New Test Alert Config Description", "alert_type_id": "97e7a25c-d5c7-4ded-b272-1bb6e5914fe3", - "start_date": "2023-05-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", "schedule_interval": "P1D", "mute_consecutive_alerts": true, "warning_interval": "PT1H", @@ -107,7 +107,7 @@ const updateAlertConfigBody = `{ "name": "Updated Test Alert 1", "body": "Updated Alert for demonstration purposes.", "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", - "start_date": "2023-05-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", "schedule_interval": "P3D", "mute_consecutive_alerts": false, "remind_interval": "P1D", diff --git a/api/internal/handler/alert_subscription.go b/api/internal/handler/alert_subscription.go index 1e0a4eae..ad98ec81 100644 --- a/api/internal/handler/alert_subscription.go +++ b/api/internal/handler/alert_subscription.go @@ -21,14 +21,14 @@ import ( // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.AlertSubscription +// @Success 201 {object} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/subscribe [post] // @Security Bearer func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) @@ -58,7 +58,7 @@ func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/unsubscribe [post] // @Security Bearer func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) @@ -80,14 +80,14 @@ func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { // @Tags alert-subscription // @Produce json // @Param key query string false "api key" -// @Success 200 {array} dto.AlertSubscription +// @Success 200 {array} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alert_subscriptions [get] // @Security Bearer func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID ss, err := h.DBService.AlertSubscriptionListForProfile(c.Request().Context(), profileID) if err != nil { @@ -105,7 +105,7 @@ func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { // @Param alert_subscription_id path string true "alert subscription id" Format(uuid) // @Param alert_subscription body dto.AlertSubscription true "alert subscription payload" // @Param key query string false "api key" -// @Success 200 {array} dto.AlertSubscription +// @Success 200 {array} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -122,7 +122,7 @@ func (h *ApiHandler) UpdateMyAlertSubscription(c echo.Context) error { } s.ID = sID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t, err := h.DBService.AlertSubscriptionGet(c.Request().Context(), sID) if err != nil { return httperr.InternalServerError(err) diff --git a/api/internal/handler/alert_test.go b/api/internal/handler/alert_test.go index c3dc5758..237e0ec7 100644 --- a/api/internal/handler/alert_test.go +++ b/api/internal/handler/alert_test.go @@ -27,9 +27,9 @@ var alertSchema = fmt.Sprintf(`{ "instruments": { "type": "array", "items": %s }, "name": { "type": "string" }, "body": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" } + "created_at": { "type": "string", "format": "date-time" } }, - "required": ["id", "alert_config_id", "project_id", "project_name", "instruments", "name", "body", "create_date"], + "required": ["id", "alert_config_id", "project_id", "project_name", "instruments", "name", "body", "created_at"], "additionalProperties": true }`, alertSubAlertConfigInstrumentSchema) @@ -65,7 +65,7 @@ func TestAlerts(t *testing.T) { Name: "DoAlertRead", URL: fmt.Sprintf("/my_alerts/%s/read", testAlertSubAlertID), Method: http.MethodPost, - ExpectedStatus: http.StatusOK, + ExpectedStatus: http.StatusCreated, ExpectedSchema: objSchema, }, { diff --git a/api/internal/handler/autocomplete.go b/api/internal/handler/autocomplete.go index 79d804aa..28d0cb3c 100644 --- a/api/internal/handler/autocomplete.go +++ b/api/internal/handler/autocomplete.go @@ -16,7 +16,7 @@ import ( // @Tags autocomplete // @Produce json // @Param q query string true "search query string" -// @Success 200 {array} dto.EmailAutocompleteResult +// @Success 200 {array} db.EmailAutocompleteListRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/aware.go b/api/internal/handler/aware.go index 794061e2..2bd0205f 100644 --- a/api/internal/handler/aware.go +++ b/api/internal/handler/aware.go @@ -3,8 +3,10 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -13,7 +15,7 @@ import ( // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} dto.AwareParameter +// @Success 200 {array} db.AwareParameterListRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -31,7 +33,7 @@ func (h *ApiHandler) ListAwareParameters(c echo.Context) error { // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} dto.AwarePlatformParameterConfig +// @Success 200 {array} service.AwarePlatformParameterConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index 6b15966b..9aaf25ed 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -20,7 +20,7 @@ import ( // @Tags collection-groups // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -44,7 +44,7 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" Format(uuid) -// @Success 200 {object} dto.CollectionGroupDetails +// @Success 200 {object} db.VCollectionGroupDetail // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -74,7 +74,7 @@ func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {array} dto.CollectionGroup +// @Success 200 {array} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -92,8 +92,8 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { return httperr.MalformedID(err) } cg.ProjectID = pID - p := c.Get("profile").(dto.Profile) - cg.CreatorID, cg.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + cg.CreatedBy, cg.CreatedAt = p.ID, time.Now() cgNew, err := h.DBService.CollectionGroupCreate(c.Request().Context(), cg) if err != nil { @@ -111,7 +111,7 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { // @Param collection_group_id path string true "collection group uuid" // @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {object} dto.CollectionGroup +// @Success 200 {object} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -135,9 +135,9 @@ func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { } cg.ID = cgID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - cg.UpdaterID, cg.UpdateDate = &p.ID, &t + cg.UpdatedBy, cg.UpdatedAt = &p.ID, &t cgUpdated, err := h.DBService.CollectionGroupUpdate(c.Request().Context(), cg) if err != nil { return httperr.InternalServerError(err) @@ -186,7 +186,7 @@ func (h *ApiHandler) DeleteCollectionGroup(c echo.Context) error { // @Param collection_group_id path string true "collection group uuid" Format(uuid) // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/collection_groups_test.go b/api/internal/handler/collection_groups_test.go index bd29efb3..62ececc1 100644 --- a/api/internal/handler/collection_groups_test.go +++ b/api/internal/handler/collection_groups_test.go @@ -16,13 +16,13 @@ const collectionGroupSchema = `{ "project_id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "sort_order": { "type": "integer" } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "sort_order"], + "required": ["id", "project_id", "name", "slug", "created_by", "created_at", "updated_by", "updated_at", "sort_order"], "additionalProperties": false }` @@ -40,10 +40,10 @@ const collectionGroupDetailsSchema = `{ "project_id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "sort_order": { "type": "integer" }, "timeseries": { "type": "array", @@ -72,7 +72,7 @@ const collectionGroupDetailsSchema = `{ } } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "timeseries", "sort_order"], + "required": ["id", "project_id", "name", "slug", "created_by", "created_at", "updated_by", "updated_at", "timeseries", "sort_order"], "additionalProperties": false }` diff --git a/api/internal/handler/datalogger.go b/api/internal/handler/datalogger.go index 363d1a06..ed30ec99 100644 --- a/api/internal/handler/datalogger.go +++ b/api/internal/handler/datalogger.go @@ -9,6 +9,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -19,7 +20,7 @@ import ( // @Tags datalogger // @Produce json // @Param key query string false "api key" -// @Success 200 {array} dto.Datalogger +// @Success 200 {array} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -58,7 +59,7 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { // @Produce json // @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {array} dto.DataloggerWithKey +// @Success 200 {object} service.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,8 +72,8 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { return httperr.MalformedBody(err) } - p := c.Get("profile").(dto.Profile) - n.CreatorID = p.ID + p := c.Get("profile").(db.VProfile) + n.CreatedBy = p.ID if n.Name == "" { return httperr.BadRequest(errors.New("valid `name` field required")) @@ -111,7 +112,7 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.DataloggerWithKey +// @Success 200 {object} service.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -134,9 +135,9 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { return httperr.NotFound(errors.New("datalogger does not exist")) } - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - u.UpdaterID, u.UpdateDate = &profile.ID, &t + u.UpdatedBy, u.UpdatedAt = &profile.ID, &t dl, err := h.DBService.DataloggerHashUpdate(ctx, u) if err != nil { @@ -153,7 +154,7 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.Datalogger +// @Success 200 {object} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -180,7 +181,7 @@ func (h *ApiHandler) GetDatalogger(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Datalogger +// @Success 200 {object} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -207,9 +208,9 @@ func (h *ApiHandler) UpdateDatalogger(c echo.Context) error { return httperr.NotFound(errors.New("datalogger does not exist")) } - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - u.UpdaterID, u.UpdateDate = &profile.ID, &t + u.UpdatedBy, u.UpdatedAt = &profile.ID, &t dlUpdated, err := h.DBService.DataloggerUpdate(ctx, u) if err != nil { @@ -246,12 +247,13 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { if !exists { return httperr.NotFound(errors.New("datalogger does not exist")) } - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) + t := time.Now() if err := h.DBService.DataloggerDelete(ctx, db.DataloggerDeleteParams{ - ID: dlID, - Updater: profile.ID, - UpdateDate: time.Now(), + ID: dlID, + UpdatedBy: &profile.ID, + UpdatedAt: &t, }); err != nil { return httperr.InternalServerError(err) } @@ -267,7 +269,7 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.DataloggerTablePreview +// @Success 200 {object} db.VDataloggerPreview // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -297,7 +299,7 @@ func (h *ApiHandler) GetDataloggerTablePreview(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.DataloggerTablePreview +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/datalogger_telemetry.go b/api/internal/handler/datalogger_telemetry.go index 7d880f84..22e6df4f 100644 --- a/api/internal/handler/datalogger_telemetry.go +++ b/api/internal/handler/datalogger_telemetry.go @@ -54,7 +54,7 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) var prv dto.DataloggerTablePreview prv.Preview = rawJSON - prv.UpdateDate = time.Now() + prv.UpdatedAt = time.Now() if _, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, preparse, prv); err != nil { return httperr.InternalServerError(err) @@ -115,7 +115,7 @@ func getCR6Handler(h *TelemetryHandler, dl db.VDatalogger, rawJSON []byte) echo. var prv dto.DataloggerTablePreview prv.Preview = rawJSON - prv.UpdateDate = time.Now() + prv.UpdatedAt = time.Now() tableID, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, tn, prv) if err != nil { diff --git a/api/internal/handler/datalogger_test.go b/api/internal/handler/datalogger_test.go index 10160b11..de4fb707 100644 --- a/api/internal/handler/datalogger_test.go +++ b/api/internal/handler/datalogger_test.go @@ -24,12 +24,12 @@ var dataloggerSchema = fmt.Sprintf(`{ "name": { "type": "string" }, "sn": { "type": "string" }, "project_id": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": "string" }, - "update_date": { "type": ["string", "null"] }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": "string" }, + "updated_at": { "type": ["string", "null"] }, "slug": { "type": "string" }, "model_id": { "type": "string" }, "model": { "type": "string" }, @@ -42,9 +42,9 @@ var dataloggerSchema = fmt.Sprintf(`{ "name", "sn", "project_id", - "creator_id", - "creator_username", - "create_date", + "created_by", + "created_by_username", + "created_at", "slug", "model_id", "model", @@ -63,7 +63,7 @@ const dataloggerPreviewSchema = `{ "type": "object", "properties": { "datalogger_table_id": { "type": "string" }, - "update_date": { "type": "string" }, + "updated_at": { "type": "string" }, "preview": { "type": ["object", "array", "null"] } } }` diff --git a/api/internal/handler/district_rollup.go b/api/internal/handler/district_rollup.go index 8432c7fc..7c4ea11e 100644 --- a/api/internal/handler/district_rollup.go +++ b/api/internal/handler/district_rollup.go @@ -19,7 +19,7 @@ const timeRangeErrMessage = "maximum requested time range exceeded (5 years)" // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} dto.DistrictRollup +// @Success 200 {array} db.VDistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -56,7 +56,7 @@ func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} dto.DistrictRollup +// @Success 200 {array} db.VDistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/domain.go b/api/internal/handler/domain.go index 5d2a3dea..ed2977a1 100644 --- a/api/internal/handler/domain.go +++ b/api/internal/handler/domain.go @@ -3,7 +3,9 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -12,7 +14,7 @@ import ( // @Summary lists all domains // @Tags domain // @Produce json -// @Success 200 {array} dto.Domain +// @Success 200 {array} db.VDomain // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -30,7 +32,7 @@ func (h *ApiHandler) ListDomains(c echo.Context) error { // @Summary Get map with domain group as key // @Tags domain // @Produce json -// @Success 200 {object} dto.DomainMap +// @Success 200 {object} service.DomainMap // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -48,7 +50,7 @@ func (h *ApiHandler) GetDomainMap(c echo.Context) error { // @Summary lists time zone options // @Tags domain // @Produce json -// @Success 200 {array} dto.TimezoneOption +// @Success 200 {array} db.PgTimezoneNamesListRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/equivalency_table.go b/api/internal/handler/equivalency_table.go index 9ff947d0..8e03259b 100644 --- a/api/internal/handler/equivalency_table.go +++ b/api/internal/handler/equivalency_table.go @@ -5,6 +5,7 @@ import ( "fmt" "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -19,7 +20,7 @@ import ( // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} dto.EquivalencyTable +// @Success 200 {array} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,7 +64,7 @@ func (h *ApiHandler) GetEquivalencyTable(c echo.Context) error { // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} dto.EquivalencyTable +// @Success 200 {object} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -137,7 +138,7 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} dto.EquivalencyTable +// @Success 200 {object} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/evaluation.go b/api/internal/handler/evaluation.go index e45de8b1..374e6b2b 100644 --- a/api/internal/handler/evaluation.go +++ b/api/internal/handler/evaluation.go @@ -18,7 +18,7 @@ import ( // @Tags evaluation // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.Evaluation +// @Success 200 {array} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -82,7 +82,7 @@ func (h *ApiHandler) ListInstrumentEvaluations(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) -// @Success 200 {object} dto.Evaluation +// @Success 200 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -107,7 +107,7 @@ func (h *ApiHandler) GetEvaluation(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Evaluation +// @Success 201 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -122,8 +122,8 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(dto.Profile) - ev.ProjectID, ev.CreatorID, ev.CreateDate = projectID, profile.ID, time.Now() + profile := c.Get("profile").(db.VProfile) + ev.ProjectID, ev.CreatedBy, ev.CreatedAt = projectID, profile.ID, time.Now() evNew, err := h.DBService.EvaluationCreate(c.Request().Context(), ev) if err != nil { @@ -141,7 +141,7 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { // @Param evaluation_id path string true "evaluation uuid" Format(uuid) // @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Evaluation +// @Success 200 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -156,9 +156,9 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - ev.UpdaterID, ev.UpdateDate = &p.ID, &t + ev.UpdatedBy, ev.UpdatedAt = &p.ID, &t evUpdated, err := h.DBService.EvaluationUpdate(c.Request().Context(), evID, ev) if err != nil { return httperr.InternalServerError(err) @@ -174,7 +174,7 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/evaluation_test.go b/api/internal/handler/evaluation_test.go index 815bf2c7..3ea29872 100644 --- a/api/internal/handler/evaluation_test.go +++ b/api/internal/handler/evaluation_test.go @@ -28,15 +28,15 @@ var evaluationSchema = fmt.Sprintf(`{ "alert_config_id": { "type": ["string", "null"] }, "submittal_id": { "type": ["string", "null"] }, "alert_config_name": { "type": ["string", "null"] }, - "start_date": { "type": "string", "format": "date-time" }, - "end_date": { "type": "string", "format": "date-time" }, + "started_at": { "type": "string", "format": "date-time" }, + "ended_at": { "type": "string", "format": "date-time" }, "instruments": { "type": "array", "items": %s }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, "additionalProperties": false }`, evaluationInstrumentSchema) @@ -57,8 +57,8 @@ const createEvaluationBody = `{ "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", "name": "New Test Evaluation", "body": "New Test Evaluation Description", - "start_date": "2023-05-16T13:19:41.441328Z", - "end_date": "2023-06-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", + "ended_at": "2023-06-16T13:19:41.441328Z", "submittal_id": "f8189297-f1a6-489d-9ea7-f1a0ffc30153", "instruments": [ {"instrument_id": "a7540f69-c41e-43b3-b655-6e44097edb7e"} @@ -70,8 +70,8 @@ const updateEvaluationBody = `{ "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", "name": "Updated Test Evaluation", "body": "Updated Test Evaluation Description", - "start_date": "2023-07-16T13:19:41.441328Z", - "end_date": "2023-08-16T13:19:41.441328Z", + "started_at": "2023-07-16T13:19:41.441328Z", + "ended_at": "2023-08-16T13:19:41.441328Z", "instruments": [] }` diff --git a/api/internal/handler/handler_test.go b/api/internal/handler/handler_test.go index 4fba2264..ae8fa14e 100644 --- a/api/internal/handler/handler_test.go +++ b/api/internal/handler/handler_test.go @@ -19,6 +19,7 @@ import ( const ( truncateLinesBody = 30 + maxLines = 50 host = "http://localhost:8080" mockJwt = `Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6Ikw0YXFVRmd6YV9RVjhqc1ZOa281OW5GVzl6bGh1b0JGX3RxdlpkTUZkajQifQ.eyJzdWIiOiJmOGRjYWZlYS0yNDNlLTRiODktOGQ3ZC1mYTAxOTE4MTMwZjQiLCJ0eXAiOiJCZWFyZXIiLCJhbGxvd2VkLW9yaWdpbnMiOlsiaHR0cDovL2xvY2FsaG9zdDozMDAwIl0sIm5hbWUiOiJBbnRob255IExhbWJlcnQiLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJ0ZXN0IiwiZ2l2ZW5fbmFtZSI6IkFudGhvbnkiLCJmYW1pbHlfbmFtZSI6IkxhbWJlcnQiLCJlbWFpbCI6ImFudGhvbnkubS5sYW1iZXJ0QGZha2UudXNhY2UuYXJteS5taWwiLCJzdWJqZWN0RE4iOiJsYW1iZXJ0LmFudGhvbnkubS4yIiwiY2FjVUlEIjoiMiJ9.8CjeifD51ZEZZOx9eeMd7RPanvtgkQQus-R19aU91Rw` mockAppKey = "appkey" @@ -134,7 +135,14 @@ func RunAll(t *testing.T, tests []HTTPTest) { assert.Truef(t, valid, "response body did not match json schema:") if !valid { var errs string - for _, err := range result.Errors() { + for idx, err := range result.Errors() { + if idx >= maxLines { + if idx == maxLines { + errs += "\n" + errs += "..." + } + continue + } errs += "\n" errs += err.String() } diff --git a/api/internal/handler/heartbeat.go b/api/internal/handler/heartbeat.go index 79148db0..5ed4c9a8 100644 --- a/api/internal/handler/heartbeat.go +++ b/api/internal/handler/heartbeat.go @@ -8,6 +8,7 @@ import ( _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -16,14 +17,14 @@ import ( // @Summary checks the health of the api server // @Tags heartbeat // @Produce json -// @Success 200 {array} map[string]interface{} +// @Success 200 {object} service.Healthcheck // @Router /health [get] func (h *ApiHandler) Healthcheck(c echo.Context) error { - return c.JSON(http.StatusOK, map[string]interface{}{"status": "healthy"}) + return c.JSON(http.StatusOK, service.Healthcheck{Status: "healthy"}) } func (h *TelemetryHandler) Healthcheck(c echo.Context) error { - return c.JSON(http.StatusOK, map[string]interface{}{"status": "healthy"}) + return c.JSON(http.StatusOK, service.Healthcheck{Status: "healthy"}) } // DoHeartbeat godoc @@ -32,7 +33,7 @@ func (h *TelemetryHandler) Healthcheck(c echo.Context) error { // @Tags heartbeat // @Produce json // @Param key query string true "api key" -// @Success 200 {object} dto.Heartbeat +// @Success 201 {object} service.Heartbeat // @Router /heartbeat [post] func (h *ApiHandler) DoHeartbeat(c echo.Context) error { hb, err := h.DBService.HeartbeatCreate(c.Request().Context(), time.Now()) @@ -47,7 +48,7 @@ func (h *ApiHandler) DoHeartbeat(c echo.Context) error { // @Summary gets the latest heartbeat // @Tags heartbeat // @Produce json -// @Success 200 {object} dto.Heartbeat +// @Success 200 {object} service.Heartbeat // @Router /heartbeat/latest [get] func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { hb, err := h.DBService.HeartbeatGetLatest(c.Request().Context()) @@ -62,7 +63,7 @@ func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { // @Summary returns all heartbeats // @Tags heartbeat // @Produce json -// @Success 200 {array} dto.Heartbeat +// @Success 200 {array} service.Heartbeat // @Router /heartbeats [get] func (h *ApiHandler) ListHeartbeats(c echo.Context) error { var limit int32 = 50 diff --git a/api/internal/handler/home.go b/api/internal/handler/home.go index ba1018ed..21082d21 100644 --- a/api/internal/handler/home.go +++ b/api/internal/handler/home.go @@ -3,6 +3,7 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/labstack/echo/v4" @@ -13,7 +14,7 @@ import ( // @Summary gets information for the homepage // @Tags home // @Produce json -// @Success 200 {object} dto.Home +// @Success 200 {object} db.HomeGetRow // @Failure 500 {object} echo.HTTPError // @Router /home [get] func (h *ApiHandler) GetHome(c echo.Context) error { diff --git a/api/internal/handler/instrument.go b/api/internal/handler/instrument.go index fa532ceb..0cc6b321 100644 --- a/api/internal/handler/instrument.go +++ b/api/internal/handler/instrument.go @@ -1,6 +1,7 @@ package handler import ( + "encoding/json" "net/http" "strings" "time" @@ -13,30 +14,12 @@ import ( "github.com/labstack/echo/v4" ) -// ListInstruments godoc -// -// @Summary lists all instruments -// @Tags instrument -// @Produce json -// @Success 200 {array} dto.Instrument -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /instruments [get] -func (h *ApiHandler) ListInstruments(c echo.Context) error { - nn, err := h.DBService.InstrumentList(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, nn) -} - // GetInstrumentCount godoc // // @Summary gets the total number of non deleted instruments in the system // @Tags instrument // @Produce json -// @Success 200 {object} dto.InstrumentCount +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -46,7 +29,7 @@ func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, ic) + return c.JSON(http.StatusOK, map[string]interface{}{"instrument_count": ic}) } // GetInstrument godoc @@ -55,7 +38,7 @@ func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { // @Tags instrument // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} dto.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -82,7 +65,7 @@ func (h *ApiHandler) GetInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument id" Format(uuid) // @Param instrument body dto.InstrumentCollection true "instrument collection payload" // @Param key query string false "api key" -// @Success 200 {array} dto.IDSlugName +// @Success 201 {array} db.InstrumentCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -101,7 +84,7 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { return httperr.MalformedBody(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() instrumentNames := make([]string, len(ic)) @@ -110,8 +93,8 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { var prj dto.IDSlugName prj.ID = projectID ic[idx].Projects = []dto.IDSlugName{prj} - ic[idx].CreatorID = p.ID - ic[idx].CreateDate = t + ic[idx].CreatedBy = p.ID + ic[idx].CreatedAt = t } if strings.ToLower(c.QueryParam("dry_run")) == "true" { @@ -142,7 +125,7 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -164,10 +147,10 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { } i.ID = iID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - i.UpdaterID, i.UpdateDate = &p.ID, &t + i.UpdatedBy, i.UpdatedAt = &p.ID, &t iUpdated, err := h.DBService.InstrumentUpdate(c.Request().Context(), pID, i) if err != nil { @@ -185,7 +168,7 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -200,17 +183,17 @@ func (h *ApiHandler) UpdateInstrumentGeometry(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var geom db.Geometry - if err := c.Bind(&geom); err != nil { + var j json.RawMessage + if err := c.Bind(&j); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) instrument, err := h.DBService.InstrumentUpdateGeometry(c.Request().Context(), db.InstrumentUpdateGeometryParams{ ProjectID: projectID, ID: instrumentID, - Geometry: geom, - Updater: &p.ID, + Geometry: j, + UpdatedBy: &p.ID, }) if err != nil { return httperr.InternalServerError(err) diff --git a/api/internal/handler/instrument_assign.go b/api/internal/handler/instrument_assign.go index 41dcdc19..b97b1706 100644 --- a/api/internal/handler/instrument_assign.go +++ b/api/internal/handler/instrument_assign.go @@ -4,8 +4,10 @@ import ( "net/http" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -19,7 +21,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} dto.InstrumentsValidation +// @Success 201 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -35,7 +37,7 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { @@ -55,7 +57,7 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} dto.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,7 +73,7 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { @@ -92,7 +94,7 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { // @Param project_ids body dto.InstrumentProjectAssignments true "project uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} dto.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -103,7 +105,7 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" pl := dto.InstrumentProjectAssignments{ProjectIDs: make([]uuid.UUID, 0)} @@ -140,7 +142,7 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { // @Param instrument_ids body dto.ProjectInstrumentAssignments true "instrument uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} dto.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -151,7 +153,7 @@ func (h *ApiHandler) UpdateProjectInstrumentAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" pl := dto.ProjectInstrumentAssignments{InstrumentIDs: make([]uuid.UUID, 0)} diff --git a/api/internal/handler/instrument_constant.go b/api/internal/handler/instrument_constant.go index bf5e2a3f..481df1ab 100644 --- a/api/internal/handler/instrument_constant.go +++ b/api/internal/handler/instrument_constant.go @@ -17,7 +17,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -43,7 +43,7 @@ func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" // @Param key query string false "api key" -// @Success 200 {array} dto.Timeseries +// @Success 200 {array} db.TimeseriesCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/instrument_group.go b/api/internal/handler/instrument_group.go index 5ef3f298..2cab7508 100644 --- a/api/internal/handler/instrument_group.go +++ b/api/internal/handler/instrument_group.go @@ -17,7 +17,7 @@ import ( // @Summary lists all instrument groups // @Tags instrument-group // @Produce json -// @Success 200 {array} dto.InstrumentGroup +// @Success 200 {array} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -36,7 +36,7 @@ func (h *ApiHandler) ListInstrumentGroups(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} dto.InstrumentGroup +// @Success 200 {object} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -60,25 +60,24 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { // @Produce json // @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 201 {object} dto.InstrumentGroup +// @Success 201 {array} db.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instrument_groups [post] // @Security Bearer func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { - - gc := dto.InstrumentGroupCollection{} + var gc dto.InstrumentGroupCollection if err := c.Bind(&gc); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range gc.Items { - gc.Items[idx].CreatorID = p.ID - gc.Items[idx].CreateDate = t + gc.Items[idx].CreatedBy = p.ID + gc.Items[idx].CreatedAt = t } gg, err := h.DBService.InstrumentGroupCreateBatch(c.Request().Context(), gc.Items) @@ -96,7 +95,7 @@ func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) // @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 200 {object} dto.InstrumentGroup +// @Success 200 {object} db.InstrumentGroupUpdateRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -108,16 +107,16 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { return httperr.MalformedID(err) } - g := dto.InstrumentGroup{ID: gID} + var g dto.InstrumentGroup if err := c.Bind(&g); err != nil { return httperr.MalformedBody(err) } g.ID = gID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - g.UpdaterID, g.UpdateDate = &p.ID, &t + g.UpdatedBy, g.UpdatedAt = &p.ID, &t gUpdated, err := h.DBService.InstrumentGroupUpdate(c.Request().Context(), g) if err != nil { @@ -133,7 +132,7 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} dto.InstrumentGroup +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -156,7 +155,7 @@ func (h *ApiHandler) DeleteFlagInstrumentGroup(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} dto.Instrument +// @Success 200 {array} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/instrument_group_test.go b/api/internal/handler/instrument_group_test.go index 963bf429..43dff938 100644 --- a/api/internal/handler/instrument_group_test.go +++ b/api/internal/handler/instrument_group_test.go @@ -16,15 +16,15 @@ const instrumentGroupSchema = `{ "slug": { "type": "string" }, "name": { "type": "string" }, "description": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "project_id": { "type": ["string", "null"] }, "instrument_count": { "type": "number" }, "timeseries_count": { "type": "number" } }, - "required": ["id", "slug", "name", "description", "creator_id", "create_date", "updater_id", "update_date", "project_id"], + "required": ["id", "slug", "name", "description", "created_by", "created_at", "updated_by", "updated_at", "project_id"], "additionalProperties": false }` diff --git a/api/internal/handler/instrument_incl.go b/api/internal/handler/instrument_incl.go index 0c7977fd..380b1889 100644 --- a/api/internal/handler/instrument_incl.go +++ b/api/internal/handler/instrument_incl.go @@ -18,7 +18,7 @@ import ( // @Tags instrument-incl // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.InclSegment +// @Success 200 {array} db.VInclSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -43,7 +43,7 @@ func (h *ApiHandler) ListInclSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} dto.InclMeasurements +// @Success 200 {array} db.VInclMeasurement // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -84,11 +84,15 @@ func (h *ApiHandler) GetInclMeasurementsForInstrument(c echo.Context) error { // @Router /instruments/incl/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateInclSegments(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } segs := make([]dto.InclSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.DBService.InclSegmentUpdateBatch(c.Request().Context(), segs); err != nil { + if err := h.DBService.InclSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_ipi.go b/api/internal/handler/instrument_ipi.go index 5c51b030..ea394f6d 100644 --- a/api/internal/handler/instrument_ipi.go +++ b/api/internal/handler/instrument_ipi.go @@ -18,7 +18,7 @@ import ( // @Tags instrument-ipi // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.IpiSegment +// @Success 200 {array} db.VIpiSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -43,7 +43,7 @@ func (h *ApiHandler) ListIpiSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} dto.IpiMeasurements +// @Success 200 {array} db.VIpiMeasurement // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -84,11 +84,15 @@ func (h *ApiHandler) GetIpiMeasurementsForInstrument(c echo.Context) error { // @Router /instruments/ipi/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateIpiSegments(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } segs := make([]dto.IpiSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.DBService.IpiSegmentUpdateBatch(c.Request().Context(), segs); err != nil { + if err := h.DBService.IpiSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_note.go b/api/internal/handler/instrument_note.go index ac3bc779..0d096b05 100644 --- a/api/internal/handler/instrument_note.go +++ b/api/internal/handler/instrument_note.go @@ -4,6 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" @@ -17,7 +19,7 @@ import ( // @Tags instrument-note // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.InstrumentNote +// @Success 200 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -40,7 +42,7 @@ func (h *ApiHandler) ListInstrumentInstrumentNotes(c echo.Context) error { // @Tags instrument-note // @Produce json // @Param note_id path string true "note uuid" Format(uuid) -// @Success 200 {object} dto.InstrumentNote +// @Success 200 {object} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,7 +66,7 @@ func (h *ApiHandler) GetInstrumentNote(c echo.Context) error { // @Produce json // @Param instrument_note body dto.InstrumentNoteCollection true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} dto.InstrumentNote +// @Success 201 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -76,12 +78,12 @@ func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { return httperr.MalformedBody(err) } // profile and timestamp - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range nc.Items { - nc.Items[idx].CreatorID = p.ID - nc.Items[idx].CreateDate = t + nc.Items[idx].CreatedBy = p.ID + nc.Items[idx].CreatedAt = t } nn, err := h.DBService.InstrumentNoteCreateBatch(c.Request().Context(), nc.Items) if err != nil { @@ -99,7 +101,7 @@ func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { // @Param note_id path string true "note uuid" Format(uuid) // @Param instrument_note body dto.InstrumentNote true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -110,15 +112,15 @@ func (h *ApiHandler) UpdateInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - n := dto.InstrumentNote{ID: noteID} + var n dto.InstrumentNote if err := c.Bind(&n); err != nil { return httperr.MalformedBody(err) } n.ID = noteID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - n.UpdaterID, n.UpdateDate = &p.ID, &t + n.UpdatedBy, n.UpdatedAt = &p.ID, &t nUpdated, err := h.DBService.InstrumentNoteUpdate(c.Request().Context(), n) if err != nil { diff --git a/api/internal/handler/instrument_note_test.go b/api/internal/handler/instrument_note_test.go index 92bfcc1b..34daaacf 100644 --- a/api/internal/handler/instrument_note_test.go +++ b/api/internal/handler/instrument_note_test.go @@ -17,12 +17,12 @@ const instrumentNoteSchema = `{ "title": { "type": "string" }, "body": { "type": "string" }, "time": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null" ] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null" ] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, - "required": ["id", "instrument_id", "title", "body", "time", "creator_id", "create_date", "updater_id", "update_date" ], + "required": ["id", "instrument_id", "title", "body", "time", "created_by", "created_at", "updated_by", "updated_at" ], "additionalProperties": false }` @@ -88,13 +88,6 @@ func TestInstrumentNotes(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: objSchema, }, - { - Name: "ListInstrumentNotes", - URL: "/instruments/notes", - Method: http.MethodGet, - ExpectedStatus: http.StatusOK, - ExpectedSchema: arrSchema, - }, { Name: "ListInstrumentInstrumentNotes", URL: fmt.Sprintf("/instruments/%s/notes", testInstrumentNoteIntrumentID), diff --git a/api/internal/handler/instrument_saa.go b/api/internal/handler/instrument_saa.go index 726d73ea..20a6ffb7 100644 --- a/api/internal/handler/instrument_saa.go +++ b/api/internal/handler/instrument_saa.go @@ -18,7 +18,7 @@ import ( // @Tags instrument-saa // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.SaaSegment +// @Success 200 {array} db.VSaaSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -43,7 +43,7 @@ func (h *ApiHandler) ListSaaSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} dto.SaaMeasurements +// @Success 200 {array} db.VSaaMeasurement // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -84,11 +84,15 @@ func (h *ApiHandler) GetSaaMeasurementsForInstrument(c echo.Context) error { // @Router /instruments/saa/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateSaaSegments(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } segs := make([]dto.SaaSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.DBService.SaaSegmentUpdateBatch(c.Request().Context(), segs); err != nil { + if err := h.DBService.SaaSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_status.go b/api/internal/handler/instrument_status.go index 98fadac3..397d59e2 100644 --- a/api/internal/handler/instrument_status.go +++ b/api/internal/handler/instrument_status.go @@ -3,6 +3,7 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" @@ -16,7 +17,7 @@ import ( // @Tags instrument-status // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.InstrumentStatus +// @Success 200 {array} db.VInstrumentStatus // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -41,7 +42,7 @@ func (h *ApiHandler) ListInstrumentStatus(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param status_id path string true "status uuid" Format(uuid) -// @Success 200 {array} dto.AlertConfig +// @Success 200 {array} db.VInstrumentStatus // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/instrument_status_test.go b/api/internal/handler/instrument_status_test.go index 5de21b77..07b152e8 100644 --- a/api/internal/handler/instrument_status_test.go +++ b/api/internal/handler/instrument_status_test.go @@ -14,10 +14,11 @@ const instrumentStatusSchema = `{ "properties": { "id": { "type": "string" }, "time": { "type": "string" }, + "instrument_id": { "type": "string" }, "status_id": { "type": "string" }, "status": { "type": "string" } }, - "required": ["id", "time", "status_id", "status"], + "required": ["id", "instrument_id", "time", "status_id", "status"], "additionalProperties": false }` diff --git a/api/internal/handler/instrument_test.go b/api/internal/handler/instrument_test.go index a4db3145..09fe9c97 100644 --- a/api/internal/handler/instrument_test.go +++ b/api/internal/handler/instrument_test.go @@ -51,18 +51,19 @@ var instrumentSchema = fmt.Sprintf(`{ }, "station": { "type": ["number", "null"] }, "offset": { "type": ["number", "null"] }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "projects": %s, "nid_id": { "type": ["string", "null"] }, "usgs_id": { "type": ["string", "null"] }, "has_cwms": { "type": "boolean" }, "show_cwms_tab": { "type": "boolean" }, - "opts": { "type": ["object", "null"] } + "opts": { "type": ["object", "null"] }, + "telemetry": { "type": ["array","null"], "items": { "type": "object" }} }, - "required": ["id", "slug", "name", "type_id", "type", "status_id", "status", "status_time", "geometry", "creator_id", "create_date", "updater_id", "update_date", "projects", "station", "offset", "constants", "has_cwms", "alert_configs", "nid_id", "usgs_id", "show_cwms_tab"], + "required": ["id", "slug", "name", "type_id", "type", "status_id", "status", "status_time", "geometry", "created_by", "created_at", "updated_by", "updated_at", "projects", "station", "offset", "constants", "has_cwms", "alert_configs", "nid_id", "usgs_id", "show_cwms_tab"], "additionalProperties": false }`, IDSlugNameArrSchema) @@ -288,13 +289,6 @@ func TestInstruments(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: objSchema, }, - { - Name: "ListInstruments", - URL: "/instruments", - Method: http.MethodGet, - ExpectedStatus: http.StatusOK, - ExpectedSchema: arrSchema, - }, { Name: "ListProjectInstruments", URL: fmt.Sprintf("/projects/%s/instruments", testProjectID), diff --git a/api/internal/handler/measurement.go b/api/internal/handler/measurement.go index 6699cd82..d2425e14 100644 --- a/api/internal/handler/measurement.go +++ b/api/internal/handler/measurement.go @@ -168,7 +168,7 @@ func (h *ApiHandler) DeleteTimeserieMeasurements(c echo.Context) error { // @Produce json // @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection false "json array of timeseries measurement collections" // @Param timeseries_measurement_collections formData file false "TOA5 file of timeseries measurement collections" -// @Success 200 {array} dto.MeasurementCollection +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/media.go b/api/internal/handler/media.go index ecc7485e..7024f416 100644 --- a/api/internal/handler/media.go +++ b/api/internal/handler/media.go @@ -14,7 +14,7 @@ import ( // @Produce jpeg // @Param project_slug path string true "project abbr" // @Param uri_path path string true "uri path of requested resource" -// @Success 200 +// @Success 200 {file} file // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/opendcs.go b/api/internal/handler/opendcs.go deleted file mode 100644 index bce924ac..00000000 --- a/api/internal/handler/opendcs.go +++ /dev/null @@ -1,26 +0,0 @@ -package handler - -import ( - "net/http" - - "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/labstack/echo/v4" -) - -// ListOpendcsSites godoc -// -// @Summary lists all instruments, represented as opendcs sites -// @Tags opendcs -// @Produce xml -// @Success 200 {array} dto.Site -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /opendcs/sites [get] -func (h *ApiHandler) ListOpendcsSites(c echo.Context) error { - ss, err := h.DBService.OpendcsSiteList(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.XMLPretty(http.StatusOK, ss, " ") -} diff --git a/api/internal/handler/plot_config.go b/api/internal/handler/plot_config.go index e0cefaf4..2fcc990a 100644 --- a/api/internal/handler/plot_config.go +++ b/api/internal/handler/plot_config.go @@ -16,7 +16,7 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.PlotConfig +// @Success 200 {array} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -41,7 +41,7 @@ func (h *ApiHandler) ListPlotConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Success 200 {object} dto.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/plot_config_bullseye.go b/api/internal/handler/plot_config_bullseye.go index f47e3b7b..80059118 100644 --- a/api/internal/handler/plot_config_bullseye.go +++ b/api/internal/handler/plot_config_bullseye.go @@ -20,7 +20,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -44,8 +44,8 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(dto.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() pcNew, err := h.DBService.PlotConfigCreateBullseye(c.Request().Context(), pc) if err != nil { @@ -63,7 +63,7 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -93,9 +93,9 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow pcUpdated, err := h.DBService.PlotConfigUpdateBullseye(c.Request().Context(), pc) if err != nil { @@ -112,7 +112,7 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} []dto.PlotConfigMeasurementBullseyePlot +// @Success 200 {array} db.PlotConfigMeasurementListBullseyeRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/plot_config_contour.go b/api/internal/handler/plot_config_contour.go index d73b9a56..476583c2 100644 --- a/api/internal/handler/plot_config_contour.go +++ b/api/internal/handler/plot_config_contour.go @@ -7,6 +7,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -20,7 +21,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -44,8 +45,8 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(dto.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() pcNew, err := h.DBService.PlotConfigCreateContour(c.Request().Context(), pc) if err != nil { @@ -63,7 +64,7 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -93,9 +94,9 @@ func (h *ApiHandler) UpdatePlotConfigContourPlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow pcUpdated, err := h.DBService.PlotConfigUpdateContour(c.Request().Context(), pc) if err != nil { @@ -154,7 +155,7 @@ func (h *ApiHandler) ListPlotConfigTimesContourPlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param time query string true "time" // @Param key query string false "api key" -// @Success 200 {object} dto.AggregatePlotConfigMeasurementsContourPlot +// @Success 200 {object} service.AggregatePlotConfigMeasurementsContourPlot // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/plot_config_profile.go b/api/internal/handler/plot_config_profile.go index 9c54ae06..8f98c217 100644 --- a/api/internal/handler/plot_config_profile.go +++ b/api/internal/handler/plot_config_profile.go @@ -4,6 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -18,7 +20,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -42,8 +44,8 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(dto.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() pcNew, err := h.DBService.PlotConfigCreateProfile(c.Request().Context(), pc) if err != nil { @@ -61,7 +63,7 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -91,9 +93,9 @@ func (h *ApiHandler) UpdatePlotConfigProfilePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow pcUpdated, err := h.DBService.PlotConfigUpdateProfile(c.Request().Context(), pc) if err != nil { diff --git a/api/internal/handler/plot_config_scatter_line.go b/api/internal/handler/plot_config_scatter_line.go index 9a4d84af..b494a1fd 100644 --- a/api/internal/handler/plot_config_scatter_line.go +++ b/api/internal/handler/plot_config_scatter_line.go @@ -4,6 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -18,7 +20,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -43,8 +45,8 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(dto.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() pcNew, err := h.DBService.PlotConfigCreateScatterLine(c.Request().Context(), pc) if err != nil { @@ -62,7 +64,7 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} dto.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -93,9 +95,9 @@ func (h *ApiHandler) UpdatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow pcUpdated, err := h.DBService.PlotConfigUpdateScatterLine(c.Request().Context(), pc) if err != nil { diff --git a/api/internal/handler/plot_config_scatter_line_test.go b/api/internal/handler/plot_config_scatter_line_test.go index be18274a..b4012f2b 100644 --- a/api/internal/handler/plot_config_scatter_line_test.go +++ b/api/internal/handler/plot_config_scatter_line_test.go @@ -15,10 +15,10 @@ const plotConfigBaseSchema = `{ "id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "project_id": { "type": ["string", "null"] }, "show_masked": { "type": "boolean" }, "show_nonvalidated": { "type": "boolean" }, @@ -31,7 +31,7 @@ const plotConfigBaseSchema = `{ "display": %s }, "required": [ - "id", "slug", "name", "creator_id", "create_date", "updater_id", "update_date", "project_id", + "id", "slug", "name", "created_by", "created_at", "updated_by", "updated_at", "project_id", "show_masked", "show_nonvalidated", "show_comments", "auto_range", "date_range", "threshold", "report_configs", "plot_type", "display" ], "additionalProperties": false diff --git a/api/internal/handler/profile.go b/api/internal/handler/profile.go index b3c38dbd..111eed40 100644 --- a/api/internal/handler/profile.go +++ b/api/internal/handler/profile.go @@ -8,6 +8,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -80,7 +81,7 @@ func (h *ApiHandler) GetMyProfile(c echo.Context) error { // @Summary creates token for a profile // @Tags profile // @Produce json -// @Success 200 {object} dto.Token +// @Success 200 {object} service.Token // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/project.go b/api/internal/handler/project.go index a2a4ba97..8442206e 100644 --- a/api/internal/handler/project.go +++ b/api/internal/handler/project.go @@ -18,7 +18,7 @@ import ( // @Summary lists all districts // @Tags project // @Produce json -// @Success 200 {array} dto.District +// @Success 200 {array} db.VDistrict // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -37,7 +37,7 @@ func (h *ApiHandler) ListDistricts(c echo.Context) error { // @Tags project // @Produce json // @Param federal_id query string false "federal id" -// @Success 200 {array} dto.Project +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -67,7 +67,7 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { // @Tags project // @Produce json // @Param role query string false "role" -// @Success 200 {array} dto.Project +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -76,7 +76,7 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { func (h *ApiHandler) ListMyProjects(c echo.Context) error { ctx := c.Request().Context() - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) if p.IsAdmin { projects, err := h.DBService.ProjectList(ctx) @@ -115,7 +115,7 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.Project +// @Success 200 {array} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -138,7 +138,7 @@ func (h *ApiHandler) ListProjectInstruments(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.InstrumentGroup +// @Success 200 {array} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -160,7 +160,7 @@ func (h *ApiHandler) ListProjectInstrumentGroups(c echo.Context) error { // @Summary gets the total number of non-deleted projects in the system // @Tags project // @Produce json -// @Success 200 {object} dto.ProjectCount +// @Success 200 {object} service.ProjectCount // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -179,7 +179,7 @@ func (h *ApiHandler) GetProjectCount(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {object} dto.Project +// @Success 200 {object} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -203,7 +203,7 @@ func (h *ApiHandler) GetProject(c echo.Context) error { // @Produce json // @Param project_collection body dto.ProjectCollection true "project collection payload" // @Param key query string false "api key" -// @Success 200 {array} dto.IDSlugName +// @Success 201 {array} db.ProjectCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -215,15 +215,15 @@ func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { return httperr.MalformedBody(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range pc { if pc[idx].Name == "" { return httperr.Message(http.StatusBadRequest, "project name required") } - pc[idx].CreatorID = p.ID - pc[idx].CreateDate = t + pc[idx].CreatedBy = p.ID + pc[idx].CreatedAt = t } pp, err := h.DBService.ProjectCreateBatch(c.Request().Context(), pc) @@ -241,7 +241,7 @@ func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param project body dto.Project true "project payload" // @Param key query string false "api key" -// @Success 200 {object} dto.Project +// @Success 200 {object} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -257,10 +257,10 @@ func (h *ApiHandler) UpdateProject(c echo.Context) error { return httperr.MalformedBody(err) } p.ID = id - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - p.UpdaterID, p.UpdateDate = &profile.ID, &t + p.UpdatedBy, p.UpdatedAt = &profile.ID, &t pUpdated, err := h.DBService.ProjectUpdate(c.Request().Context(), p) if err != nil { diff --git a/api/internal/handler/project_role.go b/api/internal/handler/project_role.go index 620ccbf2..5fe27527 100644 --- a/api/internal/handler/project_role.go +++ b/api/internal/handler/project_role.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -18,7 +17,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} dto.ProjectMembership +// @Success 200 {array} db.ProfileProjectRoleListForProjectRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -45,7 +44,7 @@ func (h *ApiHandler) ListProjectMembers(c echo.Context) error { // @Param profile_id path string true "profile uuid" Format(uuid) // @Param role_id path string true "role uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} dto.ProjectMembership +// @Success 201 {object} db.ProfileProjectRoleGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,7 +63,7 @@ func (h *ApiHandler) AddProjectMemberRole(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - grantedBy := c.Get("profile").(dto.Profile) + grantedBy := c.Get("profile").(db.VProfile) r, err := h.DBService.ProfileProjectRoleCreate(c.Request().Context(), db.ProfileProjectRoleCreateParams{ ProjectID: projectID, diff --git a/api/internal/handler/project_test.go b/api/internal/handler/project_test.go index 0fd5bec5..863efb64 100644 --- a/api/internal/handler/project_test.go +++ b/api/internal/handler/project_test.go @@ -38,16 +38,16 @@ const projectSchema = `{ "district_id": { "type": [ "string", "null"] }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "instrument_count": { "type": "number" }, "instrument_group_count": { "type": "number" } }, - "required": ["id", "federal_id", "image", "office_id", "slug", "name", "creator_id", "create_date", "updater_id", "update_date", "instrument_count", "instrument_group_count"], + "required": ["id", "federal_id", "image", "office_id", "slug", "name", "created_by", "created_at", "updated_by", "updated_at", "instrument_count", "instrument_group_count"], "additionalProperties": false }` diff --git a/api/internal/handler/report_config.go b/api/internal/handler/report_config.go index 426401ae..e15fc56d 100644 --- a/api/internal/handler/report_config.go +++ b/api/internal/handler/report_config.go @@ -24,7 +24,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" // @Accept application/json -// @Success 200 {object} dto.ReportConfig +// @Success 200 {object} db.VReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,7 +51,7 @@ func (h *ApiHandler) ListProjectReportConfigs(c echo.Context) error { // @Param report_config body dto.ReportConfig true "report config payload" // @Param key query string false "api key" // @Accept application/json -// @Success 201 {object} dto.ReportConfig +// @Success 201 {object} db.VReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -68,9 +68,9 @@ func (h *ApiHandler) CreateReportConfig(c echo.Context) error { } rc.ProjectID = pID - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - rc.CreatorID, rc.CreateDate = profile.ID, t + rc.CreatedBy, rc.CreatedAt = profile.ID, t rcNew, err := h.DBService.ReportConfigCreate(c.Request().Context(), rc) if err != nil { @@ -112,9 +112,9 @@ func (h *ApiHandler) UpdateReportConfig(c echo.Context) error { rc.ID = rcID rc.ProjectID = pID - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - rc.UpdaterID, rc.UpdateDate = &profile.ID, &t + rc.UpdatedBy, rc.UpdatedAt = &profile.ID, &t if err := h.DBService.ReportConfigUpdate(c.Request().Context(), rc); err != nil { return httperr.InternalServerError(err) @@ -156,7 +156,7 @@ func (h *ApiHandler) DeleteReportConfig(c echo.Context) error { // @Produce json // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string true "api key" -// @Success 200 {object} dto.ReportConfigWithPlotConfigs +// @Success 200 {object} service.ReportConfigWithPlotConfigs // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -181,7 +181,7 @@ func (h *ApiHandler) GetReportConfigWithPlotConfigs(c echo.Context) error { // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 201 {object} dto.ReportDownloadJob +// @Success 201 {object} db.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -193,7 +193,7 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { return httperr.MalformedID(err) } isLandscape := strings.ToLower(c.QueryParam("is_landscape")) == "true" - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) j, err := h.DBService.ReportDownloadJobCreate(c.Request().Context(), h.PubsubService, service.ReportDownloadJobCreateOpts{ ReportConfigID: rcID, @@ -217,7 +217,7 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { // @Param job_id path string true "download job uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 200 {object} dto.ReportDownloadJob +// @Success 200 {object} db.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -228,11 +228,11 @@ func (h *ApiHandler) GetReportDownloadJob(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ - ID: jobID, - Creator: p.ID, + ID: jobID, + CreatedBy: p.ID, }) if err != nil { return httperr.InternalServerError(err) @@ -265,7 +265,7 @@ func (h *ApiHandler) UpdateReportDownloadJob(c echo.Context) error { return httperr.MalformedBody(err) } j.ID = jobID - j.ProgressUpdateDate = time.Now() + j.ProgressUpdatedAt = time.Now() if err := h.DBService.ReportDownloadJobUpdate(c.Request().Context(), j); err != nil { return httperr.InternalServerError(err) @@ -293,11 +293,11 @@ func (h *ApiHandler) DownloadReport(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(dto.Profile) + p := c.Get("profile").(db.VProfile) j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ - ID: jobID, - Creator: p.ID, + ID: jobID, + CreatedBy: p.ID, }) if err != nil { return httperr.InternalServerError(err) diff --git a/api/internal/handler/report_config_test.go b/api/internal/handler/report_config_test.go index 4c6b8676..766ca5fd 100644 --- a/api/internal/handler/report_config_test.go +++ b/api/internal/handler/report_config_test.go @@ -46,19 +46,19 @@ var reportConfigSchema = fmt.Sprintf(`{ "project_id": { "type": "string" }, "project_name": { "type": "string" }, "district_name": { "type": ["string", "null"] }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "global_overrides": %s, "plot_configs": %s }, "additionalProperties": false, "required": [ - "id","slug","name","description","project_id","project_name", "district_name", "creator_id", - "creator_username","create_date","global_overrides","plot_configs" + "id","slug","name","description","project_id","project_name", "district_name", "created_by", + "created_by_username","created_at","global_overrides","plot_configs" ] }`, globalOverridesSchema, IDSlugNameArrSchema) @@ -74,13 +74,13 @@ const reportDownloadJobSchema = `{ "properties": { "id": { "type": "string" }, "report_config_id": { "type": "string" }, - "creator": { "type": "string" }, - "create_date": { "type": "string" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string" }, "status": { "type": "string" }, "file_key": { "type": ["string", "null"] }, "file_expiry": { "type": ["string", "null"] }, "progress": { "type": "number" }, - "progress_update_date": { "type": "string" } + "progress_updated_at": { "type": "string" } } }` diff --git a/api/internal/handler/submittal.go b/api/internal/handler/submittal.go index c6156a32..d0cd9ac5 100644 --- a/api/internal/handler/submittal.go +++ b/api/internal/handler/submittal.go @@ -17,7 +17,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} dto.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,7 +51,7 @@ func (h *ApiHandler) ListProjectSubmittals(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} dto.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -84,7 +84,7 @@ func (h *ApiHandler) ListInstrumentSubmittals(c echo.Context) error { // @Tags submittal // @Produce json // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {array} dto.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/submittal_test.go b/api/internal/handler/submittal_test.go index b8c0d58a..03d38072 100644 --- a/api/internal/handler/submittal_test.go +++ b/api/internal/handler/submittal_test.go @@ -20,9 +20,9 @@ const submittalSchema = `{ "project_id": { "type": "string" }, "submittal_status_id": { "type": "string" }, "submittal_status_name": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "due_date": { "type": "string", "format": "date-time" }, - "completion_date": { "type": ["string", "null"], "format": "date-time" }, + "created_at": { "type": "string", "format": "date-time" }, + "due_at": { "type": "string", "format": "date-time" }, + "completed_at": { "type": ["string", "null"], "format": "date-time" }, "marked_as_missing": { "type": "boolean" }, "warning_sent": { "type": "boolean" } }, diff --git a/api/internal/handler/timeseries.go b/api/internal/handler/timeseries.go index ff01abbb..a5481243 100644 --- a/api/internal/handler/timeseries.go +++ b/api/internal/handler/timeseries.go @@ -1,6 +1,7 @@ package handler import ( + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" @@ -17,7 +18,7 @@ import ( // @Produce json // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} dto.Timeseries +// @Success 200 {object} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -42,7 +43,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -65,7 +66,7 @@ func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} dto.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -88,7 +89,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -126,7 +127,7 @@ func (h *ApiHandler) CreateTimeseries(c echo.Context) error { if err := h.DBService.TimeseriesCreateBatch(c.Request().Context(), tc.Items); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, map[string]interface{}{}) + return c.NoContent(http.StatusCreated) } // UpdateTimeseries godoc @@ -137,7 +138,7 @@ func (h *ApiHandler) CreateTimeseries(c echo.Context) error { // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param timeseries body dto.Timeseries true "timeseries payload" // @Param key query string false "api key" -// @Success 200 {object} map[string]uuid.UUID +// @Success 200 {object} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -180,5 +181,5 @@ func (h *ApiHandler) DeleteTimeseries(c echo.Context) error { if err := h.DBService.TimeseriesDelete(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, make(map[string]interface{})) + return c.NoContent(http.StatusOK) } diff --git a/api/internal/handler/timeseries_calculated.go b/api/internal/handler/timeseries_calculated.go index 15abe0a1..201ef948 100644 --- a/api/internal/handler/timeseries_calculated.go +++ b/api/internal/handler/timeseries_calculated.go @@ -6,6 +6,7 @@ import ( "github.com/google/uuid" "github.com/labstack/echo/v4" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" ) @@ -15,7 +16,7 @@ import ( // @Summary lists calculations associated with an instrument // @Tags formula // @Produce json -// @Success 200 {array} dto.CalculatedTimeseries +// @Success 200 {array} db.TimeseriesComputedListForInstrumentRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -42,7 +43,7 @@ func (h *ApiHandler) GetInstrumentCalculations(c echo.Context) error { // @Tags formula // @Produce json // @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -58,7 +59,7 @@ func (h *ApiHandler) CreateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.DBService.TimeseriesComputedCreateOrUpdate(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedCreate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": formula.ID}) @@ -93,7 +94,7 @@ func (h *ApiHandler) UpdateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.DBService.TimeseriesComputedCreateOrUpdate(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedUpdate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, formula) diff --git a/api/internal/handler/timeseries_cwms.go b/api/internal/handler/timeseries_cwms.go index 1291f58e..148b5f03 100644 --- a/api/internal/handler/timeseries_cwms.go +++ b/api/internal/handler/timeseries_cwms.go @@ -3,6 +3,7 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -16,7 +17,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} dto.TimeseriesCwms +// @Success 200 {array} db.VTimeseriesCwms // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -47,7 +48,7 @@ func (h *ApiHandler) ListTimeseriesCwms(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param timeseries_cwms_arr body []dto.TimeseriesCwms true "array of cwms timeseries to create" -// @Success 200 {object} map[string]interface{} +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -83,7 +84,7 @@ func (h *ApiHandler) CreateTimeseriesCwms(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param timeseries_cwms body dto.TimeseriesCwms true "cwms timeseries to update" -// @Success 200 {array} dto.TimeseriesCwms +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/timeseries_cwms_test.go b/api/internal/handler/timeseries_cwms_test.go index fcf3690e..64ff47fb 100644 --- a/api/internal/handler/timeseries_cwms_test.go +++ b/api/internal/handler/timeseries_cwms_test.go @@ -99,7 +99,6 @@ func TestTimeseriesCwms(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesCwmsArrayBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "UpdateTimeseries", diff --git a/api/internal/handler/timeseries_process.go b/api/internal/handler/timeseries_process.go index fff15114..d6c937ae 100644 --- a/api/internal/handler/timeseries_process.go +++ b/api/internal/handler/timeseries_process.go @@ -21,7 +21,7 @@ const ( explorerRequest ) -// ListTimeseriesMeasurementsByTimeseries godoc +// ListTimeseriesMeasurementsForTimeseries godoc // // @Summary lists timeseries by timeseries uuid // @Tags timeseries @@ -31,13 +31,13 @@ const ( // @Param after query string false "after time" Format(date-time) // @param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} dto.MeasurementCollection +// @Success 200 {array} db.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries/{timeseries_id}/measurements [get] // @Router /instruments/{instrument_id}/timeseries/{timeseries_id}/measurements [get] -func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) error { +func (h *ApiHandler) ListTimeseriesMeasurementsForTimeseries(c echo.Context) error { tsID, err := uuid.Parse(c.Param("timeseries_id")) if err != nil { return httperr.MalformedID(err) @@ -66,15 +66,16 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro threshold = tr } - resBody, err := h.DBService.TimeseriesMeasurementListRange(c.Request().Context(), db.TimeseriesMeasurementListRangeParams{ + mc, err := h.DBService.TimeseriesMeasurementCollectionGetForRange(c.Request().Context(), db.TimeseriesMeasurementCollectionGetForRangeParams{ TimeseriesID: tsID, - AfterTime: tw.After, - BeforeTime: tw.Before, - }, threshold) + After: tw.After, + Before: tw.Before, + Threshold: threshold, + }) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, resBody) + return c.JSON(http.StatusOK, mc) } f := db.ProcessMeasurementFilter{TimeseriesID: &tsID} @@ -92,7 +93,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro // @Param after query string false "after time" Format(date-time) // @Param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} dto.MeasurementCollection +// @Success 200 {object} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -114,7 +115,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrument(c echo.Context) erro // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} dto.MeasurementCollection +// @Success 200 {object} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -137,7 +138,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrumentGroup(c echo.Context) // @Accept json // @Produce json // @Param instrument_ids body []uuid.UUID true "array of instrument uuids" -// @Success 200 {array} map[uuid.UUID]dto.MeasurementCollectionLean +// @Success 200 {array} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/timeseries_test.go b/api/internal/handler/timeseries_test.go index 3b4bb791..7d3bbca9 100644 --- a/api/internal/handler/timeseries_test.go +++ b/api/internal/handler/timeseries_test.go @@ -26,7 +26,7 @@ const timeseriesSchema = `{ "is_computed": { "type": "boolean" }, "type": { "type": "string" } }, - "required": ["id", "slug", "name", "variable", "instrument_id", "parameter_id", "unit_id", "is_computed", "type"], + "required": ["id", "slug", "name", "instrument_id", "parameter_id", "unit_id", "type"], "additionalProperties": false }` @@ -113,7 +113,6 @@ func TestTimeseries(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesObjectBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "CreateTimeseries_Array", @@ -121,7 +120,6 @@ func TestTimeseries(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesArrayBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "UpdateTimeseries", diff --git a/api/internal/handler/unit.go b/api/internal/handler/unit.go index ddd3e0f4..f2ca57b0 100644 --- a/api/internal/handler/unit.go +++ b/api/internal/handler/unit.go @@ -3,6 +3,7 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/labstack/echo/v4" ) @@ -12,7 +13,7 @@ import ( // @Summary lists the available units // @Tags unit // @Produce json -// @Success 200 {array} dto.Unit +// @Success 200 {array} db.VUnit // @Failure 400 {object} echo.HTTPError // @Router /units [get] func (h *ApiHandler) ListUnits(c echo.Context) error { diff --git a/api/internal/handler/uploader.go b/api/internal/handler/uploader.go index da163b3f..0453f939 100644 --- a/api/internal/handler/uploader.go +++ b/api/internal/handler/uploader.go @@ -4,6 +4,8 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" @@ -16,7 +18,7 @@ import ( // @Tags uploader // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} dto.UploaderConfig +// @Success 200 {array} db.VUploaderConfig // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs [get] func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { @@ -38,7 +40,7 @@ func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) -// @Success 200 {array} dto.UploaderConfigMapping +// @Success 200 {array} db.UploaderConfigMapping // @Failure 400 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [get] func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { @@ -77,10 +79,10 @@ func (h *ApiHandler) CreateUploaderConfig(c echo.Context) error { return httperr.MalformedBody(err) } - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) - uc.CreatorID = profile.ID - uc.CreateDate = time.Now() + uc.CreatedBy = profile.ID + uc.CreatedAt = time.Now() uc.ProjectID = projectID newID, err := h.DBService.UploaderConfigCreate(c.Request().Context(), uc) @@ -115,11 +117,11 @@ func (h *ApiHandler) UpdateUploaderConfig(c echo.Context) error { return httperr.MalformedBody(err) } - profile := c.Get("profile").(dto.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - uc.UpdaterID = &profile.ID - uc.UpdateDate = &t + uc.UpdatedBy = &profile.ID + uc.UpdatedAt = &t uc.ProjectID = projectID uc.ID = ucID diff --git a/api/internal/middleware/audit.go b/api/internal/middleware/audit.go index 7642f497..8d85e117 100644 --- a/api/internal/middleware/audit.go +++ b/api/internal/middleware/audit.go @@ -150,7 +150,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // IsApplicationAdmin checks that a profile is an application admin func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(dto.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -165,7 +165,7 @@ func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(dto.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -191,7 +191,7 @@ func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectMember(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(dto.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } diff --git a/api/internal/server/api.go b/api/internal/server/api.go index 654c323c..910de006 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -91,7 +91,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Alert r.public.GET("/projects/:project_id/instruments/:instrument_id/alerts", h.ListAlertsForInstrument) - r.private.GET("/my_alerts", h.ListMyAlerts) + r.private.GET("/my_alerts", h.ListAlertsForProfile) r.private.POST("/my_alerts/:alert_id/read", h.DoAlertRead) r.private.POST("/my_alerts/:alert_id/unread", h.DoAlertUnread) @@ -174,7 +174,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.public.GET("/home", h.GetHome) // Instrument - r.public.GET("/instruments", h.ListInstruments) r.public.GET("/instruments/count", h.GetInstrumentCount) r.public.GET("/instruments/:instrument_id", h.GetInstrument) r.public.GET("/instruments/:instrument_id/timeseries_measurements", h.ListTimeseriesMeasurementsByInstrument) @@ -242,9 +241,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Media r.public.GET("/projects/:project_slug/images/*", h.GetMedia) - // Opendcs - r.public.GET("/opendcs/sites", h.ListOpendcsSites) - // PlotConfig r.public.GET("/projects/:project_id/plot_configs", h.ListPlotConfigs) r.public.GET("/projects/:project_id/plot_configs/:plot_configuration_id", h.GetPlotConfig) @@ -336,8 +332,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // CalculatedTimeseries r.public.GET("/formulas", h.GetInstrumentCalculations) r.private.POST("/formulas", h.CreateCalculation) - // TODO: This PUT should really be a PATCH to conform to the REST spec - // Will need to coordinate this with the web client r.private.PUT("/formulas/:formula_id", h.UpdateCalculation) r.private.DELETE("/formulas/:formula_id", h.DeleteCalculation) @@ -347,8 +341,8 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/projects/:project_id/instruments/:instrument_id/timeseries/cwms/:timeseries_id", h.UpdateTimeseriesCwms) // ProcessTimeseries - r.public.GET("/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsByTimeseries) - r.public.GET("/instruments/:instrument_id/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsByTimeseries) + r.public.GET("/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsForTimeseries) + r.public.GET("/instruments/:instrument_id/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsForTimeseries) // Unit r.public.GET("/units", h.ListUnits) diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index e699ac20..d5cb9e7d 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -32,7 +32,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -167,7 +167,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/dto.AlertSubscription" } } }, @@ -180,7 +180,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" }, "type" : "array" } @@ -235,7 +235,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AwarePlatformParameterConfig" + "$ref" : "#/components/schemas/service.AwarePlatformParameterConfig" }, "type" : "array" } @@ -286,7 +286,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AwareParameter" + "$ref" : "#/components/schemas/db.AwareParameterListRow" }, "type" : "array" } @@ -343,7 +343,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/dto.Datalogger" } } }, @@ -355,10 +355,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerWithKey" - }, - "type" : "array" + "$ref" : "#/components/schemas/service.DataloggerWithKey" } } }, @@ -494,7 +491,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" } } }, @@ -559,7 +556,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/dto.Datalogger" } } }, @@ -571,7 +568,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" } } }, @@ -639,7 +636,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -651,7 +648,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -720,7 +717,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerWithKey" + "$ref" : "#/components/schemas/service.DataloggerWithKey" } } }, @@ -874,7 +871,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" }, "type" : "array" } @@ -950,7 +947,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -962,7 +959,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -1037,7 +1034,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -1049,7 +1046,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -1214,7 +1211,8 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerTablePreview" + "additionalProperties" : true, + "type" : "object" } } }, @@ -1291,7 +1289,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerTablePreview" + "$ref" : "#/components/schemas/db.VDataloggerPreview" } } }, @@ -1351,7 +1349,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" }, "type" : "array" } @@ -1405,7 +1403,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/District" + "$ref" : "#/components/schemas/db.VDistrict" }, "type" : "array" } @@ -1456,7 +1454,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimezoneOption" + "$ref" : "#/components/schemas/db.PgTimezoneNamesListRow" }, "type" : "array" } @@ -1506,7 +1504,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DomainMap" + "$ref" : "#/components/schemas/service.DomainMap" } } }, @@ -1564,7 +1562,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/EmailAutocompleteResult" + "$ref" : "#/components/schemas/db.EmailAutocompleteListRow" }, "type" : "array" } @@ -1630,7 +1628,10 @@ "schema" : { "items" : { "additionalProperties" : { - "$ref" : "#/components/schemas/MeasurementCollectionLean" + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" }, "type" : "object" }, @@ -1684,7 +1685,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CalculatedTimeseries" + "$ref" : "#/components/schemas/db.TimeseriesComputedListForInstrumentRow" }, "type" : "array" } @@ -1736,7 +1737,7 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { @@ -1745,7 +1746,7 @@ } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -1877,7 +1878,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CalculatedTimeseries" + "$ref" : "#/components/schemas/dto.CalculatedTimeseries" }, "type" : "array" } @@ -1930,11 +1931,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "additionalProperties" : true, - "type" : "object" - }, - "type" : "array" + "$ref" : "#/components/schemas/service.Healthcheck" } } }, @@ -1957,15 +1954,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" } } }, - "description" : "OK" + "description" : "Created" } }, "summary" : "creates a heartbeat entry at regular intervals", @@ -1979,7 +1976,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" } } }, @@ -1998,7 +1995,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" }, "type" : "array" } @@ -2018,7 +2015,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Home" + "$ref" : "#/components/schemas/db.HomeGetRow" } } }, @@ -2039,75 +2036,6 @@ "tags" : [ "home" ] } }, - "/inclinometer_explorer" : { - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "type" : "string" - }, - "type" : "array" - } - } - }, - "description" : "array of inclinometer instrument uuids", - "required" : true - }, - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "additionalProperties" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollectionLean" - }, - "type" : "object" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "list inclinometer timeseries measurements for explorer page", - "tags" : [ "explorer" ], - "x-codegen-request-body-name" : "instrument_ids" - } - }, "/instrument_groups" : { "get" : { "responses" : { @@ -2116,7 +2044,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrumentGroup" }, "type" : "array" } @@ -2171,7 +2099,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/dto.InstrumentGroup" } } }, @@ -2183,7 +2111,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "items" : { + "$ref" : "#/components/schemas/db.InstrumentGroup" + }, + "type" : "array" } } }, @@ -2252,10 +2183,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -2314,7 +2243,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrumentGroup" } } }, @@ -2376,7 +2305,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/dto.InstrumentGroup" } } }, @@ -2388,7 +2317,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.InstrumentGroup" } } }, @@ -2451,7 +2380,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" }, "type" : "array" } @@ -2657,7 +2586,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -2717,7 +2646,13 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" + }, + "type" : "object" } } }, @@ -2758,57 +2693,6 @@ "tags" : [ "timeseries" ] } }, - "/instruments" : { - "get" : { - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Instrument" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists all instruments", - "tags" : [ "instrument" ] - } - }, "/instruments/count" : { "get" : { "responses" : { @@ -2816,7 +2700,8 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentCount" + "additionalProperties" : true, + "type" : "object" } } }, @@ -2857,7 +2742,7 @@ "tags" : [ "instrument" ] } }, - "/instruments/ipi/{instrument_id}/measurements" : { + "/instruments/incl/{instrument_id}/measurements" : { "get" : { "parameters" : [ { "description" : "instrument uuid", @@ -2892,7 +2777,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiMeasurements" + "$ref" : "#/components/schemas/db.VInclMeasurement" }, "type" : "array" } @@ -2932,10 +2817,10 @@ } }, "summary" : "creates instrument notes", - "tags" : [ "instrument-ipi" ] + "tags" : [ "instrument-incl" ] } }, - "/instruments/ipi/{instrument_id}/segments" : { + "/instruments/incl/{instrument_id}/segments" : { "get" : { "parameters" : [ { "description" : "instrument uuid", @@ -2953,7 +2838,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/db.VInclSegment" }, "type" : "array" } @@ -2992,8 +2877,8 @@ "description" : "Internal Server Error" } }, - "summary" : "gets all ipi segments for an instrument", - "tags" : [ "instrument-ipi" ] + "summary" : "gets all incl segments for an instrument", + "tags" : [ "instrument-incl" ] }, "put" : { "parameters" : [ { @@ -3018,13 +2903,13 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/dto.InclSegment" }, "type" : "array" } } }, - "description" : "ipi instrument segments payload", + "description" : "incl instrument segments payload", "required" : true }, "responses" : { @@ -3033,7 +2918,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/dto.InclSegment" }, "type" : "array" } @@ -3075,20 +2960,47 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "updates multiple segments for an ipi instrument", - "tags" : [ "instrument-ipi" ], + "summary" : "updates multiple segments for an incl instrument", + "tags" : [ "instrument-incl" ], "x-codegen-request-body-name" : "instrument_segments" } }, - "/instruments/notes" : { + "/instruments/ipi/{instrument_id}/measurements" : { "get" : { + "parameters" : [ { + "description" : "instrument uuid", + "in" : "path", + "name" : "instrument_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "after time", + "in" : "query", + "name" : "after", + "schema" : { + "format" : "date-time", + "type" : "string" + } + }, { + "description" : "before time", + "in" : "query", + "name" : "before", + "required" : true, + "schema" : { + "format" : "date-time", + "type" : "string" + } + } ], "responses" : { "200" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.VIpiMeasurement" }, "type" : "array" } @@ -3127,36 +3039,29 @@ "description" : "Internal Server Error" } }, - "summary" : "gets all instrument notes", - "tags" : [ "instrument-note" ] - }, - "post" : { + "summary" : "creates instrument notes", + "tags" : [ "instrument-ipi" ] + } + }, + "/instruments/ipi/{instrument_id}/segments" : { + "get" : { "parameters" : [ { - "description" : "api key", - "in" : "query", - "name" : "key", + "description" : "instrument uuid", + "in" : "path", + "name" : "instrument_id", + "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/InstrumentNoteCollection" - } - } - }, - "description" : "instrument note collection payload", - "required" : true - }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.VIpiSegment" }, "type" : "array" } @@ -3195,32 +3100,50 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates instrument notes", - "tags" : [ "instrument-note" ], - "x-codegen-request-body-name" : "instrument_note" - } - }, - "/instruments/notes/{note_id}" : { - "get" : { + "summary" : "gets all ipi segments for an instrument", + "tags" : [ "instrument-ipi" ] + }, + "put" : { "parameters" : [ { - "description" : "note uuid", + "description" : "instrument uuid", "in" : "path", - "name" : "note_id", + "name" : "instrument_id", "required" : true, "schema" : { "format" : "uuid", "type" : "string" } + }, { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/dto.IpiSegment" + }, + "type" : "array" + } + } + }, + "description" : "ipi instrument segments payload", + "required" : true + }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentNote" + "items" : { + "$ref" : "#/components/schemas/dto.IpiSegment" + }, + "type" : "array" } } }, @@ -3257,12 +3180,148 @@ "description" : "Internal Server Error" } }, - "summary" : "gets a single instrument note by id", - "tags" : [ "instrument-note" ] - }, - "put" : { - "parameters" : [ { - "description" : "note uuid", + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates multiple segments for an ipi instrument", + "tags" : [ "instrument-ipi" ], + "x-codegen-request-body-name" : "instrument_segments" + } + }, + "/instruments/notes" : { + "post" : { + "parameters" : [ { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/dto.InstrumentNoteCollection" + } + } + }, + "description" : "instrument note collection payload", + "required" : true + }, + "responses" : { + "201" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentNote" + }, + "type" : "array" + } + } + }, + "description" : "Created" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates instrument notes", + "tags" : [ "instrument-note" ], + "x-codegen-request-body-name" : "instrument_note" + } + }, + "/instruments/notes/{note_id}" : { + "get" : { + "parameters" : [ { + "description" : "note uuid", + "in" : "path", + "name" : "note_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/db.InstrumentNote" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "gets a single instrument note by id", + "tags" : [ "instrument-note" ] + }, + "put" : { + "parameters" : [ { + "description" : "note uuid", "in" : "path", "name" : "note_id", "required" : true, @@ -3282,7 +3341,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/dto.InstrumentNote" } } }, @@ -3295,7 +3354,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.InstrumentNote" }, "type" : "array" } @@ -3377,7 +3436,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaMeasurements" + "$ref" : "#/components/schemas/db.VSaaMeasurement" }, "type" : "array" } @@ -3438,7 +3497,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/db.VSaaSegment" }, "type" : "array" } @@ -3503,7 +3562,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/dto.SaaSegment" }, "type" : "array" } @@ -3518,7 +3577,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/dto.SaaSegment" }, "type" : "array" } @@ -3582,7 +3641,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -3641,7 +3700,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.InstrumentNote" }, "type" : "array" } @@ -3780,7 +3839,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentStatus" + "$ref" : "#/components/schemas/db.VInstrumentStatus" }, "type" : "array" } @@ -3844,7 +3903,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentStatusCollection" + "$ref" : "#/components/schemas/dto.InstrumentStatusCollection" } } }, @@ -4005,7 +4064,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VInstrumentStatus" }, "type" : "array" } @@ -4073,7 +4132,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -4142,7 +4201,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" } } }, @@ -4232,7 +4291,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "items" : { + "$ref" : "#/components/schemas/db.VTimeseriesMeasurement" + }, + "type" : "array" } } }, @@ -4313,7 +4375,13 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" + }, + "type" : "object" } } }, @@ -4370,7 +4438,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" }, "type" : "array" } @@ -4433,7 +4501,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertListForProfileRow" }, "type" : "array" } @@ -4500,15 +4568,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertGetRow" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -4573,7 +4641,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertGetRow" } } }, @@ -4624,7 +4692,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Profile" + "$ref" : "#/components/schemas/db.VProfile" } } }, @@ -4684,7 +4752,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" }, "type" : "array" } @@ -4737,7 +4805,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Token" + "$ref" : "#/components/schemas/service.Token" } } }, @@ -4843,57 +4911,6 @@ "tags" : [ "profile" ] } }, - "/opendcs/sites" : { - "get" : { - "responses" : { - "200" : { - "content" : { - "text/xml" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Site" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists all instruments, represented as opendcs sites", - "tags" : [ "opendcs" ] - } - }, "/profiles" : { "post" : { "responses" : { @@ -4901,7 +4918,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Profile" + "$ref" : "#/components/schemas/db.ProfileCreateRow" } } }, @@ -4961,7 +4978,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" }, "type" : "array" } @@ -5017,7 +5034,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/dto.Project" }, "type" : "array" } @@ -5027,18 +5044,18 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/db.ProjectCreateBatchRow" }, "type" : "array" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -5086,7 +5103,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ProjectCount" + "type" : "integer" } } }, @@ -5211,7 +5228,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" } } }, @@ -5273,7 +5290,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/dto.Project" } } }, @@ -5285,7 +5302,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" } } }, @@ -5348,7 +5365,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" }, "type" : "array" } @@ -5412,7 +5429,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/dto.AlertConfig" } } }, @@ -5424,7 +5441,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5502,10 +5519,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfig" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -5573,7 +5588,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5644,7 +5659,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/dto.AlertConfig" } } }, @@ -5656,10 +5671,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfig" - }, - "type" : "array" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5722,7 +5734,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.CollectionGroup" }, "type" : "array" } @@ -5787,7 +5799,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/dto.CollectionGroup" } } }, @@ -5800,7 +5812,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/db.CollectionGroup" }, "type" : "array" } @@ -5949,7 +5961,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroupDetails" + "$ref" : "#/components/schemas/db.VCollectionGroupDetail" } } }, @@ -6019,7 +6031,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/dto.CollectionGroup" } } }, @@ -6031,7 +6043,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/db.CollectionGroup" } } }, @@ -6199,7 +6211,7 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { @@ -6208,7 +6220,7 @@ } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -6351,7 +6363,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/DistrictRollup" + "$ref" : "#/components/schemas/db.VDistrictRollup" }, "type" : "array" } @@ -6412,7 +6424,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/DistrictRollup" + "$ref" : "#/components/schemas/db.VDistrictRollup" }, "type" : "array" } @@ -6473,7 +6485,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" }, "type" : "array" } @@ -6537,7 +6549,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" } } }, @@ -6545,15 +6557,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -6628,7 +6640,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -6698,7 +6711,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, @@ -6769,7 +6782,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" } } }, @@ -6781,7 +6794,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, @@ -6895,8 +6908,8 @@ "tags" : [ "project" ] } }, - "/projects/{project_id}/inclinometer_measurements" : { - "post" : { + "/projects/{project_id}/instrument_groups" : { + "get" : { "parameters" : [ { "description" : "project uuid", "in" : "path", @@ -6906,32 +6919,14 @@ "format" : "uuid", "type" : "string" } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" - } } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollectionCollection" - } - } - }, - "description" : "inclinometer measurement collections", - "required" : true - }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" + "$ref" : "#/components/schemas/db.VInstrumentGroup" }, "type" : "array" } @@ -6970,15 +6965,11 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates or updates one or more inclinometer measurements", - "tags" : [ "measurement-inclinometer" ], - "x-codegen-request-body-name" : "timeseries_measurement_collections" + "summary" : "lists instrument groups associated with a project", + "tags" : [ "project" ] } }, - "/projects/{project_id}/instrument_groups" : { + "/projects/{project_id}/instruments" : { "get" : { "parameters" : [ { "description" : "project uuid", @@ -6996,7 +6987,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrument" }, "type" : "array" } @@ -7035,73 +7026,12 @@ "description" : "Internal Server Error" } }, - "summary" : "lists instrument groups associated with a project", + "summary" : "lists instruments associated with a project", "tags" : [ "project" ] - } - }, - "/projects/{project_id}/instruments" : { - "get" : { + }, + "post" : { "parameters" : [ { - "description" : "project uuid", - "in" : "path", - "name" : "project_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - } ], - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Project" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists instruments associated with a project", - "tags" : [ "project" ] - }, - "post" : { - "parameters" : [ { - "description" : "project id", + "description" : "project id", "in" : "path", "name" : "project_id", "required" : true, @@ -7131,7 +7061,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" }, "type" : "array" } @@ -7141,18 +7071,18 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/db.InstrumentCreateBatchRow" }, "type" : "array" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7225,7 +7155,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/ProjectInstrumentAssignments" + "$ref" : "#/components/schemas/dto.ProjectInstrumentAssignments" } } }, @@ -7237,7 +7167,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -7390,7 +7320,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" } } }, @@ -7402,7 +7332,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -7474,7 +7404,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" }, "type" : "array" } @@ -7555,15 +7485,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7718,7 +7648,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.VAlert" }, "type" : "array" } @@ -7803,7 +7733,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -7875,15 +7805,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7962,7 +7892,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentProjectAssignments" + "$ref" : "#/components/schemas/dto.InstrumentProjectAssignments" } } }, @@ -7974,7 +7904,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -8046,7 +7976,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -8119,7 +8049,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesCollectionItems" + "$ref" : "#/components/schemas/dto.TimeseriesCollectionItems" } } }, @@ -8132,7 +8062,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.TimeseriesCreateBatchRow" }, "type" : "array" } @@ -8293,7 +8223,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" }, "type" : "array" } @@ -8368,7 +8298,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" } } }, @@ -8380,7 +8310,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -8452,7 +8382,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -8522,7 +8452,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/db.VTimeseriesCwm" }, "type" : "array" } @@ -8589,7 +8519,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/dto.TimeseriesCwms" }, "type" : "array" } @@ -8599,18 +8529,16 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -8682,7 +8610,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/dto.TimeseriesCwms" } } }, @@ -8695,7 +8623,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -8764,7 +8693,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/ProjectMembership" + "$ref" : "#/components/schemas/db.ProfileProjectRoleListForProjectRow" }, "type" : "array" } @@ -8937,7 +8866,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ProjectMembership" + "type" : "string" } } }, @@ -8999,7 +8928,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" }, "type" : "array" } @@ -9065,7 +8994,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlot" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlot" } } }, @@ -9073,15 +9002,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9154,7 +9083,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlot" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlot" } } }, @@ -9166,7 +9095,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9245,7 +9174,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigMeasurementBullseyePlot" + "$ref" : "#/components/schemas/db.PlotConfigMeasurementListBullseyeRow" }, "type" : "array" } @@ -9314,7 +9243,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigContourPlot" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlot" } } }, @@ -9322,15 +9251,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9403,7 +9332,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigContourPlot" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlot" } } }, @@ -9415,7 +9344,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9501,7 +9430,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AggregatePlotConfigMeasurementsContourPlot" + "$ref" : "#/components/schemas/service.AggregatePlotConfigMeasurementsContourPlot" } } }, @@ -9662,7 +9591,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlot" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlot" } } }, @@ -9670,15 +9599,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9751,7 +9680,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlot" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlot" } } }, @@ -9763,7 +9692,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9831,7 +9760,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -9839,15 +9768,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9920,7 +9849,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -9932,7 +9861,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10079,7 +10008,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10138,7 +10067,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" }, "type" : "array" } @@ -10202,7 +10131,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -10210,15 +10139,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -10361,7 +10290,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10432,7 +10361,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -10444,7 +10373,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10513,7 +10442,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/db.VReportConfig" } } }, @@ -10578,7 +10507,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/dto.ReportConfig" } } }, @@ -10590,7 +10519,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/db.VReportConfig" } } }, @@ -10743,7 +10672,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/dto.ReportConfig" } } }, @@ -10834,7 +10763,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/db.ReportDownloadJob" } } }, @@ -10920,7 +10849,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/db.ReportDownloadJob" } } }, @@ -11069,7 +10998,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -11130,7 +11059,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -11196,7 +11125,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/dto.TimeseriesMeasurementCollectionCollection" } } }, @@ -11208,10 +11137,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -11293,7 +11220,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/dto.TimeseriesMeasurementCollectionCollection" } } }, @@ -11306,7 +11233,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -11371,7 +11299,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/UploaderConfig" + "$ref" : "#/components/schemas/db.VUploaderConfig" }, "type" : "array" } @@ -11408,7 +11336,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/UploaderConfig" + "$ref" : "#/components/schemas/dto.UploaderConfig" } } }, @@ -11514,7 +11442,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/UploaderConfig" + "$ref" : "#/components/schemas/dto.UploaderConfig" } } }, @@ -11622,7 +11550,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/UploaderConfigMapping" + "$ref" : "#/components/schemas/db.UploaderConfigMapping" }, "type" : "array" } @@ -11669,7 +11597,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/UploaderConfigMapping" + "$ref" : "#/components/schemas/dto.UploaderConfigMapping" }, "type" : "array" } @@ -11730,7 +11658,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/UploaderConfigMapping" + "$ref" : "#/components/schemas/dto.UploaderConfigMapping" }, "type" : "array" } @@ -11788,7 +11716,14 @@ } ], "responses" : { "200" : { - "content" : { }, + "content" : { + "image/jpeg" : { + "schema" : { + "format" : "binary", + "type" : "string" + } + } + }, "description" : "OK" }, "400" : { @@ -11851,7 +11786,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfigWithPlotConfigs" + "$ref" : "#/components/schemas/service.ReportConfigWithPlotConfigs" } } }, @@ -11916,7 +11851,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/dto.ReportDownloadJob" } } }, @@ -11971,7 +11906,7 @@ "x-codegen-request-body-name" : "report_download_job" } }, - "/search/{entity}" : { + "/search/projects" : { "get" : { "parameters" : [ { "description" : "entity to search (i.e. projects, etc.)", @@ -11995,7 +11930,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SearchResult" + "$ref" : "#/components/schemas/db.VProject" }, "type" : "array" } @@ -12121,7 +12056,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesCollectionItems" + "$ref" : "#/components/schemas/dto.TimeseriesCollectionItems" } } }, @@ -12133,13 +12068,10 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "additionalProperties" : { - "type" : "string" - }, - "type" : "object" + "additionalProperties" : { + "type" : "string" }, - "type" : "array" + "type" : "object" } } }, @@ -12268,7 +12200,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" } } }, @@ -12330,7 +12262,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/dto.Timeseries" } } }, @@ -12342,10 +12274,7 @@ "content" : { "application/json" : { "schema" : { - "additionalProperties" : { - "type" : "string" - }, - "type" : "object" + "$ref" : "#/components/schemas/dto.Timeseries" } } }, @@ -12390,7 +12319,7 @@ "x-codegen-request-body-name" : "timeseries" } }, - "/timeseries/{timeseries_id}/inclinometer_measurements" : { + "/timeseries/{timeseries_id}/measurements" : { "delete" : { "parameters" : [ { "description" : "timeseries uuid", @@ -12464,8 +12393,8 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "deletes a single inclinometer measurement by timestamp", - "tags" : [ "measurement-inclinometer" ] + "summary" : "deletes a single timeseries measurement by timestamp", + "tags" : [ "measurement" ] }, "get" : { "parameters" : [ { @@ -12478,7 +12407,7 @@ "type" : "string" } }, { - "description" : "after timestamp", + "description" : "after time", "in" : "query", "name" : "after", "schema" : { @@ -12486,20 +12415,30 @@ "type" : "string" } }, { - "description" : "before timestamp", + "description" : "before time", "in" : "query", "name" : "before", "schema" : { "format" : "date-time", "type" : "string" } + }, { + "description" : "downsample threshold", + "in" : "query", + "name" : "threshold", + "schema" : { + "type" : "number" + } } ], "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" + "items" : { + "$ref" : "#/components/schemas/db.VTimeseriesMeasurement" + }, + "type" : "array" } } }, @@ -12536,45 +12475,36 @@ "description" : "Internal Server Error" } }, - "summary" : "lists all measurements for an inclinometer", - "tags" : [ "measurement-inclinometer" ] + "summary" : "lists timeseries by timeseries uuid", + "tags" : [ "timeseries" ] } }, - "/timeseries/{timeseries_id}/measurements" : { - "delete" : { - "parameters" : [ { - "description" : "timeseries uuid", - "in" : "path", - "name" : "timeseries_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "timestamp of measurement to delete", - "in" : "query", - "name" : "time", - "required" : true, - "schema" : { - "format" : "date-time", - "type" : "string" - } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" + "/timeseries_measurements" : { + "post" : { + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" + } + }, + "multipart/form-data" : { + "schema" : { + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" + } + } } - } ], + }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "additionalProperties" : true, - "type" : "object" + "items" : { + "additionalProperties" : true, + "type" : "object" + }, + "type" : "array" } } }, @@ -12614,169 +12544,22 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "deletes a single timeseries measurement by timestamp", - "tags" : [ "measurement" ] - }, + "summary" : "creates one or more timeseries measurements", + "tags" : [ "measurement" ], + "x-codegen-request-body-name" : "timeseries_measurement_collections" + } + }, + "/units" : { "get" : { - "parameters" : [ { - "description" : "timeseries uuid", - "in" : "path", - "name" : "timeseries_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "after time", - "in" : "query", - "name" : "after", - "schema" : { - "format" : "date-time", - "type" : "string" - } - }, { - "description" : "before time", - "in" : "query", - "name" : "before", - "schema" : { - "format" : "date-time", - "type" : "string" - } - }, { - "description" : "downsample threshold", - "in" : "query", - "name" : "threshold", - "schema" : { - "type" : "number" - } - } ], "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists timeseries by timeseries uuid", - "tags" : [ "timeseries" ] - } - }, - "/timeseries_measurements" : { - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/_timeseries_measurements_post_request" - } - }, - "multipart/form-data" : { - "schema" : { - "$ref" : "#/components/schemas/_timeseries_measurements_post_request" - } - } - } - }, - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates one or more timeseries measurements", - "tags" : [ "measurement" ], - "x-codegen-request-body-name" : "timeseries_measurement_collections" - } - }, - "/units" : { - "get" : { - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Unit" - }, - "type" : "array" + "items" : { + "$ref" : "#/components/schemas/db.VUnit" + }, + "type" : "array" } } }, @@ -12808,65 +12591,1823 @@ }, "type" : "object" }, - "geojson.Geometry" : { + "db.AlertGetRow" : { "properties" : { - "coordinates" : { - "type" : "object" + "alert_config_id" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" }, - "geometries" : { + "id" : { + "type" : "string" + }, + "instruments" : { "items" : { - "$ref" : "#/components/schemas/geojson.Geometry" + "$ref" : "#/components/schemas/db.InstrumentIDName" }, "type" : "array" }, - "type" : { + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { "type" : "string" + }, + "read" : { + "type" : "boolean" } }, "type" : "object" }, - "AggregatePlotConfigMeasurementsContourPlot" : { + "db.AlertListForProfileRow" : { "properties" : { - "x" : { - "items" : { - "type" : "number" - }, - "type" : "array" + "alert_config_id" : { + "type" : "string" }, - "y" : { - "items" : { - "type" : "number" - }, - "type" : "array" + "body" : { + "type" : "string" }, - "z" : { + "created_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { "items" : { - "type" : "number" + "$ref" : "#/components/schemas/db.InstrumentIDName" }, "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "read" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "db.AlertProfileSubscription" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "mute_notify" : { + "type" : "boolean" + }, + "mute_ui" : { + "type" : "boolean" + }, + "profile_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.AwareParameterListRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "key" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.CollectionGroup" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.CollectionGroupDetailsTimeseries" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "instrument_slug" : { + "type" : "string" + }, + "is_computed" : { + "type" : "boolean" + }, + "latest_time" : { + "type" : "string" + }, + "latest_value" : { + "type" : "number" + }, + "name" : { + "type" : "string" + }, + "parameter" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + }, + "variable" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.DataloggerEquivalencyTableField" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "field_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.DataloggerTableIDName" : { + "properties" : { + "id" : { + "type" : "string" + }, + "table_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.DomainGroupOpt" : { + "properties" : { + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.EmailAutocompleteListRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "user_type" : { + "type" : "string" + }, + "username" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.EmailAutocompleteResult" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "user_type" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.HomeGetRow" : { + "properties" : { + "instrument_count" : { + "type" : "integer" + }, + "instrument_group_count" : { + "type" : "integer" + }, + "new_instruments_7d" : { + "type" : "integer" + }, + "new_measurements_2h" : { + "type" : "integer" + }, + "project_count" : { + "type" : "integer" + } + }, + "type" : "object" + }, + "db.IDSlugName" : { + "properties" : { + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentGroup" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "deleted" : { + "type" : "boolean" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentIDName" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "instrument_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentNote" : { + "properties" : { + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "time" : { + "type" : "string" + }, + "title" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.IpiMeasurement" : { + "properties" : { + "cum_dev" : { + "type" : "number" + }, + "elevation" : { + "type" : "number" + }, + "inc_dev" : { + "type" : "number" + }, + "segment_id" : { + "type" : "integer" + }, + "temp" : { + "type" : "number" + }, + "tilt" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.JobStatus" : { + "enum" : [ "SUCCESS", "FAIL", "INIT" ], + "type" : "string", + "x-enum-varnames" : [ "JobStatusSUCCESS", "JobStatusFAIL", "JobStatusINIT" ] + }, + "db.MeasurementCollectionLean" : { + "properties" : { + "items" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementLean" + }, + "type" : "array" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.MeasurementLean" : { + "additionalProperties" : { + "type" : "number" + }, + "type" : "object" + }, + "db.PgTimezoneNamesListRow" : { + "properties" : { + "abbrev" : { + "type" : "string" + }, + "is_dst" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "utc_offset" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.PlotConfigMeasurementListBullseyeRow" : { + "properties" : { + "time" : { + "type" : "string" + }, + "x" : { + "type" : "object" + }, + "y" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.PlotType" : { + "enum" : [ "scatter-line", "profile", "contour", "bullseye" ], + "type" : "string", + "x-enum-varnames" : [ "PlotTypeScatterLine", "PlotTypeProfile", "PlotTypeContour", "PlotTypeBullseye" ] + }, + "db.ProfileCreateRow" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ProfileProjectRoleListForProjectRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "role" : { + "type" : "string" + }, + "role_id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ProjectCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ReportConfigGlobalOverrides" : { + "properties" : { + "date_range" : { + "$ref" : "#/components/schemas/db.TextOption" + }, + "show_masked" : { + "$ref" : "#/components/schemas/db.ToggleOption" + }, + "show_nonvalidated" : { + "$ref" : "#/components/schemas/db.ToggleOption" + } + }, + "type" : "object" + }, + "db.ReportDownloadJob" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "file_expiry" : { + "type" : "string" + }, + "file_key" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "progress" : { + "type" : "integer" + }, + "progress_updated_at" : { + "type" : "string" + }, + "report_config_id" : { + "type" : "string" + }, + "status" : { + "$ref" : "#/components/schemas/db.JobStatus" + } + }, + "type" : "object" + }, + "db.SaaMeasurement" : { + "properties" : { + "elevation" : { + "type" : "number" + }, + "segment_id" : { + "type" : "integer" + }, + "temp" : { + "type" : "number" + }, + "temp_cum_dev" : { + "type" : "number" + }, + "temp_increment" : { + "type" : "number" + }, + "x" : { + "type" : "number" + }, + "x_cum_dev" : { + "type" : "number" + }, + "x_increment" : { + "type" : "number" + }, + "y" : { + "type" : "number" + }, + "y_cum_dev" : { + "type" : "number" + }, + "y_increment" : { + "type" : "number" + }, + "z" : { + "type" : "number" + }, + "z_cum_dev" : { + "type" : "number" + }, + "z_increment" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.TextOption" : { + "properties" : { + "enabled" : { + "type" : "boolean" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesComputedListForInstrumentRow" : { + "properties" : { + "formula" : { + "type" : "string" + }, + "formula_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesType" : { + "enum" : [ "standard", "constant", "computed", "cwms" ], + "type" : "string", + "x-enum-varnames" : [ "TimeseriesTypeStandard", "TimeseriesTypeConstant", "TimeseriesTypeComputed", "TimeseriesTypeCwms" ] + }, + "db.ToggleOption" : { + "properties" : { + "enabled" : { + "type" : "boolean" + }, + "value" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "db.UploaderConfigMapping" : { + "properties" : { + "field_name" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + }, + "uploader_config_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.UploaderConfigType" : { + "enum" : [ "csv", "dux", "toa5" ], + "type" : "string", + "x-enum-varnames" : [ "UploaderConfigTypeCsv", "UploaderConfigTypeDux", "UploaderConfigTypeToa5" ] + }, + "db.VAlert" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VAlertConfig" : { + "properties" : { + "alert_email_subscriptions" : { + "items" : { + "$ref" : "#/components/schemas/db.EmailAutocompleteResult" + }, + "type" : "array" + }, + "alert_type" : { + "type" : "string" + }, + "alert_type_id" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "create_next_submittal_from" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "last_checked_at" : { + "type" : "string" + }, + "last_reminded_at" : { + "type" : "string" + }, + "mute_consecutive_alerts" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "remind_interval" : { + "type" : "string" + }, + "schedule_interval" : { + "type" : "string" + }, + "started_at" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "warning_interval" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VCollectionGroupDetail" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "timeseries" : { + "items" : { + "$ref" : "#/components/schemas/db.CollectionGroupDetailsTimeseries" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDatalogger" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "id" : { + "type" : "string" + }, + "model" : { + "type" : "string" + }, + "model_id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sn" : { + "type" : "string" + }, + "tables" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerTableIDName" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDataloggerEquivalencyTable" : { + "properties" : { + "datalogger_id" : { + "type" : "string" + }, + "datalogger_table_id" : { + "type" : "string" + }, + "datalogger_table_name" : { + "type" : "string" + }, + "fields" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerEquivalencyTableField" + }, + "type" : "array" + } + }, + "type" : "object" + }, + "db.VDataloggerPreview" : { + "properties" : { + "datalogger_table_id" : { + "type" : "string" + }, + "preview" : { + "items" : { + "type" : "integer" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDistrict" : { + "properties" : { + "agency" : { + "type" : "string" + }, + "division_initials" : { + "type" : "string" + }, + "division_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "initials" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDistrictRollup" : { + "properties" : { + "actual_total_submittals" : { + "type" : "integer" + }, + "alert_type_id" : { + "type" : "string" + }, + "district_initials" : { + "type" : "string" + }, + "expected_total_submittals" : { + "type" : "integer" + }, + "green_submittals" : { + "type" : "integer" + }, + "month" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "red_submittals" : { + "type" : "integer" + }, + "yellow_submittals" : { + "type" : "integer" + } + }, + "type" : "object" + }, + "db.VDomain" : { + "properties" : { + "description" : { + "type" : "string" + }, + "group" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VEvaluation" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "alert_config_name" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "ended_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "started_at" : { + "type" : "string" + }, + "submittal_id" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInclMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "type" : "object" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInclSegment" : { + "properties" : { + "a0_timeseries_id" : { + "type" : "string" + }, + "a180_timeseries_id" : { + "type" : "string" + }, + "b0_timeseries_id" : { + "type" : "string" + }, + "b180_timeseries_id" : { + "type" : "string" + }, + "depth_timeseries_id" : { + "type" : "string" + }, + "id" : { + "type" : "integer" + }, + "instrument_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrument" : { + "properties" : { + "alert_configs" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "constants" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "geometry" : { + "items" : { + "type" : "integer" + }, + "type" : "array" + }, + "groups" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "has_cwms" : { + "type" : "boolean" + }, + "icon" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "nid_id" : { + "type" : "string" + }, + "offset" : { + "type" : "integer" + }, + "opts" : { + "type" : "object" + }, + "projects" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "show_cwms_tab" : { + "type" : "boolean" + }, + "slug" : { + "type" : "string" + }, + "station" : { + "type" : "integer" + }, + "status" : { + "type" : "string" + }, + "status_id" : { + "type" : "string" + }, + "status_time" : { + "type" : "string" + }, + "telemetry" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "type" : { + "type" : "string" + }, + "type_id" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "usgs_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrumentGroup" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_count" : { + "type" : "integer" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "timeseries_count" : { + "type" : "object" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrumentStatus" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "status" : { + "type" : "string" + }, + "status_id" : { + "type" : "string" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VIpiMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "items" : { + "$ref" : "#/components/schemas/db.IpiMeasurement" + }, + "type" : "array" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VIpiSegment" : { + "properties" : { + "id" : { + "type" : "integer" + }, + "inc_dev_timeseries_id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "length" : { + "type" : "number" + }, + "length_timeseries_id" : { + "type" : "string" + }, + "tilt_timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VPlotConfiguration" : { + "properties" : { + "auto_range" : { + "type" : "boolean" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "date_range" : { + "type" : "string" + }, + "display" : { + "type" : "object" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "plot_type" : { + "$ref" : "#/components/schemas/db.PlotType" + }, + "project_id" : { + "type" : "string" + }, + "report_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "show_comments" : { + "type" : "boolean" + }, + "show_masked" : { + "type" : "boolean" + }, + "show_nonvalidated" : { + "type" : "boolean" + }, + "slug" : { + "type" : "string" + }, + "threshold" : { + "type" : "integer" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProfile" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "edipi" : { + "type" : "integer" + }, + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "is_admin" : { + "type" : "boolean" + }, + "roles" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "tokens" : { + "items" : { + "$ref" : "#/components/schemas/db.VProfileToken" + }, + "type" : "array" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProfileToken" : { + "properties" : { + "issued" : { + "type" : "string" + }, + "token_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProject" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "district_id" : { + "type" : "string" + }, + "federal_id" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "image" : { + "type" : "object" + }, + "instrument_count" : { + "type" : "integer" + }, + "instrument_group_count" : { + "type" : "integer" + }, + "name" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VReportConfig" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "district_name" : { + "type" : "string" + }, + "global_overrides" : { + "$ref" : "#/components/schemas/db.ReportConfigGlobalOverrides" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "plot_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSaaMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "items" : { + "$ref" : "#/components/schemas/db.SaaMeasurement" + }, + "type" : "array" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSaaSegment" : { + "properties" : { + "id" : { + "type" : "integer" + }, + "instrument_id" : { + "type" : "string" + }, + "length" : { + "type" : "number" + }, + "length_timeseries_id" : { + "type" : "string" + }, + "temp_timeseries_id" : { + "type" : "string" + }, + "x_timeseries_id" : { + "type" : "string" + }, + "y_timeseries_id" : { + "type" : "string" + }, + "z_timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSubmittal" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "alert_config_name" : { + "type" : "string" + }, + "alert_type_id" : { + "type" : "string" + }, + "alert_type_name" : { + "type" : "string" + }, + "completed_at" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "due_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "marked_as_missing" : { + "type" : "boolean" + }, + "project_id" : { + "type" : "string" + }, + "submittal_status_id" : { + "type" : "string" + }, + "submittal_status_name" : { + "type" : "string" + }, + "warning_sent" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "db.VTimeseries" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "instrument_slug" : { + "type" : "string" + }, + "is_computed" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "parameter" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + }, + "variable" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.VTimeseriesCwm" : { + "properties" : { + "cwms_extent_earliest_time" : { + "type" : "string" + }, + "cwms_extent_latest_time" : { + "type" : "string" + }, + "cwms_office_id" : { + "type" : "string" + }, + "cwms_timeseries_id" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "instrument_slug" : { + "type" : "string" + }, + "is_computed" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "parameter" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + }, + "variable" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.VTimeseriesMeasurement" : { + "properties" : { + "annotation" : { + "type" : "string" + }, + "masked" : { + "type" : "boolean" + }, + "time" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + }, + "validated" : { + "type" : "boolean" + }, + "value" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.VUnit" : { + "properties" : { + "abbreviation" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "measure" : { + "type" : "string" + }, + "measure_id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "unit_family" : { + "type" : "string" + }, + "unit_family_id" : { + "type" : "string" } }, "type" : "object" }, - "Alert" : { + "db.VUploaderConfig" : { "properties" : { - "alert_config_id" : { + "comment_field" : { "type" : "string" }, - "body" : { + "comment_field_enabled" : { + "type" : "boolean" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { "type" : "string" }, - "create_date" : { + "description" : { "type" : "string" }, "id" : { "type" : "string" }, - "instruments" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfigInstrument" - }, - "type" : "array" + "masked_field" : { + "type" : "string" + }, + "masked_field_enabled" : { + "type" : "boolean" }, "name" : { "type" : "string" @@ -12874,20 +14415,41 @@ "project_id" : { "type" : "string" }, - "project_name" : { + "slug" : { "type" : "string" }, - "read" : { + "time_field" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.UploaderConfigType" + }, + "tz_name" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "validated_field" : { + "type" : "string" + }, + "validated_field_enabled" : { "type" : "boolean" } }, "type" : "object" }, - "AlertConfig" : { + "dto.AlertConfig" : { "properties" : { "alert_email_subscriptions" : { "items" : { - "$ref" : "#/components/schemas/EmailAutocompleteResult" + "$ref" : "#/components/schemas/dto.EmailAutocompleteResult" }, "type" : "array" }, @@ -12900,13 +14462,13 @@ "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "id" : { @@ -12914,7 +14476,7 @@ }, "instruments" : { "items" : { - "$ref" : "#/components/schemas/AlertConfigInstrument" + "$ref" : "#/components/schemas/dto.AlertConfigInstrument" }, "type" : "array" }, @@ -12942,16 +14504,16 @@ "schedule_interval" : { "type" : "string" }, - "start_date" : { + "started_at" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" }, "warning_interval" : { @@ -12960,7 +14522,7 @@ }, "type" : "object" }, - "AlertConfigInstrument" : { + "dto.AlertConfigInstrument" : { "properties" : { "instrument_id" : { "type" : "string" @@ -12971,7 +14533,7 @@ }, "type" : "object" }, - "AlertSubscription" : { + "dto.AlertSubscription" : { "properties" : { "alert_config_id" : { "type" : "string" @@ -12991,41 +14553,7 @@ }, "type" : "object" }, - "AwareParameter" : { - "properties" : { - "id" : { - "type" : "string" - }, - "key" : { - "type" : "string" - }, - "parameter_id" : { - "type" : "string" - }, - "unit_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "AwarePlatformParameterConfig" : { - "properties" : { - "aware_id" : { - "type" : "string" - }, - "aware_parameters" : { - "additionalProperties" : { - "type" : "string" - }, - "type" : "object" - }, - "instrument_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "CalculatedTimeseries" : { + "dto.CalculatedTimeseries" : { "properties" : { "formula" : { "type" : "string" @@ -13051,53 +14579,15 @@ }, "type" : "object" }, - "CollectionGroup" : { - "properties" : { - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "project_id" : { - "type" : "string" - }, - "slug" : { - "type" : "string" - }, - "sort_order" : { - "type" : "integer" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "CollectionGroupDetails" : { + "dto.CollectionGroup" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "id" : { @@ -13115,114 +14605,27 @@ "sort_order" : { "type" : "integer" }, - "timeseries" : { - "items" : { - "$ref" : "#/components/schemas/collectionGroupDetailsTimeseries" - }, - "type" : "array" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Datalogger" : { - "properties" : { - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "errors" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "id" : { - "type" : "string" - }, - "model" : { - "type" : "string" - }, - "model_id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "project_id" : { - "type" : "string" - }, - "slug" : { - "type" : "string" - }, - "sn" : { - "type" : "string" - }, - "tables" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerTable" - }, - "type" : "array" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "DataloggerTable" : { - "properties" : { - "id" : { + "updated_by" : { "type" : "string" }, - "table_name" : { - "type" : "string" - } - }, - "type" : "object" - }, - "DataloggerTablePreview" : { - "properties" : { - "datalogger_table_id" : { + "updated_by_username" : { "type" : "string" }, - "preview" : { - "$ref" : "#/components/schemas/pgtype.JSON" - }, - "update_date" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "DataloggerWithKey" : { + "dto.Datalogger" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "errors" : { @@ -13234,9 +14637,6 @@ "id" : { "type" : "string" }, - "key" : { - "type" : "string" - }, "model" : { "type" : "string" }, @@ -13251,133 +14651,40 @@ }, "slug" : { "type" : "string" - }, - "sn" : { - "type" : "string" - }, - "tables" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerTable" - }, - "type" : "array" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "District" : { - "properties" : { - "agency" : { - "type" : "string" - }, - "division_initials" : { - "type" : "string" - }, - "division_name" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "initials" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "office_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "DistrictRollup" : { - "properties" : { - "actual_total_submittals" : { - "type" : "integer" - }, - "alert_type_id" : { - "type" : "string" - }, - "district_initials" : { - "type" : "string" - }, - "expected_total_submittals" : { - "type" : "integer" - }, - "green_submittals" : { - "type" : "integer" - }, - "month" : { - "type" : "string" - }, - "office_id" : { - "type" : "string" - }, - "project_id" : { - "type" : "string" - }, - "project_name" : { - "type" : "string" - }, - "red_submittals" : { - "type" : "integer" - }, - "yellow_submittals" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "Domain" : { - "properties" : { - "description" : { + }, + "sn" : { "type" : "string" }, - "group" : { + "tables" : { + "items" : { + "$ref" : "#/components/schemas/dto.DataloggerTable" + }, + "type" : "array" + }, + "updated_by" : { "type" : "string" }, - "id" : { + "updated_by_username" : { "type" : "string" }, - "value" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "DomainGroupOption" : { + "dto.DataloggerTable" : { "properties" : { - "description" : { - "type" : "string" - }, "id" : { "type" : "string" }, - "value" : { + "table_name" : { "type" : "string" } }, "type" : "object" }, - "DomainMap" : { - "additionalProperties" : { - "items" : { - "$ref" : "#/components/schemas/DomainGroupOption" - }, - "type" : "array" - }, - "type" : "object" - }, - "EmailAutocompleteResult" : { + "dto.EmailAutocompleteResult" : { "properties" : { "email" : { "type" : "string" @@ -13394,7 +14701,7 @@ }, "type" : "object" }, - "EquivalencyTable" : { + "dto.EquivalencyTable" : { "properties" : { "datalogger_id" : { "type" : "string" @@ -13407,14 +14714,14 @@ }, "rows" : { "items" : { - "$ref" : "#/components/schemas/EquivalencyTableRow" + "$ref" : "#/components/schemas/dto.EquivalencyTableRow" }, "type" : "array" } }, "type" : "object" }, - "EquivalencyTableRow" : { + "dto.EquivalencyTableRow" : { "properties" : { "display_name" : { "type" : "string" @@ -13434,7 +14741,7 @@ }, "type" : "object" }, - "Evaluation" : { + "dto.Evaluation" : { "properties" : { "alert_config_id" : { "type" : "string" @@ -13445,16 +14752,16 @@ "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, - "end_date" : { + "ended_at" : { "type" : "string" }, "id" : { @@ -13462,7 +14769,7 @@ }, "instruments" : { "items" : { - "$ref" : "#/components/schemas/EvaluationInstrument" + "$ref" : "#/components/schemas/dto.EvaluationInstrument" }, "type" : "array" }, @@ -13475,25 +14782,25 @@ "project_name" : { "type" : "string" }, - "start_date" : { + "started_at" : { "type" : "string" }, "submittal_id" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "EvaluationInstrument" : { + "dto.EvaluationInstrument" : { "properties" : { "instrument_id" : { "type" : "string" @@ -13504,52 +14811,7 @@ }, "type" : "object" }, - "Geometry" : { - "properties" : { - "coordinates" : { - "type" : "object" - }, - "geometries" : { - "items" : { - "$ref" : "#/components/schemas/geojson.Geometry" - }, - "type" : "array" - }, - "type" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Heartbeat" : { - "properties" : { - "time" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Home" : { - "properties" : { - "instrument_count" : { - "type" : "integer" - }, - "instrument_group_count" : { - "type" : "integer" - }, - "new_instruments_7d" : { - "type" : "integer" - }, - "new_measurements_2h" : { - "type" : "integer" - }, - "project_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "IDSlugName" : { + "dto.IDSlugName" : { "properties" : { "id" : { "type" : "string" @@ -13563,75 +14825,33 @@ }, "type" : "object" }, - "InclinometerMeasurement" : { + "dto.InclSegment" : { "properties" : { - "create_date" : { + "a0_timeseries_id" : { "type" : "string" }, - "creator" : { + "a180_timeseries_id" : { "type" : "string" }, - "time" : { + "b0_timeseries_id" : { "type" : "string" }, - "values" : { - "items" : { - "type" : "integer" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollection" : { - "properties" : { - "inclinometers" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurement" - }, - "type" : "array" - }, - "timeseries_id" : { + "b180_timeseries_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollectionCollection" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollectionLean" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementLean" - }, - "type" : "array" }, - "timeseries_id" : { + "depth_timeseries_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "InclinometerMeasurementLean" : { - "additionalProperties" : { - "items" : { + }, + "id" : { "type" : "integer" }, - "type" : "array" + "instrument_id" : { + "type" : "string" + } }, "type" : "object" }, - "Instrument" : { + "dto.Instrument" : { "properties" : { "alert_configs" : { "items" : { @@ -13648,17 +14868,20 @@ }, "type" : "array" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "geometry" : { - "$ref" : "#/components/schemas/Geometry" + "items" : { + "type" : "integer" + }, + "type" : "array" }, "groups" : { "items" : { @@ -13685,11 +14908,11 @@ "type" : "integer" }, "opts" : { - "$ref" : "#/components/schemas/Opts" + "$ref" : "#/components/schemas/dto.Opts" }, "projects" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13717,13 +14940,13 @@ "type_id" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" }, "usgs_id" : { @@ -13732,23 +14955,15 @@ }, "type" : "object" }, - "InstrumentCount" : { + "dto.InstrumentGroup" : { "properties" : { - "instrument_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "InstrumentGroup" : { - "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "description" : { @@ -13772,30 +14987,30 @@ "timeseries_count" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "InstrumentNote" : { + "dto.InstrumentNote" : { "properties" : { "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "id" : { @@ -13810,30 +15025,30 @@ "title" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "InstrumentNoteCollection" : { + "dto.InstrumentNoteCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/dto.InstrumentNote" }, "type" : "array" } }, "type" : "object" }, - "InstrumentProjectAssignments" : { + "dto.InstrumentProjectAssignments" : { "properties" : { "project_ids" : { "items" : { @@ -13844,7 +15059,7 @@ }, "type" : "object" }, - "InstrumentStatus" : { + "dto.InstrumentStatus" : { "properties" : { "id" : { "type" : "string" @@ -13861,46 +15076,18 @@ }, "type" : "object" }, - "InstrumentStatusCollection" : { + "dto.InstrumentStatusCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/InstrumentStatus" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InstrumentsValidation" : { - "properties" : { - "errors" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "is_valid" : { - "type" : "boolean" - } - }, - "type" : "object" - }, - "IpiMeasurements" : { - "properties" : { - "measurements" : { - "items" : { - "$ref" : "#/components/schemas/IpiSegmentMeasurement" + "$ref" : "#/components/schemas/dto.InstrumentStatus" }, "type" : "array" - }, - "time" : { - "type" : "string" } }, "type" : "object" }, - "IpiSegment" : { + "dto.IpiSegment" : { "properties" : { "id" : { "type" : "integer" @@ -13926,30 +15113,7 @@ }, "type" : "object" }, - "IpiSegmentMeasurement" : { - "properties" : { - "cum_dev" : { - "type" : "number" - }, - "elevation" : { - "type" : "number" - }, - "inc_dev" : { - "type" : "number" - }, - "segment_id" : { - "type" : "integer" - }, - "temp" : { - "type" : "number" - }, - "tilt" : { - "type" : "number" - } - }, - "type" : "object" - }, - "Measurement" : { + "dto.Measurement" : { "properties" : { "annotation" : { "type" : "string" @@ -13972,25 +15136,11 @@ }, "type" : "object" }, - "MeasurementCollection" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/Measurement" - }, - "type" : "array" - }, - "timeseries_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "MeasurementCollectionLean" : { + "dto.MeasurementCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/MeasurementLean" + "$ref" : "#/components/schemas/dto.Measurement" }, "type" : "array" }, @@ -14000,100 +15150,29 @@ }, "type" : "object" }, - "MeasurementLean" : { - "additionalProperties" : { - "type" : "number" - }, - "type" : "object" - }, - "Opts" : { + "dto.Opts" : { "additionalProperties" : true, "type" : "object" }, - "PlotConfig" : { - "properties" : { - "auto_range" : { - "type" : "boolean" - }, - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "date_range" : { - "type" : "string" - }, - "display" : { - "$ref" : "#/components/schemas/Opts" - }, - "id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "plot_type" : { - "type" : "string" - }, - "project_id" : { - "type" : "string" - }, - "report_configs" : { - "items" : { - "$ref" : "#/components/schemas/IDSlugName" - }, - "type" : "array" - }, - "show_comments" : { - "type" : "boolean" - }, - "show_masked" : { - "type" : "boolean" - }, - "show_nonvalidated" : { - "type" : "boolean" - }, - "slug" : { - "type" : "string" - }, - "threshold" : { - "type" : "integer" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "PlotConfigBullseyePlot" : { + "dto.PlotConfigBullseyePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlotDisplay" }, "id" : { "type" : "string" @@ -14109,7 +15188,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14128,19 +15207,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigBullseyePlotDisplay" : { + "dto.PlotConfigBullseyePlotDisplay" : { "properties" : { "x_axis_timeseries_id" : { "type" : "string" @@ -14151,25 +15230,25 @@ }, "type" : "object" }, - "PlotConfigContourPlot" : { + "dto.PlotConfigContourPlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigContourPlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlotDisplay" }, "id" : { "type" : "string" @@ -14185,7 +15264,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14204,19 +15283,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigContourPlotDisplay" : { + "dto.PlotConfigContourPlotDisplay" : { "properties" : { "contour_smoothing" : { "type" : "boolean" @@ -14234,47 +15313,33 @@ "type" : "string" }, "timeseries_ids" : { - "items" : { - "type" : "string" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "PlotConfigMeasurementBullseyePlot" : { - "properties" : { - "time" : { - "type" : "string" - }, - "x" : { - "type" : "number" - }, - "y" : { - "type" : "number" + "items" : { + "type" : "string" + }, + "type" : "array" } }, "type" : "object" }, - "PlotConfigProfilePlot" : { + "dto.PlotConfigProfilePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlotDisplay" }, "id" : { "type" : "string" @@ -14290,7 +15355,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14309,19 +15374,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigProfilePlotDisplay" : { + "dto.PlotConfigProfilePlotDisplay" : { "properties" : { "instrument_id" : { "type" : "string" @@ -14332,7 +15397,7 @@ }, "type" : "object" }, - "PlotConfigScatterLineCustomShape" : { + "dto.PlotConfigScatterLineCustomShape" : { "properties" : { "color" : { "type" : "string" @@ -14352,25 +15417,25 @@ }, "type" : "object" }, - "PlotConfigScatterLineDisplay" : { + "dto.PlotConfigScatterLineDisplay" : { "properties" : { "layout" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineLayout" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineLayout" }, "traces" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineTimeseriesTrace" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineTimeseriesTrace" }, "type" : "array" } }, "type" : "object" }, - "PlotConfigScatterLineLayout" : { + "dto.PlotConfigScatterLineLayout" : { "properties" : { "custom_shapes" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineCustomShape" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineCustomShape" }, "type" : "array" }, @@ -14383,25 +15448,25 @@ }, "type" : "object" }, - "PlotConfigScatterLinePlot" : { + "dto.PlotConfigScatterLinePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineDisplay" }, "id" : { "type" : "string" @@ -14417,7 +15482,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14436,19 +15501,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigScatterLineTimeseriesTrace" : { + "dto.PlotConfigScatterLineTimeseriesTrace" : { "properties" : { "color" : { "type" : "string" @@ -14489,47 +15554,15 @@ }, "type" : "object" }, - "Profile" : { - "properties" : { - "display_name" : { - "type" : "string" - }, - "email" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "is_admin" : { - "type" : "boolean" - }, - "roles" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "tokens" : { - "items" : { - "$ref" : "#/components/schemas/TokenInfoProfile" - }, - "type" : "array" - }, - "username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Project" : { + "dto.Project" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "district_id" : { @@ -14559,27 +15592,19 @@ "slug" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "ProjectCount" : { - "properties" : { - "project_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "ProjectInstrumentAssignments" : { + "dto.ProjectInstrumentAssignments" : { "properties" : { "instrument_ids" : { "items" : { @@ -14590,105 +15615,15 @@ }, "type" : "object" }, - "ProjectMembership" : { - "properties" : { - "email" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "profile_id" : { - "type" : "string" - }, - "role" : { - "type" : "string" - }, - "role_id" : { - "type" : "string" - }, - "username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "ReportConfig" : { - "properties" : { - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "description" : { - "type" : "string" - }, - "district_name" : { - "type" : "string" - }, - "global_overrides" : { - "$ref" : "#/components/schemas/ReportConfigGlobalOverrides" - }, - "id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "plot_configs" : { - "items" : { - "$ref" : "#/components/schemas/IDSlugName" - }, - "type" : "array" - }, - "project_id" : { - "type" : "string" - }, - "project_name" : { - "type" : "string" - }, - "slug" : { - "type" : "string" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "ReportConfigGlobalOverrides" : { - "properties" : { - "date_range" : { - "$ref" : "#/components/schemas/TextOption" - }, - "show_masked" : { - "$ref" : "#/components/schemas/ToggleOption" - }, - "show_nonvalidated" : { - "$ref" : "#/components/schemas/ToggleOption" - } - }, - "type" : "object" - }, - "ReportConfigWithPlotConfigs" : { + "dto.ReportConfig" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "description" : { @@ -14698,7 +15633,7 @@ "type" : "string" }, "global_overrides" : { - "$ref" : "#/components/schemas/ReportConfigGlobalOverrides" + "$ref" : "#/components/schemas/dto.ReportConfigGlobalOverrides" }, "id" : { "type" : "string" @@ -14708,7 +15643,7 @@ }, "plot_configs" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14721,227 +15656,94 @@ "slug" : { "type" : "string" }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "ReportDownloadJob" : { - "properties" : { - "create_date" : { - "type" : "string" - }, - "creator" : { - "type" : "string" - }, - "file_expiry" : { - "type" : "string" - }, - "file_key" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "progress" : { - "type" : "integer" - }, - "progress_update_date" : { - "type" : "string" - }, - "report_config_id" : { - "type" : "string" - }, - "status" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaMeasurements" : { - "properties" : { - "measurements" : { - "items" : { - "$ref" : "#/components/schemas/SaaSegmentMeasurement" - }, - "type" : "array" - }, - "time" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaSegment" : { - "properties" : { - "id" : { - "type" : "integer" - }, - "instrument_id" : { - "type" : "string" - }, - "length" : { - "type" : "number" - }, - "length_timeseries_id" : { - "type" : "string" - }, - "temp_timeseries_id" : { - "type" : "string" - }, - "x_timeseries_id" : { - "type" : "string" - }, - "y_timeseries_id" : { - "type" : "string" - }, - "z_timeseries_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaSegmentMeasurement" : { - "properties" : { - "elevation" : { - "type" : "number" - }, - "segment_id" : { - "type" : "integer" - }, - "temp" : { - "type" : "number" - }, - "temp_cum_dev" : { - "type" : "number" - }, - "temp_increment" : { - "type" : "number" - }, - "x" : { - "type" : "number" - }, - "x_cum_dev" : { - "type" : "number" - }, - "x_increment" : { - "type" : "number" - }, - "y" : { - "type" : "number" - }, - "y_cum_dev" : { - "type" : "number" - }, - "y_increment" : { - "type" : "number" - }, - "z" : { - "type" : "number" - }, - "z_cum_dev" : { - "type" : "number" - }, - "z_increment" : { - "type" : "number" - } - }, - "type" : "object" - }, - "SearchResult" : { - "properties" : { - "id" : { - "type" : "string" - }, - "item" : { - "type" : "object" - }, - "type" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Site" : { - "properties" : { - "description" : { + "updated_by" : { "type" : "string" }, - "elevation" : { + "updated_by_username" : { "type" : "string" }, - "elevationUnits" : { + "updatedd_at" : { "type" : "string" - }, - "siteName" : { - "$ref" : "#/components/schemas/SiteName" } }, "type" : "object" }, - "SiteName" : { + "dto.ReportConfigGlobalOverrides" : { "properties" : { - "id" : { - "type" : "string" + "date_range" : { + "$ref" : "#/components/schemas/dto.TextOption" }, - "nameType" : { - "type" : "string" + "show_masked" : { + "$ref" : "#/components/schemas/dto.ToggleOption" + }, + "show_nonvalidated" : { + "$ref" : "#/components/schemas/dto.ToggleOption" } }, "type" : "object" }, - "Submittal" : { + "dto.ReportDownloadJob" : { "properties" : { - "alert_config_id" : { + "created_at" : { "type" : "string" }, - "alert_config_name" : { + "created_by" : { "type" : "string" }, - "alert_type_id" : { + "file_expiry" : { "type" : "string" }, - "alert_type_name" : { + "file_key" : { "type" : "string" }, - "completion_date" : { + "id" : { "type" : "string" }, - "create_date" : { + "progress" : { + "type" : "integer" + }, + "progress_updated_at" : { "type" : "string" }, - "due_date" : { + "report_config_id" : { "type" : "string" }, + "status" : { + "type" : "string" + } + }, + "type" : "object" + }, + "dto.SaaSegment" : { + "properties" : { "id" : { + "type" : "integer" + }, + "instrument_id" : { "type" : "string" }, - "marked_as_missing" : { - "type" : "boolean" + "length" : { + "type" : "number" }, - "project_id" : { + "length_timeseries_id" : { "type" : "string" }, - "submittal_status_id" : { + "temp_timeseries_id" : { "type" : "string" }, - "submittal_status_name" : { + "x_timeseries_id" : { "type" : "string" }, - "warning_sent" : { - "type" : "boolean" + "y_timeseries_id" : { + "type" : "string" + }, + "z_timeseries_id" : { + "type" : "string" } }, "type" : "object" }, - "TextOption" : { + "dto.TextOption" : { "properties" : { "enabled" : { "type" : "boolean" @@ -14952,7 +15754,7 @@ }, "type" : "object" }, - "Timeseries" : { + "dto.Timeseries" : { "properties" : { "id" : { "type" : "string" @@ -14992,7 +15794,7 @@ }, "values" : { "items" : { - "$ref" : "#/components/schemas/Measurement" + "$ref" : "#/components/schemas/dto.Measurement" }, "type" : "array" }, @@ -15002,18 +15804,18 @@ }, "type" : "object" }, - "TimeseriesCollectionItems" : { + "dto.TimeseriesCollectionItems" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/dto.Timeseries" }, "type" : "array" } }, "type" : "object" }, - "TimeseriesCwms" : { + "dto.TimeseriesCwms" : { "properties" : { "cwms_extent_earliest_time" : { "type" : "string" @@ -15065,7 +15867,7 @@ }, "values" : { "items" : { - "$ref" : "#/components/schemas/Measurement" + "$ref" : "#/components/schemas/dto.Measurement" }, "type" : "array" }, @@ -15075,35 +15877,18 @@ }, "type" : "object" }, - "TimeseriesMeasurementCollectionCollection" : { + "dto.TimeseriesMeasurementCollectionCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "$ref" : "#/components/schemas/dto.MeasurementCollection" }, "type" : "array" } }, "type" : "object" }, - "TimezoneOption" : { - "properties" : { - "abbrev" : { - "type" : "string" - }, - "is_dst" : { - "type" : "boolean" - }, - "name" : { - "type" : "string" - }, - "utc_offset" : { - "type" : "string" - } - }, - "type" : "object" - }, - "ToggleOption" : { + "dto.ToggleOption" : { "properties" : { "enabled" : { "type" : "boolean" @@ -15114,195 +15899,280 @@ }, "type" : "object" }, - "Token" : { + "dto.UploaderConfig" : { "properties" : { - "issued" : { + "created_at" : { "type" : "string" }, - "profile_id" : { + "created_by" : { "type" : "string" }, - "secret_token" : { + "created_by_username" : { "type" : "string" }, - "token_id" : { + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/dto.UploaderConfigType" + }, + "tz_name" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "TokenInfoProfile" : { + "dto.UploaderConfigMapping" : { "properties" : { - "issued" : { + "field_name" : { "type" : "string" }, - "token_id" : { + "timeseries_id" : { "type" : "string" } }, "type" : "object" }, - "Unit" : { + "dto.UploaderConfigType" : { + "enum" : [ "csv", "dux", "toa5" ], + "type" : "string", + "x-enum-varnames" : [ "CSV", "DUX", "TOA5" ] + }, + "service.AggregatePlotConfigMeasurementsContourPlot" : { "properties" : { - "abbreviation" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "measure" : { - "type" : "string" + "x" : { + "items" : { + "type" : "number" + }, + "type" : "array" }, - "measure_id" : { - "type" : "string" + "y" : { + "items" : { + "type" : "number" + }, + "type" : "array" }, - "name" : { + "z" : { + "items" : { + "type" : "number" + }, + "type" : "array" + } + }, + "type" : "object" + }, + "service.AwarePlatformParameterConfig" : { + "properties" : { + "aware_id" : { "type" : "string" }, - "unit_family" : { - "type" : "string" + "aware_parameters" : { + "additionalProperties" : { + "type" : "string" + }, + "type" : "object" }, - "unit_family_id" : { + "instrument_id" : { "type" : "string" } }, "type" : "object" }, - "UploaderConfig" : { + "service.DataloggerWithKey" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, - "description" : { - "type" : "string" + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" }, "id" : { "type" : "string" }, + "key" : { + "type" : "string" + }, + "model" : { + "type" : "string" + }, + "model_id" : { + "type" : "string" + }, "name" : { "type" : "string" }, "project_id" : { "type" : "string" }, - "type" : { - "$ref" : "#/components/schemas/UploaderConfigType" + "slug" : { + "type" : "string" }, - "tz_name" : { + "sn" : { "type" : "string" }, - "update_date" : { + "tables" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerTableIDName" + }, + "type" : "array" + }, + "updated_at" : { "type" : "string" }, - "updater_id" : { + "updated_by" : { "type" : "string" }, - "updater_username" : { + "updated_by_username" : { "type" : "string" } }, "type" : "object" }, - "UploaderConfigMapping" : { + "service.DomainMap" : { + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.DomainGroupOpt" + }, + "type" : "array" + }, + "type" : "object" + }, + "service.Healthcheck" : { "properties" : { - "field_name" : { + "status" : { "type" : "string" - }, - "timeseries_id" : { + } + }, + "type" : "object" + }, + "service.Heartbeat" : { + "properties" : { + "time" : { "type" : "string" } }, "type" : "object" }, - "UploaderConfigType" : { - "enum" : [ "csv", "dux", "toa5" ], - "type" : "string", - "x-enum-varnames" : [ "CSV", "DUX", "TOA5" ] + "service.InstrumentsValidation" : { + "properties" : { + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "is_valid" : { + "type" : "boolean" + } + }, + "type" : "object" }, - "collectionGroupDetailsTimeseries" : { + "service.ReportConfigWithPlotConfigs" : { "properties" : { - "id" : { + "created_at" : { "type" : "string" }, - "instrument" : { + "created_by" : { "type" : "string" }, - "instrument_id" : { + "created_by_username" : { "type" : "string" }, - "instrument_slug" : { + "description" : { "type" : "string" }, - "is_computed" : { - "type" : "boolean" - }, - "latest_time" : { + "district_name" : { "type" : "string" }, - "latest_value" : { - "type" : "number" + "global_overrides" : { + "$ref" : "#/components/schemas/db.ReportConfigGlobalOverrides" }, - "name" : { + "id" : { "type" : "string" }, - "parameter" : { + "name" : { "type" : "string" }, - "parameter_id" : { - "type" : "string" + "plot_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.VPlotConfiguration" + }, + "type" : "array" }, - "slug" : { + "project_id" : { "type" : "string" }, - "sort_order" : { - "type" : "integer" - }, - "type" : { + "project_name" : { "type" : "string" }, - "unit" : { + "slug" : { "type" : "string" }, - "unit_id" : { + "updated_at" : { "type" : "string" }, - "values" : { - "items" : { - "$ref" : "#/components/schemas/Measurement" - }, - "type" : "array" + "updated_by" : { + "type" : "string" }, - "variable" : { + "updated_by_username" : { "type" : "string" } }, "type" : "object" }, - "pgtype.JSON" : { + "service.Token" : { "properties" : { - "bytes" : { - "items" : { - "type" : "integer" - }, - "type" : "array" + "hash" : { + "type" : "string" }, - "status" : { - "$ref" : "#/components/schemas/pgtype.Status" + "id" : { + "type" : "string" + }, + "issued" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "secret_token" : { + "type" : "string" + }, + "token_id" : { + "type" : "string" } }, "type" : "object" }, - "pgtype.Status" : { - "enum" : [ 0, 1, 2 ], - "type" : "integer", - "x-enum-varnames" : [ "Undefined", "Null", "Present" ] - }, "_timeseries_measurements_post_request" : { "properties" : { "timeseries_measurement_collections" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index c84bd1c8..2ce95707 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -26,7 +26,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -116,7 +116,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/dto.AlertSubscription' description: alert subscription payload required: true responses: @@ -125,7 +125,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/db.AlertProfileSubscription' type: array description: OK "400": @@ -160,7 +160,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AwarePlatformParameterConfig' + $ref: '#/components/schemas/service.AwarePlatformParameterConfig' type: array description: OK "400": @@ -192,7 +192,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AwareParameter' + $ref: '#/components/schemas/db.AwareParameterListRow' type: array description: OK "400": @@ -228,7 +228,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/dto.Datalogger' description: datalogger payload required: true responses: @@ -236,9 +236,7 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/DataloggerWithKey' - type: array + $ref: '#/components/schemas/service.DataloggerWithKey' description: OK "400": content: @@ -329,7 +327,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' description: OK "400": content: @@ -372,7 +370,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/dto.Datalogger' description: datalogger payload required: true responses: @@ -380,7 +378,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' description: OK "400": content: @@ -425,7 +423,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -433,7 +431,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -480,7 +478,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerWithKey' + $ref: '#/components/schemas/service.DataloggerWithKey' description: OK "400": content: @@ -585,7 +583,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' type: array description: OK "400": @@ -636,7 +634,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -644,7 +642,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -696,7 +694,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -704,7 +702,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -817,7 +815,8 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerTablePreview' + additionalProperties: true + type: object description: OK "400": content: @@ -869,7 +868,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerTablePreview' + $ref: '#/components/schemas/db.VDataloggerPreview' description: OK "400": content: @@ -908,7 +907,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' type: array description: OK "400": @@ -942,7 +941,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/District' + $ref: '#/components/schemas/db.VDistrict' type: array description: OK "400": @@ -974,7 +973,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TimezoneOption' + $ref: '#/components/schemas/db.PgTimezoneNamesListRow' type: array description: OK "400": @@ -1005,7 +1004,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DomainMap' + $ref: '#/components/schemas/service.DomainMap' description: OK "400": content: @@ -1043,7 +1042,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/EmailAutocompleteResult' + $ref: '#/components/schemas/db.EmailAutocompleteListRow' type: array description: OK "400": @@ -1085,7 +1084,9 @@ paths: schema: items: additionalProperties: - $ref: '#/components/schemas/MeasurementCollectionLean' + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array type: object type: array description: OK @@ -1119,7 +1120,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CalculatedTimeseries' + $ref: '#/components/schemas/db.TimeseriesComputedListForInstrumentRow' type: array description: OK "400": @@ -1151,13 +1152,13 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: additionalProperties: true type: object - description: OK + description: Created "400": content: application/json: @@ -1247,7 +1248,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CalculatedTimeseries' + $ref: '#/components/schemas/dto.CalculatedTimeseries' type: array description: OK "400": @@ -1280,10 +1281,7 @@ paths: content: application/json: schema: - items: - additionalProperties: true - type: object - type: array + $ref: '#/components/schemas/service.Healthcheck' description: OK summary: checks the health of the api server tags: @@ -1298,12 +1296,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Heartbeat' - description: OK + $ref: '#/components/schemas/service.Heartbeat' + description: Created summary: creates a heartbeat entry at regular intervals tags: - heartbeat @@ -1314,7 +1312,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Heartbeat' + $ref: '#/components/schemas/service.Heartbeat' description: OK summary: gets the latest heartbeat tags: @@ -1327,7 +1325,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Heartbeat' + $ref: '#/components/schemas/service.Heartbeat' type: array description: OK summary: returns all heartbeats @@ -1340,7 +1338,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Home' + $ref: '#/components/schemas/db.HomeGetRow' description: OK "500": content: @@ -1351,50 +1349,6 @@ paths: summary: gets information for the homepage tags: - home - /inclinometer_explorer: - post: - requestBody: - content: - application/json: - schema: - items: - type: string - type: array - description: array of inclinometer instrument uuids - required: true - responses: - "200": - content: - application/json: - schema: - items: - additionalProperties: - $ref: '#/components/schemas/InclinometerMeasurementCollectionLean' - type: object - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: list inclinometer timeseries measurements for explorer page - tags: - - explorer - x-codegen-request-body-name: instrument_ids /instrument_groups: get: responses: @@ -1403,7 +1357,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' type: array description: OK "400": @@ -1438,7 +1392,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/dto.InstrumentGroup' description: instrument group payload required: true responses: @@ -1446,7 +1400,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + items: + $ref: '#/components/schemas/db.InstrumentGroup' + type: array description: Created "400": content: @@ -1492,9 +1448,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/InstrumentGroup' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -1533,7 +1488,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' description: OK "400": content: @@ -1574,7 +1529,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/dto.InstrumentGroup' description: instrument group payload required: true responses: @@ -1582,7 +1537,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.InstrumentGroup' description: OK "400": content: @@ -1624,7 +1579,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' type: array description: OK "400": @@ -1762,7 +1717,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -1801,7 +1756,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array + type: object description: OK "400": content: @@ -1824,15 +1783,67 @@ paths: summary: lists timeseries measurements by instrument group id tags: - timeseries - /instruments: + /instruments/count: + get: + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets the total number of non deleted instruments in the system + tags: + - instrument + /instruments/incl/{instrument_id}/measurements: get: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + - description: after time + in: query + name: after + schema: + format: date-time + type: string + - description: before time + in: query + name: before + required: true + schema: + format: date-time + type: string responses: "200": content: application/json: schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInclMeasurement' type: array description: OK "400": @@ -1853,17 +1864,80 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: lists all instruments + summary: creates instrument notes tags: - - instrument - /instruments/count: + - instrument-incl + /instruments/incl/{instrument_id}/segments: get: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/db.VInclSegment' + type: array + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets all incl segments for an instrument + tags: + - instrument-incl + put: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + - description: api key + in: query + name: key + schema: + type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/dto.InclSegment' + type: array + description: incl instrument segments payload + required: true responses: "200": content: application/json: schema: - $ref: '#/components/schemas/InstrumentCount' + items: + $ref: '#/components/schemas/dto.InclSegment' + type: array description: OK "400": content: @@ -1883,9 +1957,12 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: gets the total number of non deleted instruments in the system + security: + - Bearer: [] + summary: updates multiple segments for an incl instrument tags: - - instrument + - instrument-incl + x-codegen-request-body-name: instrument_segments /instruments/ipi/{instrument_id}/measurements: get: parameters: @@ -1915,7 +1992,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiMeasurements' + $ref: '#/components/schemas/db.VIpiMeasurement' type: array description: OK "400": @@ -1955,7 +2032,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/db.VIpiSegment' type: array description: OK "400": @@ -1998,7 +2075,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/dto.IpiSegment' type: array description: ipi instrument segments payload required: true @@ -2008,7 +2085,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/dto.IpiSegment' type: array description: OK "400": @@ -2036,37 +2113,6 @@ paths: - instrument-ipi x-codegen-request-body-name: instrument_segments /instruments/notes: - get: - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/InstrumentNote' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: gets all instrument notes - tags: - - instrument-note post: parameters: - description: api key @@ -2078,18 +2124,18 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentNoteCollection' + $ref: '#/components/schemas/dto.InstrumentNoteCollection' description: instrument note collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' type: array - description: OK + description: Created "400": content: application/json: @@ -2129,7 +2175,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' description: OK "400": content: @@ -2170,7 +2216,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/dto.InstrumentNote' description: instrument note collection payload required: true responses: @@ -2179,7 +2225,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.InstrumentNote' type: array description: OK "400": @@ -2235,7 +2281,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaMeasurements' + $ref: '#/components/schemas/db.VSaaMeasurement' type: array description: OK "400": @@ -2275,7 +2321,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/db.VSaaSegment' type: array description: OK "400": @@ -2318,7 +2364,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/dto.SaaSegment' type: array description: saa instrument segments payload required: true @@ -2328,7 +2374,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/dto.SaaSegment' type: array description: OK "400": @@ -2370,7 +2416,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -2409,7 +2455,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' type: array description: OK "400": @@ -2502,7 +2548,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentStatus' + $ref: '#/components/schemas/db.VInstrumentStatus' type: array description: OK "400": @@ -2544,7 +2590,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentStatusCollection' + $ref: '#/components/schemas/dto.InstrumentStatusCollection' description: instrument status collection paylaod required: true responses: @@ -2654,7 +2700,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VInstrumentStatus' type: array description: OK "400": @@ -2699,7 +2745,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -2745,7 +2791,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' description: OK "400": content: @@ -2807,7 +2853,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + items: + $ref: '#/components/schemas/db.VTimeseriesMeasurement' + type: array description: OK "400": content: @@ -2862,7 +2910,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array + type: object description: OK "400": content: @@ -2899,7 +2951,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/db.AlertProfileSubscription' type: array description: OK "400": @@ -2940,7 +2992,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.AlertListForProfileRow' type: array description: OK "400": @@ -2985,12 +3037,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Alert' - description: OK + $ref: '#/components/schemas/db.AlertGetRow' + description: Created "400": content: application/json: @@ -3037,7 +3089,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.AlertGetRow' description: OK "400": content: @@ -3069,7 +3121,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Profile' + $ref: '#/components/schemas/db.VProfile' description: OK "400": content: @@ -3108,7 +3160,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' type: array description: OK "400": @@ -3142,7 +3194,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Token' + $ref: '#/components/schemas/service.Token' description: OK "400": content: @@ -3208,38 +3260,6 @@ paths: summary: deletes a token for a profile tags: - profile - /opendcs/sites: - get: - responses: - "200": - content: - text/xml: - schema: - items: - $ref: '#/components/schemas/Site' - type: array - description: OK - "400": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: "lists all instruments, represented as opendcs sites" - tags: - - opendcs /profiles: post: responses: @@ -3247,7 +3267,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Profile' + $ref: '#/components/schemas/db.ProfileCreateRow' description: OK "400": content: @@ -3286,7 +3306,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' type: array description: OK "400": @@ -3322,19 +3342,19 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/dto.Project' type: array description: project collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/db.ProjectCreateBatchRow' type: array - description: OK + description: Created "400": content: application/json: @@ -3366,7 +3386,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ProjectCount' + type: integer description: OK "400": content: @@ -3449,7 +3469,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' description: OK "400": content: @@ -3490,7 +3510,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/dto.Project' description: project payload required: true responses: @@ -3498,7 +3518,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' description: OK "400": content: @@ -3540,7 +3560,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' type: array description: OK "400": @@ -3582,7 +3602,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/dto.AlertConfig' description: alert config payload required: true responses: @@ -3590,7 +3610,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3643,9 +3663,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/AlertConfig' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -3691,7 +3710,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3739,7 +3758,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/dto.AlertConfig' description: alert config payload required: true responses: @@ -3747,9 +3766,7 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/AlertConfig' - type: array + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3791,7 +3808,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.CollectionGroup' type: array description: OK "400": @@ -3835,7 +3852,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/dto.CollectionGroup' description: collection group payload required: true responses: @@ -3844,7 +3861,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/db.CollectionGroup' type: array description: OK "400": @@ -3945,7 +3962,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/CollectionGroupDetails' + $ref: '#/components/schemas/db.VCollectionGroupDetail' description: OK "400": content: @@ -3992,7 +4009,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/dto.CollectionGroup' description: collection group payload required: true responses: @@ -4000,7 +4017,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/db.CollectionGroup' description: OK "400": content: @@ -4115,13 +4132,13 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: additionalProperties: true type: object - description: OK + description: Created "400": content: application/json: @@ -4220,7 +4237,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/DistrictRollup' + $ref: '#/components/schemas/db.VDistrictRollup' type: array description: OK "400": @@ -4260,7 +4277,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/DistrictRollup' + $ref: '#/components/schemas/db.VDistrictRollup' type: array description: OK "400": @@ -4300,7 +4317,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' type: array description: OK "400": @@ -4342,16 +4359,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' description: evaluation payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Evaluation' - description: OK + $ref: '#/components/schemas/db.VEvaluation' + description: Created "400": content: application/json: @@ -4404,7 +4421,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + additionalProperties: true + type: object type: array description: OK "400": @@ -4451,7 +4469,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' description: OK "400": content: @@ -4499,7 +4517,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' description: evaluation payload required: true responses: @@ -4507,7 +4525,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' description: OK "400": content: @@ -4579,61 +4597,6 @@ paths: summary: uploades a picture for a project tags: - project - /projects/{project_id}/inclinometer_measurements: - post: - parameters: - - description: project uuid - in: path - name: project_id - required: true - schema: - format: uuid - type: string - - description: api key - in: query - name: key - schema: - type: string - requestBody: - content: - '*/*': - schema: - $ref: '#/components/schemas/InclinometerMeasurementCollectionCollection' - description: inclinometer measurement collections - required: true - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/InclinometerMeasurementCollection' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - security: - - Bearer: [] - summary: creates or updates one or more inclinometer measurements - tags: - - measurement-inclinometer - x-codegen-request-body-name: timeseries_measurement_collections /projects/{project_id}/instrument_groups: get: parameters: @@ -4650,7 +4613,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' type: array description: OK "400": @@ -4690,7 +4653,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VInstrument' type: array description: OK "400": @@ -4740,19 +4703,19 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' type: array description: instrument collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/db.InstrumentCreateBatchRow' type: array - description: OK + description: Created "400": content: application/json: @@ -4804,7 +4767,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/ProjectInstrumentAssignments' + $ref: '#/components/schemas/dto.ProjectInstrumentAssignments' description: instrument uuids required: true responses: @@ -4812,7 +4775,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -4916,7 +4879,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' description: instrument payload required: true responses: @@ -4924,7 +4887,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -4973,7 +4936,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' type: array description: OK "400": @@ -5027,12 +4990,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/AlertSubscription' - description: OK + $ref: '#/components/schemas/db.AlertProfileSubscription' + description: Created "400": content: application/json: @@ -5140,7 +5103,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.VAlert' type: array description: OK "400": @@ -5198,7 +5161,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -5247,12 +5210,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' - description: OK + $ref: '#/components/schemas/service.InstrumentsValidation' + description: Created "400": content: application/json: @@ -5309,7 +5272,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentProjectAssignments' + $ref: '#/components/schemas/dto.InstrumentProjectAssignments' description: project uuids required: true responses: @@ -5317,7 +5280,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -5366,7 +5329,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -5415,7 +5378,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesCollectionItems' + $ref: '#/components/schemas/dto.TimeseriesCollectionItems' description: timeseries collection items payload required: true responses: @@ -5424,7 +5387,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.TimeseriesCreateBatchRow' type: array description: OK "400": @@ -5534,7 +5497,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' type: array description: OK "400": @@ -5584,7 +5547,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' description: instrument payload required: true responses: @@ -5592,7 +5555,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -5641,7 +5604,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -5688,7 +5651,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/db.VTimeseriesCwm' type: array description: OK "400": @@ -5733,19 +5696,18 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/dto.TimeseriesCwms' type: array description: array of cwms timeseries to create required: true responses: - "200": + "201": content: application/json: schema: - items: - $ref: '#/components/schemas/TimeseriesCwms' - type: array - description: OK + additionalProperties: true + type: object + description: Created "400": content: application/json: @@ -5796,7 +5758,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/dto.TimeseriesCwms' description: cwms timeseries to update required: true responses: @@ -5805,7 +5767,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + additionalProperties: true + type: object type: array description: OK "400": @@ -5851,7 +5814,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/ProjectMembership' + $ref: '#/components/schemas/db.ProfileProjectRoleListForProjectRow' type: array description: OK "400": @@ -5970,7 +5933,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ProjectMembership' + type: string description: OK "400": content: @@ -6011,7 +5974,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' type: array description: OK "400": @@ -6054,16 +6017,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigBullseyePlot' + $ref: '#/components/schemas/dto.PlotConfigBullseyePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6114,7 +6077,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigBullseyePlot' + $ref: '#/components/schemas/dto.PlotConfigBullseyePlot' description: plot config payload required: true responses: @@ -6122,7 +6085,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6176,7 +6139,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfigMeasurementBullseyePlot' + $ref: '#/components/schemas/db.PlotConfigMeasurementListBullseyeRow' type: array description: OK "400": @@ -6221,16 +6184,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigContourPlot' + $ref: '#/components/schemas/dto.PlotConfigContourPlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6281,7 +6244,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigContourPlot' + $ref: '#/components/schemas/dto.PlotConfigContourPlot' description: plot config payload required: true responses: @@ -6289,7 +6252,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6348,7 +6311,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AggregatePlotConfigMeasurementsContourPlot' + $ref: '#/components/schemas/service.AggregatePlotConfigMeasurementsContourPlot' description: OK "400": content: @@ -6457,16 +6420,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigProfilePlot' + $ref: '#/components/schemas/dto.PlotConfigProfilePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6517,7 +6480,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigProfilePlot' + $ref: '#/components/schemas/dto.PlotConfigProfilePlot' description: plot config payload required: true responses: @@ -6525,7 +6488,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6570,16 +6533,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6630,7 +6593,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: @@ -6638,7 +6601,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6738,7 +6701,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6777,7 +6740,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' type: array description: OK "400": @@ -6819,16 +6782,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6927,7 +6890,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6975,7 +6938,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: @@ -6983,7 +6946,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -7029,7 +6992,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/db.VReportConfig' description: OK "400": content: @@ -7072,7 +7035,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/dto.ReportConfig' description: report config payload required: true responses: @@ -7080,7 +7043,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/db.VReportConfig' description: Created "400": content: @@ -7184,7 +7147,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/dto.ReportConfig' description: report config payload required: true responses: @@ -7246,7 +7209,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportDownloadJob' + $ref: '#/components/schemas/db.ReportDownloadJob' description: Created "400": content: @@ -7305,7 +7268,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportDownloadJob' + $ref: '#/components/schemas/db.ReportDownloadJob' description: OK "400": content: @@ -7406,7 +7369,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -7446,7 +7409,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -7489,7 +7452,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' + $ref: '#/components/schemas/dto.TimeseriesMeasurementCollectionCollection' description: array of timeseries measurement collections required: true responses: @@ -7497,9 +7460,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/MeasurementCollection' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -7555,7 +7517,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' + $ref: '#/components/schemas/dto.TimeseriesMeasurementCollectionCollection' description: array of timeseries measurement collections required: true responses: @@ -7564,7 +7526,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: true + type: object type: array description: OK "400": @@ -7607,7 +7570,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/UploaderConfig' + $ref: '#/components/schemas/db.VUploaderConfig' type: array description: OK "400": @@ -7632,7 +7595,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/UploaderConfig' + $ref: '#/components/schemas/dto.UploaderConfig' description: uploader config payload required: true responses: @@ -7707,7 +7670,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/UploaderConfig' + $ref: '#/components/schemas/dto.UploaderConfig' description: uploader config payload required: true responses: @@ -7784,7 +7747,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/UploaderConfigMapping' + $ref: '#/components/schemas/db.UploaderConfigMapping' type: array description: OK "400": @@ -7817,7 +7780,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/UploaderConfigMapping' + $ref: '#/components/schemas/dto.UploaderConfigMapping' type: array description: uploader config mappings payload required: true @@ -7860,7 +7823,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/UploaderConfigMapping' + $ref: '#/components/schemas/dto.UploaderConfigMapping' type: array description: uploader config mappings payload required: true @@ -7899,7 +7862,11 @@ paths: type: string responses: "200": - content: {} + content: + image/jpeg: + schema: + format: binary + type: string description: OK "400": content: @@ -7943,7 +7910,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfigWithPlotConfigs' + $ref: '#/components/schemas/service.ReportConfigWithPlotConfigs' description: OK "400": content: @@ -7986,7 +7953,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportDownloadJob' + $ref: '#/components/schemas/dto.ReportDownloadJob' description: report download job payload required: true responses: @@ -8019,7 +7986,7 @@ paths: tags: - report-config x-codegen-request-body-name: report_download_job - /search/{entity}: + /search/projects: get: parameters: - description: "entity to search (i.e. projects, etc.)" @@ -8039,7 +8006,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SearchResult' + $ref: '#/components/schemas/db.VProject' type: array description: OK "400": @@ -8121,7 +8088,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesCollectionItems' + $ref: '#/components/schemas/dto.TimeseriesCollectionItems' description: timeseries collection items payload required: true responses: @@ -8129,11 +8096,9 @@ paths: content: application/json: schema: - items: - additionalProperties: - type: string - type: object - type: array + additionalProperties: + type: string + type: object description: OK "400": content: @@ -8219,7 +8184,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' description: OK "400": content: @@ -8260,7 +8225,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/dto.Timeseries' description: timeseries payload required: true responses: @@ -8268,9 +8233,7 @@ paths: content: application/json: schema: - additionalProperties: - type: string - type: object + $ref: '#/components/schemas/dto.Timeseries' description: OK "400": content: @@ -8296,108 +8259,6 @@ paths: tags: - timeseries x-codegen-request-body-name: timeseries - /timeseries/{timeseries_id}/inclinometer_measurements: - delete: - parameters: - - description: timeseries uuid - in: path - name: timeseries_id - required: true - schema: - format: uuid - type: string - - description: timestamp of measurement to delete - in: query - name: time - required: true - schema: - format: date-time - type: string - - description: api key - in: query - name: key - schema: - type: string - responses: - "200": - content: - application/json: - schema: - additionalProperties: true - type: object - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - security: - - Bearer: [] - summary: deletes a single inclinometer measurement by timestamp - tags: - - measurement-inclinometer - get: - parameters: - - description: timeseries uuid - in: path - name: timeseries_id - required: true - schema: - format: uuid - type: string - - description: after timestamp - in: query - name: after - schema: - format: date-time - type: string - - description: before timestamp - in: query - name: before - schema: - format: date-time - type: string - responses: - "200": - content: - application/json: - schema: - $ref: '#/components/schemas/InclinometerMeasurementCollection' - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: lists all measurements for an inclinometer - tags: - - measurement-inclinometer /timeseries/{timeseries_id}/measurements: delete: parameters: @@ -8482,7 +8343,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + items: + $ref: '#/components/schemas/db.VTimeseriesMeasurement' + type: array description: OK "400": content: @@ -8521,7 +8384,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: true + type: object type: array description: OK "400": @@ -8556,7 +8420,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Unit' + $ref: '#/components/schemas/db.VUnit' type: array description: OK "400": @@ -8577,49 +8441,44 @@ components: message: type: object type: object - geojson.Geometry: + db.AlertGetRow: example: - geometries: - - null - - null - coordinates: "{}" - type: type + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + read: true + alert_config_id: alert_config_id + project_id: project_id + name: name + created_at: created_at + id: id + body: body + project_name: project_name properties: - coordinates: - type: object - geometries: + alert_config_id: + type: string + body: + type: string + created_at: + type: string + id: + type: string + instruments: items: - $ref: '#/components/schemas/geojson.Geometry' + $ref: '#/components/schemas/db.InstrumentIDName' type: array - type: + name: + type: string + project_id: type: string + project_name: + type: string + read: + type: boolean type: object - AggregatePlotConfigMeasurementsContourPlot: - example: - x: - - 0.8008281904610115 - - 0.8008281904610115 - "y": - - 6.027456183070403 - - 6.027456183070403 - z: - - 1.4658129805029452 - - 1.4658129805029452 - properties: - x: - items: - type: number - type: array - "y": - items: - type: number - type: array - z: - items: - type: number - type: array - type: object - Alert: + db.AlertListForProfileRow: example: instruments: - instrument_name: instrument_name @@ -8630,22 +8489,22 @@ components: alert_config_id: alert_config_id project_id: project_id name: name + created_at: created_at id: id body: body - create_date: create_date project_name: project_name properties: alert_config_id: type: string body: type: string - create_date: + created_at: type: string id: type: string instruments: items: - $ref: '#/components/schemas/AlertConfigInstrument' + $ref: '#/components/schemas/db.InstrumentIDName' type: array name: type: string @@ -8656,103 +8515,7 @@ components: read: type: boolean type: object - AlertConfig: - example: - updater_username: updater_username - alert_type_id: alert_type_id - creator_username: creator_username - remind_interval: remind_interval - body: body - project_name: project_name - alert_type: alert_type - update_date: update_date - instruments: - - instrument_name: instrument_name - instrument_id: instrument_id - - instrument_name: instrument_name - instrument_id: instrument_id - project_id: project_id - last_checked: last_checked - mute_consecutive_alerts: true - creator_id: creator_id - last_reminded: last_reminded - name: name - updater_id: updater_id - schedule_interval: schedule_interval - id: id - alert_email_subscriptions: - - user_type: user_type - id: id - email: email - username: username - - user_type: user_type - id: id - email: email - username: username - create_date: create_date - warning_interval: warning_interval - start_date: start_date - properties: - alert_email_subscriptions: - items: - $ref: '#/components/schemas/EmailAutocompleteResult' - type: array - alert_type: - type: string - alert_type_id: - type: string - body: - type: string - create_date: - type: string - creator_id: - type: string - creator_username: - type: string - id: - type: string - instruments: - items: - $ref: '#/components/schemas/AlertConfigInstrument' - type: array - last_checked: - type: string - last_reminded: - type: string - mute_consecutive_alerts: - type: boolean - name: - type: string - project_id: - type: string - project_name: - type: string - remind_interval: - type: string - schedule_interval: - type: string - start_date: - type: string - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - warning_interval: - type: string - type: object - AlertConfigInstrument: - example: - instrument_name: instrument_name - instrument_id: instrument_id - properties: - instrument_id: - type: string - instrument_name: - type: string - type: object - AlertSubscription: + db.AlertProfileSubscription: example: alert_config_id: alert_config_id profile_id: profile_id @@ -8771,7 +8534,7 @@ components: profile_id: type: string type: object - AwareParameter: + db.AwareParameterListRow: example: id: id unit_id: unit_id @@ -8787,66 +8550,21 @@ components: unit_id: type: string type: object - AwarePlatformParameterConfig: - example: - aware_parameters: - key: aware_parameters - instrument_id: instrument_id - aware_id: aware_id - properties: - aware_id: - type: string - aware_parameters: - additionalProperties: - type: string - type: object - instrument_id: - type: string - type: object - CalculatedTimeseries: - example: - formula_name: formula_name - formula: formula - id: id - instrument_id: instrument_id - unit_id: unit_id - slug: slug - parameter_id: parameter_id - properties: - formula: - type: string - formula_name: - type: string - id: - type: string - instrument_id: - type: string - parameter_id: - type: string - slug: - type: string - unit_id: - type: string - type: object - CollectionGroup: + db.CollectionGroup: example: - updater_username: updater_username + updated_at: updated_at project_id: project_id - creator_username: creator_username - creator_id: creator_id name: name - updater_id: updater_id + updated_by: updated_by + created_at: created_at id: id - create_date: create_date + created_by: created_by sort_order: 0 slug: slug - update_date: update_date properties: - create_date: - type: string - creator_id: + created_at: type: string - creator_username: + created_by: type: string id: type: string @@ -8858,173 +8576,83 @@ components: type: string sort_order: type: integer - update_date: + updated_at: type: string - updater_id: - type: string - updater_username: + updated_by: type: string type: object - CollectionGroupDetails: + db.CollectionGroupDetailsTimeseries: example: - updater_username: updater_username - timeseries: - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - latest_value: 6.027456183070403 - id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - sort_order: 1 - unit_id: unit_id - slug: slug - parameter_id: parameter_id - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - latest_value: 6.027456183070403 - id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - sort_order: 1 - unit_id: unit_id - slug: slug - parameter_id: parameter_id - project_id: project_id - creator_username: creator_username - creator_id: creator_id + instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter name: name - updater_id: updater_id + variable: "{}" + latest_value: 6.027456183070403 id: id - create_date: create_date - sort_order: 0 + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id slug: slug - update_date: update_date + parameter_id: parameter_id properties: - create_date: + id: + type: string + instrument: type: string - creator_id: + instrument_id: type: string - creator_username: + instrument_slug: type: string - id: + is_computed: + type: boolean + latest_time: type: string + latest_value: + type: number name: type: string - project_id: + parameter: + type: string + parameter_id: type: string slug: type: string sort_order: type: integer - timeseries: - items: - $ref: '#/components/schemas/collectionGroupDetailsTimeseries' - type: array - update_date: - type: string - updater_id: + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: type: string - updater_username: + unit_id: type: string + variable: + type: object type: object - Datalogger: + db.DataloggerEquivalencyTableField: example: - updater_username: updater_username - creator_username: creator_username - model_id: model_id - update_date: update_date - tables: - - id: id - table_name: table_name - - id: id - table_name: table_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - model: model + timeseries_id: timeseries_id id: id - sn: sn - create_date: create_date - errors: - - errors - - errors - slug: slug + display_name: display_name + instrument_id: instrument_id + field_name: field_name properties: - create_date: - type: string - creator_id: + display_name: type: string - creator_username: + field_name: type: string - errors: - items: - type: string - type: array id: type: string - model: - type: string - model_id: - type: string - name: - type: string - project_id: - type: string - slug: - type: string - sn: - type: string - tables: - items: - $ref: '#/components/schemas/DataloggerTable' - type: array - update_date: - type: string - updater_id: + instrument_id: type: string - updater_username: + timeseries_id: type: string type: object - DataloggerTable: + db.DataloggerTableIDName: example: id: id table_name: table_name @@ -9034,473 +8662,2051 @@ components: table_name: type: string type: object - DataloggerTablePreview: - example: - preview: - bytes: - - 0 - - 0 - status: 6 - datalogger_table_id: datalogger_table_id - update_date: update_date + db.DomainGroupOpt: properties: - datalogger_table_id: + description: type: string - preview: - $ref: '#/components/schemas/pgtype.JSON' - update_date: + id: type: string - type: object - DataloggerWithKey: + value: + type: string + type: object + db.EmailAutocompleteListRow: example: - updater_username: updater_username - creator_username: creator_username - model_id: model_id - update_date: update_date - tables: - - id: id - table_name: table_name - - id: id - table_name: table_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - model: model + user_type: user_type id: id - sn: sn - create_date: create_date - errors: - - errors - - errors - key: key - slug: slug + email: email + username: "{}" properties: - create_date: - type: string - creator_id: - type: string - creator_username: + email: type: string - errors: - items: - type: string - type: array id: type: string - key: - type: string - model: - type: string - model_id: - type: string - name: - type: string - project_id: - type: string - slug: - type: string - sn: - type: string - tables: - items: - $ref: '#/components/schemas/DataloggerTable' - type: array - update_date: - type: string - updater_id: - type: string - updater_username: + user_type: type: string + username: + type: object type: object - District: + db.EmailAutocompleteResult: example: - office_id: office_id - agency: agency - initials: initials - division_initials: division_initials - division_name: division_name - name: name + user_type: user_type id: id + email: email + username: username properties: - agency: - type: string - division_initials: - type: string - division_name: + email: type: string id: type: string - initials: - type: string - name: + user_type: type: string - office_id: + username: type: string type: object - DistrictRollup: + db.HomeGetRow: example: - expected_total_submittals: 6 - office_id: office_id - alert_type_id: alert_type_id - month: month - project_id: project_id - red_submittals: 5 - green_submittals: 1 - yellow_submittals: 5 - actual_total_submittals: 0 - district_initials: district_initials - project_name: project_name + new_instruments_7d: 1 + project_count: 5 + instrument_group_count: 6 + new_measurements_2h: 5 + instrument_count: 0 properties: - actual_total_submittals: + instrument_count: type: integer - alert_type_id: - type: string - district_initials: - type: string - expected_total_submittals: + instrument_group_count: type: integer - green_submittals: + new_instruments_7d: type: integer - month: - type: string - office_id: - type: string - project_id: - type: string - project_name: - type: string - red_submittals: + new_measurements_2h: type: integer - yellow_submittals: + project_count: type: integer type: object - Domain: + db.IDSlugName: + example: + name: name + id: id + slug: slug properties: - description: - type: string - group: - type: string id: type: string - value: + name: + type: string + slug: type: string type: object - DomainGroupOption: + db.InstrumentCreateBatchRow: + example: + id: id + slug: slug properties: - description: - type: string id: type: string - value: + slug: type: string type: object - DomainMap: - additionalProperties: - items: - $ref: '#/components/schemas/DomainGroupOption' - type: array - type: object - EmailAutocompleteResult: + db.InstrumentGroup: example: - user_type: user_type + deleted: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at + description: description id: id - email: email - username: username + created_by: created_by + slug: slug properties: - email: + created_at: + type: string + created_by: + type: string + deleted: + type: boolean + description: type: string id: type: string - user_type: + name: type: string - username: + project_id: type: string - type: object - EquivalencyTable: - example: - datalogger_table_id: datalogger_table_id - datalogger_table_name: datalogger_table_name - rows: - - timeseries_id: timeseries_id - id: id - display_name: display_name - instrument_id: instrument_id - field_name: field_name - - timeseries_id: timeseries_id - id: id - display_name: display_name - instrument_id: instrument_id - field_name: field_name - datalogger_id: datalogger_id - properties: - datalogger_id: + slug: type: string - datalogger_table_id: + updated_at: type: string - datalogger_table_name: + updated_by: type: string - rows: - items: - $ref: '#/components/schemas/EquivalencyTableRow' - type: array type: object - EquivalencyTableRow: + db.InstrumentIDName: example: - timeseries_id: timeseries_id - id: id - display_name: display_name + instrument_name: instrument_name instrument_id: instrument_id - field_name: field_name properties: - display_name: - type: string - field_name: - type: string - id: - type: string instrument_id: type: string - timeseries_id: + instrument_name: type: string type: object - Evaluation: + db.InstrumentNote: example: - end_date: end_date - updater_username: updater_username - alert_config_id: alert_config_id - creator_username: creator_username - alert_config_name: alert_config_name - body: body - project_name: project_name - submittal_id: submittal_id - update_date: update_date - instruments: - - instrument_name: instrument_name - instrument_id: instrument_id - - instrument_name: instrument_name - instrument_id: instrument_id - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id + updated_at: updated_at + updated_by: updated_by + created_at: created_at id: id - create_date: create_date - start_date: start_date + time: time + body: body + title: title + created_by: created_by + instrument_id: instrument_id properties: - alert_config_id: - type: string - alert_config_name: - type: string body: type: string - create_date: + created_at: type: string - creator_id: - type: string - creator_username: - type: string - end_date: + created_by: type: string id: type: string - instruments: - items: - $ref: '#/components/schemas/EvaluationInstrument' - type: array - name: - type: string - project_id: - type: string - project_name: - type: string - start_date: + instrument_id: type: string - submittal_id: + time: type: string - update_date: + title: type: string - updater_id: + updated_at: type: string - updater_username: + updated_by: type: string type: object - EvaluationInstrument: + db.IpiMeasurement: example: - instrument_name: instrument_name - instrument_id: instrument_id + elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 properties: - instrument_id: - type: string - instrument_name: - type: string + cum_dev: + type: number + elevation: + type: number + inc_dev: + type: number + segment_id: + type: integer + temp: + type: number + tilt: + type: number type: object - Geometry: + db.JobStatus: + enum: + - SUCCESS + - FAIL + - INIT + type: string + x-enum-varnames: + - JobStatusSUCCESS + - JobStatusFAIL + - JobStatusINIT + db.MeasurementCollectionLean: example: - geometries: - - geometries: - - null - - null - coordinates: "{}" - type: type - - geometries: - - null - - null - coordinates: "{}" - type: type - coordinates: "{}" - type: type + timeseries_id: timeseries_id + items: + - null + - null properties: - coordinates: - type: object - geometries: + items: items: - $ref: '#/components/schemas/geojson.Geometry' + $ref: '#/components/schemas/db.MeasurementLean' type: array - type: + timeseries_id: type: string type: object - Heartbeat: - example: + db.MeasurementLean: + additionalProperties: + type: number + type: object + db.PgTimezoneNamesListRow: + example: + utc_offset: utc_offset + name: name + abbrev: abbrev + is_dst: true + properties: + abbrev: + type: string + is_dst: + type: boolean + name: + type: string + utc_offset: + type: string + type: object + db.PlotConfigMeasurementListBullseyeRow: + example: + x: "{}" + "y": "{}" time: time properties: time: type: string + x: + type: object + "y": + type: object type: object - Home: + db.PlotType: + enum: + - scatter-line + - profile + - contour + - bullseye + type: string + x-enum-varnames: + - PlotTypeScatterLine + - PlotTypeProfile + - PlotTypeContour + - PlotTypeBullseye + db.ProfileCreateRow: example: - new_instruments_7d: 1 - project_count: 5 - instrument_group_count: 6 - new_measurements_2h: 5 - instrument_count: 0 + id: id + display_name: display_name + email: email + username: username properties: - instrument_count: - type: integer - instrument_group_count: - type: integer - new_instruments_7d: - type: integer - new_measurements_2h: + display_name: + type: string + email: + type: string + id: + type: string + username: + type: string + type: object + db.ProfileProjectRoleListForProjectRow: + example: + role: role + role_id: role_id + profile_id: profile_id + id: id + email: email + username: username + properties: + email: + type: string + id: + type: string + profile_id: + type: string + role: + type: string + role_id: + type: string + username: + type: string + type: object + db.ProjectCreateBatchRow: + example: + id: id + slug: slug + properties: + id: + type: string + slug: + type: string + type: object + db.ReportConfigGlobalOverrides: + example: + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true + properties: + date_range: + $ref: '#/components/schemas/db.TextOption' + show_masked: + $ref: '#/components/schemas/db.ToggleOption' + show_nonvalidated: + $ref: '#/components/schemas/db.ToggleOption' + type: object + db.ReportDownloadJob: + example: + progress_updated_at: progress_updated_at + file_key: file_key + report_config_id: report_config_id + created_at: created_at + progress: 0 + file_expiry: file_expiry + id: id + created_by: created_by + status: SUCCESS + properties: + created_at: + type: string + created_by: + type: string + file_expiry: + type: string + file_key: + type: string + id: + type: string + progress: type: integer - project_count: + progress_updated_at: + type: string + report_config_id: + type: string + status: + $ref: '#/components/schemas/db.JobStatus' + type: object + db.SaaMeasurement: + example: + elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 + properties: + elevation: + type: number + segment_id: type: integer + temp: + type: number + temp_cum_dev: + type: number + temp_increment: + type: number + x: + type: number + x_cum_dev: + type: number + x_increment: + type: number + "y": + type: number + y_cum_dev: + type: number + y_increment: + type: number + z: + type: number + z_cum_dev: + type: number + z_increment: + type: number type: object - IDSlugName: + db.TextOption: example: - name: name + value: value + enabled: true + properties: + enabled: + type: boolean + value: + type: string + type: object + db.TimeseriesComputedListForInstrumentRow: + example: + formula_name: formula_name + formula: formula id: id + instrument_id: instrument_id + unit_id: unit_id slug: slug + parameter_id: parameter_id properties: + formula: + type: string + formula_name: + type: string id: type: string - name: + instrument_id: + type: string + parameter_id: type: string slug: type: string + unit_id: + type: string type: object - InclinometerMeasurement: + db.TimeseriesCreateBatchRow: example: - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date + name: name + id: id + type: standard + instrument_id: instrument_id + unit_id: unit_id + slug: slug + parameter_id: parameter_id properties: - create_date: + id: type: string - creator: + instrument_id: type: string - time: + name: type: string - values: - items: - type: integer - type: array + parameter_id: + type: string + slug: + type: string + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit_id: + type: string + type: object + db.TimeseriesType: + enum: + - standard + - constant + - computed + - cwms + type: string + x-enum-varnames: + - TimeseriesTypeStandard + - TimeseriesTypeConstant + - TimeseriesTypeComputed + - TimeseriesTypeCwms + db.ToggleOption: + example: + value: true + enabled: true + properties: + enabled: + type: boolean + value: + type: boolean type: object - InclinometerMeasurementCollection: + db.UploaderConfigMapping: example: timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date + uploader_config_id: uploader_config_id + field_name: field_name properties: - inclinometers: - items: - $ref: '#/components/schemas/InclinometerMeasurement' - type: array + field_name: + type: string timeseries_id: type: string + uploader_config_id: + type: string type: object - InclinometerMeasurementCollectionCollection: + db.UploaderConfigType: + enum: + - csv + - dux + - toa5 + type: string + x-enum-varnames: + - UploaderConfigTypeCsv + - UploaderConfigTypeDux + - UploaderConfigTypeToa5 + db.VAlert: example: - items: - - timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + alert_config_id: alert_config_id + project_id: project_id + name: name + created_at: created_at + id: id + body: body + project_name: project_name properties: - items: - items: - $ref: '#/components/schemas/InclinometerMeasurementCollection' - type: array + alert_config_id: + type: string + body: + type: string + created_at: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + name: + type: string + project_id: + type: string + project_name: + type: string + type: object + db.VAlertConfig: + example: + alert_type_id: alert_type_id + created_at: created_at + remind_interval: remind_interval + create_next_submittal_from: create_next_submittal_from + body: body + project_name: project_name + created_by: created_by + alert_type: alert_type + last_checked_at: last_checked_at + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + updated_at: updated_at + project_id: project_id + mute_consecutive_alerts: true + name: name + updated_by: updated_by + schedule_interval: schedule_interval + started_at: started_at + created_by_username: created_by_username + id: id + alert_email_subscriptions: + - user_type: user_type + id: id + email: email + username: username + - user_type: user_type + id: id + email: email + username: username + last_reminded_at: last_reminded_at + warning_interval: warning_interval + properties: + alert_email_subscriptions: + items: + $ref: '#/components/schemas/db.EmailAutocompleteResult' + type: array + alert_type: + type: string + alert_type_id: + type: string + body: + type: string + create_next_submittal_from: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + last_checked_at: + type: string + last_reminded_at: + type: string + mute_consecutive_alerts: + type: boolean + name: + type: string + project_id: + type: string + project_name: + type: string + remind_interval: + type: string + schedule_interval: + type: string + started_at: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + warning_interval: + type: string + type: object + db.VCollectionGroupDetail: + example: + timeseries: + - instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter + name: name + variable: "{}" + latest_value: 6.027456183070403 + id: id + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id + slug: slug + parameter_id: parameter_id + - instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter + name: name + variable: "{}" + latest_value: 6.027456183070403 + id: id + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id + slug: slug + parameter_id: parameter_id + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at + id: id + created_by: created_by + sort_order: 0 + slug: slug + properties: + created_at: + type: string + created_by: + type: string + id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sort_order: + type: integer + timeseries: + items: + $ref: '#/components/schemas/db.CollectionGroupDetailsTimeseries' + type: array + updated_at: + type: string + updated_by: + type: string + type: object + db.VDatalogger: + example: + created_at: created_at + model_id: model_id + created_by: created_by + updated_by_username: updated_by_username + tables: + - id: id + table_name: table_name + - id: id + table_name: table_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + model: model + id: id + sn: sn + errors: + - errors + - errors + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + errors: + items: + type: string + type: array + id: + type: string + model: + type: string + model_id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sn: + type: string + tables: + items: + $ref: '#/components/schemas/db.DataloggerTableIDName' + type: array + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VDataloggerEquivalencyTable: + example: + datalogger_table_id: datalogger_table_id + datalogger_table_name: datalogger_table_name + fields: + - timeseries_id: timeseries_id + id: id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + - timeseries_id: timeseries_id + id: id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + datalogger_id: datalogger_id + properties: + datalogger_id: + type: string + datalogger_table_id: + type: string + datalogger_table_name: + type: string + fields: + items: + $ref: '#/components/schemas/db.DataloggerEquivalencyTableField' + type: array + type: object + db.VDataloggerPreview: + example: + preview: + - 0 + - 0 + updated_at: updated_at + datalogger_table_id: datalogger_table_id + properties: + datalogger_table_id: + type: string + preview: + items: + type: integer + type: array + updated_at: + type: string + type: object + db.VDistrict: + example: + office_id: office_id + agency: agency + initials: initials + division_initials: division_initials + division_name: division_name + name: name + id: id + properties: + agency: + type: string + division_initials: + type: string + division_name: + type: string + id: + type: string + initials: + type: string + name: + type: string + office_id: + type: string + type: object + db.VDistrictRollup: + example: + expected_total_submittals: 6 + office_id: office_id + alert_type_id: alert_type_id + month: month + project_id: project_id + red_submittals: 5 + green_submittals: 1 + yellow_submittals: 5 + actual_total_submittals: 0 + district_initials: district_initials + project_name: project_name + properties: + actual_total_submittals: + type: integer + alert_type_id: + type: string + district_initials: + type: string + expected_total_submittals: + type: integer + green_submittals: + type: integer + month: + type: string + office_id: + type: string + project_id: + type: string + project_name: + type: string + red_submittals: + type: integer + yellow_submittals: + type: integer + type: object + db.VDomain: + properties: + description: + type: string + group: + type: string + id: + type: string + value: + type: string + type: object + db.VEvaluation: + example: + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + body: body + project_name: project_name + created_by: created_by + submittal_id: submittal_id + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + started_at: started_at + created_by_username: created_by_username + id: id + ended_at: ended_at + properties: + alert_config_id: + type: string + alert_config_name: + type: string + body: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + ended_at: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + name: + type: string + project_id: + type: string + project_name: + type: string + started_at: + type: string + submittal_id: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VInclMeasurement: + example: + time: time + instrument_id: instrument_id + measurements: "{}" + properties: + instrument_id: + type: string + measurements: + type: object + time: + type: string + type: object + db.VInclSegment: + example: + depth_timeseries_id: depth_timeseries_id + b180_timeseries_id: b180_timeseries_id + a180_timeseries_id: a180_timeseries_id + id: 0 + instrument_id: instrument_id + a0_timeseries_id: a0_timeseries_id + b0_timeseries_id: b0_timeseries_id + properties: + a0_timeseries_id: + type: string + a180_timeseries_id: + type: string + b0_timeseries_id: + type: string + b180_timeseries_id: + type: string + depth_timeseries_id: + type: string + id: + type: integer + instrument_id: + type: string + type: object + db.VInstrument: + example: + has_cwms: true + projects: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + alert_configs: + - alert_configs + - alert_configs + icon: icon + created_at: created_at + type: type + status_id: status_id + opts: "{}" + updated_at: updated_at + station: 1 + constants: + - constants + - constants + id: id + status_time: status_time + slug: slug + offset: 6 + type_id: type_id + show_cwms_tab: true + usgs_id: usgs_id + groups: + - groups + - groups + created_by: created_by + name: name + updated_by: updated_by + geometry: + - 0 + - 0 + nid_id: nid_id + telemetry: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + status: status + properties: + alert_configs: + items: + type: string + type: array + constants: + items: + type: string + type: array + created_at: + type: string + created_by: + type: string + geometry: + items: + type: integer + type: array + groups: + items: + type: string + type: array + has_cwms: + type: boolean + icon: + type: string + id: + type: string + name: + type: string + nid_id: + type: string + offset: + type: integer + opts: + type: object + projects: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + show_cwms_tab: + type: boolean + slug: + type: string + station: + type: integer + status: + type: string + status_id: + type: string + status_time: + type: string + telemetry: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + type: + type: string + type_id: + type: string + updated_at: + type: string + updated_by: + type: string + usgs_id: + type: string + type: object + db.VInstrumentGroup: + example: + updated_at: updated_at + project_id: project_id + name: name + timeseries_count: "{}" + updated_by: updated_by + created_at: created_at + description: description + id: id + created_by: created_by + instrument_count: 0 + slug: slug + properties: + created_at: + type: string + created_by: + type: string + description: + type: string + id: + type: string + instrument_count: + type: integer + name: + type: string + project_id: + type: string + slug: + type: string + timeseries_count: + type: object + updated_at: + type: string + updated_by: + type: string + type: object + db.VInstrumentStatus: + example: + status_id: status_id + id: id + time: time + instrument_id: instrument_id + status: status + properties: + id: + type: string + instrument_id: + type: string + status: + type: string + status_id: + type: string + time: + type: string + type: object + db.VIpiMeasurement: + example: + time: time + instrument_id: instrument_id + measurements: + - elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 + - elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 + properties: + instrument_id: + type: string + measurements: + items: + $ref: '#/components/schemas/db.IpiMeasurement' + type: array + time: + type: string + type: object + db.VIpiSegment: + example: + length: 6.027456183070403 + tilt_timeseries_id: tilt_timeseries_id + id: 0 + inc_dev_timeseries_id: inc_dev_timeseries_id + instrument_id: instrument_id + length_timeseries_id: length_timeseries_id + properties: + id: + type: integer + inc_dev_timeseries_id: + type: string + instrument_id: + type: string + length: + type: number + length_timeseries_id: + type: string + tilt_timeseries_id: + type: string + type: object + db.VPlotConfiguration: + example: + date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug + properties: + auto_range: + type: boolean + created_at: + type: string + created_by: + type: string + date_range: + type: string + display: + type: object + id: + type: string + name: + type: string + plot_type: + $ref: '#/components/schemas/db.PlotType' + project_id: + type: string + report_configs: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean + slug: + type: string + threshold: + type: integer + updated_at: + type: string + updated_by: + type: string + type: object + db.VProfile: + example: + is_admin: true + roles: + - roles + - roles + tokens: + - token_id: token_id + issued: issued + - token_id: token_id + issued: issued + id: id + display_name: display_name + edipi: 0 + email: email + username: username + properties: + display_name: + type: string + edipi: + type: integer + email: + type: string + id: + type: string + is_admin: + type: boolean + roles: + items: + type: string + type: array + tokens: + items: + $ref: '#/components/schemas/db.VProfileToken' + type: array + username: + type: string + type: object + db.VProfileToken: + example: + token_id: token_id + issued: issued + properties: + issued: + type: string + token_id: + type: string + type: object + db.VProject: + example: + image: "{}" + federal_id: federal_id + created_at: created_at + created_by: created_by + instrument_count: 0 + office_id: office_id + updated_by_username: updated_by_username + instrument_group_count: 6 + updated_at: updated_at + name: name + updated_by: updated_by + created_by_username: created_by_username + district_id: district_id + id: id + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + district_id: + type: string + federal_id: + type: string + id: + type: string + image: + type: object + instrument_count: + type: integer + instrument_group_count: + type: integer + name: + type: string + office_id: + type: string + slug: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VReportConfig: + example: + global_overrides: + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true + created_at: created_at + description: description + project_name: project_name + created_by: created_by + updated_by_username: updated_by_username + district_name: district_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + id: id + plot_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + district_name: + type: string + global_overrides: + $ref: '#/components/schemas/db.ReportConfigGlobalOverrides' + id: + type: string + name: + type: string + plot_configs: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + project_id: + type: string + project_name: + type: string + slug: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VSaaMeasurement: + example: + time: time + instrument_id: instrument_id + measurements: + - elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 + - elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 + properties: + instrument_id: + type: string + measurements: + items: + $ref: '#/components/schemas/db.SaaMeasurement' + type: array + time: + type: string + type: object + db.VSaaSegment: + example: + z_timeseries_id: z_timeseries_id + temp_timeseries_id: temp_timeseries_id + y_timeseries_id: y_timeseries_id + x_timeseries_id: x_timeseries_id + length: 6.027456183070403 + id: 0 + instrument_id: instrument_id + length_timeseries_id: length_timeseries_id + properties: + id: + type: integer + instrument_id: + type: string + length: + type: number + length_timeseries_id: + type: string + temp_timeseries_id: + type: string + x_timeseries_id: + type: string + y_timeseries_id: + type: string + z_timeseries_id: + type: string + type: object + db.VSubmittal: + example: + alert_type_id: alert_type_id + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + submittal_status_id: submittal_status_id + submittal_status_name: submittal_status_name + warning_sent: true + completed_at: completed_at + project_id: project_id + alert_type_name: alert_type_name + marked_as_missing: true + due_at: due_at + id: id + properties: + alert_config_id: + type: string + alert_config_name: + type: string + alert_type_id: + type: string + alert_type_name: + type: string + completed_at: + type: string + created_at: + type: string + due_at: + type: string + id: + type: string + marked_as_missing: + type: boolean + project_id: + type: string + submittal_status_id: + type: string + submittal_status_name: + type: string + warning_sent: + type: boolean + type: object + db.VTimeseries: + example: + instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter + name: name + variable: "{}" + id: id + instrument_slug: instrument_slug + is_computed: true + unit_id: unit_id + slug: slug + parameter_id: parameter_id + properties: + id: + type: string + instrument: + type: string + instrument_id: + type: string + instrument_slug: + type: string + is_computed: + type: boolean + name: + type: string + parameter: + type: string + parameter_id: + type: string + slug: + type: string + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: + type: string + unit_id: + type: string + variable: + type: object + type: object + db.VTimeseriesCwm: + example: + cwms_office_id: cwms_office_id + instrument: instrument + cwms_extent_earliest_time: cwms_extent_earliest_time + type: standard + cwms_timeseries_id: cwms_timeseries_id + instrument_id: instrument_id + unit: unit + parameter: parameter + cwms_extent_latest_time: cwms_extent_latest_time + name: name + variable: "{}" + id: id + instrument_slug: instrument_slug + is_computed: true + unit_id: unit_id + slug: slug + parameter_id: parameter_id + properties: + cwms_extent_earliest_time: + type: string + cwms_extent_latest_time: + type: string + cwms_office_id: + type: string + cwms_timeseries_id: + type: string + id: + type: string + instrument: + type: string + instrument_id: + type: string + instrument_slug: + type: string + is_computed: + type: boolean + name: + type: string + parameter: + type: string + parameter_id: + type: string + slug: + type: string + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: + type: string + unit_id: + type: string + variable: + type: object + type: object + db.VTimeseriesMeasurement: + example: + annotation: annotation + timeseries_id: timeseries_id + validated: true + masked: true + time: time + value: 0.8008281904610115 + properties: + annotation: + type: string + masked: + type: boolean + time: + type: string + timeseries_id: + type: string + validated: + type: boolean + value: + type: number + type: object + db.VUnit: + example: + measure: measure + unit_family_id: unit_family_id + name: name + unit_family: unit_family + id: id + abbreviation: abbreviation + measure_id: measure_id + properties: + abbreviation: + type: string + id: + type: string + measure: + type: string + measure_id: + type: string + name: + type: string + unit_family: + type: string + unit_family_id: + type: string + type: object + db.VUploaderConfig: + example: + validated_field: validated_field + created_at: created_at + description: description + comment_field: comment_field + type: csv + created_by: created_by + updated_by_username: updated_by_username + time_field: time_field + masked_field: masked_field + tz_name: tz_name + updated_at: updated_at + comment_field_enabled: true + project_id: project_id + name: name + updated_by: updated_by + masked_field_enabled: true + created_by_username: created_by_username + id: id + validated_field_enabled: true + slug: slug + properties: + comment_field: + type: string + comment_field_enabled: + type: boolean + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + id: + type: string + masked_field: + type: string + masked_field_enabled: + type: boolean + name: + type: string + project_id: + type: string + slug: + type: string + time_field: + type: string + type: + $ref: '#/components/schemas/db.UploaderConfigType' + tz_name: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + validated_field: + type: string + validated_field_enabled: + type: boolean + type: object + dto.AlertConfig: + properties: + alert_email_subscriptions: + items: + $ref: '#/components/schemas/dto.EmailAutocompleteResult' + type: array + alert_type: + type: string + alert_type_id: + type: string + body: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/dto.AlertConfigInstrument' + type: array + last_checked: + type: string + last_reminded: + type: string + mute_consecutive_alerts: + type: boolean + name: + type: string + project_id: + type: string + project_name: + type: string + remind_interval: + type: string + schedule_interval: + type: string + started_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + warning_interval: + type: string + type: object + dto.AlertConfigInstrument: + properties: + instrument_id: + type: string + instrument_name: + type: string + type: object + dto.AlertSubscription: + properties: + alert_config_id: + type: string + id: + type: string + mute_notify: + type: boolean + mute_ui: + type: boolean + profile_id: + type: string + type: object + dto.CalculatedTimeseries: + example: + formula_name: formula_name + formula: formula + id: id + instrument_id: instrument_id + unit_id: unit_id + slug: slug + parameter_id: parameter_id + properties: + formula: + type: string + formula_name: + type: string + id: + type: string + instrument_id: + type: string + parameter_id: + type: string + slug: + type: string + unit_id: + type: string + type: object + dto.CollectionGroup: + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sort_order: + type: integer + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + type: object + dto.Datalogger: + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + errors: + items: + type: string + type: array + id: + type: string + model: + type: string + model_id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sn: + type: string + tables: + items: + $ref: '#/components/schemas/dto.DataloggerTable' + type: array + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + type: object + dto.DataloggerTable: + properties: + id: + type: string + table_name: + type: string + type: object + dto.EmailAutocompleteResult: + properties: + email: + type: string + id: + type: string + user_type: + type: string + username: + type: string + type: object + dto.EquivalencyTable: + properties: + datalogger_id: + type: string + datalogger_table_id: + type: string + datalogger_table_name: + type: string + rows: + items: + $ref: '#/components/schemas/dto.EquivalencyTableRow' + type: array + type: object + dto.EquivalencyTableRow: + properties: + display_name: + type: string + field_name: + type: string + id: + type: string + instrument_id: + type: string + timeseries_id: + type: string + type: object + dto.Evaluation: + example: + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + body: body + project_name: project_name + created_by: created_by + submittal_id: submittal_id + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + project_id: project_id + name: name + updated_by: updated_by + started_at: started_at + updatedd_at: updatedd_at + created_by_username: created_by_username + id: id + ended_at: ended_at + properties: + alert_config_id: + type: string + alert_config_name: + type: string + body: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + ended_at: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/dto.EvaluationInstrument' + type: array + name: + type: string + project_id: + type: string + project_name: + type: string + started_at: + type: string + submittal_id: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + type: object + dto.EvaluationInstrument: + example: + instrument_name: instrument_name + instrument_id: instrument_id + properties: + instrument_id: + type: string + instrument_name: + type: string + type: object + dto.IDSlugName: + example: + name: name + id: id + slug: slug + properties: + id: + type: string + name: + type: string + slug: + type: string type: object - InclinometerMeasurementCollectionLean: + dto.InclSegment: example: - timeseries_id: timeseries_id - items: - - null - - null + depth_timeseries_id: depth_timeseries_id + b180_timeseries_id: b180_timeseries_id + a180_timeseries_id: a180_timeseries_id + id: 0 + instrument_id: instrument_id + a0_timeseries_id: a0_timeseries_id + b0_timeseries_id: b0_timeseries_id properties: - items: - items: - $ref: '#/components/schemas/InclinometerMeasurementLean' - type: array - timeseries_id: + a0_timeseries_id: type: string - type: object - InclinometerMeasurementLean: - additionalProperties: - items: + a180_timeseries_id: + type: string + b0_timeseries_id: + type: string + b180_timeseries_id: + type: string + depth_timeseries_id: + type: string + id: type: integer - type: array + instrument_id: + type: string type: object - Instrument: + dto.Instrument: example: has_cwms: true projects: @@ -9514,46 +10720,35 @@ components: - alert_configs - alert_configs icon: icon + created_at: created_at type: type aware_id: aware_id + updated_by_username: updated_by_username status_id: status_id opts: key: "" - station: 6 + station: 1 + created_by_username: created_by_username constants: - constants - constants id: id status_time: status_time - create_date: create_date slug: slug - updater_username: updater_username - offset: 0 - creator_username: creator_username + offset: 6 type_id: type_id show_cwms_tab: true usgs_id: usgs_id groups: - groups - groups - update_date: update_date - creator_id: creator_id + created_by: created_by name: name - updater_id: updater_id + updated_by: updated_by + updatedd_at: updatedd_at geometry: - geometries: - - geometries: - - null - - null - coordinates: "{}" - type: type - - geometries: - - null - - null - coordinates: "{}" - type: type - coordinates: "{}" - type: type + - 0 + - 0 nid_id: nid_id status: status properties: @@ -9567,14 +10762,16 @@ components: items: type: string type: array - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string geometry: - $ref: '#/components/schemas/Geometry' + items: + type: integer + type: array groups: items: type: string @@ -9596,7 +10793,7 @@ components: type: object projects: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array show_cwms_tab: type: boolean @@ -9614,43 +10811,22 @@ components: type: string type_id: type: string - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string usgs_id: type: string type: object - InstrumentCount: - example: - instrument_count: 0 - properties: - instrument_count: - type: integer - type: object - InstrumentGroup: - example: - updater_username: updater_username - creator_username: creator_username - description: description - instrument_count: 0 - update_date: update_date - project_id: project_id - creator_id: creator_id - name: name - timeseries_count: 6 - updater_id: updater_id - id: id - create_date: create_date - slug: slug + dto.InstrumentGroup: properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string description: type: string @@ -9666,34 +10842,22 @@ components: type: string timeseries_count: type: integer - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - InstrumentNote: - example: - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id - id: id - time: time - body: body - create_date: create_date - title: title - instrument_id: instrument_id - update_date: update_date + dto.InstrumentNote: properties: body: type: string - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string id: type: string @@ -9703,61 +10867,28 @@ components: type: string title: type: string - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - InstrumentNoteCollection: - example: - items: - - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id - id: id - time: time - body: body - create_date: create_date - title: title - instrument_id: instrument_id - update_date: update_date - - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id - id: id - time: time - body: body - create_date: create_date - title: title - instrument_id: instrument_id - update_date: update_date + dto.InstrumentNoteCollection: properties: items: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/dto.InstrumentNote' type: array type: object - InstrumentProjectAssignments: - example: - project_ids: - - project_ids - - project_ids + dto.InstrumentProjectAssignments: properties: project_ids: items: type: string type: array type: object - InstrumentStatus: - example: - status_id: status_id - id: id - time: time - status: status + dto.InstrumentStatus: properties: id: type: string @@ -9768,62 +10899,14 @@ components: time: type: string type: object - InstrumentStatusCollection: - example: - items: - - status_id: status_id - id: id - time: time - status: status - - status_id: status_id - id: id - time: time - status: status + dto.InstrumentStatusCollection: properties: items: items: - $ref: '#/components/schemas/InstrumentStatus' - type: array - type: object - InstrumentsValidation: - example: - is_valid: true - errors: - - errors - - errors - properties: - errors: - items: - type: string - type: array - is_valid: - type: boolean - type: object - IpiMeasurements: - example: - time: time - measurements: - - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 - - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 - properties: - measurements: - items: - $ref: '#/components/schemas/IpiSegmentMeasurement' + $ref: '#/components/schemas/dto.InstrumentStatus' type: array - time: - type: string type: object - IpiSegment: + dto.IpiSegment: example: temp_timeseries_id: temp_timeseries_id length: 6.027456183070403 @@ -9848,29 +10931,7 @@ components: tilt_timeseries_id: type: string type: object - IpiSegmentMeasurement: - example: - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 - properties: - cum_dev: - type: number - elevation: - type: number - inc_dev: - type: number - segment_id: - type: integer - temp: - type: number - tilt: - type: number - type: object - Measurement: + dto.Measurement: example: annotation: annotation validated: true @@ -9892,164 +10953,32 @@ components: value: type: number type: object - MeasurementCollection: - example: - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - properties: - items: - items: - $ref: '#/components/schemas/Measurement' - type: array - timeseries_id: - type: string - type: object - MeasurementCollectionLean: - example: - timeseries_id: timeseries_id - items: - - null - - null + dto.MeasurementCollection: properties: items: items: - $ref: '#/components/schemas/MeasurementLean' + $ref: '#/components/schemas/dto.Measurement' type: array timeseries_id: type: string type: object - MeasurementLean: - additionalProperties: - type: number - type: object - Opts: + dto.Opts: additionalProperties: true type: object - PlotConfig: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - key: "" - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug - properties: - auto_range: - type: boolean - create_date: - type: string - creator_id: - type: string - creator_username: - type: string - date_range: - type: string - display: - additionalProperties: true - type: object - id: - type: string - name: - type: string - plot_type: - type: string - project_id: - type: string - report_configs: - items: - $ref: '#/components/schemas/IDSlugName' - type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: - type: boolean - slug: - type: string - threshold: - type: integer - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - type: object - PlotConfigBullseyePlot: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - y_axis_timeseries_id: y_axis_timeseries_id - x_axis_timeseries_id: x_axis_timeseries_id - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug + dto.PlotConfigBullseyePlot: properties: auto_range: type: boolean - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string date_range: type: string display: - $ref: '#/components/schemas/PlotConfigBullseyePlotDisplay' + $ref: '#/components/schemas/dto.PlotConfigBullseyePlotDisplay' id: type: string name: @@ -10060,7 +10989,7 @@ components: type: string report_configs: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array show_comments: type: boolean @@ -10072,71 +11001,34 @@ components: type: string threshold: type: integer - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - type: object - PlotConfigBullseyePlotDisplay: - example: - y_axis_timeseries_id: y_axis_timeseries_id - x_axis_timeseries_id: x_axis_timeseries_id - properties: - x_axis_timeseries_id: + updated_by: type: string - y_axis_timeseries_id: + updated_by_username: type: string - type: object - PlotConfigContourPlot: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - contour_smoothing: true - gradient_smoothing: true - locf_backfill: locf_backfill - timeseries_ids: - - timeseries_ids - - timeseries_ids - show_labels: true - time: time - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug + updatedd_at: + type: string + type: object + dto.PlotConfigBullseyePlotDisplay: + properties: + x_axis_timeseries_id: + type: string + y_axis_timeseries_id: + type: string + type: object + dto.PlotConfigContourPlot: properties: auto_range: type: boolean - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string date_range: type: string display: - $ref: '#/components/schemas/PlotConfigContourPlotDisplay' + $ref: '#/components/schemas/dto.PlotConfigContourPlotDisplay' id: type: string name: @@ -10147,7 +11039,7 @@ components: type: string report_configs: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array show_comments: type: boolean @@ -10159,23 +11051,14 @@ components: type: string threshold: type: integer - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - PlotConfigContourPlotDisplay: - example: - contour_smoothing: true - gradient_smoothing: true - locf_backfill: locf_backfill - timeseries_ids: - - timeseries_ids - - timeseries_ids - show_labels: true - time: time + dto.PlotConfigContourPlotDisplay: properties: contour_smoothing: type: boolean @@ -10192,61 +11075,20 @@ components: type: string type: array type: object - PlotConfigMeasurementBullseyePlot: - example: - x: 0.8008281904610115 - "y": 6.027456183070403 - time: time - properties: - time: - type: string - x: - type: number - "y": - type: number - type: object - PlotConfigProfilePlot: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - instrument_type: instrument_type - instrument_id: instrument_id - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug + dto.PlotConfigProfilePlot: properties: auto_range: type: boolean - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string date_range: type: string display: - $ref: '#/components/schemas/PlotConfigProfilePlotDisplay' + $ref: '#/components/schemas/dto.PlotConfigProfilePlotDisplay' id: type: string name: @@ -10257,7 +11099,7 @@ components: type: string report_configs: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array show_comments: type: boolean @@ -10269,30 +11111,21 @@ components: type: string threshold: type: integer - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - PlotConfigProfilePlotDisplay: - example: - instrument_type: instrument_type - instrument_id: instrument_id + dto.PlotConfigProfilePlotDisplay: properties: instrument_id: type: string instrument_type: type: string type: object - PlotConfigScatterLineCustomShape: - example: - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true + dto.PlotConfigScatterLineCustomShape: properties: color: type: string @@ -10305,155 +11138,40 @@ components: plot_configuration_id: type: string type: object - PlotConfigScatterLineDisplay: - example: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 + dto.PlotConfigScatterLineDisplay: properties: layout: - $ref: '#/components/schemas/PlotConfigScatterLineLayout' + $ref: '#/components/schemas/dto.PlotConfigScatterLineLayout' traces: items: - $ref: '#/components/schemas/PlotConfigScatterLineTimeseriesTrace' + $ref: '#/components/schemas/dto.PlotConfigScatterLineTimeseriesTrace' type: array type: object - PlotConfigScatterLineLayout: - example: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title + dto.PlotConfigScatterLineLayout: properties: custom_shapes: items: - $ref: '#/components/schemas/PlotConfigScatterLineCustomShape' + $ref: '#/components/schemas/dto.PlotConfigScatterLineCustomShape' type: array y2_axis_title: type: string y_axis_title: type: string type: object - PlotConfigScatterLinePlot: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug + dto.PlotConfigScatterLinePlot: properties: auto_range: type: boolean - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string date_range: type: string display: - $ref: '#/components/schemas/PlotConfigScatterLineDisplay' + $ref: '#/components/schemas/dto.PlotConfigScatterLineDisplay' id: type: string name: @@ -10464,7 +11182,7 @@ components: type: string report_configs: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array show_comments: type: boolean @@ -10476,26 +11194,14 @@ components: type: string threshold: type: integer - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - PlotConfigScatterLineTimeseriesTrace: - example: - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 + dto.PlotConfigScatterLineTimeseriesTrace: properties: color: type: string @@ -10523,64 +11229,29 @@ components: description: "y1 or y2, default y1" type: string type: object - Profile: - example: - is_admin: true - roles: - - roles - - roles - tokens: - - token_id: token_id - issued: issued - - token_id: token_id - issued: issued - id: id - display_name: display_name - email: email - username: username - properties: - display_name: - type: string - email: - type: string - id: - type: string - is_admin: - type: boolean - roles: - items: - type: string - type: array - tokens: - items: - $ref: '#/components/schemas/TokenInfoProfile' - type: array - username: - type: string - type: object - Project: + dto.Project: example: image: image - updater_username: updater_username federal_id: federal_id - creator_username: creator_username + created_at: created_at + created_by: created_by instrument_count: 0 - update_date: update_date office_id: office_id + updated_by_username: updated_by_username instrument_group_count: 6 - creator_id: creator_id name: name - updater_id: updater_id + updated_by: updated_by + updatedd_at: updatedd_at + created_by_username: created_by_username district_id: district_id id: id - create_date: create_date slug: slug properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string district_id: type: string @@ -10600,105 +11271,41 @@ components: type: string slug: type: string - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - ProjectCount: - example: - project_count: 0 - properties: - project_count: - type: integer - type: object - ProjectInstrumentAssignments: - example: - instrument_ids: - - instrument_ids - - instrument_ids + dto.ProjectInstrumentAssignments: properties: instrument_ids: items: type: string type: array type: object - ProjectMembership: - example: - role: role - role_id: role_id - profile_id: profile_id - id: id - email: email - username: username - properties: - email: - type: string - id: - type: string - profile_id: - type: string - role: - type: string - role_id: - type: string - username: - type: string - type: object - ReportConfig: - example: - updater_username: updater_username - creator_username: creator_username - global_overrides: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true - description: description - project_name: project_name - update_date: update_date - district_name: district_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - slug: slug + dto.ReportConfig: properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string description: type: string district_name: type: string global_overrides: - $ref: '#/components/schemas/ReportConfigGlobalOverrides' + $ref: '#/components/schemas/dto.ReportConfigGlobalOverrides' id: type: string name: type: string plot_configs: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array project_id: type: string @@ -10706,231 +11313,27 @@ components: type: string slug: type: string - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - ReportConfigGlobalOverrides: - example: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true + dto.ReportConfigGlobalOverrides: properties: date_range: - $ref: '#/components/schemas/TextOption' + $ref: '#/components/schemas/dto.TextOption' show_masked: - $ref: '#/components/schemas/ToggleOption' + $ref: '#/components/schemas/dto.ToggleOption' show_nonvalidated: - $ref: '#/components/schemas/ToggleOption' - type: object - ReportConfigWithPlotConfigs: - example: - updater_username: updater_username - creator_username: creator_username - global_overrides: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true - description: description - project_name: project_name - update_date: update_date - district_name: district_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_configs: - - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug - - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug - slug: slug - properties: - create_date: - type: string - creator_id: - type: string - creator_username: - type: string - description: - type: string - district_name: - type: string - global_overrides: - $ref: '#/components/schemas/ReportConfigGlobalOverrides' - id: - type: string - name: - type: string - plot_configs: - items: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' - type: array - project_id: - type: string - project_name: - type: string - slug: - type: string - update_date: - type: string - updater_id: - type: string - updater_username: - type: string + $ref: '#/components/schemas/dto.ToggleOption' type: object - ReportDownloadJob: - example: - file_key: file_key - creator: creator - progress_update_date: progress_update_date - report_config_id: report_config_id - progress: 0 - file_expiry: file_expiry - id: id - create_date: create_date - status: status + dto.ReportDownloadJob: properties: - create_date: + created_at: type: string - creator: + created_by: type: string file_expiry: type: string @@ -10940,54 +11343,14 @@ components: type: string progress: type: integer - progress_update_date: + progress_updated_at: type: string report_config_id: type: string status: type: string type: object - SaaMeasurements: - example: - time: time - measurements: - - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 - - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 - properties: - measurements: - items: - $ref: '#/components/schemas/SaaSegmentMeasurement' - type: array - time: - type: string - type: object - SaaSegment: + dto.SaaSegment: example: z_timeseries_id: z_timeseries_id temp_timeseries_id: temp_timeseries_id @@ -11015,137 +11378,14 @@ components: z_timeseries_id: type: string type: object - SaaSegmentMeasurement: - example: - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 - properties: - elevation: - type: number - segment_id: - type: integer - temp: - type: number - temp_cum_dev: - type: number - temp_increment: - type: number - x: - type: number - x_cum_dev: - type: number - x_increment: - type: number - "y": - type: number - y_cum_dev: - type: number - y_increment: - type: number - z: - type: number - z_cum_dev: - type: number - z_increment: - type: number - type: object - SearchResult: - example: - item: "{}" - id: id - type: type - properties: - id: - type: string - item: - type: object - type: - type: string - type: object - Site: - properties: - description: - type: string - elevation: - type: string - elevationUnits: - type: string - siteName: - $ref: '#/components/schemas/SiteName' - type: object - SiteName: - properties: - id: - type: string - nameType: - type: string - type: object - Submittal: - example: - alert_type_id: alert_type_id - alert_config_id: alert_config_id - due_date: due_date - alert_config_name: alert_config_name - submittal_status_id: submittal_status_id - submittal_status_name: submittal_status_name - warning_sent: true - project_id: project_id - alert_type_name: alert_type_name - marked_as_missing: true - completion_date: completion_date - id: id - create_date: create_date - properties: - alert_config_id: - type: string - alert_config_name: - type: string - alert_type_id: - type: string - alert_type_name: - type: string - completion_date: - type: string - create_date: - type: string - due_date: - type: string - id: - type: string - marked_as_missing: - type: boolean - project_id: - type: string - submittal_status_id: - type: string - submittal_status_name: - type: string - warning_sent: - type: boolean - type: object - TextOption: - example: - value: value - enabled: true + dto.TextOption: properties: enabled: type: boolean value: type: string type: object - Timeseries: + dto.Timeseries: example: values: - annotation: annotation @@ -11200,73 +11440,19 @@ components: type: string values: items: - $ref: '#/components/schemas/Measurement' + $ref: '#/components/schemas/dto.Measurement' type: array variable: type: string type: object - TimeseriesCollectionItems: - example: - items: - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - id: id - instrument_slug: instrument_slug - is_computed: true - unit_id: unit_id - slug: slug - parameter_id: parameter_id - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - id: id - instrument_slug: instrument_slug - is_computed: true - unit_id: unit_id - slug: slug - parameter_id: parameter_id + dto.TimeseriesCollectionItems: properties: items: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/dto.Timeseries' type: array type: object - TimeseriesCwms: + dto.TimeseriesCwms: example: cwms_office_id: cwms_office_id values: @@ -11333,145 +11519,32 @@ components: type: string values: items: - $ref: '#/components/schemas/Measurement' + $ref: '#/components/schemas/dto.Measurement' type: array variable: type: string type: object - TimeseriesMeasurementCollectionCollection: - example: - items: - - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - properties: - items: - items: - $ref: '#/components/schemas/MeasurementCollection' - type: array - type: object - TimezoneOption: - example: - utc_offset: utc_offset - name: name - abbrev: abbrev - is_dst: true - properties: - abbrev: - type: string - is_dst: - type: boolean - name: - type: string - utc_offset: - type: string - type: object - ToggleOption: - example: - value: true - enabled: true - properties: - enabled: - type: boolean - value: - type: boolean - type: object - Token: - example: - token_id: token_id - profile_id: profile_id - issued: issued - secret_token: secret_token - properties: - issued: - type: string - profile_id: - type: string - secret_token: - type: string - token_id: - type: string - type: object - TokenInfoProfile: - example: - token_id: token_id - issued: issued - properties: - issued: - type: string - token_id: - type: string - type: object - Unit: - example: - measure: measure - unit_family_id: unit_family_id - name: name - unit_family: unit_family - id: id - abbreviation: abbreviation - measure_id: measure_id - properties: - abbreviation: - type: string - id: - type: string - measure: - type: string - measure_id: - type: string - name: - type: string - unit_family: - type: string - unit_family_id: - type: string - type: object - UploaderConfig: - example: - updater_username: updater_username - tz_name: tz_name - project_id: project_id - creator_username: creator_username - creator_id: creator_id - name: name - updater_id: updater_id - description: description - id: id - create_date: create_date - type: csv - update_date: update_date + dto.TimeseriesMeasurementCollectionCollection: + properties: + items: + items: + $ref: '#/components/schemas/dto.MeasurementCollection' + type: array + type: object + dto.ToggleOption: + properties: + enabled: + type: boolean + value: + type: boolean + type: object + dto.UploaderConfig: properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string description: type: string @@ -11481,18 +11554,20 @@ components: type: string project_id: type: string + slug: + type: string type: - $ref: '#/components/schemas/UploaderConfigType' + $ref: '#/components/schemas/dto.UploaderConfigType' tz_name: type: string - update_date: + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - UploaderConfigMapping: + dto.UploaderConfigMapping: example: timeseries_id: timeseries_id field_name: field_name @@ -11502,7 +11577,7 @@ components: timeseries_id: type: string type: object - UploaderConfigType: + dto.UploaderConfigType: enum: - csv - dux @@ -11512,99 +11587,271 @@ components: - CSV - DUX - TOA5 - collectionGroupDetailsTimeseries: + service.AggregatePlotConfigMeasurementsContourPlot: example: - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type + x: + - 0.8008281904610115 + - 0.8008281904610115 + "y": + - 6.027456183070403 + - 6.027456183070403 + z: + - 1.4658129805029452 + - 1.4658129805029452 + properties: + x: + items: + type: number + type: array + "y": + items: + type: number + type: array + z: + items: + type: number + type: array + type: object + service.AwarePlatformParameterConfig: + example: + aware_parameters: + key: aware_parameters instrument_id: instrument_id - unit: unit - parameter: parameter + aware_id: aware_id + properties: + aware_id: + type: string + aware_parameters: + additionalProperties: + type: string + type: object + instrument_id: + type: string + type: object + service.DataloggerWithKey: + example: + created_at: created_at + model_id: model_id + created_by: created_by + updated_by_username: updated_by_username + tables: + - id: id + table_name: table_name + - id: id + table_name: table_name + updated_at: updated_at + project_id: project_id name: name - variable: variable - latest_value: 6.027456183070403 + updated_by: updated_by + created_by_username: created_by_username + model: model id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - sort_order: 1 - unit_id: unit_id + sn: sn + errors: + - errors + - errors + key: key slug: slug - parameter_id: parameter_id properties: - id: + created_at: type: string - instrument: + created_by: type: string - instrument_id: + created_by_username: type: string - instrument_slug: + errors: + items: + type: string + type: array + id: type: string - is_computed: - type: boolean - latest_time: + key: type: string - latest_value: - type: number - name: + model: type: string - parameter: + model_id: type: string - parameter_id: + name: + type: string + project_id: type: string slug: type: string - sort_order: - type: integer - type: + sn: type: string - unit: + tables: + items: + $ref: '#/components/schemas/db.DataloggerTableIDName' + type: array + updated_at: type: string - unit_id: + updated_by: type: string - values: + updated_by_username: + type: string + type: object + service.DomainMap: + additionalProperties: + items: + $ref: '#/components/schemas/db.DomainGroupOpt' + type: array + type: object + service.Healthcheck: + example: + status: status + properties: + status: + type: string + type: object + service.Heartbeat: + example: + time: time + properties: + time: + type: string + type: object + service.InstrumentsValidation: + example: + is_valid: true + errors: + - errors + - errors + properties: + errors: items: - $ref: '#/components/schemas/Measurement' + type: string type: array - variable: - type: string + is_valid: + type: boolean type: object - pgtype.JSON: + service.ReportConfigWithPlotConfigs: example: - bytes: - - 0 - - 0 - status: 6 + global_overrides: + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true + created_at: created_at + description: description + project_name: project_name + created_by: created_by + updated_by_username: updated_by_username + district_name: district_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + id: id + plot_configs: + - date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug + - date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug + slug: slug properties: - bytes: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + district_name: + type: string + global_overrides: + $ref: '#/components/schemas/db.ReportConfigGlobalOverrides' + id: + type: string + name: + type: string + plot_configs: items: - type: integer + $ref: '#/components/schemas/db.VPlotConfiguration' type: array - status: - $ref: '#/components/schemas/pgtype.Status' + project_id: + type: string + project_name: + type: string + slug: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + service.Token: + example: + token_id: token_id + profile_id: profile_id + id: id + issued: issued + hash: hash + secret_token: secret_token + properties: + hash: + type: string + id: + type: string + issued: + type: string + profile_id: + type: string + secret_token: + type: string + token_id: + type: string type: object - pgtype.Status: - enum: - - 0 - - 1 - - 2 - type: integer - x-enum-varnames: - - Undefined - - "Null" - - Present _timeseries_measurements_post_request: properties: timeseries_measurement_collections: diff --git a/api/internal/service/alert_check.go b/api/internal/service/alert_check.go index 9effd166..80ff7b9e 100644 --- a/api/internal/service/alert_check.go +++ b/api/internal/service/alert_check.go @@ -63,7 +63,7 @@ func (s DBService) DoAlertChecks(ctx context.Context, cfg *config.AlertCheckConf if err != nil { return err } - acs, err := qtx.AlertConfigListUpdateLastChecked(ctx) + acs, err := qtx.AlertConfigListUpdateLastCheckedAt(ctx) if err != nil { return err } @@ -197,9 +197,9 @@ func checkMeasurements(ctx context.Context, q *db.Queries, subMap submittalMap, func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT) error { for _, acc := range accs { ac := acc.GetAlertConfig() - if err := q.AlertConfigUpdateLastReminded(ctx, db.AlertConfigUpdateLastRemindedParams{ - ID: ac.ID, - LastReminded: ac.LastReminded, + if err := q.AlertConfigUpdateLastRemindedAt(ctx, db.AlertConfigUpdateLastRemindedAtParams{ + ID: ac.ID, + LastRemindedAt: ac.LastRemindedAt, }); err != nil { return err } @@ -212,7 +212,7 @@ func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx conte if err := q.SubmittalUpdateCompletionDateOrWarningSent(ctx, db.SubmittalUpdateCompletionDateOrWarningSentParams{ ID: sub.ID, SubmittalStatusID: &sub.SubmittalStatusID, - CompletionDate: sub.CompletionDate, + CompletedAt: sub.CompletedAt, WarningSent: sub.WarningSent, }); err != nil { return err @@ -283,22 +283,22 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, // completion_date to current timestamp if sub.SubmittalStatusID == RedSubmittalStatusID { sub.SubmittalStatusID = YellowSubmittalStatusID - sub.CompletionDate = &t + sub.CompletedAt = &t ac.CreateNextSubmittalFrom = &t } else // if submittal status is green and the current time is not before the submittal due date, // complete the submittal at that due date and prepare the next submittal interval - if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueDate) { - sub.CompletionDate = &sub.DueDate - ac.CreateNextSubmittalFrom = &sub.DueDate + if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueAt) { + sub.CompletedAt = &sub.DueAt + ac.CreateNextSubmittalFrom = &sub.DueAt } } else // if any submittal warning is triggered, immediately send a // warning email, since submittal due dates are unique within alert configs if shouldWarn && !sub.WarningSent { - if !ac.MuteConsecutiveAlerts || ac.LastReminded == nil { + if !ac.MuteConsecutiveAlerts || ac.LastRemindedAt == nil { mu.Lock() if err := acc.DoEmail(warning, cfg); err != nil { errs = append(errs, err) @@ -315,7 +315,7 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, if sub.SubmittalStatusID != RedSubmittalStatusID { sub.SubmittalStatusID = RedSubmittalStatusID acAlert = true - ac.CreateNextSubmittalFrom = &sub.DueDate + ac.CreateNextSubmittalFrom = &sub.DueAt } resetReminders = false } @@ -337,16 +337,16 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, // if there are no alerts, there should also be no reminders sent. "last_reminded" is used to determine // if an alert has already been sent for an alert config, and send a reminder if so if resetReminders { - ac.LastReminded = nil + ac.LastRemindedAt = nil } // if there are any reminders within an alert config, they will override the alerts if MuteConsecutiveAlerts is true - if acAlert && ((!acReminder && ac.LastReminded == nil) || !ac.MuteConsecutiveAlerts) { - ac.LastReminded = &t + if acAlert && ((!acReminder && ac.LastRemindedAt == nil) || !ac.MuteConsecutiveAlerts) { + ac.LastRemindedAt = &t sendAlertEmail = true } - if acReminder && ac.LastReminded != nil { - ac.LastReminded = &t + if acReminder && ac.LastRemindedAt != nil { + ac.LastRemindedAt = &t sendReminderEmail = true } @@ -461,7 +461,7 @@ func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.Aler "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Evaluation Submittals:\r\n" + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", } templContent, err := email.CreateEmailTemplateContent(preformatted) @@ -518,7 +518,7 @@ func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.Aler "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Measurement Submittals:\r\n" + "{{range .AlertChecks}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + "{{range .AffectedTimeseries}}" + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + "{{end}}\r\n{{end}}", diff --git a/api/internal/service/alert_config.go b/api/internal/service/alert_config.go index 5ed5021c..68d486bc 100644 --- a/api/internal/service/alert_config.go +++ b/api/internal/service/alert_config.go @@ -30,13 +30,13 @@ func (s DBService) AlertConfigCreate(ctx context.Context, ac dto.AlertConfig) (d Name: ac.Name, Body: ac.Body, AlertTypeID: ac.AlertTypeID, - StartDate: ac.StartDate, + StartedAt: ac.StartedAt, ScheduleInterval: ac.ScheduleInterval, MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, RemindInterval: ac.RemindInterval, WarningInterval: ac.WarningInterval, - Creator: ac.CreatorID, - CreateDate: ac.CreateDate, + CreatedBy: ac.CreatedBy, + CreatedAt: ac.CreatedAt, }) if err != nil { return a, err @@ -93,13 +93,13 @@ func (s DBService) AlertConfigUpdate(ctx context.Context, alertConfigID uuid.UUI ProjectID: ac.ProjectID, Name: ac.Name, Body: ac.Body, - StartDate: ac.StartDate, + StartedAt: ac.StartedAt, ScheduleInterval: ac.ScheduleInterval, MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, RemindInterval: ac.RemindInterval, WarningInterval: ac.WarningInterval, - Updater: ac.UpdaterID, - UpdateDate: ac.UpdateDate, + UpdatedBy: ac.UpdatedBy, + UpdatedAt: ac.UpdatedAt, }); err != nil { return a, err } diff --git a/api/internal/service/collection_group.go b/api/internal/service/collection_group.go index a7ef1433..6aa0ea51 100644 --- a/api/internal/service/collection_group.go +++ b/api/internal/service/collection_group.go @@ -9,20 +9,20 @@ import ( func (s DBService) CollectionGroupCreate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { return s.Queries.CollectionGroupCreate(ctx, db.CollectionGroupCreateParams{ - ProjectID: cg.ProjectID, - Name: cg.Name, - Creator: cg.CreatorID, - CreateDate: cg.CreateDate, - SortOrder: cg.SortOrder, + ProjectID: cg.ProjectID, + Name: cg.Name, + CreatedBy: cg.CreatedBy, + CreatedAt: cg.CreatedAt, + SortOrder: cg.SortOrder, }) } func (s DBService) CollectionGroupUpdate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { return s.Queries.CollectionGroupUpdate(ctx, db.CollectionGroupUpdateParams{ - ID: cg.ID, - ProjectID: cg.ProjectID, - Name: cg.Name, - Updater: cg.UpdaterID, - UpdateDate: cg.UpdateDate, + ID: cg.ID, + ProjectID: cg.ProjectID, + Name: cg.Name, + UpdatedBy: cg.UpdatedBy, + UpdatedAt: cg.UpdatedAt, }) } diff --git a/api/internal/service/datalogger.go b/api/internal/service/datalogger.go index fc9c8776..b826f2a8 100644 --- a/api/internal/service/datalogger.go +++ b/api/internal/service/datalogger.go @@ -30,7 +30,7 @@ func (s DBService) DataloggerCreate(ctx context.Context, n dto.Datalogger) (Data Name: n.Name, Sn: n.SN, ProjectID: n.ProjectID, - Creator: n.CreatorID, + CreatedBy: n.CreatedBy, ModelID: n.ModelID, }) if err != nil { @@ -62,7 +62,7 @@ func (s DBService) DataloggerCreate(ctx context.Context, n dto.Datalogger) (Data func (s DBService) DataloggerHashUpdate(ctx context.Context, arg dto.Datalogger) (DataloggerWithKey, error) { var a DataloggerWithKey - if arg.UpdaterID == nil { + if arg.UpdatedBy == nil { return a, errors.New("must supply updater profile id") } @@ -83,10 +83,11 @@ func (s DBService) DataloggerHashUpdate(ctx context.Context, arg dto.Datalogger) return a, err } - if err := qtx.DataloggerUpdateUpdater(ctx, db.DataloggerUpdateUpdaterParams{ - ID: arg.ID, - Updater: *arg.UpdaterID, - UpdateDate: time.Now(), + t := time.Now() + if err := qtx.DataloggerUpdateAuditInfo(ctx, db.DataloggerUpdateAuditInfoParams{ + ID: arg.ID, + UpdatedBy: arg.UpdatedBy, + UpdatedAt: &t, }); err != nil { return a, err } @@ -118,15 +119,16 @@ func (s DBService) DataloggerUpdate(ctx context.Context, u dto.Datalogger) (db.V qtx := s.WithTx(tx) - if u.UpdaterID == nil { + if u.UpdatedBy == nil { return a, errors.New("must set updater id") } + t := time.Now() if err := qtx.DataloggerUpdate(ctx, db.DataloggerUpdateParams{ - ID: u.ID, - Name: u.Name, - Updater: *u.UpdaterID, - UpdateDate: time.Now(), + ID: u.ID, + Name: u.Name, + UpdatedBy: u.UpdatedBy, + UpdatedAt: &t, }); err != nil { return a, err } diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index 907fbb46..eef72bfd 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -19,7 +19,7 @@ import ( func (s DBService) DataloggerTablePreviewCreate(ctx context.Context, prv dto.DataloggerTablePreview) error { return s.Queries.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ DataloggerTableID: prv.DataloggerTableID, - UpdateDate: prv.UpdateDate, + UpdatedAt: prv.UpdatedAt, Preview: prv.Preview, }) } @@ -54,7 +54,7 @@ func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerI DataloggerID: dataloggerID, TableName: tableName, Preview: prv.Preview, - UpdateDate: prv.UpdateDate, + UpdatedAt: prv.UpdatedAt, }); err != nil { if !errors.Is(err, sql.ErrNoRows) { return uuid.Nil, err @@ -63,7 +63,7 @@ func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerI if err := qtx.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ DataloggerTableID: prv.DataloggerTableID, Preview: prv.Preview, - UpdateDate: prv.UpdateDate, + UpdatedAt: prv.UpdatedAt, }); err != nil { } } diff --git a/api/internal/service/db.go b/api/internal/service/db.go index 96d0a5cc..0e904575 100644 --- a/api/internal/service/db.go +++ b/api/internal/service/db.go @@ -2,7 +2,6 @@ package service import ( "context" - "database/sql" "errors" "fmt" "log" @@ -11,7 +10,6 @@ import ( gen "github.com/USACE/instrumentation-api/api/internal/db" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgxpool" - pgxgeom "github.com/twpayne/pgx-geom" ) type DBService struct { @@ -34,26 +32,19 @@ func newDatabasePool(cfg config.DBConfig) *DatabasePool { if err != nil { log.Fatal(err.Error()) } - - config.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error { - return pgxgeom.Register(ctx, conn) - } - pool, err := pgxpool.NewWithConfig(context.Background(), config) if err != nil { log.Fatal(err.Error()) } - if err := pool.Ping(context.Background()); err != nil { log.Fatal(err.Error()) } - return &DatabasePool{pool} } func txDo(ctx context.Context, rollback func(ctx context.Context) error) { err := rollback(ctx) - if err != nil && !errors.Is(err, sql.ErrTxDone) { + if err != nil && !errors.Is(err, pgx.ErrTxClosed) { log.Print(err.Error()) } } diff --git a/api/internal/service/evaluation.go b/api/internal/service/evaluation.go index 447f1cae..83782d0f 100644 --- a/api/internal/service/evaluation.go +++ b/api/internal/service/evaluation.go @@ -55,10 +55,10 @@ func (s DBService) EvaluationCreate(ctx context.Context, ev dto.Evaluation) (db. SubmittalID: ev.SubmittalID, Name: ev.Name, Body: ev.Body, - StartDate: ev.StartDate, - EndDate: ev.EndDate, - Creator: ev.CreatorID, - CreateDate: ev.CreateDate, + StartedAt: ev.StartedAt, + EndedAt: ev.EndedAt, + CreatedBy: ev.CreatedBy, + CreatedAt: ev.CreatedAt, }) if err != nil { return a, err @@ -95,14 +95,14 @@ func (s DBService) EvaluationUpdate(ctx context.Context, evaluationID uuid.UUID, qtx := s.WithTx(tx) if err := qtx.EvaluationUpdate(ctx, db.EvaluationUpdateParams{ - ID: ev.ID, - ProjectID: ev.ProjectID, - Name: ev.Name, - Body: ev.Body, - StartDate: ev.StartDate, - EndDate: ev.EndDate, - Updater: ev.UpdaterID, - UpdateDate: ev.UpdateDate, + ID: ev.ID, + ProjectID: ev.ProjectID, + Name: ev.Name, + Body: ev.Body, + StartedAt: ev.StartedAt, + EndedAt: ev.EndedAt, + UpdatedBy: ev.UpdatedBy, + UpdatedAt: ev.UpdatedAt, }); err != nil { return a, err } diff --git a/api/internal/service/heartbeat.go b/api/internal/service/heartbeat.go new file mode 100644 index 00000000..025d8423 --- /dev/null +++ b/api/internal/service/heartbeat.go @@ -0,0 +1,42 @@ +package service + +import ( + "context" + "time" +) + +type Healthcheck struct { + Status string +} + +type Heartbeat struct { + Time time.Time +} + +func (s DBService) HeartbeatCreate(ctx context.Context, argTime time.Time) (Heartbeat, error) { + hb, err := s.Queries.HeartbeatCreate(ctx, argTime) + if err != nil { + return Heartbeat{}, err + } + return Heartbeat{hb}, nil +} + +func (s DBService) HeartbeatGetLatest(ctx context.Context) (Heartbeat, error) { + hb, err := s.Queries.HeartbeatGetLatest(ctx) + if err != nil { + return Heartbeat{}, err + } + return Heartbeat{hb}, nil +} + +func (s DBService) HeartbeatList(ctx context.Context, resultLimit int32) ([]Heartbeat, error) { + hh, err := s.Queries.HeartbeatList(ctx, resultLimit) + if err != nil { + return nil, err + } + rr := make([]Heartbeat, len(hh)) + for idx := range hh { + rr[idx] = Heartbeat{hh[idx]} + } + return rr, nil +} diff --git a/api/internal/service/instrument.go b/api/internal/service/instrument.go index c7f78799..8c1eba53 100644 --- a/api/internal/service/instrument.go +++ b/api/internal/service/instrument.go @@ -42,8 +42,8 @@ func (s DBService) InstrumentCreateBatch(ctx context.Context, ii []dto.Instrumen Geometry: inst.Geometry, Station: inst.Station, StationOffset: inst.StationOffset, - Creator: inst.CreatorID, - CreateDate: inst.CreateDate, + CreatedBy: inst.CreatedBy, + CreatedAt: inst.CreatedAt, NidID: inst.NIDID, UsgsID: inst.USGSID, ShowCwmsTab: inst.ShowCwmsTab, @@ -74,6 +74,7 @@ func (s DBService) InstrumentCreateBatch(ctx context.Context, ii []dto.Instrumen }) } newInstruments[idx] = r + ii[idx].ID = r.ID }) if err != nil { return nil, err @@ -116,8 +117,8 @@ func (s DBService) InstrumentUpdate(ctx context.Context, projectID uuid.UUID, in Name: inst.Name, TypeID: inst.TypeID, Geometry: inst.Geometry, - Updater: inst.UpdaterID, - UpdateDate: inst.UpdateDate, + UpdatedBy: inst.UpdatedBy, + UpdatedAt: inst.UpdatedAt, Station: inst.Station, StationOffset: inst.StationOffset, NidID: inst.NIDID, diff --git a/api/internal/service/instrument_constant.go b/api/internal/service/instrument_constant.go index 463a9c98..3b98947d 100644 --- a/api/internal/service/instrument_constant.go +++ b/api/internal/service/instrument_constant.go @@ -23,10 +23,7 @@ func (s DBService) InstrumentConstantCreateBatch(ctx context.Context, tt []dto.T Name: t.Name, ParameterID: t.ParameterID, UnitID: t.UnitID, - Type: db.NullTimeseriesType{ - TimeseriesType: db.TimeseriesTypeConstant, - Valid: true, - }, + Type: db.TimeseriesTypeConstant, } } uu := make([]db.TimeseriesCreateBatchRow, len(createTimeseriesParams)) diff --git a/api/internal/service/instrument_group.go b/api/internal/service/instrument_group.go index 5480a224..3048f2b5 100644 --- a/api/internal/service/instrument_group.go +++ b/api/internal/service/instrument_group.go @@ -7,19 +7,19 @@ import ( "github.com/USACE/instrumentation-api/api/internal/dto" ) -func (s DBService) InstrumentGroupCreateBatch(ctx context.Context, groups []dto.InstrumentGroup) ([]db.InstrumentGroup, error) { +func (s DBService) InstrumentGroupCreateBatch(ctx context.Context, groups []dto.InstrumentGroup) ([]db.InstrumentGroupCreateBatchRow, error) { args := make([]db.InstrumentGroupCreateBatchParams, len(groups)) for idx, g := range groups { args[idx] = db.InstrumentGroupCreateBatchParams{ Name: g.Name, Description: &g.Description, - Creator: g.CreatorID, - CreateDate: g.CreateDate, + CreatedBy: g.CreatedBy, + CreatedAt: g.CreatedAt, ProjectID: g.ProjectID, } } var err error - ggNew := make([]db.InstrumentGroup, len(groups)) + ggNew := make([]db.InstrumentGroupCreateBatchRow, len(groups)) s.Queries.InstrumentGroupCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(ggNew, &err)) if err != nil { return nil, err @@ -27,13 +27,13 @@ func (s DBService) InstrumentGroupCreateBatch(ctx context.Context, groups []dto. return ggNew, nil } -func (s DBService) InstrumentGroupUpdate(ctx context.Context, g dto.InstrumentGroup) (db.InstrumentGroup, error) { +func (s DBService) InstrumentGroupUpdate(ctx context.Context, g dto.InstrumentGroup) (db.InstrumentGroupUpdateRow, error) { return s.Queries.InstrumentGroupUpdate(ctx, db.InstrumentGroupUpdateParams{ ID: g.ID, Name: g.Name, Description: &g.Description, ProjectID: g.ProjectID, - Updater: g.UpdaterID, - UpdateDate: g.UpdateDate, + UpdatedBy: g.UpdatedBy, + UpdatedAt: g.UpdatedAt, }) } diff --git a/api/internal/service/instrument_incl.go b/api/internal/service/instrument_incl.go index 0731e442..4b435e3f 100644 --- a/api/internal/service/instrument_incl.go +++ b/api/internal/service/instrument_incl.go @@ -8,9 +8,10 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" ) -func (s DBService) InclSegmentUpdateBatch(ctx context.Context, segs []dto.InclSegment) error { +func (s DBService) InclSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.InclSegment) error { tx, err := s.db.Begin(ctx) if err != nil { return err @@ -23,7 +24,7 @@ func (s DBService) InclSegmentUpdateBatch(ctx context.Context, segs []dto.InclSe for idx, seg := range segs { updateInclArgs[idx] = db.InclSegmentUpdateBatchParams{ ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, + InstrumentID: instrumentID, DepthTimeseriesID: seg.DepthTimeseriesID, A0TimeseriesID: seg.A0TimeseriesID, A180TimeseriesID: seg.A180TimeseriesID, @@ -64,10 +65,7 @@ func inclOptsCreateBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument Name: inst.Slug + "-bottom-elevation", ParameterID: dto.InclParameterID, UnitID: dto.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, + Type: db.TimeseriesTypeConstant, } createInclOptsParams[idx] = db.InclOptsCreateBatchParams{ InstrumentID: inst.ID, diff --git a/api/internal/service/instrument_ipi.go b/api/internal/service/instrument_ipi.go index fb91c4fc..cc0e8746 100644 --- a/api/internal/service/instrument_ipi.go +++ b/api/internal/service/instrument_ipi.go @@ -9,9 +9,10 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" ) -func (s DBService) IpiSegmentUpdateBatch(ctx context.Context, segs []dto.IpiSegment) error { +func (s DBService) IpiSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.IpiSegment) error { tx, err := s.db.Begin(ctx) if err != nil { return err @@ -25,7 +26,7 @@ func (s DBService) IpiSegmentUpdateBatch(ctx context.Context, segs []dto.IpiSegm for idx, seg := range segs { updateIpiArgs[idx] = db.IpiSegmentUpdateBatchParams{ ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, + InstrumentID: instrumentID, LengthTimeseriesID: &seg.LengthTimeseriesID, TiltTimeseriesID: seg.TiltTimeseriesID, IncDevTimeseriesID: seg.IncDevTimeseriesID, @@ -73,10 +74,7 @@ func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), ParameterID: dto.IpiParameterID, UnitID: dto.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, + Type: db.TimeseriesTypeConstant, } createIpiSegmentBatchParams[idx][i] = db.IpiSegmentCreateBatchParams{ ID: int32(i + 1), @@ -88,10 +86,7 @@ func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) Name: inst.Slug + "-bottom-elevation", ParameterID: dto.IpiParameterID, UnitID: dto.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, + Type: db.TimeseriesTypeConstant, } createIpiOptsParams[idx] = db.IpiOptsCreateBatchParams{ InstrumentID: inst.ID, diff --git a/api/internal/service/instrument_note.go b/api/internal/service/instrument_note.go index 530f3540..c7072913 100644 --- a/api/internal/service/instrument_note.go +++ b/api/internal/service/instrument_note.go @@ -22,8 +22,8 @@ func (s DBService) InstrumentNoteCreateBatch(ctx context.Context, notes []dto.In Title: n.Title, Body: n.Body, Time: n.Time, - Creator: n.CreatorID, - CreateDate: n.CreateDate, + CreatedBy: n.CreatedBy, + CreatedAt: n.CreatedAt, } } nn := make([]db.InstrumentNote, len(args)) @@ -40,11 +40,11 @@ func (s DBService) InstrumentNoteCreateBatch(ctx context.Context, notes []dto.In func (s DBService) InstrumentNoteUpdate(ctx context.Context, u dto.InstrumentNote) (db.InstrumentNote, error) { return s.Queries.InstrumentNoteUpdate(ctx, db.InstrumentNoteUpdateParams{ - ID: u.ID, - Title: u.Title, - Body: u.Body, - Time: u.Time, - Updater: u.UpdaterID, - UpdateDate: u.UpdateDate, + ID: u.ID, + Title: u.Title, + Body: u.Body, + Time: u.Time, + UpdatedBy: u.UpdatedBy, + UpdatedAt: u.UpdatedAt, }) } diff --git a/api/internal/service/instrument_saa.go b/api/internal/service/instrument_saa.go index d801e42f..9e5bbbac 100644 --- a/api/internal/service/instrument_saa.go +++ b/api/internal/service/instrument_saa.go @@ -9,9 +9,10 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" ) -func (s DBService) SaaSegmentUpdateBatch(ctx context.Context, segs []dto.SaaSegment) error { +func (s DBService) SaaSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.SaaSegment) error { tx, err := s.db.Begin(ctx) if err != nil { return err @@ -24,7 +25,7 @@ func (s DBService) SaaSegmentUpdateBatch(ctx context.Context, segs []dto.SaaSegm for idx, seg := range segs { updateSaaSegParams[idx] = db.SaaSegmentUpdateBatchParams{ ID: int32(seg.ID), - InstrumentID: seg.InstrumentID, + InstrumentID: instrumentID, LengthTimeseriesID: &seg.LengthTimeseriesID, XTimeseriesID: seg.XTimeseriesID, YTimeseriesID: seg.YTimeseriesID, @@ -73,10 +74,7 @@ func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), ParameterID: dto.SaaParameterID, UnitID: dto.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, + Type: db.TimeseriesTypeConstant, } createSaaSegmentBatchParams[idx][i] = db.SaaSegmentCreateBatchParams{ ID: int32(i + 1), @@ -88,10 +86,7 @@ func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) Name: inst.Slug + "-bottom-elevation", ParameterID: dto.SaaParameterID, UnitID: dto.FeetUnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeConstant, - }, + Type: db.TimeseriesTypeConstant, } createSaaOptsParams[idx] = db.SaaOptsCreateBatchParams{ InstrumentID: inst.ID, diff --git a/api/internal/service/measurement.go b/api/internal/service/measurement.go index 3f9e1f6b..0d0ba302 100644 --- a/api/internal/service/measurement.go +++ b/api/internal/service/measurement.go @@ -9,12 +9,12 @@ import ( "github.com/USACE/instrumentation-api/api/internal/util" ) -func (s DBService) TimeseriesMeasurementListRange(ctx context.Context, arg db.TimeseriesMeasurementListRangeParams, threshold int) ([]db.VTimeseriesMeasurement, error) { - mm, err := s.Queries.TimeseriesMeasurementListRange(ctx, arg) +func (s DBService) TimeseriesMeasurementListForRange(ctx context.Context, arg db.TimeseriesMeasurementCollectionGetForRangeParams, threshold int) (db.MeasurementCollection, error) { + mc, err := s.Queries.TimeseriesMeasurementCollectionGetForRange(ctx, arg) if err != nil { - return nil, err + return mc, err } - return db.LTTB(mm, threshold), nil + return mc, nil } func (s DBService) CreateTimeseriesMeasurements(ctx context.Context, mc []dto.MeasurementCollection) error { diff --git a/api/internal/service/opendcs.go b/api/internal/service/opendcs.go deleted file mode 100644 index b82dab61..00000000 --- a/api/internal/service/opendcs.go +++ /dev/null @@ -1,39 +0,0 @@ -package service - -import ( - "context" - - "github.com/google/uuid" -) - -type Site struct { - Elevation string `xml:"Elevation"` - ElevationUnits string `xml:"ElevationUnits"` - Description string `xml:"Description"` - SiteName SiteName `xml:"SiteName"` -} - -type SiteName struct { - ID uuid.UUID `xml:",chardata"` - NameType string `xml:",attr"` -} - -func (s DBService) OpendcsSiteList(ctx context.Context) ([]Site, error) { - nn, err := s.Queries.InstrumentList(ctx) - if err != nil { - return make([]Site, 0), err - } - ss := make([]Site, len(nn)) - for idx, n := range nn { - ss[idx] = Site{ - Elevation: "", - ElevationUnits: "", - Description: n.Name, - SiteName: SiteName{ - ID: n.ID, - NameType: "uuid", - }, - } - } - return ss, nil -} diff --git a/api/internal/service/plot_config.go b/api/internal/service/plot_config.go index 100a1075..93607c6c 100644 --- a/api/internal/service/plot_config.go +++ b/api/internal/service/plot_config.go @@ -10,11 +10,11 @@ import ( func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) (uuid.UUID, error) { pcID, err := q.PlotConfigCreate(ctx, db.PlotConfigCreateParams{ - Name: pc.Name, - ProjectID: pc.ProjectID, - Creator: pc.CreatorID, - CreateDate: pc.CreateDate, - PlotType: db.PlotType(pc.PlotType), + Name: pc.Name, + ProjectID: pc.ProjectID, + CreatedBy: pc.CreatedBy, + CreatedAt: pc.CreatedAt, + PlotType: db.PlotType(pc.PlotType), }) if err != nil { return pcID, err @@ -33,11 +33,11 @@ func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfi func updatePlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) error { if err := q.PlotConfigUpdate(ctx, db.PlotConfigUpdateParams{ - ProjectID: pc.ProjectID, - ID: pc.ID, - Name: pc.Name, - Updater: pc.UpdaterID, - UpdateDate: pc.UpdateDate, + ProjectID: pc.ProjectID, + ID: pc.ID, + Name: pc.Name, + UpdatedBy: pc.UpdatedBy, + UpdatedAt: pc.UpdatedAt, }); err != nil { return err } diff --git a/api/internal/service/project.go b/api/internal/service/project.go index 526f48d1..b3cd2831 100644 --- a/api/internal/service/project.go +++ b/api/internal/service/project.go @@ -22,6 +22,15 @@ type ImgUploaderOpts struct { bucketName string } +type ProjectCount struct { + ProjectCount int64 `json:"project_count"` +} + +func (s DBService) ProjectGetCount(ctx context.Context) (ProjectCount, error) { + count, err := s.Queries.ProjectGetCount(ctx) + return ProjectCount{count}, err +} + func (s DBService) ProjectCreateBatch(ctx context.Context, projects []dto.Project) ([]db.ProjectCreateBatchRow, error) { args := make([]db.ProjectCreateBatchParams, len(projects)) for idx, p := range projects { @@ -29,8 +38,8 @@ func (s DBService) ProjectCreateBatch(ctx context.Context, projects []dto.Projec FederalID: p.FederalID, Name: p.Name, DistrictID: p.DistrictID, - Creator: p.CreatorID, - CreateDate: p.CreateDate, + CreatedBy: p.CreatedBy, + CreatedAt: p.CreatedAt, } } var err error @@ -54,8 +63,8 @@ func (s DBService) ProjectUpdate(ctx context.Context, p dto.Project) (db.VProjec if _, err := qtx.ProjectUpdate(ctx, db.ProjectUpdateParams{ ID: p.ID, Name: p.Name, - Updater: p.UpdaterID, - UpdateDate: p.UpdateDate, + UpdatedBy: p.UpdatedBy, + UpdatedAt: p.UpdatedAt, DistrictID: p.DistrictID, FederalID: p.FederalID, }); err != nil { diff --git a/api/internal/service/project_role.go b/api/internal/service/project_role.go new file mode 100644 index 00000000..c10078b5 --- /dev/null +++ b/api/internal/service/project_role.go @@ -0,0 +1,30 @@ +package service + +import ( + "context" + + "github.com/USACE/instrumentation-api/api/internal/db" +) + +func (s DBService) ProfileProjectRoleCreate(ctx context.Context, arg db.ProfileProjectRoleCreateParams) (db.ProfileProjectRoleGetRow, error) { + var a db.ProfileProjectRoleGetRow + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + newID, err := qtx.ProfileProjectRoleCreate(ctx, arg) + if err != nil { + return a, err + } + a, err = qtx.ProfileProjectRoleGet(ctx, newID) + if err != nil { + return a, err + } + if err := tx.Commit(ctx); err != nil { + return a, err + } + return a, nil +} diff --git a/api/internal/service/report_config.go b/api/internal/service/report_config.go index b600c666..b0754f31 100644 --- a/api/internal/service/report_config.go +++ b/api/internal/service/report_config.go @@ -22,7 +22,7 @@ func (s DBService) ReportConfigCreate(ctx context.Context, rc dto.ReportConfig) rcID, err := qtx.ReportConfigCreate(ctx, db.ReportConfigCreateParams{ Name: rc.Name, ProjectID: rc.ProjectID, - Creator: rc.CreatorID, + CreatedBy: rc.CreatedBy, Description: rc.Description, DateRange: &rc.GlobalOverrides.DateRange.Value, DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, @@ -66,7 +66,7 @@ func (s DBService) ReportConfigUpdate(ctx context.Context, rc dto.ReportConfig) if err := qtx.ReportConfigUpdate(ctx, db.ReportConfigUpdateParams{ ID: rc.ID, Name: rc.Name, - Updater: rc.UpdaterID, + UpdatedBy: rc.UpdatedBy, Description: rc.Description, DateRange: &rc.GlobalOverrides.DateRange.Value, DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, @@ -135,7 +135,7 @@ func (s DBService) ReportDownloadJobCreate(ctx context.Context, queue cloud.Pubs a, err = qtx.ReportDownloadJobCreate(ctx, db.ReportDownloadJobCreateParams{ ReportConfigID: &arg.ReportConfigID, - Creator: arg.ProfileID, + CreatedBy: arg.ProfileID, }) if err != nil { return a, err @@ -165,11 +165,11 @@ func (s DBService) ReportDownloadJobCreate(ctx context.Context, queue cloud.Pubs func (s DBService) ReportDownloadJobUpdate(ctx context.Context, j dto.ReportDownloadJob) error { return s.Queries.ReportDownloadJobUpdate(ctx, db.ReportDownloadJobUpdateParams{ - ID: j.ID, - Status: db.JobStatus(j.Status), - Progress: int32(j.Progress), - ProgressUpdateDate: j.ProgressUpdateDate, - FileKey: j.FileKey, - FileExpiry: j.FileExpiry, + ID: j.ID, + Status: db.JobStatus(j.Status), + Progress: int32(j.Progress), + ProgressUpdatedAt: j.ProgressUpdatedAt, + FileKey: j.FileKey, + FileExpiry: j.FileExpiry, }) } diff --git a/api/internal/service/timeseries.go b/api/internal/service/timeseries.go index 52b23fe9..2e479f9d 100644 --- a/api/internal/service/timeseries.go +++ b/api/internal/service/timeseries.go @@ -22,10 +22,7 @@ func (s DBService) TimeseriesCreateBatch(ctx context.Context, tt []dto.Timeserie Name: ts.Name, ParameterID: ts.ParameterID, UnitID: ts.UnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeStandard, - }, + Type: db.TimeseriesTypeStandard, } } var err error @@ -39,10 +36,7 @@ func (s DBService) TimeseriesCreate(ctx context.Context, ts dto.Timeseries) (uui Name: ts.Name, ParameterID: ts.ParameterID, UnitID: ts.UnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeStandard, - }, + Type: db.TimeseriesTypeStandard, }) if err != nil { return uuid.Nil, err diff --git a/api/internal/service/timeseries_calculated.go b/api/internal/service/timeseries_calculated.go index a1b87ba0..57146c32 100644 --- a/api/internal/service/timeseries_calculated.go +++ b/api/internal/service/timeseries_calculated.go @@ -5,6 +5,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" ) func (s DBService) TimeseriesComputedCreate(ctx context.Context, ct dto.CalculatedTimeseries) error { @@ -33,7 +34,7 @@ func (s DBService) TimeseriesComputedCreate(ctx context.Context, ct dto.Calculat return tx.Commit(ctx) } -func (s DBService) TimeseriesComputedCreateOrUpdate(ctx context.Context, ct dto.CalculatedTimeseries) error { +func (s DBService) TimeseriesComputedUpdate(ctx context.Context, ct dto.CalculatedTimeseries) error { tx, err := s.db.Begin(ctx) if err != nil { return err @@ -41,16 +42,23 @@ func (s DBService) TimeseriesComputedCreateOrUpdate(ctx context.Context, ct dto. defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.TimeseriesComputedCreateOrUpdate(ctx, db.TimeseriesComputedCreateOrUpdateParams{ - ID: ct.ID, - InstrumentID: &ct.InstrumentID, - ParameterID: ct.ParameterID, - UnitID: ct.UnitID, - Name: ct.FormulaName, + if ct.ParameterID == uuid.Nil { + ct.ParameterID = dto.UnknownParameterID + } + if ct.UnitID == uuid.Nil { + ct.UnitID = dto.UnknownUnitID + } + + if err := qtx.TimeseriesComputedUpdate(ctx, db.TimeseriesComputedUpdateParams{ + ID: ct.ID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + Slug: ct.Slug, }); err != nil { return err } - if err := qtx.CalculationCreateOrUpdate(ctx, db.CalculationCreateOrUpdateParams{ + if err := qtx.CalculationUpdate(ctx, db.CalculationUpdateParams{ TimeseriesID: ct.ID, Contents: &ct.Formula, }); err != nil { diff --git a/api/internal/service/timeseries_cwms.go b/api/internal/service/timeseries_cwms.go index fbc6b518..477269e0 100644 --- a/api/internal/service/timeseries_cwms.go +++ b/api/internal/service/timeseries_cwms.go @@ -29,10 +29,7 @@ func (s DBService) TimeseriesCwmsCreateBatch(ctx context.Context, instrumentID u Name: tc.Name, ParameterID: tc.ParameterID, UnitID: tc.UnitID, - Type: db.NullTimeseriesType{ - Valid: true, - TimeseriesType: db.TimeseriesTypeCwms, - }, + Type: db.TimeseriesTypeCwms, }) if err != nil { return err diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index cd96294c..a7a21c3c 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -20,8 +20,8 @@ func (s DBService) UploaderConfigCreate(ctx context.Context, uc dto.UploaderConf Description: uc.Description, Type: db.UploaderConfigType(uc.Type), TzName: uc.TzName, - Creator: uc.CreatorID, - CreateDate: uc.CreateDate, + CreatedBy: uc.CreatedBy, + CreatedAt: uc.CreatedAt, }) } @@ -32,8 +32,8 @@ func (s DBService) UploaderConfigUpdate(ctx context.Context, uc dto.UploaderConf Description: uc.Description, Type: db.UploaderConfigType(uc.Type), TzName: uc.TzName, - Updater: uc.UpdaterID, - UpdateDate: uc.UpdateDate, + UpdatedBy: uc.UpdatedBy, + UpdatedAt: uc.UpdatedAt, }) } diff --git a/api/migrations/repeat/0020__views_profiles.sql b/api/migrations/repeat/0020__views_profiles.sql index 83a2cd84..6f4e4565 100644 --- a/api/migrations/repeat/0020__views_profiles.sql +++ b/api/migrations/repeat/0020__views_profiles.sql @@ -16,13 +16,17 @@ CREATE OR REPLACE VIEW v_profile AS ( p.email, p.is_admin, COALESCE(r.roles,'{}')::text[] AS roles, - COALESCE(jsonb_agg(jsonb_build_object( - 'token_id', pt.token_id, - 'issued', pt.time - )), '[]'::jsonb) AS tokens + COALESCE(sq.tokens, '[]'::jsonb) AS tokens FROM profile p LEFT JOIN roles_by_profile r ON r.profile_id = p.id - LEFT JOIN profile_token pt ON pt.profile_id = p.id + LEFT JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object( + 'token_id', pt.token_id, + 'issued', pt.issued + )) tokens + FROM profile_token pt + WHERE pt.profile_id = p.id + ) sq ON true ); CREATE OR REPLACE VIEW v_profile_project_roles AS ( diff --git a/api/migrations/repeat/0030__views_projects.sql b/api/migrations/repeat/0030__views_projects.sql index 66a865ea..e091f11a 100644 --- a/api/migrations/repeat/0030__views_projects.sql +++ b/api/migrations/repeat/0030__views_projects.sql @@ -5,23 +5,22 @@ CREATE OR REPLACE VIEW v_project AS ( CASE WHEN p.image IS NOT NULL THEN cfg.static_host || '/projects/' || p.slug || '/images/' || p.image ELSE NULL - END AS image, + END image, p.district_id, d.office_id, - p.deleted, p.slug, p.name, - p.creator, - u.username AS creator_username, - p.create_date, - p.updater, - u.username AS updater_username, - p.update_date, - COALESCE(i.count, 0) AS instrument_count, - COALESCE(g.count, 0) AS instrument_group_count + p.created_by, + COALESCE(u.username, 'midas') created_by_username, + p.created_at, + p.updated_by, + u.username updated_by_username, + p.updated_at, + COALESCE(i.count, 0) instrument_count, + COALESCE(g.count, 0) instrument_group_count FROM project p - LEFT JOIN profile c ON p.creator = c.id - LEFT JOIN profile u ON p.updater = c.id + LEFT JOIN profile c ON p.created_by = c.id + LEFT JOIN profile u ON p.updated_by = c.id LEFT JOIN ( SELECT pi.project_id, COUNT(pi.*) as count FROM project_instrument pi @@ -48,13 +47,13 @@ CREATE OR REPLACE VIEW v_project AS ( CREATE OR REPLACE VIEW v_district AS ( SELECT - ag.name AS agency, - dis.id AS id, - dis.name AS name, - dis.initials AS initials, - div.name AS division_name, - div.initials AS division_initials, - dis.office_id AS office_id + ag.name agency, + dis.id, + dis.name, + dis.initials, + div.name division_name, + div.initials division_initials, + dis.office_id FROM district dis INNER JOIN division div ON dis.division_id = div.id INNER JOIN agency ag ON ag.id = div.agency_id diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index 06dfb1be..3832c27e 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -1,9 +1,9 @@ CREATE OR REPLACE VIEW v_instrument_telemetry AS ( SELECT a.id, - a.instrument_id AS instrument_id, - b.id AS telemetry_type_id, - b.slug AS telemetry_type_slug, - b.name AS telemetry_type_name + a.instrument_id, + b.id telemetry_type_id, + b.slug telemetry_type_slug, + b.name telemetry_type_name FROM instrument_telemetry a INNER JOIN telemetry_type b ON b.id = a.telemetry_type_id LEFT JOIN telemetry_goes tg ON a.telemetry_id = tg.id @@ -13,7 +13,6 @@ CREATE OR REPLACE VIEW v_instrument_telemetry AS ( CREATE OR REPLACE VIEW v_instrument AS ( SELECT i.id, - i.deleted, s.status_id, s.status, s.status_time, @@ -21,15 +20,15 @@ CREATE OR REPLACE VIEW v_instrument AS ( i.name, i.type_id, i.show_cwms_tab, - t.name type, + t.name "type", t.icon, - i.geometry, + ST_AsGeoJSON(i.geometry)::json geometry, i.station, - i.station_offset, - i.creator, - i.create_date, - i.updater, - i.update_date, + i.station_offset "offset", + i.created_by, + i.created_at, + i.updated_by, + i.updated_at, i.nid_id, i.usgs_id, tel.telemetry, @@ -126,39 +125,39 @@ CREATE OR REPLACE VIEW v_instrument AS ( LIMIT 1 ) b2 ON true ) o ON o.instrument_id = i.id + WHERE NOT i.deleted ); CREATE OR REPLACE VIEW v_instrument_group AS ( WITH instrument_count AS ( - SELECT - igi.instrument_group_id, - count(igi.instrument_group_id) as i_count - FROM instrument_group_instruments igi - JOIN instrument i on igi.instrument_id = i.id and not i.deleted - GROUP BY igi.instrument_group_id - ), - timeseries_instruments as ( - SELECT t.id, t.instrument_id, igi.instrument_group_id from timeseries t - JOIN instrument i on i.id = t.instrument_id and not i.deleted - JOIN instrument_group_instruments igi on igi.instrument_id = i.id - ) - SELECT ig.id, - ig.slug, - ig.name, - ig.description, - ig.creator, - ig.create_date, - ig.updater, - ig.update_date, - ig.project_id, - ig.deleted, - COALESCE(ic.i_count,0) as instrument_count, - COALESCE(count(ti.id),0) as timeseries_count - FROM instrument_group ig - LEFT JOIN instrument_count ic on ic.instrument_group_id = ig.id - LEFT JOIN timeseries_instruments ti on ig.id = ti.instrument_group_id - GROUP BY ig.id, ic.i_count - ORDER BY ig.name + SELECT + igi.instrument_group_id, + count(igi.instrument_group_id) as i_count + FROM instrument_group_instruments igi + JOIN instrument i on igi.instrument_id = i.id and not i.deleted + GROUP BY igi.instrument_group_id + ), + timeseries_instruments as ( + SELECT t.id, t.instrument_id, igi.instrument_group_id from timeseries t + JOIN instrument i on i.id = t.instrument_id and not i.deleted + JOIN instrument_group_instruments igi on igi.instrument_id = i.id + ) + SELECT ig.id, + ig.slug, + ig.name, + ig.description, + ig.created_by, + ig.created_at, + ig.updated_by, + ig.updated_at, + ig.project_id, + COALESCE(ic.i_count,0) as instrument_count, + COALESCE(count(ti.id),0) as timeseries_count + FROM instrument_group ig + LEFT JOIN instrument_count ic on ic.instrument_group_id = ig.id + LEFT JOIN timeseries_instruments ti on ig.id = ti.instrument_group_id + GROUP BY ig.id, ic.i_count + ORDER BY ig.name ); CREATE OR REPLACE VIEW v_instrument_status AS ( diff --git a/api/migrations/repeat/0050__views_timeseries.sql b/api/migrations/repeat/0050__views_timeseries.sql index 457731f0..01b4815d 100644 --- a/api/migrations/repeat/0050__views_timeseries.sql +++ b/api/migrations/repeat/0050__views_timeseries.sql @@ -89,8 +89,9 @@ CREATE OR REPLACE VIEW v_collection_group_details AS ( LEFT JOIN LATERAL ( SELECT COALESCE(jsonb_agg(to_jsonb(t.*) || jsonb_build_object( 'latest_time', tm.time, - 'latest_value', tm.value - )), '[]'::jsonb) AS timeseries + 'latest_value', tm.value, + 'sort_order', cgt.sort_order + ) ORDER BY sort_order ASC, t.name ASC), '[]'::jsonb) timeseries FROM collection_group_timeseries cgt LEFT JOIN v_timeseries t on t.id = cgt.timeseries_id LEFT JOIN LATERAL ( diff --git a/api/migrations/repeat/0060__views_alerts.sql b/api/migrations/repeat/0060__views_alerts.sql index 6edd34ce..1a1780a9 100644 --- a/api/migrations/repeat/0060__views_alerts.sql +++ b/api/migrations/repeat/0060__views_alerts.sql @@ -1,7 +1,7 @@ CREATE OR REPLACE VIEW v_alert AS ( SELECT a.id AS id, a.alert_config_id AS alert_config_id, - a.create_date AS create_date, + a.created_at AS created_at, p.id AS project_id, p.name AS project_name, ac.name AS name, @@ -25,26 +25,26 @@ CREATE OR REPLACE VIEW v_alert AS ( CREATE OR REPLACE VIEW v_alert_config AS ( SELECT - ac.id AS id, - ac.name AS name, - ac.body AS body, - prf1.id AS creator, - COALESCE(prf1.username, 'midas') AS creator_username, - ac.create_date AS create_date, - prf2.id AS updater, - prf2.username AS updater_username, - ac.update_date AS update_date, - prj.id AS project_id, - prj.name AS project_name, - atype.id AS alert_type_id, - atype.name AS alert_type, - ac.start_date AS start_date, - ac.schedule_interval::text AS schedule_interval, - ac.mute_consecutive_alerts AS mute_consecutive_alerts, - ac.remind_interval::text AS remind_interval, - ac.warning_interval::text AS warning_interval, - ac.last_checked AS last_checked, - ac.last_reminded AS last_reminded, + ac.id, + ac.name, + ac.body, + prf1.id created_by, + COALESCE(prf1.username, 'midas') created_by_username, + ac.created_at, + prf2.id updated_by, + prf2.username updated_by_username, + ac.updated_at, + prj.id project_id, + prj.name project_name, + atype.id alert_type_id, + atype.name alert_type, + ac.started_at, + ac.schedule_interval::text schedule_interval, + ac.mute_consecutive_alerts, + ac.remind_interval::text remind_interval, + ac.warning_interval::text warning_interval, + ac.last_checked_at, + ac.last_reminded_at, null::timestamptz create_next_submittal_from, ( SELECT COALESCE(jsonb_agg(jsonb_build_object( @@ -88,30 +88,30 @@ CREATE OR REPLACE VIEW v_alert_config AS ( WHERE aps.alert_config_id = ac.id ) ) all_emails - ) AS alert_email_subscriptions + ) alert_email_subscriptions FROM alert_config ac INNER JOIN project prj ON ac.project_id = prj.id INNER JOIN alert_type atype ON ac.alert_type_id = atype.id - LEFT JOIN profile prf1 ON ac.creator = prf1.id - LEFT JOIN profile prf2 ON ac.updater = prf2.id + LEFT JOIN profile prf1 ON ac.created_by = prf1.id + LEFT JOIN profile prf2 ON ac.updated_by = prf2.id WHERE NOT ac.deleted ); CREATE OR REPLACE VIEW v_submittal AS ( SELECT - sub.id AS id, - ac.id AS alert_config_id, - ac.name AS alert_config_name, - aty.id AS alert_type_id, - aty.name AS alert_type_name, - ac.project_id AS project_id, - sst.id AS submittal_status_id, - sst.name AS submittal_status_name, - sub.completion_date AS completion_date, - sub.create_date AS create_date, - sub.due_date AS due_date, - sub.marked_as_missing AS marked_as_missing, - sub.warning_sent AS warning_sent + sub.id, + ac.id alert_config_id, + ac.name alert_config_name, + aty.id alert_type_id, + aty.name alert_type_name, + ac.project_id, + sst.id submittal_status_id, + sst.name submittal_status_name, + sub.completed_at, + sub.created_at, + sub.due_at, + sub.marked_as_missing, + sub.warning_sent FROM submittal sub INNER JOIN alert_config ac ON sub.alert_config_id = ac.id INNER JOIN submittal_status sst ON sub.submittal_status_id = sst.id diff --git a/api/migrations/repeat/0090__views_plots.sql b/api/migrations/repeat/0090__views_plots.sql index 5f2eb0d2..dc028b56 100644 --- a/api/migrations/repeat/0090__views_plots.sql +++ b/api/migrations/repeat/0090__views_plots.sql @@ -4,10 +4,10 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( pc.slug, pc.name, pc.project_id, - pc.creator, - pc.create_date, - pc.updater, - pc.update_date, + pc.created_by, + pc.created_at, + pc.updated_by, + pc.updated_at, COALESCE(k.show_masked, 'true') AS show_masked, COALESCE(k.show_nonvalidated, 'true') AS show_nonvalidated, COALESCE(k.show_comments, 'true') AS show_comments, diff --git a/api/migrations/repeat/0100__views_datalogger.sql b/api/migrations/repeat/0100__views_datalogger.sql index c31b4f82..0593dd5f 100644 --- a/api/migrations/repeat/0100__views_datalogger.sql +++ b/api/migrations/repeat/0100__views_datalogger.sql @@ -1,23 +1,23 @@ CREATE OR REPLACE VIEW v_datalogger AS ( SELECT - dl.id AS id, - dl.sn AS sn, - dl.project_id AS project_id, - p1.id AS creator, - p1.username AS creator_username, - dl.create_date AS create_date, - p2.id AS updater, - p2.username AS updater_username, - dl.update_date AS update_date, - dl.name AS name, - dl.slug AS slug, - m.id AS model_id, - m.model AS model, + dl.id, + dl.sn, + dl.project_id, + p1.id created_by, + p1.username created_by_username, + dl.created_at, + p2.id updated_by, + p2.username updated_by_username, + dl.updated_at, + dl.name, + dl.slug, + m.id model_id, + m.model, COALESCE(e.errors, '{}')::text[] AS errors, COALESCE(t.tables, '[]'::jsonb) AS tables FROM datalogger dl - INNER JOIN profile p1 ON dl.creator = p1.id - INNER JOIN profile p2 ON dl.updater = p2.id + INNER JOIN profile p1 ON dl.created_by = p1.id + INNER JOIN profile p2 ON dl.updated_by = p2.id INNER JOIN datalogger_model m ON dl.model_id = m.id LEFT JOIN ( SELECT @@ -45,7 +45,7 @@ CREATE OR REPLACE VIEW v_datalogger_preview AS ( SELECT p.datalogger_table_id, p.preview, - p.update_date + p.updated_at FROM datalogger_preview p INNER JOIN datalogger_table dt ON dt.id = p.datalogger_table_id INNER JOIN datalogger dl ON dl.id = dt.datalogger_id @@ -57,7 +57,7 @@ CREATE OR REPLACE VIEW v_datalogger_equivalency_table AS ( dt.datalogger_id AS datalogger_id, dt.id AS datalogger_table_id, dt.table_name AS datalogger_table_name, - COALESCE(jsonb_agg(row_to_jsonb(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::jsonb) AS fields + COALESCE(jsonb_agg(to_jsonb(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::jsonb) AS fields FROM datalogger_table dt INNER JOIN datalogger dl ON dt.datalogger_id = dl.id LEFT JOIN LATERAL ( diff --git a/api/migrations/repeat/0110__views_evaluations.sql b/api/migrations/repeat/0110__views_evaluations.sql index 4cdedd85..34c1d1a0 100644 --- a/api/migrations/repeat/0110__views_evaluations.sql +++ b/api/migrations/repeat/0110__views_evaluations.sql @@ -3,19 +3,19 @@ CREATE OR REPLACE VIEW v_evaluation AS ( ev.id, ev.name, ev.body, - prf1.id creator, - COALESCE(prf1.username, 'midas') creator_username, - ev.create_date, - prf2.id updater, - prf2.username updater_username, - ev.update_date, + prf1.id created_by, + COALESCE(prf1.username, 'midas') created_by_username, + ev.created_at, + prf2.id updated_by, + prf2.username updated_by_username, + ev.updated_at, prj.id project_id, prj.name project_name, ac.id alert_config_id, ac.name alert_config_name, ev.submittal_id, - ev.start_date, - ev.end_date, + ev.started_at, + ev.ended_at, ( SELECT COALESCE(jsonb_agg(jsonb_build_object( 'instrument_id', id, @@ -30,8 +30,8 @@ CREATE OR REPLACE VIEW v_evaluation AS ( ) instruments FROM evaluation ev INNER JOIN project prj ON ev.project_id = prj.id - LEFT JOIN profile prf1 ON ev.creator = prf1.id - LEFT JOIN profile prf2 ON ev.updater = prf2.id + LEFT JOIN profile prf1 ON ev.created_by = prf1.id + LEFT JOIN profile prf2 ON ev.updated_by = prf2.id LEFT JOIN submittal sub ON sub.id = ev.submittal_id LEFT JOIN alert_config ac ON ac.id = sub.alert_config_id ); diff --git a/api/migrations/repeat/0120__views_alert_check.sql b/api/migrations/repeat/0120__views_alert_check.sql index 2f7387fb..999ee9b2 100644 --- a/api/migrations/repeat/0120__views_alert_check.sql +++ b/api/migrations/repeat/0120__views_alert_check.sql @@ -5,35 +5,35 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date - ac.warning_interval - AND NOW() < sub.due_date + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at - ac.warning_interval + AND NOW() < sub.due_at AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true )::boolean AS should_warn, COALESCE( - sub.completion_date IS NULL + sub.completed_at IS NULL AND NOT sub.marked_as_missing - AND NOW() >= sub.due_date + AND NOW() >= sub.due_at AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' - AND ac.last_reminded IS NOT NULL - AND sub.completion_date IS NULL + AND ac.last_reminded_at IS NOT NULL + AND sub.completed_at IS NULL AND NOT sub.marked_as_missing - AND NOW() >= sub.due_date + AND NOW() >= sub.due_at -- subtract 10 second constant to account for ticker accuracy/execution time - AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds', + AND NOW() >= ac.last_reminded_at + ac.remind_interval - INTERVAL '10 seconds', true )::boolean AS should_remind, COALESCE(json_agg(json_build_object( 'instrument_name', inst.name, 'timeseries_name', COALESCE(ts.name, 'No timeseries for instrument'), 'status', CASE - WHEN NOW() >= sub.due_date THEN 'missing' - WHEN NOW() < sub.due_date THEN 'warning' + WHEN NOW() >= sub.due_at THEN 'missing' + WHEN NOW() < sub.due_at THEN 'warning' ELSE 'N/A' END )) FILTER (WHERE lm.time IS NULL), '[]')::text AS affected_timeseries @@ -46,7 +46,7 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( LEFT JOIN LATERAL ( SELECT timeseries_id, - MAX(time) FILTER (WHERE time > sub.create_date AND time <= sub.due_date) AS time + MAX(time) FILTER (WHERE time > sub.created_at AND time <= sub.due_at) AS time FROM timeseries_measurement WHERE timeseries_id = ANY(SELECT id FROM timeseries WHERE instrument_id = inst.id) AND NOT timeseries_id = ANY(SELECT timeseries_id FROM instrument_constants) @@ -65,24 +65,24 @@ CREATE OR REPLACE VIEW v_alert_check_evaluation_submittal AS ( null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date - ac.warning_interval - AND NOW() < sub.due_date, + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at - ac.warning_interval + AND NOW() < sub.due_at, true )::boolean AS should_warn, COALESCE( - sub.completion_date IS NULL - AND NOW() >= sub.due_date + sub.completed_at IS NULL + AND NOW() >= sub.due_at AND NOT sub.marked_as_missing, true )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' - AND ac.last_reminded IS NOT NULL - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date + AND ac.last_reminded_at IS NOT NULL + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at -- subtract 10 second constant to account for ticker accuracy/execution time - AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds' + AND NOW() >= ac.last_reminded_at + ac.remind_interval - INTERVAL '10 seconds' AND NOT sub.marked_as_missing, true )::boolean AS should_remind diff --git a/api/migrations/repeat/0130__views_district_rollup.sql b/api/migrations/repeat/0130__views_district_rollup.sql index 768e99f2..8fbe6a56 100644 --- a/api/migrations/repeat/0130__views_district_rollup.sql +++ b/api/migrations/repeat/0130__views_district_rollup.sql @@ -1,29 +1,29 @@ CREATE OR REPLACE VIEW v_district_rollup AS ( SELECT - ac.alert_type_id AS alert_type_id, - dt.office_id AS office_id, - dt.initials AS district_initials, - prj.name AS project_name, - prj.id AS project_id, - DATE_TRUNC('month', sub.due_date) AS the_month, - COUNT(sub.*) AS expected_total_submittals, - COUNT(sub.completion_date) FILTER ( - WHERE sub.completion_date IS NOT NULL - ) AS actual_total_submittals, + ac.alert_type_id, + dt.office_id, + dt.initials district_initials, + prj.name project_name, + prj.id project_id, + DATE_TRUNC('month', sub.due_at)::timestamptz "month", + COUNT(sub.*) expected_total_submittals, + COUNT(sub.completed_at) FILTER ( + WHERE sub.completed_at IS NOT NULL + ) actual_total_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b' - ) AS red_submittals, + ) red_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = 'ef9a3235-f6e2-4e6c-92f6-760684308f7f' - ) AS yellow_submittals, + ) yellow_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = '0c0d6487-3f71-4121-8575-19514c7b9f03' - ) AS green_submittals + ) green_submittals FROM alert_config ac INNER JOIN project prj ON ac.project_id = prj.id LEFT JOIN district dt ON dt.id = prj.district_id LEFT JOIN submittal sub ON sub.alert_config_id = ac.id - WHERE sub.due_date <= NOW() - GROUP BY ac.alert_type_id, dt.office_id, dt.initials, prj.id, prj.name, DATE_TRUNC('month', sub.due_date) - ORDER BY DATE_TRUNC('month', sub.due_date), ac.alert_type_id + WHERE sub.due_at <= NOW() + GROUP BY ac.alert_type_id, dt.office_id, dt.initials, prj.id, prj.name, DATE_TRUNC('month', sub.due_at) + ORDER BY DATE_TRUNC('month', sub.due_at), ac.alert_type_id ); diff --git a/api/migrations/repeat/0140__views_depth_based_instruments.sql b/api/migrations/repeat/0140__views_depth_based_instruments.sql index 01e7a6ec..2720b001 100644 --- a/api/migrations/repeat/0140__views_depth_based_instruments.sql +++ b/api/migrations/repeat/0140__views_depth_based_instruments.sql @@ -200,21 +200,21 @@ CREATE OR REPLACE VIEW v_incl_measurement AS ( ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements FROM (SELECT DISTINCT seg.instrument_id, - seg.id AS segment_id, + seg.id segment_id, q.time, q.depth, q.a0, q.a180, q.b0, q.b180, - q.a_checksum, - q.a_comb, - COALESCE(q.a_inc, 0) a_inc, - COALESCE(q.a_cum_dev, 0) a_cum_dev, - q.b_checksum, - q.b_comb, - COALESCE(q.b_inc, 0) b_inc, - COALESCE(q.b_cum_dev, 0) b_cum_dev + (q.a0 + q.a180) a_checksum, + (q.a0 - q.a180) / 2 a_comb, + (q.a0 - q.a180) / 2 / NULLIF(q.bottom, 0) * 24 a_inc, + sum((q.a0 - q.a180) / 2 / NULLIF(q.bottom, 0) * 24) OVER (ORDER BY q.depth desc) a_cum_dev, + (q.b0 + q.b180) b_checksum, + (q.b0 - q.b180) / 2 b_comb, + (q.b0 - q.b180) / 2 / NULLIF(q.bottom, 0) * 24 b_inc, + sum((q.b0 - q.b180) / 2 / NULLIF(q.bottom, 0) * 24) OVER (ORDER BY q.depth desc) b_cum_dev FROM incl_segment seg INNER JOIN incl_opts opts ON opts.instrument_id = seg.instrument_id LEFT JOIN LATERAL ( @@ -225,25 +225,18 @@ CREATE OR REPLACE VIEW v_incl_measurement AS ( a180.value a180, b0.value b0, b180.value b180, - (a0.value + a180.value) a_checksum, - (a0.value - a180.value) / 2 a_comb, - (a0.value - a180.value) / 2 / NULLIF(a.bottom, 0) * 24 a_inc, - sum((a0.value - a180.value) / 2 / NULLIF(a.bottom, 0) * 24) OVER (ORDER BY d.value desc) a_cum_dev, - (b0.value + b180.value) b_checksum, - (b0.value - b180.value) / 2 b_comb, - (b0.value - b180.value) / 2 / NULLIF(a.bottom, 0) * 24 b_inc, - sum((items.b0 - items.b180) / 2 / NULLIF(a.bottom, 0) * 24) OVER (ORDER BY d.value desc) b_cum_dev + locf(b.value) OVER (ORDER BY a.time ASC) AS bottom FROM ( SELECT DISTINCT time FROM timeseries_measurement WHERE timeseries_id IN (SELECT id FROM timeseries WHERE instrument_id = seg.instrument_id) UNION SELECT time FROM timeseries_measurement WHERE time = opts.initial_time ) a - LEFT JOIN timeseries_measurement d ON d.timeseries_id = opts.depth_timeseries_id AND d.time = a.time - LEFT JOIN timeseries_measurement a0 ON a0.timeseries_id = opts.a0_timeseries_id AND a0.time = a.time - LEFT JOIN timeseries_measurement a180 ON a180.timeseries_id = opts.a180_timeseries_id AND a180.time = a.time - LEFT JOIN timeseries_measurement b0 ON b0.timeseries_id = opts.b0_timeseries_id AND b0.time = a.time - LEFT JOIN timeseries_measurement b180 ON b180.timeseries_id = opts.b180_timeseries_id AND b180.time = a.time + LEFT JOIN timeseries_measurement d ON d.timeseries_id = seg.depth_timeseries_id AND d.time = a.time + LEFT JOIN timeseries_measurement a0 ON a0.timeseries_id = seg.a0_timeseries_id AND a0.time = a.time + LEFT JOIN timeseries_measurement a180 ON a180.timeseries_id = seg.a180_timeseries_id AND a180.time = a.time + LEFT JOIN timeseries_measurement b0 ON b0.timeseries_id = seg.b0_timeseries_id AND b0.time = a.time + LEFT JOIN timeseries_measurement b180 ON b180.timeseries_id = seg.b180_timeseries_id AND b180.time = a.time LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time ) q ON true) r GROUP BY r.instrument_id, r.time diff --git a/api/migrations/repeat/0160__views_report_config.sql b/api/migrations/repeat/0160__views_report_config.sql index 4c5ff9c3..f0ec56eb 100644 --- a/api/migrations/repeat/0160__views_report_config.sql +++ b/api/migrations/repeat/0160__views_report_config.sql @@ -7,12 +7,12 @@ CREATE OR REPLACE VIEW v_report_config AS ( rc.project_id, p.name AS project_name, dt.name AS district_name, - rc.creator, - cp.username AS creator_username, - rc.create_date, - rc.updater, - up.username AS updater_username, - rc.update_date, + rc.created_by, + cp.username AS created_by_username, + rc.created_at, + rc.updated_by, + up.username AS updated_by_username, + rc.updated_at, COALESCE(pc.configs, '[]'::jsonb) AS plot_configs, jsonb_build_object( 'date_range', jsonb_build_object( @@ -31,8 +31,8 @@ CREATE OR REPLACE VIEW v_report_config AS ( FROM report_config rc INNER JOIN project p ON rc.project_id = p.id LEFT JOIN district dt ON p.district_id = dt.id - INNER JOIN profile cp ON cp.id = rc.creator - LEFT JOIN profile up ON up.id = rc.updater + INNER JOIN profile cp ON cp.id = rc.created_by + LEFT JOIN profile up ON up.id = rc.updated_by LEFT JOIN LATERAL ( SELECT jsonb_agg(jsonb_build_object( 'id', pc.id, diff --git a/api/migrations/repeat/0170__views_uploader.sql b/api/migrations/repeat/0170__views_uploader.sql index 32253bf5..50145f8b 100644 --- a/api/migrations/repeat/0170__views_uploader.sql +++ b/api/migrations/repeat/0170__views_uploader.sql @@ -5,12 +5,12 @@ CREATE VIEW v_uploader_config AS ( u.slug, u.name, u.description, - u.create_date, - u.creator, - pc.username creator_username, - u.update_date, - u.updater, - pu.username updater_username, + u.created_at, + u.created_by, + pc.username created_by_username, + u.updated_by, + u.updated_at, + pu.username updated_by_username, u.type, u.tz_name, u.time_field, @@ -21,6 +21,6 @@ CREATE VIEW v_uploader_config AS ( u.comment_field_enabled, u.comment_field FROM uploader_config u - INNER JOIN profile pc ON u.creator = pc.id - LEFT JOIN profile pu ON u.updater = pu.id + INNER JOIN profile pc ON u.created_by = pc.id + LEFT JOIN profile pu ON u.updated_by = pu.id ); diff --git a/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql b/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql new file mode 100644 index 00000000..da270224 --- /dev/null +++ b/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql @@ -0,0 +1,2 @@ +UPDATE timeseries SET type='standard' WHERE type IS NULL; +ALTER TABLE timeseries ALTER COLUMN type SET NOT NULL; diff --git a/api/migrations/schema/V1.19.00__audit_into_rename.sql b/api/migrations/schema/V1.19.00__audit_into_rename.sql new file mode 100644 index 00000000..2671b1f7 --- /dev/null +++ b/api/migrations/schema/V1.19.00__audit_into_rename.sql @@ -0,0 +1,74 @@ +ALTER TABLE collection_group RENAME creator TO created_by; +ALTER TABLE collection_group RENAME create_date TO created_at; +ALTER TABLE collection_group RENAME updater TO updated_by; +ALTER TABLE collection_group RENAME update_date TO updated_at; + +ALTER TABLE report_config RENAME creator TO created_by; +ALTER TABLE report_config RENAME create_date TO created_at; +ALTER TABLE report_config RENAME updater TO updated_by; +ALTER TABLE report_config RENAME update_date TO updated_at; + +ALTER TABLE report_download_job RENAME creator TO created_by; +ALTER TABLE report_download_job RENAME create_date TO created_at; +ALTER TABLE report_download_job RENAME progress_update_date TO progress_updated_at; + +ALTER TABLE datalogger RENAME creator TO created_by; +ALTER TABLE datalogger RENAME create_date TO created_at; +ALTER TABLE datalogger RENAME updater TO updated_by; +ALTER TABLE datalogger RENAME update_date TO updated_at; + +ALTER TABLE project RENAME creator TO created_by; +ALTER TABLE project RENAME create_date TO created_at; +ALTER TABLE project RENAME updater TO updated_by; +ALTER TABLE project RENAME update_date TO updated_at; + +ALTER TABLE instrument_group RENAME creator TO created_by; +ALTER TABLE instrument_group RENAME create_date TO created_at; +ALTER TABLE instrument_group RENAME updater TO updated_by; +ALTER TABLE instrument_group RENAME update_date TO updated_at; + +ALTER TABLE instrument RENAME creator TO created_by; +ALTER TABLE instrument RENAME create_date TO created_at; +ALTER TABLE instrument RENAME updater TO updated_by; +ALTER TABLE instrument RENAME update_date TO updated_at; + +ALTER TABLE alert RENAME create_date TO created_at; + +ALTER TABLE alert_config RENAME creator TO created_by; +ALTER TABLE alert_config RENAME create_date TO created_at; +ALTER TABLE alert_config RENAME updater TO updated_by; +ALTER TABLE alert_config RENAME update_date TO updated_at; +ALTER TABLE alert_config RENAME last_checked TO last_checked_at; +ALTER TABLE alert_config RENAME last_reminded TO last_reminded_at; +ALTER TABLE alert_config RENAME start_date TO started_at; + +ALTER TABLE instrument_note RENAME creator TO created_by; +ALTER TABLE instrument_note RENAME create_date TO created_at; +ALTER TABLE instrument_note RENAME updater TO updated_by; +ALTER TABLE instrument_note RENAME update_date TO updated_at; + +ALTER TABLE plot_configuration RENAME creator TO created_by; +ALTER TABLE plot_configuration RENAME create_date TO created_at; +ALTER TABLE plot_configuration RENAME updater TO updated_by; +ALTER TABLE plot_configuration RENAME update_date TO updated_at; + +ALTER TABLE uploader_config RENAME creator TO created_by; +ALTER TABLE uploader_config RENAME create_date TO created_at; +ALTER TABLE uploader_config RENAME updater TO updated_by; +ALTER TABLE uploader_config RENAME update_date TO updated_at; + +ALTER TABLE evaluation RENAME creator TO created_by; +ALTER TABLE evaluation RENAME create_date TO created_at; +ALTER TABLE evaluation RENAME updater TO updated_by; +ALTER TABLE evaluation RENAME update_date TO updated_at; +ALTER TABLE evaluation RENAME start_date TO started_at; +ALTER TABLE evaluation RENAME end_date TO ended_at; + +ALTER TABLE submittal RENAME completion_date TO completed_at; +ALTER TABLE submittal RENAME due_date TO due_at; +ALTER TABLE submittal RENAME create_date TO created_at; + +ALTER TABLE datalogger_preview RENAME update_date TO updated_at; + +ALTER TABLE datalogger ALTER COLUMN updated_by DROP NOT NULL; +ALTER TABLE datalogger ALTER COLUMN updated_at DROP NOT NULL; diff --git a/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql b/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql new file mode 100644 index 00000000..3d4ed59c --- /dev/null +++ b/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql @@ -0,0 +1,107 @@ +-- To clean up tables where the create/update profiles are null, +-- we can create a user with a zero-value UUID, then update +-- those column to reference this profile + +INSERT INTO profile (id, edipi, username, display_name, email, is_admin) +VALUES ('00000000-0000-0000-0000-000000000000', 0, 'MIDAS', 'MIDAS', 'MIDAS', false); + +UPDATE project +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE datalogger +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument_group +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE alert_config +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument_note +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE collection_group +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE plot_configuration +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE evaluation +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); + +UPDATE project +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE datalogger +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument_group +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE alert_config +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument_note +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE collection_group +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE plot_configuration +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE evaluation +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; + +ALTER TABLE project +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE datalogger +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument_group +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE alert_config +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument_note +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE collection_group +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE plot_configuration +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE evaluation +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; diff --git a/api/queries/alert_check.sql b/api/queries/alert_check.sql index f16abcf6..ee6e5ec1 100644 --- a/api/queries/alert_check.sql +++ b/api/queries/alert_check.sql @@ -1,6 +1,6 @@ --- name: AlertConfigListUpdateLastChecked :many +-- name: AlertConfigListUpdateLastCheckedAt :many update alert_config ac1 -set last_checked = now() +set last_checked_at = now() from ( select * from v_alert_config @@ -9,20 +9,20 @@ where ac1.id = ac2.id returning ac2.*; --- name: AlertConfigUpdateLastReminded :exec -update alert_config set last_reminded = $2 where id = $1; +-- name: AlertConfigUpdateLastRemindedAt :exec +update alert_config set last_reminded_at = $2 where id = $1; -- name: SubmittalUpdateCompletionDateOrWarningSent :exec update submittal set submittal_status_id = $2, - completion_date = $3, + completed_at = $3, warning_sent = $4 where id = $1; -- name: SubmittalCreateNextFromNewAlertConfigDate :exec -insert into submittal (alert_config_id, create_date, due_date) +insert into submittal (alert_config_id, created_at, due_at) select ac.id, sqlc.arg(date)::timestamptz, diff --git a/api/queries/alert_config.sql b/api/queries/alert_config.sql index 9800882d..35d47cd5 100644 --- a/api/queries/alert_config.sql +++ b/api/queries/alert_config.sql @@ -31,13 +31,13 @@ insert into alert_config ( name, body, alert_type_id, - start_date, + started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, - creator, - create_date + created_by, + created_at ) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) returning id; @@ -51,8 +51,8 @@ delete from alert_config_instrument where alert_config_id = $1; -- name: SubmittalCreateNextFromExistingAlertConfigDate :exec -insert into submittal (alert_config_id, due_date) -select ac.id, ac.create_date + ac.schedule_interval +insert into submittal (alert_config_id, due_at) +select ac.id, ac.created_at + ac.schedule_interval from alert_config ac where ac.id = $1; @@ -61,32 +61,32 @@ where ac.id = $1; update alert_config set name = $3, body = $4, - start_date = $5, + started_at = $5, schedule_interval = $6, mute_consecutive_alerts = $7, remind_interval = $8, warning_interval = $9, - updater = $10, - update_date = $11 + updated_by = $10, + updated_at = $11 where id = $1 and project_id = $2; -- name: SubmittalUpdateNextForAlertConfig :one update submittal -set due_date = sq.new_due_date +set due_at = sq.new_due_at from ( select sub.id as submittal_id, - sub.create_date + ac.schedule_interval as new_due_date + sub.created_at + ac.schedule_interval as new_due_at from submittal sub inner join alert_config ac on sub.alert_config_id = ac.id where sub.alert_config_id = $1 - and sub.due_date > now() - and sub.completion_date is null + and sub.due_at > now() + and sub.completed_at is null and not sub.marked_as_missing ) sq where id = sq.submittal_id -and sq.new_due_date > now() +and sq.new_due_at > now() returning id; diff --git a/api/queries/alert_measurement_check.sql b/api/queries/alert_measurement_check.sql index 62a42cf5..b4dbc803 100644 --- a/api/queries/alert_measurement_check.sql +++ b/api/queries/alert_measurement_check.sql @@ -2,7 +2,7 @@ select * from v_alert_check_evaluation_submittal where submittal_id = any( select id from submittal - where completion_date is null and not marked_as_missing + where completed_at is null and not marked_as_missing ); @@ -10,5 +10,5 @@ where submittal_id = any( select * from v_alert_check_measurement_submittal where submittal_id = any( select id from submittal - where completion_date is null and not marked_as_missing + where completed_at is null and not marked_as_missing ); diff --git a/api/queries/collection_group.sql b/api/queries/collection_group.sql index b96dbd33..24cc4d78 100644 --- a/api/queries/collection_group.sql +++ b/api/queries/collection_group.sql @@ -7,13 +7,13 @@ select * from v_collection_group_details where id = $1; -- name: CollectionGroupCreate :one -insert into collection_group (project_id, name, slug, creator, create_date, sort_order) -values (sqlc.arg(project_id), sqlc.arg(name)::varchar, slugify(sqlc.arg(name)::varchar, 'collection_group'), sqlc.arg(creator), sqlc.arg(create_date), sqlc.arg(sort_order)) +insert into collection_group (project_id, name, slug, created_by, created_at, sort_order) +values (sqlc.arg(project_id), sqlc.arg(name)::varchar, slugify(sqlc.arg(name)::varchar, 'collection_group'), sqlc.arg(created_by), sqlc.arg(created_at), sqlc.arg(sort_order)) returning *; -- name: CollectionGroupUpdate :one -update collection_group set name=$3, updater=$4, update_date=$5, sort_order=$6 +update collection_group set name=$3, updated_by=$4, updated_at=$5, sort_order=$6 where project_id=$1 and id=$2 returning *; diff --git a/api/queries/datalogger.sql b/api/queries/datalogger.sql index cfa1ac90..00fb8cff 100644 --- a/api/queries/datalogger.sql +++ b/api/queries/datalogger.sql @@ -27,7 +27,7 @@ select * from v_datalogger where id=$1; -- name: DataloggerCreate :one -insert into datalogger (name, sn, project_id, creator, updater, slug, model_id) +insert into datalogger (name, sn, project_id, created_by, updated_by, slug, model_id) values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) returning id; @@ -35,8 +35,8 @@ returning id; -- name: DataloggerUpdate :exec update datalogger set name=$2, - updater=$3, - update_date=$4 + updated_by=$3, + updated_at=$4 where id=$1; @@ -44,12 +44,12 @@ where id=$1; update datalogger_hash set "hash"=$2 where datalogger_id=$1; --- name: DataloggerUpdateUpdater :exec -update datalogger set updater=$2, update_date=$3 where id=$1; +-- name: DataloggerUpdateAuditInfo :exec +update datalogger set updated_by=$2, updated_at=$3 where id=$1; -- name: DataloggerDelete :exec -update datalogger set deleted=true, updater=$2, update_date=$3 where id=$1; +update datalogger set deleted=true, updated_by=$2, updated_at=$3 where id=$1; -- name: DataloggerTablePreviewGet :one diff --git a/api/queries/datalogger_telemetry.sql b/api/queries/datalogger_telemetry.sql index d31a3b4e..837395ff 100644 --- a/api/queries/datalogger_telemetry.sql +++ b/api/queries/datalogger_telemetry.sql @@ -11,17 +11,17 @@ limit 1; -- name: DataloggerTablePreviewCreate :exec -insert into datalogger_preview (datalogger_table_id, preview, update_date) values ($1, $2, $3); +insert into datalogger_preview (datalogger_table_id, preview, updated_at) values ($1, $2, $3); -- name: DataloggerTablePreviewUpdate :exec -update datalogger_preview set preview = $3, update_date = $4 -where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); +update datalogger_preview set preview = $3, updated_at = $4 +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); -- name: DataloggerErrorDelete :exec delete from datalogger_error -where datalogger_table_id in (select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); -- name: DataloggerErrorCreate :exec diff --git a/api/queries/district_rollup.sql b/api/queries/district_rollup.sql index 6cfae010..6bb93396 100644 --- a/api/queries/district_rollup.sql +++ b/api/queries/district_rollup.sql @@ -2,13 +2,13 @@ select * from v_district_rollup where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid and project_id=sqlc.arg(project_id) -and the_month >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) -and the_month <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); +and "month" >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and "month" <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); -- name: DistrictRollupListMeasurementForProjectAlertConfig :many select * from v_district_rollup where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid and project_id=sqlc.arg(project_id) -and the_month >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) -and the_month <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); +and "month" >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and "month" <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); diff --git a/api/queries/evaluation.sql b/api/queries/evaluation.sql index 69b9c0d7..cb4d8cb4 100644 --- a/api/queries/evaluation.sql +++ b/api/queries/evaluation.sql @@ -27,20 +27,20 @@ select * from v_evaluation where id=$1; -- name: SubmittalUpdateCompleteEvaluation :one update submittal sub1 set submittal_status_id = sq.submittal_status_id, - completion_date = now() + completed_at = now() from ( select sub2.id as submittal_id, case -- if completed before due date, mark submittal as green id - when now() <= sub2.due_date then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + when now() <= sub2.due_at then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid -- if completed after due date, mark as yellow else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid end as submittal_status_id from submittal sub2 inner join alert_config ac on sub2.alert_config_id = ac.id where sub2.id=$1 - and sub2.completion_date is null + and sub2.completed_at is null and not sub2.marked_as_missing and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid ) sq @@ -49,12 +49,12 @@ returning sub1.*; -- name: SubmittalCreateNextEvaluation :exec -insert into submittal (alert_config_id, due_date) +insert into submittal (alert_config_id, due_at) select ac.id, now() + ac.schedule_interval from alert_config ac -where ac.id in (select sub.alert_config_id from submittal sub where sub.id=$1); +where ac.id = any(select sub.alert_config_id from submittal sub where sub.id=$1); -- name: EvaluationCreate :one @@ -63,10 +63,10 @@ insert into evaluation ( submittal_id, name, body, - start_date, - end_date, - creator, - create_date + started_at, + ended_at, + created_by, + created_at ) values ($1,$2,$3,$4,$5,$6,$7,$8) returning id; @@ -83,10 +83,10 @@ insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); update evaluation set name=$3, body=$4, - start_date=$5, - end_date=$6, - updater=$7, - update_date=$8 + started_at=$5, + ended_at=$6, + updated_by=$7, + updated_at=$8 where id=$1 and project_id=$2; diff --git a/api/queries/home.sql b/api/queries/home.sql index 3632b146..0a03db7b 100644 --- a/api/queries/home.sql +++ b/api/queries/home.sql @@ -3,5 +3,5 @@ select (select count(*) from instrument where not deleted) as instrument_count, (select count(*) from project where not deleted) as project_count, (select count(*) from instrument_group) as instrument_group_count, - (select count(*) from instrument where not deleted and create_date > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from instrument where not deleted and created_at > now() - '7 days'::interval) as new_instruments_7d, (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h; diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql index ff3f749a..783e4046 100644 --- a/api/queries/instrument.sql +++ b/api/queries/instrument.sql @@ -1,9 +1,3 @@ --- name: InstrumentList :many -select * -from v_instrument -where not deleted; - - -- name: InstrumentListForProject :many select i.* from v_instrument i @@ -21,8 +15,7 @@ where instrument_group_id = $1; -- name: InstrumentGet :one select * from v_instrument -where not deleted -and id = $1; +where id = $1; -- name: InstrumentGetCount :one @@ -30,14 +23,37 @@ select count(*) from instrument where not deleted; -- name: InstrumentCreate :one -insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) -returning id, slug; +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify(sqlc.arg(name), 'instrument'), + sqlc.arg(name), + sqlc.arg(type_id), + ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + sqlc.arg(station), + sqlc.arg(station_offset), + sqlc.arg(created_by), + sqlc.arg(created_at), + sqlc.arg(nid_id), + sqlc.arg(usgs_id), + sqlc.arg(show_cwms_tab) +) returning id, slug; -- name: InstrumentCreateBatch :batchone -insert into instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) -values (slugify($1, 'instrument'), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify(sqlc.arg(name), 'instrument'), + sqlc.arg(name), + sqlc.arg(type_id), + ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + sqlc.arg(station), + sqlc.arg(station_offset), + sqlc.arg(created_by), + sqlc.arg(created_at), + sqlc.arg(nid_id), + sqlc.arg(usgs_id), + sqlc.arg(show_cwms_tab) +) returning id, slug; @@ -49,41 +65,41 @@ select project_id from project_instrument where instrument_id = $1; select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count from project_instrument pi inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in (sqlc.arg(instrument_ids)::uuid[]) +where pi.instrument_id = any(sqlc.arg(instrument_ids)::uuid[]) group by pi.instrument_id, i.name order by i.name; -- name: InstrumentUpdate :exec update instrument set - name=$3, - type_id=$4, - geometry=$5, - updater=$6, - update_date=$7, - station=$8, - station_offset=$9, - nid_id=$10, - usgs_id=$11, - show_cwms_tab=$12 -where id = $2 -and id in ( + name=sqlc.arg(name), + type_id=sqlc.arg(type_id), + geometry=ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + updated_by=sqlc.arg(updated_by), + updated_at=sqlc.arg(updated_at), + station=sqlc.arg(station), + station_offset=sqlc.arg(station_offset), + nid_id=sqlc.arg(nid_id), + usgs_id=sqlc.arg(usgs_id), + show_cwms_tab=sqlc.arg(show_cwms_tab) +where id = sqlc.arg(id) +and id = any( select instrument_id from project_instrument - where project_id = $1 + where project_id = sqlc.arg(project_id) ); -- name: InstrumentUpdateGeometry :one update instrument set - geometry=$3, - updater=$4, - update_date=now() -where id = $2 -and id in ( + geometry=ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + updated_by=sqlc.arg(updated_by), + updated_at=now() +where id = sqlc.arg(id) +and id = any( select instrument_id from project_instrument - where project_id = $1 + where project_id = sqlc.arg(project_id) ) returning id; @@ -101,5 +117,5 @@ and id = $2; -- name: InstrumentIDNameListByIDs :many select id, name from instrument -where id in (sqlc.arg(instrument_ids)::uuid[]) +where id = any(sqlc.arg(instrument_ids)::uuid[]) and not deleted; diff --git a/api/queries/instrument_assign.sql b/api/queries/instrument_assign.sql index abd78a4a..1ef0c5fb 100644 --- a/api/queries/instrument_assign.sql +++ b/api/queries/instrument_assign.sql @@ -21,7 +21,7 @@ select i.name from project_instrument pi inner join instrument i on pi.instrument_id = i.id where pi.project_id = sqlc.arg(project_id) -and i.name in (sqlc.arg(instrument_names)::text[]) +and i.name = any(sqlc.arg(instrument_names)::text[]) and not i.deleted; @@ -31,7 +31,7 @@ from project_instrument pi inner join instrument i on pi.instrument_id = i.id inner join project p on pi.project_id = p.id where i.name = sqlc.arg(instrument_name) -and pi.project_id in (sqlc.arg(project_ids)::uuid[]) +and pi.project_id = any(sqlc.arg(project_ids)::uuid[]) and not i.deleted order by pi.project_id; @@ -41,7 +41,7 @@ select p.name as project_name, i.name as instrument_name from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id -where pi.instrument_id in (sqlc.arg(instrument_ids)::uuid[]) +where pi.instrument_id = any(sqlc.arg(instrument_ids)::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr where ppr.profile_id = sqlc.arg(profile_id) @@ -56,7 +56,7 @@ from project_instrument pi inner join project p on pi.project_id = p.id inner join instrument i on pi.instrument_id = i.id where pi.instrument_id = sqlc.arg(instrument_id) -and pi.project_id in (sqlc.arg(project_ids)::uuid[]) +and pi.project_id = any(sqlc.arg(project_ids)::uuid[]) and not exists ( select 1 from v_profile_project_roles ppr where profile_id = sqlc.arg(profile_id) diff --git a/api/queries/instrument_group.sql b/api/queries/instrument_group.sql index 9b3e8b28..9c426e71 100644 --- a/api/queries/instrument_group.sql +++ b/api/queries/instrument_group.sql @@ -1,14 +1,12 @@ -- name: InstrumentGroupList :many select * -from v_instrument_group -where not deleted; +from v_instrument_group; --- name: InstrumentGroupGet :many +-- name: InstrumentGroupGet :one select * from v_instrument_group -where not deleted -and id=$1; +where id=$1; -- name: InstrumentGroupListForProject :many @@ -18,26 +16,26 @@ where ig.project_id = $1; -- name: InstrumentGroupCreate :one -insert into instrument_group (slug, name, description, creator, create_date, project_id) +insert into instrument_group (slug, name, description, created_by, created_at, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning *; +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; -- name: InstrumentGroupCreateBatch :batchone -insert into instrument_group (slug, name, description, creator, create_date, project_id) +insert into instrument_group (slug, name, description, created_by, created_at, project_id) values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) -returning *; +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; -- name: InstrumentGroupUpdate :one update instrument_group set name = $2, description = $3, - updater = $4, - update_date = $5, + updated_by = $4, + updated_at = $5, project_id = $6 where id = $1 - returning *; +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; -- name: InstrumentGroupDeleteFlag :exec diff --git a/api/queries/instrument_incl.sql b/api/queries/instrument_incl.sql index 0e792127..0d783dc0 100644 --- a/api/queries/instrument_incl.sql +++ b/api/queries/instrument_incl.sql @@ -77,6 +77,6 @@ where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_tim union select m2.instrument_id, m2.time, m2.measurements from v_incl_measurement m2 -where m2.time in (select o.initial_time from incl_opts o where o.instrument_id = sqlc.arg(instrument_id)) +where m2.time = any(select o.initial_time from incl_opts o where o.instrument_id = sqlc.arg(instrument_id)) and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/instrument_ipi.sql b/api/queries/instrument_ipi.sql index e0edfab0..dadd858b 100644 --- a/api/queries/instrument_ipi.sql +++ b/api/queries/instrument_ipi.sql @@ -73,6 +73,6 @@ where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_tim union select m2.instrument_id, m2.time, m2.measurements from v_ipi_measurement m2 -where m2.time in (select o.initial_time from ipi_opts o where o.instrument_id = sqlc.arg(instrument_id)) +where m2.time = any(select o.initial_time from ipi_opts o where o.instrument_id = sqlc.arg(instrument_id)) and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/instrument_note.sql b/api/queries/instrument_note.sql index 629ed56a..16681a33 100644 --- a/api/queries/instrument_note.sql +++ b/api/queries/instrument_note.sql @@ -11,15 +11,15 @@ where instrument_id = $1; -- name: InstrumentNoteCreate :one -insert into instrument_note (instrument_id, title, body, time, creator, create_date) +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) values ($1, $2, $3, $4, $5, $6) -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at; -- name: InstrumentNoteCreateBatch :batchone -insert into instrument_note (instrument_id, title, body, time, creator, create_date) +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) values ($1, $2, $3, $4, $5, $6) -returning id, instrument_id, title, body, time, creator, create_date, updater, update_date; +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at; -- name: InstrumentNoteUpdate :one @@ -27,8 +27,8 @@ update instrument_note set title=$2, body=$3, time=$4, - updater=$5, - update_date=$6 + updated_by=$5, + updated_at=$6 where id = $1 returning *; diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql index 4d0a0324..708344e8 100644 --- a/api/queries/instrument_saa.sql +++ b/api/queries/instrument_saa.sql @@ -77,6 +77,6 @@ where m1.instrument_id = sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_t union select m2.instrument_id, m2.time, m2.measurements from v_saa_measurement m2 -where m2.time in (select o.initial_time from saa_opts o where o.instrument_id = sqlc.arg(instrument_id)) +where m2.time = any(select o.initial_time from saa_opts o where o.instrument_id = sqlc.arg(instrument_id)) and m2.instrument_id = sqlc.arg(instrument_id) order by time asc; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql index c88979ff..994fa1ad 100644 --- a/api/queries/measurement.sql +++ b/api/queries/measurement.sql @@ -1,8 +1,8 @@ --- name: TimeseriesMeasurementListRange :many +-- name: TimeseriesMeasurementListForRange :many select * from v_timeseries_measurement where timeseries_id=sqlc.arg(timeseries_id) -and time > sqlc.arg(after_time) -and time < sqlc.arg(before_time); +and time > sqlc.arg(after) +and time < sqlc.arg(before); -- name: TimeseriesMeasurementGetMostRecent :one diff --git a/api/queries/plot_config.sql b/api/queries/plot_config.sql index 867caa54..ac4fffd7 100644 --- a/api/queries/plot_config.sql +++ b/api/queries/plot_config.sql @@ -11,7 +11,7 @@ where id = $1; -- name: PlotConfigCreate :one -insert into plot_configuration (slug, name, project_id, creator, create_date, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +insert into plot_configuration (slug, name, project_id, created_by, created_at, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) returning id; @@ -21,7 +21,7 @@ values ($1, $2, $3, $4, $5, $6, $7); -- name: PlotConfigUpdate :exec -update plot_configuration set name = $3, updater = $4, update_date = $5 where project_id = $1 and id = $2; +update plot_configuration set name = $3, updated_by = $4, updated_at = $5 where project_id = $1 and id = $2; -- name: PlotConfigDelete :exec diff --git a/api/queries/project.sql b/api/queries/project.sql index 75022fd0..4f33d93a 100644 --- a/api/queries/project.sql +++ b/api/queries/project.sql @@ -42,13 +42,13 @@ select * from v_project where id = $1; -- name: ProjectCreateBatch :batchone -insert into project (federal_id, slug, name, district_id, creator, create_date) +insert into project (federal_id, slug, name, district_id, created_by, created_at) values ($1, slugify($2, 'project'), $2, $3, $4, $5) returning id, slug; -- name: ProjectUpdate :one -update project set name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 where id=$1 returning id; +update project set name=$2, updated_by=$3, updated_at=$4, district_id=$5, federal_id=$6 where id=$1 returning id; -- name: ProjectUpdateImage :exec diff --git a/api/queries/report_config.sql b/api/queries/report_config.sql index 03e9c83d..c6a60f22 100644 --- a/api/queries/report_config.sql +++ b/api/queries/report_config.sql @@ -1,6 +1,6 @@ -- name: ReportConfigCreate :one insert into report_config ( - name, slug, project_id, creator, description, date_range, date_range_enabled, + name, slug, project_id, created_by, description, date_range, date_range_enabled, show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled ) values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) @@ -23,7 +23,7 @@ select * from v_report_config where id = $1; -- name: ReportConfigUpdate :exec update report_config set name=$2, -updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +updated_by=$3, updated_at=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1; @@ -52,12 +52,12 @@ delete from report_config_plot_config where report_config_id=$1; -- name: ReportDownloadJobGet :one -select * from report_download_job where id=$1 and creator=$2; +select * from report_download_job where id=$1 and created_by=$2; -- name: ReportDownloadJobCreate :one -insert into report_download_job (report_config_id, creator) values ($1, $2) returning *; +insert into report_download_job (report_config_id, created_by) values ($1, $2) returning *; -- name: ReportDownloadJobUpdate :exec -update report_download_job set status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 where id=$1; +update report_download_job set status=$2, progress=$3, progress_updated_at=$4, file_key=$5, file_expiry=$6 where id=$1; diff --git a/api/queries/submittal.sql b/api/queries/submittal.sql index 22a797f9..37d4cab5 100644 --- a/api/queries/submittal.sql +++ b/api/queries/submittal.sql @@ -2,8 +2,8 @@ select * from v_submittal where project_id = sqlc.arg(project_id) -and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) -order by due_date desc, alert_type_name asc; +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc, alert_type_name asc; -- name: SubmittalListForInstrument :many @@ -11,30 +11,30 @@ select sub.* from v_submittal sub inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id where aci.instrument_id = sqlc.arg(instrument_id) -and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) -order by sub.due_date desc; +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by sub.due_at desc; -- name: SubmittalListForAlertConfig :many select * from v_submittal where alert_config_id = sqlc.arg(alert_config_id) -and (sqlc.arg(show_incomplete_missing)::boolean = false or (completion_date is null and not marked_as_missing)) -order by due_date desc; +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc; -- name: SubmittalListUnverifiedMissing :many select * from v_submittal -where completion_date is null +where completed_at is null and not marked_as_missing -order by due_date desc; +order by due_at desc; -- name: SubmittalUpdate :exec update submittal set submittal_status_id = $2, - completion_date = $3, + completed_at = $3, warning_sent = $4 where id = $1; @@ -44,8 +44,8 @@ update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true where id = $1 -and completion_date is null -and now() > due_date; +and completed_at is null +and now() > due_at; -- name: SubmittalUpdateVerifyMissingForAlertConfig :exec @@ -53,5 +53,5 @@ update submittal set submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, marked_as_missing = true where alert_config_id = $1 -and completion_date is null -and now() > due_date; +and completed_at is null +and now() > due_at; diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql index 680769a7..c03e5f74 100644 --- a/api/queries/timeseries.sql +++ b/api/queries/timeseries.sql @@ -55,7 +55,7 @@ returning id, instrument_id, slug, name, parameter_id, unit_id, type; -- name: TimeseriesUpdate :exec -update timeseries set name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 +update timeseries set name=$2, instrument_id=$3, parameter_id=$4, unit_id=$5 where id = $1; diff --git a/api/queries/timeseries_calculated.sql b/api/queries/timeseries_calculated.sql index 3e0e688d..4b2f39c3 100644 --- a/api/queries/timeseries_calculated.sql +++ b/api/queries/timeseries_calculated.sql @@ -24,52 +24,31 @@ from v_timeseries_computed where instrument_id = $1; --- name: TimeseriesComputedCreate :one -insert into timeseries ( - instrument_id, - parameter_id, - unit_id, - slug, - name, - type -) values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') -returning id; - - -- name: CalculationCreate :exec insert into calculation (timeseries_id, contents) values ($1,$2); --- name: CalculationCreateOrUpdate :exec -with p as ( - select contents from calculation where timeseries_id=$1 -) -insert into calculation (timeseries_id, contents) values ($1, $2) -on conflict (timeseries_id) do update set contents=coalesce(excluded.contents, p.contents); +-- name: CalculationUpdate :exec +update calculation set contents=$2 where timeseries_id=$1; -- name: TimeseriesComputedDelete :exec -delete from timeseries where id = $1 and id in (select timeseries_id from calculation); +delete from timeseries where id = $1 and id = any(select timeseries_id from calculation); --- name: TimeseriesComputedCreateOrUpdate :exec -with p as ( - select * from timeseries - where id=$1 -) -insert into timeseries ( - id, - instrument_id, - parameter_id, - unit_id, - slug, - name, - type -) values ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') -on conflict (id) do update set - instrument_id=coalesce(excluded.instrument_id, p.instrument_id), - parameter_id=coalesce(excluded.parameter_id, p.parameter_id), - unit_id=coalesce(excluded.unit_id, p.unit_id), - slug=coalesce(excluded.slug, p.slug), - name=coalesce(excluded.name, p.name), - type='computed'; +-- the below queried are needed becuase the slug is currently used as the variable name, it would +-- be better if we used a generated column for this on the timeseries table, maybe converted to snake_case + +-- name: TimeseriesComputedCreate :one +insert into timeseries (instrument_id, parameter_id, unit_id, slug, name, type) +values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +returning id; + + +-- name: TimeseriesComputedUpdate :exec +update timeseries set + parameter_id=$2, + unit_id=$3, + slug=$4, + name=$5 +where id = $1; diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql index 1cb173c6..c3b105bd 100644 --- a/api/queries/uploader.sql +++ b/api/queries/uploader.sql @@ -1,9 +1,9 @@ -- name: UploaderConfigListForProject :many -select * from uploader_config where project_id=$1; +select * from v_uploader_config where project_id=$1; -- name: UploaderConfigCreate :one -insert into uploader_config (project_id, name, slug, description, create_date, creator, type, tz_name) +insert into uploader_config (project_id, name, slug, description, created_at, created_by, type, tz_name) values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) returning id; @@ -12,8 +12,8 @@ returning id; update uploader_config set name=$2, description=$3, - update_date=$4, - updater=$5, + updated_by=$4, + updated_at=$5, type=$6, tz_name=$7 where id=$1; diff --git a/go.work.sum b/go.work.sum index 62ca0f7d..38e3ed0a 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,4 +1,5 @@ dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg= github.com/ClickHouse/clickhouse-go/v2 v2.27.1/go.mod h1:XvcaX7ai9T9si83rZ0cB3y2upq9AYMwdj16Trqm+sPg= @@ -26,6 +27,7 @@ github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7F github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= @@ -33,6 +35,7 @@ github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2V github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= @@ -41,6 +44,7 @@ github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ib github.com/lucasjones/reggen v0.0.0-20200904144131-37ba4fa293bb/go.mod h1:5ELEyG+X8f+meRWHuqUOewBOhvHkl7M76pdGEansxW4= github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= github.com/mfridman/xflag v0.0.0-20240825232106-efb77353e578/go.mod h1:/483ywM5ZO5SuMVjrIGquYNE5CzLrj5Ux/LxWWnjRaE= github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= @@ -78,6 +82,7 @@ github.com/ydb-platform/ydb-go-genproto v0.0.0-20240528144234-5d5a685e41f7/go.mo github.com/ydb-platform/ydb-go-sdk/v3 v3.76.5/go.mod h1:IHwuXyolaAmGK2Dp7+dlhsnXphG1pwCoaP/OITT3+tU= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +go.mongodb.org/mongo-driver v1.11.4 h1:4ayjakA013OdpGyL2K3ZqylTac/rMjrJOMZ1EHizXas= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= diff --git a/report/generated.d.ts b/report/generated.d.ts index 6fd296d3..9c5ebfb9 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -18,7 +18,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -101,14 +101,14 @@ export interface paths { /** @description alert subscription payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertSubscription"]; + "application/json": components["schemas"]["dto.AlertSubscription"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertSubscription"][]; + "application/json": components["schemas"]["db.AlertProfileSubscription"][]; }; }; /** @description Bad Request */ @@ -139,7 +139,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AwarePlatformParameterConfig"][]; + "application/json": components["schemas"]["service.AwarePlatformParameterConfig"][]; }; }; /** @description Bad Request */ @@ -170,7 +170,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AwareParameter"][]; + "application/json": components["schemas"]["db.AwareParameterListRow"][]; }; }; /** @description Bad Request */ @@ -206,14 +206,14 @@ export interface paths { /** @description datalogger payload */ requestBody: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["dto.Datalogger"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerWithKey"][]; + "application/json": components["schemas"]["service.DataloggerWithKey"]; }; }; /** @description Bad Request */ @@ -254,7 +254,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["db.VDatalogger"]; }; }; /** @description Bad Request */ @@ -292,14 +292,14 @@ export interface paths { /** @description datalogger payload */ requestBody: { content: { - "*/*": components["schemas"]["Datalogger"]; + "*/*": components["schemas"]["dto.Datalogger"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["db.VDatalogger"]; }; }; /** @description Bad Request */ @@ -380,14 +380,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -428,7 +428,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerWithKey"]; + "application/json": components["schemas"]["service.DataloggerWithKey"]; }; }; /** @description Bad Request */ @@ -471,7 +471,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"][]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"][]; }; }; /** @description Bad Request */ @@ -511,14 +511,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -558,14 +558,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -698,7 +698,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerTablePreview"]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -741,7 +743,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerTablePreview"]; + "application/json": components["schemas"]["db.VDataloggerPreview"]; }; }; /** @description Bad Request */ @@ -778,7 +780,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"][]; + "application/json": components["schemas"]["db.VDatalogger"][]; }; }; /** @description Bad Request */ @@ -809,7 +811,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["District"][]; + "application/json": components["schemas"]["db.VDistrict"][]; }; }; /** @description Bad Request */ @@ -840,7 +842,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["TimezoneOption"][]; + "application/json": components["schemas"]["db.PgTimezoneNamesListRow"][]; }; }; /** @description Bad Request */ @@ -871,7 +873,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DomainMap"]; + "application/json": components["schemas"]["service.DomainMap"]; }; }; /** @description Bad Request */ @@ -908,7 +910,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EmailAutocompleteResult"][]; + "application/json": components["schemas"]["db.EmailAutocompleteListRow"][]; }; }; /** @description Bad Request */ @@ -946,7 +948,7 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: components["schemas"]["MeasurementCollectionLean"]; + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; }[]; }; }; @@ -978,7 +980,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CalculatedTimeseries"][]; + "application/json": components["schemas"]["db.TimeseriesComputedListForInstrumentRow"][]; }; }; /** @description Bad Request */ @@ -1010,8 +1012,8 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { "application/json": { [key: string]: unknown; @@ -1056,7 +1058,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CalculatedTimeseries"][]; + "application/json": components["schemas"]["dto.CalculatedTimeseries"][]; }; }; /** @description Bad Request */ @@ -1128,9 +1130,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: unknown; - }[]; + "application/json": components["schemas"]["service.Healthcheck"]; }; }; }; @@ -1146,10 +1146,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Heartbeat"]; + "application/json": components["schemas"]["service.Heartbeat"]; }; }; }; @@ -1162,7 +1162,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Heartbeat"]; + "application/json": components["schemas"]["service.Heartbeat"]; }; }; }; @@ -1175,7 +1175,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Heartbeat"][]; + "application/json": components["schemas"]["service.Heartbeat"][]; }; }; }; @@ -1188,46 +1188,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Home"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; - "/inclinometer_explorer": { - /** list inclinometer timeseries measurements for explorer page */ - post: { - /** @description array of inclinometer instrument uuids */ - requestBody: { - content: { - "application/json": string[]; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": { - [key: string]: components["schemas"]["InclinometerMeasurementCollectionLean"]; - }[]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; + "application/json": components["schemas"]["db.HomeGetRow"]; }; }; /** @description Internal Server Error */ @@ -1246,7 +1207,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": components["schemas"]["db.VInstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -1280,14 +1241,14 @@ export interface paths { /** @description instrument group payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentGroup"]; + "*/*": components["schemas"]["dto.InstrumentGroup"]; }; }; responses: { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.InstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -1324,7 +1285,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.VInstrumentGroup"]; }; }; /** @description Bad Request */ @@ -1362,14 +1323,14 @@ export interface paths { /** @description instrument group payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentGroup"]; + "*/*": components["schemas"]["dto.InstrumentGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.InstrumentGroup"]; }; }; /** @description Bad Request */ @@ -1408,7 +1369,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -1445,7 +1408,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"][]; + "application/json": components["schemas"]["db.VInstrument"][]; }; }; /** @description Bad Request */ @@ -1568,7 +1531,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -1605,7 +1568,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": { + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; + }; }; }; /** @description Bad Request */ @@ -1629,14 +1594,16 @@ export interface paths { }; }; }; - "/instruments": { - /** lists all instruments */ + "/instruments/count": { + /** gets the total number of non deleted instruments in the system */ get: { responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -1660,14 +1627,26 @@ export interface paths { }; }; }; - "/instruments/count": { - /** gets the total number of non deleted instruments in the system */ + "/instruments/incl/{instrument_id}/measurements": { + /** creates instrument notes */ get: { + parameters: { + query: { + /** @description after time */ + after?: string; + /** @description before time */ + before: string; + }; + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentCount"]; + "application/json": components["schemas"]["db.VInclMeasurement"][]; }; }; /** @description Bad Request */ @@ -1691,16 +1670,10 @@ export interface paths { }; }; }; - "/instruments/ipi/{instrument_id}/measurements": { - /** creates instrument notes */ + "/instruments/incl/{instrument_id}/segments": { + /** gets all incl segments for an instrument */ get: { parameters: { - query: { - /** @description after time */ - after?: string; - /** @description before time */ - before: string; - }; path: { /** @description instrument uuid */ instrument_id: string; @@ -1710,7 +1683,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiMeasurements"][]; + "application/json": components["schemas"]["db.VInclSegment"][]; }; }; /** @description Bad Request */ @@ -1733,21 +1706,29 @@ export interface paths { }; }; }; - }; - "/instruments/ipi/{instrument_id}/segments": { - /** gets all ipi segments for an instrument */ - get: { + /** updates multiple segments for an incl instrument */ + put: { parameters: { + query?: { + /** @description api key */ + key?: string; + }; path: { /** @description instrument uuid */ instrument_id: string; }; }; + /** @description incl instrument segments payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.InclSegment"][]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiSegment"][]; + "application/json": components["schemas"]["dto.InclSegment"][]; }; }; /** @description Bad Request */ @@ -1770,29 +1751,27 @@ export interface paths { }; }; }; - /** updates multiple segments for an ipi instrument */ - put: { + }; + "/instruments/ipi/{instrument_id}/measurements": { + /** creates instrument notes */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; + query: { + /** @description after time */ + after?: string; + /** @description before time */ + before: string; }; path: { /** @description instrument uuid */ instrument_id: string; }; }; - /** @description ipi instrument segments payload */ - requestBody: { - content: { - "*/*": components["schemas"]["IpiSegment"][]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiSegment"][]; + "application/json": components["schemas"]["db.VIpiMeasurement"][]; }; }; /** @description Bad Request */ @@ -1816,14 +1795,65 @@ export interface paths { }; }; }; - "/instruments/notes": { - /** gets all instrument notes */ + "/instruments/ipi/{instrument_id}/segments": { + /** gets all ipi segments for an instrument */ get: { + parameters: { + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["db.VIpiSegment"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** updates multiple segments for an ipi instrument */ + put: { + parameters: { + query?: { + /** @description api key */ + key?: string; + }; + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; + /** @description ipi instrument segments payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.IpiSegment"][]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["dto.IpiSegment"][]; }; }; /** @description Bad Request */ @@ -1846,6 +1876,8 @@ export interface paths { }; }; }; + }; + "/instruments/notes": { /** creates instrument notes */ post: { parameters: { @@ -1857,14 +1889,14 @@ export interface paths { /** @description instrument note collection payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentNoteCollection"]; + "*/*": components["schemas"]["dto.InstrumentNoteCollection"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -1901,7 +1933,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"]; + "application/json": components["schemas"]["db.InstrumentNote"]; }; }; /** @description Bad Request */ @@ -1939,14 +1971,14 @@ export interface paths { /** @description instrument note collection payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentNote"]; + "*/*": components["schemas"]["dto.InstrumentNote"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -1989,7 +2021,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaMeasurements"][]; + "application/json": components["schemas"]["db.VSaaMeasurement"][]; }; }; /** @description Bad Request */ @@ -2026,7 +2058,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaSegment"][]; + "application/json": components["schemas"]["db.VSaaSegment"][]; }; }; /** @description Bad Request */ @@ -2064,14 +2096,14 @@ export interface paths { /** @description saa instrument segments payload */ requestBody: { content: { - "*/*": components["schemas"]["SaaSegment"][]; + "*/*": components["schemas"]["dto.SaaSegment"][]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaSegment"][]; + "application/json": components["schemas"]["dto.SaaSegment"][]; }; }; /** @description Bad Request */ @@ -2108,7 +2140,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -2145,7 +2177,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -2227,7 +2259,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentStatus"][]; + "application/json": components["schemas"]["db.VInstrumentStatus"][]; }; }; /** @description Bad Request */ @@ -2265,7 +2297,7 @@ export interface paths { /** @description instrument status collection paylaod */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentStatusCollection"]; + "*/*": components["schemas"]["dto.InstrumentStatusCollection"]; }; }; responses: { @@ -2313,7 +2345,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VInstrumentStatus"][]; }; }; /** @description Bad Request */ @@ -2397,7 +2429,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -2436,7 +2468,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"]; + "application/json": components["schemas"]["db.VTimeseries"]; }; }; /** @description Bad Request */ @@ -2483,7 +2515,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": components["schemas"]["db.VTimeseriesMeasurement"][]; }; }; /** @description Bad Request */ @@ -2528,7 +2560,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": { + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; + }; }; }; /** @description Bad Request */ @@ -2565,7 +2599,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertSubscription"][]; + "application/json": components["schemas"]["db.AlertProfileSubscription"][]; }; }; /** @description Bad Request */ @@ -2605,7 +2639,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"][]; + "application/json": components["schemas"]["db.AlertListForProfileRow"][]; }; }; /** @description Bad Request */ @@ -2647,10 +2681,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Alert"]; + "application/json": components["schemas"]["db.AlertGetRow"]; }; }; /** @description Bad Request */ @@ -2695,7 +2729,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"]; + "application/json": components["schemas"]["db.AlertGetRow"]; }; }; /** @description Bad Request */ @@ -2726,7 +2760,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Profile"]; + "application/json": components["schemas"]["db.VProfile"]; }; }; /** @description Bad Request */ @@ -2763,7 +2797,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"][]; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -2794,7 +2828,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Token"]; + "application/json": components["schemas"]["service.Token"]; }; }; /** @description Bad Request */ @@ -2857,37 +2891,6 @@ export interface paths { }; }; }; - "/opendcs/sites": { - /** lists all instruments, represented as opendcs sites */ - get: { - responses: { - /** @description OK */ - 200: { - content: { - "text/xml": components["schemas"]["Site"][]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; "/profiles": { /** creates a user profile */ post: { @@ -2895,7 +2898,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Profile"]; + "application/json": components["schemas"]["db.ProfileCreateRow"]; }; }; /** @description Bad Request */ @@ -2932,7 +2935,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"][]; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -2966,14 +2969,14 @@ export interface paths { /** @description project collection payload */ requestBody: { content: { - "*/*": components["schemas"]["Project"][]; + "*/*": components["schemas"]["dto.Project"][]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["IDSlugName"][]; + "application/json": components["schemas"]["db.ProjectCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -3004,7 +3007,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ProjectCount"]; + "application/json": number; }; }; /** @description Bad Request */ @@ -3041,7 +3044,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"]; + "application/json": components["schemas"]["db.VProject"]; }; }; /** @description Bad Request */ @@ -3079,14 +3082,14 @@ export interface paths { /** @description project payload */ requestBody: { content: { - "*/*": components["schemas"]["Project"]; + "*/*": components["schemas"]["dto.Project"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"]; + "application/json": components["schemas"]["db.VProject"]; }; }; /** @description Bad Request */ @@ -3164,7 +3167,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"][]; }; }; /** @description Bad Request */ @@ -3202,14 +3205,14 @@ export interface paths { /** @description alert config payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["dto.AlertConfig"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3248,7 +3251,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3288,14 +3291,14 @@ export interface paths { /** @description alert config payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["dto.AlertConfig"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3336,7 +3339,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -3373,7 +3378,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.CollectionGroup"][]; }; }; /** @description Bad Request */ @@ -3414,14 +3419,14 @@ export interface paths { /** @description collection group payload */ requestBody: { content: { - "*/*": components["schemas"]["CollectionGroup"]; + "*/*": components["schemas"]["dto.CollectionGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroup"][]; + "application/json": components["schemas"]["db.CollectionGroup"][]; }; }; /** @description Bad Request */ @@ -3460,7 +3465,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroupDetails"]; + "application/json": components["schemas"]["db.VCollectionGroupDetail"]; }; }; /** @description Bad Request */ @@ -3500,14 +3505,14 @@ export interface paths { /** @description collection group payload */ requestBody: { content: { - "*/*": components["schemas"]["CollectionGroup"]; + "*/*": components["schemas"]["dto.CollectionGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroup"]; + "application/json": components["schemas"]["db.CollectionGroup"]; }; }; /** @description Bad Request */ @@ -3637,8 +3642,8 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { "application/json": { [key: string]: unknown; @@ -3724,7 +3729,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DistrictRollup"][]; + "application/json": components["schemas"]["db.VDistrictRollup"][]; }; }; /** @description Bad Request */ @@ -3761,7 +3766,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DistrictRollup"][]; + "application/json": components["schemas"]["db.VDistrictRollup"][]; }; }; /** @description Bad Request */ @@ -3798,7 +3803,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"][]; + "application/json": components["schemas"]["db.VEvaluation"][]; }; }; /** @description Bad Request */ @@ -3836,14 +3841,14 @@ export interface paths { /** @description evaluation payload */ requestBody: { content: { - "*/*": components["schemas"]["Evaluation"]; + "*/*": components["schemas"]["dto.Evaluation"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3882,7 +3887,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3922,14 +3927,14 @@ export interface paths { /** @description evaluation payload */ requestBody: { content: { - "*/*": components["schemas"]["Evaluation"]; + "*/*": components["schemas"]["dto.Evaluation"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3970,7 +3975,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -4037,30 +4044,20 @@ export interface paths { }; }; }; - "/projects/{project_id}/inclinometer_measurements": { - /** creates or updates one or more inclinometer measurements */ - post: { + "/projects/{project_id}/instrument_groups": { + /** lists instrument groups associated with a project */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; - }; path: { /** @description project uuid */ project_id: string; }; }; - /** @description inclinometer measurement collections */ - requestBody: { - content: { - "*/*": components["schemas"]["InclinometerMeasurementCollectionCollection"]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InclinometerMeasurementCollection"][]; + "application/json": components["schemas"]["db.VInstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -4084,8 +4081,8 @@ export interface paths { }; }; }; - "/projects/{project_id}/instrument_groups": { - /** lists instrument groups associated with a project */ + "/projects/{project_id}/instruments": { + /** lists instruments associated with a project */ get: { parameters: { path: { @@ -4097,7 +4094,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": components["schemas"]["db.VInstrument"][]; }; }; /** @description Bad Request */ @@ -4120,68 +4117,31 @@ export interface paths { }; }; }; - }; - "/projects/{project_id}/instruments": { - /** lists instruments associated with a project */ - get: { + /** accepts an array of instruments for bulk upload to the database */ + post: { parameters: { + query?: { + /** @description api key */ + key?: string; + }; path: { - /** @description project uuid */ + /** @description project id */ project_id: string; + /** @description instrument id */ + instrument_id: string; + }; + }; + /** @description instrument collection payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.Instrument"][]; }; }; responses: { - /** @description OK */ - 200: { - content: { - "application/json": components["schemas"]["Project"][]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - /** accepts an array of instruments for bulk upload to the database */ - post: { - parameters: { - query?: { - /** @description api key */ - key?: string; - }; - path: { - /** @description project id */ - project_id: string; - /** @description instrument id */ - instrument_id: string; - }; - }; - /** @description instrument collection payload */ - requestBody: { - content: { - "*/*": components["schemas"]["Instrument"][]; - }; - }; - responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["IDSlugName"][]; + "application/json": components["schemas"]["db.InstrumentCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -4226,14 +4186,14 @@ export interface paths { /** @description instrument uuids */ requestBody: { content: { - "*/*": components["schemas"]["ProjectInstrumentAssignments"]; + "*/*": components["schemas"]["dto.ProjectInstrumentAssignments"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4275,14 +4235,14 @@ export interface paths { /** @description instrument payload */ requestBody: { content: { - "*/*": components["schemas"]["Instrument"]; + "*/*": components["schemas"]["dto.Instrument"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -4364,7 +4324,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"][]; }; }; /** @description Bad Request */ @@ -4406,10 +4366,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["AlertSubscription"]; + "application/json": components["schemas"]["db.AlertProfileSubscription"]; }; }; /** @description Bad Request */ @@ -4498,7 +4458,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"][]; + "application/json": components["schemas"]["db.VAlert"][]; }; }; /** @description Bad Request */ @@ -4545,14 +4505,14 @@ export interface paths { /** @description project uuids */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentProjectAssignments"]; + "*/*": components["schemas"]["dto.InstrumentProjectAssignments"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4593,10 +4553,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4642,7 +4602,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4681,7 +4641,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -4721,14 +4681,14 @@ export interface paths { /** @description timeseries collection items payload */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCollectionItems"]; + "*/*": components["schemas"]["dto.TimeseriesCollectionItems"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.TimeseriesCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -4814,7 +4774,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"][]; + "application/json": components["schemas"]["dto.Evaluation"][]; }; }; /** @description Bad Request */ @@ -4856,14 +4816,14 @@ export interface paths { /** @description instrument payload */ requestBody: { content: { - "*/*": components["schemas"]["Instrument"]; + "*/*": components["schemas"]["dto.Instrument"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -4902,7 +4862,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -4941,7 +4901,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": components["schemas"]["db.VTimeseriesCwm"][]; }; }; /** @description Bad Request */ @@ -4977,14 +4937,16 @@ export interface paths { /** @description array of cwms timeseries to create */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCwms"][]; + "*/*": components["schemas"]["dto.TimeseriesCwms"][]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -5024,14 +4986,16 @@ export interface paths { /** @description cwms timeseries to update */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCwms"]; + "*/*": components["schemas"]["dto.TimeseriesCwms"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -5072,7 +5036,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ProjectMembership"][]; + "application/json": components["schemas"]["db.ProfileProjectRoleListForProjectRow"][]; }; }; /** @description Bad Request */ @@ -5117,7 +5081,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ProjectMembership"]; + "application/json": string; }; }; /** @description Bad Request */ @@ -5199,7 +5163,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"][]; + "application/json": components["schemas"]["db.VPlotConfiguration"][]; }; }; /** @description Bad Request */ @@ -5239,14 +5203,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigBullseyePlot"]; + "*/*": components["schemas"]["dto.PlotConfigBullseyePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5288,14 +5252,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigBullseyePlot"]; + "*/*": components["schemas"]["dto.PlotConfigBullseyePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5338,7 +5302,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfigMeasurementBullseyePlot"][]; + "application/json": components["schemas"]["db.PlotConfigMeasurementListBullseyeRow"][]; }; }; /** @description Bad Request */ @@ -5378,14 +5342,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigContourPlot"]; + "*/*": components["schemas"]["dto.PlotConfigContourPlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5427,14 +5391,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigContourPlot"]; + "*/*": components["schemas"]["dto.PlotConfigContourPlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5479,7 +5443,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AggregatePlotConfigMeasurementsContourPlot"]; + "application/json": components["schemas"]["service.AggregatePlotConfigMeasurementsContourPlot"]; }; }; /** @description Bad Request */ @@ -5566,14 +5530,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigProfilePlot"]; + "*/*": components["schemas"]["dto.PlotConfigProfilePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5615,14 +5579,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigProfilePlot"]; + "*/*": components["schemas"]["dto.PlotConfigProfilePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5662,14 +5626,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5711,14 +5675,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5757,7 +5721,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5837,7 +5801,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"][]; + "application/json": components["schemas"]["db.VPlotConfiguration"][]; }; }; /** @description Bad Request */ @@ -5875,14 +5839,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5921,7 +5885,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5961,14 +5925,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -6052,7 +6016,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["db.VReportConfig"]; }; }; /** @description Bad Request */ @@ -6090,14 +6054,14 @@ export interface paths { /** @description report config payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["dto.ReportConfig"]; }; }; responses: { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["db.VReportConfig"]; }; }; /** @description Bad Request */ @@ -6139,7 +6103,7 @@ export interface paths { /** @description report config payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["dto.ReportConfig"]; }; }; responses: { @@ -6234,7 +6198,7 @@ export interface paths { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "application/json": components["schemas"]["db.ReportDownloadJob"]; }; }; /** @description Bad Request */ @@ -6279,7 +6243,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "application/json": components["schemas"]["db.ReportDownloadJob"]; }; }; /** @description Bad Request */ @@ -6361,7 +6325,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -6398,7 +6362,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -6442,14 +6406,16 @@ export interface paths { /** @description array of timeseries measurement collections */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "*/*": components["schemas"]["dto.TimeseriesMeasurementCollectionCollection"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -6487,14 +6453,16 @@ export interface paths { /** @description array of timeseries measurement collections */ requestBody: { content: { - "application/json": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "application/json": components["schemas"]["dto.TimeseriesMeasurementCollectionCollection"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6531,7 +6499,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["UploaderConfig"][]; + "application/json": components["schemas"]["db.VUploaderConfig"][]; }; }; /** @description Bad Request */ @@ -6553,7 +6521,7 @@ export interface paths { /** @description uploader config payload */ requestBody: { content: { - "*/*": components["schemas"]["UploaderConfig"]; + "*/*": components["schemas"]["dto.UploaderConfig"]; }; }; responses: { @@ -6588,7 +6556,7 @@ export interface paths { /** @description uploader config payload */ requestBody: { content: { - "*/*": components["schemas"]["UploaderConfig"]; + "*/*": components["schemas"]["dto.UploaderConfig"]; }; }; responses: { @@ -6651,7 +6619,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["UploaderConfigMapping"][]; + "application/json": components["schemas"]["db.UploaderConfigMapping"][]; }; }; /** @description Bad Request */ @@ -6675,7 +6643,7 @@ export interface paths { /** @description uploader config mappings payload */ requestBody: { content: { - "*/*": components["schemas"]["UploaderConfigMapping"][]; + "*/*": components["schemas"]["dto.UploaderConfigMapping"][]; }; }; responses: { @@ -6708,7 +6676,7 @@ export interface paths { /** @description uploader config mappings payload */ requestBody: { content: { - "*/*": components["schemas"]["UploaderConfigMapping"][]; + "*/*": components["schemas"]["dto.UploaderConfigMapping"][]; }; }; responses: { @@ -6771,6 +6739,7 @@ export interface paths { /** @description OK */ 200: { content: { + "image/jpeg": string; }; }; /** @description Bad Request */ @@ -6811,7 +6780,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ReportConfigWithPlotConfigs"]; + "application/json": components["schemas"]["service.ReportConfigWithPlotConfigs"]; }; }; /** @description Bad Request */ @@ -6851,7 +6820,7 @@ export interface paths { /** @description report download job payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "application/json": components["schemas"]["dto.ReportDownloadJob"]; }; }; responses: { @@ -6884,7 +6853,7 @@ export interface paths { }; }; }; - "/search/{entity}": { + "/search/projects": { /** allows searching using a string on different entities */ get: { parameters: { @@ -6901,7 +6870,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SearchResult"][]; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -6980,7 +6949,7 @@ export interface paths { /** @description timeseries collection items payload */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCollectionItems"]; + "*/*": components["schemas"]["dto.TimeseriesCollectionItems"]; }; }; responses: { @@ -6988,8 +6957,8 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: string; - }[]; + [key: string]: string; + }; }; }; /** @description Bad Request */ @@ -7026,7 +6995,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"]; + "application/json": components["schemas"]["db.VTimeseries"]; }; }; /** @description Bad Request */ @@ -7064,16 +7033,14 @@ export interface paths { /** @description timeseries payload */ requestBody: { content: { - "*/*": components["schemas"]["Timeseries"]; + "*/*": components["schemas"]["dto.Timeseries"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: string; - }; + "application/json": components["schemas"]["dto.Timeseries"]; }; }; /** @description Bad Request */ @@ -7138,92 +7105,6 @@ export interface paths { }; }; }; - "/timeseries/{timeseries_id}/inclinometer_measurements": { - /** lists all measurements for an inclinometer */ - get: { - parameters: { - query?: { - /** @description after timestamp */ - after?: string; - /** @description before timestamp */ - before?: string; - }; - path: { - /** @description timeseries uuid */ - timeseries_id: string; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": components["schemas"]["InclinometerMeasurementCollection"]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - /** deletes a single inclinometer measurement by timestamp */ - delete: { - parameters: { - query: { - /** @description timestamp of measurement to delete */ - time: string; - /** @description api key */ - key?: string; - }; - path: { - /** @description timeseries uuid */ - timeseries_id: string; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": { - [key: string]: unknown; - }; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; "/timeseries/{timeseries_id}/measurements": { /** lists timeseries by timeseries uuid */ get: { @@ -7245,7 +7126,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": components["schemas"]["db.VTimeseriesMeasurement"][]; }; }; /** @description Bad Request */ @@ -7325,7 +7206,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -7356,7 +7239,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Unit"][]; + "application/json": components["schemas"]["db.VUnit"][]; }; }; /** @description Bad Request */ @@ -7382,42 +7265,6 @@ export interface components { "echo.HTTPError": { message?: Record; }; - /** - * @example { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - */ - "geojson.Geometry": { - coordinates?: Record; - geometries?: components["schemas"]["geojson.Geometry"][]; - type?: string; - }; - /** - * @example { - * "x": [ - * 0.8008281904610115, - * 0.8008281904610115 - * ], - * "y": [ - * 6.027456183070403, - * 6.027456183070403 - * ], - * "z": [ - * 1.4658129805029452, - * 1.4658129805029452 - * ] - * } - */ - AggregatePlotConfigMeasurementsContourPlot: { - x?: number[]; - y?: number[]; - z?: number[]; - }; /** * @example { * "instruments": [ @@ -7434,18 +7281,18 @@ export interface components { * "alert_config_id": "alert_config_id", * "project_id": "project_id", * "name": "name", + * "created_at": "created_at", * "id": "id", * "body": "body", - * "create_date": "create_date", * "project_name": "project_name" * } */ - Alert: { + "db.AlertGetRow": { alert_config_id?: string; body?: string; - create_date?: string; + created_at?: string; id?: string; - instruments?: components["schemas"]["AlertConfigInstrument"][]; + instruments?: components["schemas"]["db.InstrumentIDName"][]; name?: string; project_id?: string; project_name?: string; @@ -7453,14 +7300,6 @@ export interface components { }; /** * @example { - * "updater_username": "updater_username", - * "alert_type_id": "alert_type_id", - * "creator_username": "creator_username", - * "remind_interval": "remind_interval", - * "body": "body", - * "project_name": "project_name", - * "alert_type": "alert_type", - * "update_date": "update_date", * "instruments": [ * { * "instrument_name": "instrument_name", @@ -7471,78 +7310,37 @@ export interface components { * "instrument_id": "instrument_id" * } * ], + * "read": true, + * "alert_config_id": "alert_config_id", * "project_id": "project_id", - * "last_checked": "last_checked", - * "mute_consecutive_alerts": true, - * "creator_id": "creator_id", - * "last_reminded": "last_reminded", * "name": "name", - * "updater_id": "updater_id", - * "schedule_interval": "schedule_interval", + * "created_at": "created_at", * "id": "id", - * "alert_email_subscriptions": [ - * { - * "user_type": "user_type", - * "id": "id", - * "email": "email", - * "username": "username" - * }, - * { - * "user_type": "user_type", - * "id": "id", - * "email": "email", - * "username": "username" - * } - * ], - * "create_date": "create_date", - * "warning_interval": "warning_interval", - * "start_date": "start_date" + * "body": "body", + * "project_name": "project_name" * } */ - AlertConfig: { - alert_email_subscriptions?: components["schemas"]["EmailAutocompleteResult"][]; - alert_type?: string; - alert_type_id?: string; + "db.AlertListForProfileRow": { + alert_config_id?: string; body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; + created_at?: string; id?: string; - instruments?: components["schemas"]["AlertConfigInstrument"][]; - last_checked?: string; - last_reminded?: string; - mute_consecutive_alerts?: boolean; + instruments?: components["schemas"]["db.InstrumentIDName"][]; name?: string; project_id?: string; project_name?: string; - remind_interval?: string; - schedule_interval?: string; - start_date?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - warning_interval?: string; + read?: boolean; }; /** * @example { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" + * "alert_config_id": "alert_config_id", + * "profile_id": "profile_id", + * "mute_ui": true, + * "id": "id", + * "mute_notify": true * } */ - AlertConfigInstrument: { - instrument_id?: string; - instrument_name?: string; - }; - /** - * @example { - * "alert_config_id": "alert_config_id", - * "profile_id": "profile_id", - * "mute_ui": true, - * "id": "id", - * "mute_notify": true - * } - */ - AlertSubscription: { + "db.AlertProfileSubscription": { alert_config_id?: string; id?: string; mute_notify?: boolean; @@ -7557,7 +7355,7 @@ export interface components { * "parameter_id": "parameter_id" * } */ - AwareParameter: { + "db.AwareParameterListRow": { id?: string; key?: string; parameter_id?: string; @@ -7565,1067 +7363,883 @@ export interface components { }; /** * @example { - * "aware_parameters": { - * "key": "aware_parameters" - * }, - * "instrument_id": "instrument_id", - * "aware_id": "aware_id" + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "id": "id", + * "created_by": "created_by", + * "sort_order": 0, + * "slug": "slug" * } */ - AwarePlatformParameterConfig: { - aware_id?: string; - aware_parameters?: { - [key: string]: string; - }; - instrument_id?: string; + "db.CollectionGroup": { + created_at?: string; + created_by?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "formula_name": "formula_name", - * "formula": "formula", - * "id": "id", + * "instrument": "instrument", + * "type": "standard", * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" * } */ - CalculatedTimeseries: { - formula?: string; - formula_name?: string; + "db.CollectionGroupDetailsTimeseries": { id?: string; + instrument?: string; instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; + latest_time?: string; + latest_value?: number; + name?: string; + parameter?: string; parameter_id?: string; slug?: string; + sort_order?: number; + type?: components["schemas"]["db.TimeseriesType"]; + unit?: string; unit_id?: string; + variable?: Record; }; /** * @example { - * "updater_username": "updater_username", - * "project_id": "project_id", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", + * "timeseries_id": "timeseries_id", * "id": "id", - * "create_date": "create_date", - * "sort_order": 0, - * "slug": "slug", - * "update_date": "update_date" + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * } */ - CollectionGroup: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.DataloggerEquivalencyTableField": { + display_name?: string; + field_name?: string; id?: string; - name?: string; - project_id?: string; - slug?: string; - sort_order?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; + instrument_id?: string; + timeseries_id?: string; }; /** * @example { - * "updater_username": "updater_username", - * "timeseries": [ - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "latest_value": 6.027456183070403, - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "latest_time": "latest_time", - * "sort_order": 1, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * }, - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "latest_value": 6.027456183070403, - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "latest_time": "latest_time", - * "sort_order": 1, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * } - * ], - * "project_id": "project_id", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", * "id": "id", - * "create_date": "create_date", - * "sort_order": 0, - * "slug": "slug", - * "update_date": "update_date" + * "table_name": "table_name" * } */ - CollectionGroupDetails: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.DataloggerTableIDName": { id?: string; - name?: string; - project_id?: string; - slug?: string; - sort_order?: number; - timeseries?: components["schemas"]["collectionGroupDetailsTimeseries"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; + table_name?: string; + }; + "db.DomainGroupOpt": { + description?: string; + id?: string; + value?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "model_id": "model_id", - * "update_date": "update_date", - * "tables": [ - * { - * "id": "id", - * "table_name": "table_name" - * }, - * { - * "id": "id", - * "table_name": "table_name" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "model": "model", + * "user_type": "user_type", * "id": "id", - * "sn": "sn", - * "create_date": "create_date", - * "errors": [ - * "errors", - * "errors" - * ], - * "slug": "slug" + * "email": "email", + * "username": "{}" * } */ - Datalogger: { - create_date?: string; - creator_id?: string; - creator_username?: string; - errors?: string[]; + "db.EmailAutocompleteListRow": { + email?: string; id?: string; - model?: string; - model_id?: string; - name?: string; - project_id?: string; - slug?: string; - sn?: string; - tables?: components["schemas"]["DataloggerTable"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; + user_type?: string; + username?: Record; }; /** * @example { + * "user_type": "user_type", * "id": "id", - * "table_name": "table_name" + * "email": "email", + * "username": "username" * } */ - DataloggerTable: { + "db.EmailAutocompleteResult": { + email?: string; id?: string; - table_name?: string; + user_type?: string; + username?: string; }; /** * @example { - * "preview": { - * "bytes": [ - * 0, - * 0 - * ], - * "status": 6 - * }, - * "datalogger_table_id": "datalogger_table_id", - * "update_date": "update_date" + * "new_instruments_7d": 1, + * "project_count": 5, + * "instrument_group_count": 6, + * "new_measurements_2h": 5, + * "instrument_count": 0 * } */ - DataloggerTablePreview: { - datalogger_table_id?: string; - preview?: components["schemas"]["pgtype.JSON"]; - update_date?: string; + "db.HomeGetRow": { + instrument_count?: number; + instrument_group_count?: number; + new_instruments_7d?: number; + new_measurements_2h?: number; + project_count?: number; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "model_id": "model_id", - * "update_date": "update_date", - * "tables": [ - * { - * "id": "id", - * "table_name": "table_name" - * }, - * { - * "id": "id", - * "table_name": "table_name" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", - * "model": "model", * "id": "id", - * "sn": "sn", - * "create_date": "create_date", - * "errors": [ - * "errors", - * "errors" - * ], - * "key": "key", * "slug": "slug" * } */ - DataloggerWithKey: { - create_date?: string; - creator_id?: string; - creator_username?: string; - errors?: string[]; + "db.IDSlugName": { id?: string; - key?: string; - model?: string; - model_id?: string; name?: string; - project_id?: string; slug?: string; - sn?: string; - tables?: components["schemas"]["DataloggerTable"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; }; /** * @example { - * "office_id": "office_id", - * "agency": "agency", - * "initials": "initials", - * "division_initials": "division_initials", - * "division_name": "division_name", - * "name": "name", - * "id": "id" + * "id": "id", + * "slug": "slug" * } */ - District: { - agency?: string; - division_initials?: string; - division_name?: string; + "db.InstrumentCreateBatchRow": { id?: string; - initials?: string; - name?: string; - office_id?: string; + slug?: string; }; /** * @example { - * "expected_total_submittals": 6, - * "office_id": "office_id", - * "alert_type_id": "alert_type_id", - * "month": "month", + * "deleted": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "red_submittals": 5, - * "green_submittals": 1, - * "yellow_submittals": 5, - * "actual_total_submittals": 0, - * "district_initials": "district_initials", - * "project_name": "project_name" + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "description": "description", + * "id": "id", + * "created_by": "created_by", + * "slug": "slug" * } */ - DistrictRollup: { - actual_total_submittals?: number; - alert_type_id?: string; - district_initials?: string; - expected_total_submittals?: number; - green_submittals?: number; - month?: string; - office_id?: string; - project_id?: string; - project_name?: string; - red_submittals?: number; - yellow_submittals?: number; - }; - Domain: { + "db.InstrumentGroup": { + created_at?: string; + created_by?: string; + deleted?: boolean; description?: string; - group?: string; id?: string; - value?: string; + name?: string; + project_id?: string; + slug?: string; + updated_at?: string; + updated_by?: string; }; - DomainGroupOption: { - description?: string; - id?: string; - value?: string; - }; - DomainMap: { - [key: string]: components["schemas"]["DomainGroupOption"][]; + /** + * @example { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + */ + "db.InstrumentIDName": { + instrument_id?: string; + instrument_name?: string; }; /** * @example { - * "user_type": "user_type", + * "updated_at": "updated_at", + * "updated_by": "updated_by", + * "created_at": "created_at", * "id": "id", - * "email": "email", - * "username": "username" + * "time": "time", + * "body": "body", + * "title": "title", + * "created_by": "created_by", + * "instrument_id": "instrument_id" * } */ - EmailAutocompleteResult: { - email?: string; + "db.InstrumentNote": { + body?: string; + created_at?: string; + created_by?: string; id?: string; - user_type?: string; - username?: string; + instrument_id?: string; + time?: string; + title?: string; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "datalogger_table_id": "datalogger_table_id", - * "datalogger_table_name": "datalogger_table_name", - * "rows": [ - * { - * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" - * }, - * { - * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" - * } - * ], - * "datalogger_id": "datalogger_id" + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * } */ - EquivalencyTable: { - datalogger_id?: string; - datalogger_table_id?: string; - datalogger_table_name?: string; - rows?: components["schemas"]["EquivalencyTableRow"][]; + "db.IpiMeasurement": { + cum_dev?: number; + elevation?: number; + inc_dev?: number; + segment_id?: number; + temp?: number; + tilt?: number; }; + /** @enum {string} */ + "db.JobStatus": "SUCCESS" | "FAIL" | "INIT"; /** * @example { * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" + * "items": [ + * null, + * null + * ] * } */ - EquivalencyTableRow: { - display_name?: string; - field_name?: string; - id?: string; - instrument_id?: string; + "db.MeasurementCollectionLean": { + items?: components["schemas"]["db.MeasurementLean"][]; timeseries_id?: string; }; + "db.MeasurementLean": { + [key: string]: number; + }; /** * @example { - * "end_date": "end_date", - * "updater_username": "updater_username", - * "alert_config_id": "alert_config_id", - * "creator_username": "creator_username", - * "alert_config_name": "alert_config_name", - * "body": "body", - * "project_name": "project_name", - * "submittal_id": "submittal_id", - * "update_date": "update_date", - * "instruments": [ - * { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" - * }, - * { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", + * "utc_offset": "utc_offset", * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "start_date": "start_date" + * "abbrev": "abbrev", + * "is_dst": true * } */ - Evaluation: { - alert_config_id?: string; - alert_config_name?: string; - body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; - end_date?: string; - id?: string; - instruments?: components["schemas"]["EvaluationInstrument"][]; + "db.PgTimezoneNamesListRow": { + abbrev?: string; + is_dst?: boolean; name?: string; - project_id?: string; - project_name?: string; - start_date?: string; - submittal_id?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" - * } - */ - EvaluationInstrument: { - instrument_id?: string; - instrument_name?: string; + utc_offset?: string; }; /** * @example { - * "geometries": [ - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - * ], - * "coordinates": "{}", - * "type": "type" + * "x": "{}", + * "y": "{}", + * "time": "time" * } */ - Geometry: { - coordinates?: Record; - geometries?: components["schemas"]["geojson.Geometry"][]; - type?: string; + "db.PlotConfigMeasurementListBullseyeRow": { + time?: string; + x?: Record; + y?: Record; }; + /** @enum {string} */ + "db.PlotType": "scatter-line" | "profile" | "contour" | "bullseye"; /** * @example { - * "time": "time" + * "id": "id", + * "display_name": "display_name", + * "email": "email", + * "username": "username" * } */ - Heartbeat: { - time?: string; + "db.ProfileCreateRow": { + display_name?: string; + email?: string; + id?: string; + username?: string; }; /** * @example { - * "new_instruments_7d": 1, - * "project_count": 5, - * "instrument_group_count": 6, - * "new_measurements_2h": 5, - * "instrument_count": 0 + * "role": "role", + * "role_id": "role_id", + * "profile_id": "profile_id", + * "id": "id", + * "email": "email", + * "username": "username" * } */ - Home: { - instrument_count?: number; - instrument_group_count?: number; - new_instruments_7d?: number; - new_measurements_2h?: number; - project_count?: number; + "db.ProfileProjectRoleListForProjectRow": { + email?: string; + id?: string; + profile_id?: string; + role?: string; + role_id?: string; + username?: string; }; /** * @example { - * "name": "name", * "id": "id", * "slug": "slug" * } */ - IDSlugName: { + "db.ProjectCreateBatchRow": { id?: string; - name?: string; slug?: string; }; /** * @example { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" + * "date_range": { + * "value": "value", + * "enabled": true + * }, + * "show_nonvalidated": { + * "value": true, + * "enabled": true + * }, + * "show_masked": { + * "value": true, + * "enabled": true + * } * } */ - InclinometerMeasurement: { - create_date?: string; - creator?: string; - time?: string; - values?: number[]; + "db.ReportConfigGlobalOverrides": { + date_range?: components["schemas"]["db.TextOption"]; + show_masked?: components["schemas"]["db.ToggleOption"]; + show_nonvalidated?: components["schemas"]["db.ToggleOption"]; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] + * "progress_updated_at": "progress_updated_at", + * "file_key": "file_key", + * "report_config_id": "report_config_id", + * "created_at": "created_at", + * "progress": 0, + * "file_expiry": "file_expiry", + * "id": "id", + * "created_by": "created_by", + * "status": "SUCCESS" * } */ - InclinometerMeasurementCollection: { - inclinometers?: components["schemas"]["InclinometerMeasurement"][]; - timeseries_id?: string; + "db.ReportDownloadJob": { + created_at?: string; + created_by?: string; + file_expiry?: string; + file_key?: string; + id?: string; + progress?: number; + progress_updated_at?: string; + report_config_id?: string; + status?: components["schemas"]["db.JobStatus"]; }; /** * @example { - * "items": [ - * { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] - * }, - * { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] - * } - * ] + * "elevation": 0.8008281904610115, + * "temp": 1.4658129805029452, + * "z_cum_dev": 1.2315135367772556, + * "y_increment": 4.145608029883936, + * "x_cum_dev": 7.061401241503109, + * "temp_increment": 5.637376656633329, + * "z_increment": 1.0246457001441578, + * "y_cum_dev": 2.027123023002322, + * "x_increment": 9.301444243932576, + * "x": 2.3021358869347655, + * "y": 3.616076749251911, + * "z": 7.386281948385884, + * "segment_id": 6, + * "temp_cum_dev": 5.962133916683182 * } */ - InclinometerMeasurementCollectionCollection: { - items?: components["schemas"]["InclinometerMeasurementCollection"][]; + "db.SaaMeasurement": { + elevation?: number; + segment_id?: number; + temp?: number; + temp_cum_dev?: number; + temp_increment?: number; + x?: number; + x_cum_dev?: number; + x_increment?: number; + y?: number; + y_cum_dev?: number; + y_increment?: number; + z?: number; + z_cum_dev?: number; + z_increment?: number; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ - * null, - * null - * ] + * "value": "value", + * "enabled": true * } */ - InclinometerMeasurementCollectionLean: { - items?: components["schemas"]["InclinometerMeasurementLean"][]; - timeseries_id?: string; - }; - InclinometerMeasurementLean: { - [key: string]: number[]; + "db.TextOption": { + enabled?: boolean; + value?: string; }; /** * @example { - * "has_cwms": true, - * "projects": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "alert_configs": [ - * "alert_configs", - * "alert_configs" - * ], - * "icon": "icon", - * "type": "type", - * "aware_id": "aware_id", - * "status_id": "status_id", - * "opts": { - * "key": "" - * }, - * "station": 6, - * "constants": [ - * "constants", - * "constants" - * ], + * "formula_name": "formula_name", + * "formula": "formula", * "id": "id", - * "status_time": "status_time", - * "create_date": "create_date", + * "instrument_id": "instrument_id", + * "unit_id": "unit_id", * "slug": "slug", - * "updater_username": "updater_username", - * "offset": 0, - * "creator_username": "creator_username", - * "type_id": "type_id", - * "show_cwms_tab": true, - * "usgs_id": "usgs_id", - * "groups": [ - * "groups", - * "groups" - * ], - * "update_date": "update_date", - * "creator_id": "creator_id", + * "parameter_id": "parameter_id" + * } + */ + "db.TimeseriesComputedListForInstrumentRow": { + formula?: string; + formula_name?: string; + id?: string; + instrument_id?: string; + parameter_id?: string; + slug?: string; + unit_id?: string; + }; + /** + * @example { * "name": "name", - * "updater_id": "updater_id", - * "geometry": { - * "geometries": [ - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * "nid_id": "nid_id", - * "status": "status" + * "id": "id", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - Instrument: { - alert_configs?: string[]; - aware_id?: string; - constants?: string[]; - create_date?: string; - creator_id?: string; - creator_username?: string; - geometry?: components["schemas"]["Geometry"]; - groups?: string[]; - has_cwms?: boolean; - icon?: string; + "db.TimeseriesCreateBatchRow": { id?: string; + instrument_id?: string; name?: string; - nid_id?: string; - offset?: number; - opts?: { - [key: string]: unknown; - }; - projects?: components["schemas"]["IDSlugName"][]; - show_cwms_tab?: boolean; + parameter_id?: string; slug?: string; - station?: number; - status?: string; - status_id?: string; - status_time?: string; - type?: string; - type_id?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - usgs_id?: string; + type?: components["schemas"]["db.TimeseriesType"]; + unit_id?: string; }; + /** @enum {string} */ + "db.TimeseriesType": "standard" | "constant" | "computed" | "cwms"; /** * @example { - * "instrument_count": 0 + * "value": true, + * "enabled": true * } */ - InstrumentCount: { - instrument_count?: number; + "db.ToggleOption": { + enabled?: boolean; + value?: boolean; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "description": "description", - * "instrument_count": 0, - * "update_date": "update_date", - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "timeseries_count": 6, - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "slug": "slug" + * "timeseries_id": "timeseries_id", + * "uploader_config_id": "uploader_config_id", + * "field_name": "field_name" * } */ - InstrumentGroup: { - create_date?: string; - creator_id?: string; - creator_username?: string; - description?: string; - id?: string; - instrument_count?: number; - name?: string; - project_id?: string; - slug?: string; - timeseries_count?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; + "db.UploaderConfigMapping": { + field_name?: string; + timeseries_id?: string; + uploader_config_id?: string; }; + /** @enum {string} */ + "db.UploaderConfigType": "csv" | "dux" | "toa5"; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", + * "instruments": [ + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * }, + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + * ], + * "alert_config_id": "alert_config_id", + * "project_id": "project_id", + * "name": "name", + * "created_at": "created_at", * "id": "id", - * "time": "time", * "body": "body", - * "create_date": "create_date", - * "title": "title", - * "instrument_id": "instrument_id", - * "update_date": "update_date" + * "project_name": "project_name" * } */ - InstrumentNote: { + "db.VAlert": { + alert_config_id?: string; body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; + created_at?: string; id?: string; - instrument_id?: string; - time?: string; - title?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + name?: string; + project_id?: string; + project_name?: string; }; /** * @example { - * "items": [ + * "alert_type_id": "alert_type_id", + * "created_at": "created_at", + * "remind_interval": "remind_interval", + * "create_next_submittal_from": "create_next_submittal_from", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "alert_type": "alert_type", + * "last_checked_at": "last_checked_at", + * "updated_by_username": "updated_by_username", + * "instruments": [ + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * }, + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "mute_consecutive_alerts": true, + * "name": "name", + * "updated_by": "updated_by", + * "schedule_interval": "schedule_interval", + * "started_at": "started_at", + * "created_by_username": "created_by_username", + * "id": "id", + * "alert_email_subscriptions": [ * { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", + * "user_type": "user_type", * "id": "id", - * "time": "time", - * "body": "body", - * "create_date": "create_date", - * "title": "title", - * "instrument_id": "instrument_id", - * "update_date": "update_date" + * "email": "email", + * "username": "username" * }, * { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", + * "user_type": "user_type", * "id": "id", - * "time": "time", - * "body": "body", - * "create_date": "create_date", - * "title": "title", - * "instrument_id": "instrument_id", - * "update_date": "update_date" + * "email": "email", + * "username": "username" * } - * ] - * } - */ - InstrumentNoteCollection: { - items?: components["schemas"]["InstrumentNote"][]; - }; - /** - * @example { - * "project_ids": [ - * "project_ids", - * "project_ids" - * ] + * ], + * "last_reminded_at": "last_reminded_at", + * "warning_interval": "warning_interval" * } */ - InstrumentProjectAssignments: { - project_ids?: string[]; + "db.VAlertConfig": { + alert_email_subscriptions?: components["schemas"]["db.EmailAutocompleteResult"][]; + alert_type?: string; + alert_type_id?: string; + body?: string; + create_next_submittal_from?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + last_checked_at?: string; + last_reminded_at?: string; + mute_consecutive_alerts?: boolean; + name?: string; + project_id?: string; + project_name?: string; + remind_interval?: string; + schedule_interval?: string; + started_at?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + warning_interval?: string; }; /** * @example { - * "status_id": "status_id", + * "timeseries": [ + * { + * "instrument": "instrument", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * }, + * { + * "instrument": "instrument", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", * "id": "id", - * "time": "time", - * "status": "status" + * "created_by": "created_by", + * "sort_order": 0, + * "slug": "slug" * } */ - InstrumentStatus: { + "db.VCollectionGroupDetail": { + created_at?: string; + created_by?: string; id?: string; - status?: string; - status_id?: string; - time?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + timeseries?: components["schemas"]["db.CollectionGroupDetailsTimeseries"][]; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "items": [ + * "created_at": "created_at", + * "model_id": "model_id", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "tables": [ * { - * "status_id": "status_id", * "id": "id", - * "time": "time", - * "status": "status" + * "table_name": "table_name" * }, * { - * "status_id": "status_id", * "id": "id", - * "time": "time", - * "status": "status" + * "table_name": "table_name" * } - * ] - * } - */ - InstrumentStatusCollection: { - items?: components["schemas"]["InstrumentStatus"][]; - }; - /** - * @example { - * "is_valid": true, + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "model": "model", + * "id": "id", + * "sn": "sn", * "errors": [ * "errors", * "errors" - * ] + * ], + * "slug": "slug" * } */ - InstrumentsValidation: { + "db.VDatalogger": { + created_at?: string; + created_by?: string; + created_by_username?: string; errors?: string[]; - is_valid?: boolean; + id?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["db.DataloggerTableIDName"][]; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "time": "time", - * "measurements": [ + * "datalogger_table_id": "datalogger_table_id", + * "datalogger_table_name": "datalogger_table_name", + * "fields": [ * { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 + * "timeseries_id": "timeseries_id", + * "id": "id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * }, * { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 + * "timeseries_id": "timeseries_id", + * "id": "id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * } - * ] + * ], + * "datalogger_id": "datalogger_id" * } */ - IpiMeasurements: { - measurements?: components["schemas"]["IpiSegmentMeasurement"][]; - time?: string; + "db.VDataloggerEquivalencyTable": { + datalogger_id?: string; + datalogger_table_id?: string; + datalogger_table_name?: string; + fields?: components["schemas"]["db.DataloggerEquivalencyTableField"][]; }; /** * @example { - * "temp_timeseries_id": "temp_timeseries_id", - * "length": 6.027456183070403, - * "tilt_timeseries_id": "tilt_timeseries_id", - * "id": 0, - * "inc_dev_timeseries_id": "inc_dev_timeseries_id", - * "instrument_id": "instrument_id", - * "length_timeseries_id": "length_timeseries_id" + * "preview": [ + * 0, + * 0 + * ], + * "updated_at": "updated_at", + * "datalogger_table_id": "datalogger_table_id" * } */ - IpiSegment: { - id?: number; - inc_dev_timeseries_id?: string; - instrument_id?: string; - length?: number; - length_timeseries_id?: string; - temp_timeseries_id?: string; - tilt_timeseries_id?: string; + "db.VDataloggerPreview": { + datalogger_table_id?: string; + preview?: number[]; + updated_at?: string; }; /** * @example { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 + * "office_id": "office_id", + * "agency": "agency", + * "initials": "initials", + * "division_initials": "division_initials", + * "division_name": "division_name", + * "name": "name", + * "id": "id" * } */ - IpiSegmentMeasurement: { - cum_dev?: number; - elevation?: number; - inc_dev?: number; - segment_id?: number; - temp?: number; - tilt?: number; + "db.VDistrict": { + agency?: string; + division_initials?: string; + division_name?: string; + id?: string; + initials?: string; + name?: string; + office_id?: string; }; /** * @example { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "expected_total_submittals": 6, + * "office_id": "office_id", + * "alert_type_id": "alert_type_id", + * "month": "month", + * "project_id": "project_id", + * "red_submittals": 5, + * "green_submittals": 1, + * "yellow_submittals": 5, + * "actual_total_submittals": 0, + * "district_initials": "district_initials", + * "project_name": "project_name" * } */ - Measurement: { - annotation?: string; - error?: string; - masked?: boolean; - time?: string; - validated?: boolean; - value?: number; + "db.VDistrictRollup": { + actual_total_submittals?: number; + alert_type_id?: string; + district_initials?: string; + expected_total_submittals?: number; + green_submittals?: number; + month?: string; + office_id?: string; + project_id?: string; + project_name?: string; + red_submittals?: number; + yellow_submittals?: number; + }; + "db.VDomain": { + description?: string; + group?: string; + id?: string; + value?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ + * "alert_config_id": "alert_config_id", + * "created_at": "created_at", + * "alert_config_name": "alert_config_name", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "submittal_id": "submittal_id", + * "updated_by_username": "updated_by_username", + * "instruments": [ * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * }, * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * } - * ] + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "started_at": "started_at", + * "created_by_username": "created_by_username", + * "id": "id", + * "ended_at": "ended_at" * } */ - MeasurementCollection: { - items?: components["schemas"]["Measurement"][]; - timeseries_id?: string; + "db.VEvaluation": { + alert_config_id?: string; + alert_config_name?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + ended_at?: string; + id?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + name?: string; + project_id?: string; + project_name?: string; + started_at?: string; + submittal_id?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ - * null, - * null - * ] + * "time": "time", + * "instrument_id": "instrument_id", + * "measurements": "{}" * } */ - MeasurementCollectionLean: { - items?: components["schemas"]["MeasurementLean"][]; - timeseries_id?: string; - }; - MeasurementLean: { - [key: string]: number; + "db.VInclMeasurement": { + instrument_id?: string; + measurements?: Record; + time?: string; }; - Opts: { - [key: string]: unknown; + /** + * @example { + * "depth_timeseries_id": "depth_timeseries_id", + * "b180_timeseries_id": "b180_timeseries_id", + * "a180_timeseries_id": "a180_timeseries_id", + * "id": 0, + * "instrument_id": "instrument_id", + * "a0_timeseries_id": "a0_timeseries_id", + * "b0_timeseries_id": "b0_timeseries_id" + * } + */ + "db.VInclSegment": { + a0_timeseries_id?: string; + a180_timeseries_id?: string; + b0_timeseries_id?: string; + b180_timeseries_id?: string; + depth_timeseries_id?: string; + id?: number; + instrument_id?: string; }; /** * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "key": "" - * }, - * "show_comments": true, - * "report_configs": [ + * "has_cwms": true, + * "projects": [ * { * "name": "name", * "id": "id", @@ -8637,55 +8251,41 @@ export interface components { * "slug": "slug" * } * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", + * "alert_configs": [ + * "alert_configs", + * "alert_configs" + * ], + * "icon": "icon", + * "created_at": "created_at", + * "type": "type", + * "status_id": "status_id", + * "opts": "{}", + * "updated_at": "updated_at", + * "station": 1, + * "constants": [ + * "constants", + * "constants" + * ], * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * } - */ - PlotConfig: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: { - [key: string]: unknown; - }; - id?: string; - name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; - slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "y_axis_timeseries_id": "y_axis_timeseries_id", - * "x_axis_timeseries_id": "x_axis_timeseries_id" - * }, - * "show_comments": true, - * "report_configs": [ + * "status_time": "status_time", + * "slug": "slug", + * "offset": 6, + * "type_id": "type_id", + * "show_cwms_tab": true, + * "usgs_id": "usgs_id", + * "groups": [ + * "groups", + * "groups" + * ], + * "created_by": "created_by", + * "name": "name", + * "updated_by": "updated_by", + * "geometry": [ + * 0, + * 0 + * ], + * "nid_id": "nid_id", + * "telemetry": [ * { * "name": "name", * "id": "id", @@ -8697,372 +8297,132 @@ export interface components { * "slug": "slug" * } * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" + * "status": "status" * } */ - PlotConfigBullseyePlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigBullseyePlotDisplay"]; + "db.VInstrument": { + alert_configs?: string[]; + constants?: string[]; + created_at?: string; + created_by?: string; + geometry?: number[]; + groups?: string[]; + has_cwms?: boolean; + icon?: string; id?: string; name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; + nid_id?: string; + offset?: number; + opts?: Record; + projects?: components["schemas"]["db.IDSlugName"][]; + show_cwms_tab?: boolean; slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "y_axis_timeseries_id": "y_axis_timeseries_id", - * "x_axis_timeseries_id": "x_axis_timeseries_id" - * } - */ - PlotConfigBullseyePlotDisplay: { - x_axis_timeseries_id?: string; - y_axis_timeseries_id?: string; + station?: number; + status?: string; + status_id?: string; + status_time?: string; + telemetry?: components["schemas"]["db.IDSlugName"][]; + type?: string; + type_id?: string; + updated_at?: string; + updated_by?: string; + usgs_id?: string; }; /** * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "contour_smoothing": true, - * "gradient_smoothing": true, - * "locf_backfill": "locf_backfill", - * "timeseries_ids": [ - * "timeseries_ids", - * "timeseries_ids" - * ], - * "show_labels": true, - * "time": "time" - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "timeseries_count": "{}", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "description": "description", * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", + * "created_by": "created_by", + * "instrument_count": 0, * "slug": "slug" * } */ - PlotConfigContourPlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigContourPlotDisplay"]; + "db.VInstrumentGroup": { + created_at?: string; + created_by?: string; + description?: string; id?: string; + instrument_count?: number; name?: string; - plot_type?: string; project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "contour_smoothing": true, - * "gradient_smoothing": true, - * "locf_backfill": "locf_backfill", - * "timeseries_ids": [ - * "timeseries_ids", - * "timeseries_ids" - * ], - * "show_labels": true, - * "time": "time" - * } - */ - PlotConfigContourPlotDisplay: { - contour_smoothing?: boolean; - gradient_smoothing?: boolean; - locf_backfill?: string; - show_labels?: boolean; - time?: string; - timeseries_ids?: string[]; - }; - /** - * @example { - * "x": 0.8008281904610115, - * "y": 6.027456183070403, - * "time": "time" - * } - */ - PlotConfigMeasurementBullseyePlot: { - time?: string; - x?: number; - y?: number; + timeseries_count?: Record; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "instrument_type": "instrument_type", - * "instrument_id": "instrument_id" - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", + * "status_id": "status_id", * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" + * "time": "time", + * "instrument_id": "instrument_id", + * "status": "status" * } */ - PlotConfigProfilePlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigProfilePlotDisplay"]; + "db.VInstrumentStatus": { id?: string; - name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; - slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "instrument_type": "instrument_type", - * "instrument_id": "instrument_id" - * } - */ - PlotConfigProfilePlotDisplay: { instrument_id?: string; - instrument_type?: string; - }; - /** - * @example { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - */ - PlotConfigScatterLineCustomShape: { - color?: string; - data_point?: number; - enabled?: boolean; - name?: string; - plot_configuration_id?: string; + status?: string; + status_id?: string; + time?: string; }; /** * @example { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ + * "time": "time", + * "instrument_id": "instrument_id", + * "measurements": [ * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * }, * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * } * ] * } */ - PlotConfigScatterLineDisplay: { - layout?: components["schemas"]["PlotConfigScatterLineLayout"]; - traces?: components["schemas"]["PlotConfigScatterLineTimeseriesTrace"][]; + "db.VIpiMeasurement": { + instrument_id?: string; + measurements?: components["schemas"]["db.IpiMeasurement"][]; + time?: string; }; /** * @example { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" + * "length": 6.027456183070403, + * "tilt_timeseries_id": "tilt_timeseries_id", + * "id": 0, + * "inc_dev_timeseries_id": "inc_dev_timeseries_id", + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" * } */ - PlotConfigScatterLineLayout: { - custom_shapes?: components["schemas"]["PlotConfigScatterLineCustomShape"][]; - y2_axis_title?: string; - y_axis_title?: string; + "db.VIpiSegment": { + id?: number; + inc_dev_timeseries_id?: string; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + tilt_timeseries_id?: string; }; /** * @example { * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, + * "display": "{}", * "show_comments": true, * "report_configs": [ * { @@ -9076,72 +8436,39 @@ export interface components { * "slug": "slug" * } * ], + * "created_at": "created_at", * "auto_range": true, * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", + * "threshold": 0, + * "created_by": "created_by", * "show_nonvalidated": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", + * "plot_type": "scatter-line", * "slug": "slug" * } */ - PlotConfigScatterLinePlot: { + "db.VPlotConfiguration": { auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; + created_at?: string; + created_by?: string; date_range?: string; - display?: components["schemas"]["PlotConfigScatterLineDisplay"]; + display?: Record; id?: string; name?: string; - plot_type?: string; + plot_type?: components["schemas"]["db.PlotType"]; project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; + report_configs?: components["schemas"]["db.IDSlugName"][]; show_comments?: boolean; show_masked?: boolean; show_nonvalidated?: boolean; slug?: string; threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - */ - PlotConfigScatterLineTimeseriesTrace: { - color?: string; - line_style?: string; - /** @description read-only */ - name?: string; - /** @description read-only */ - parameter?: string; - plot_configuration_id?: string; - show_markers?: boolean; - timeseries_id?: string; - trace_order?: number; - trace_type?: string; - width?: number; - /** @description y1 or y2, default y1 */ - y_axis?: string; + updated_at?: string; + updated_by?: string; }; /** * @example { @@ -9162,96 +8489,69 @@ export interface components { * ], * "id": "id", * "display_name": "display_name", + * "edipi": 0, * "email": "email", * "username": "username" * } */ - Profile: { + "db.VProfile": { display_name?: string; + edipi?: number; email?: string; id?: string; is_admin?: boolean; roles?: string[]; - tokens?: components["schemas"]["TokenInfoProfile"][]; + tokens?: components["schemas"]["db.VProfileToken"][]; username?: string; }; /** * @example { - * "image": "image", - * "updater_username": "updater_username", + * "token_id": "token_id", + * "issued": "issued" + * } + */ + "db.VProfileToken": { + issued?: string; + token_id?: string; + }; + /** + * @example { + * "image": "{}", * "federal_id": "federal_id", - * "creator_username": "creator_username", + * "created_at": "created_at", + * "created_by": "created_by", * "instrument_count": 0, - * "update_date": "update_date", * "office_id": "office_id", + * "updated_by_username": "updated_by_username", * "instrument_group_count": 6, - * "creator_id": "creator_id", + * "updated_at": "updated_at", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", * "district_id": "district_id", * "id": "id", - * "create_date": "create_date", * "slug": "slug" * } */ - Project: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.VProject": { + created_at?: string; + created_by?: string; + created_by_username?: string; district_id?: string; federal_id?: string; id?: string; - image?: string; + image?: Record; instrument_count?: number; instrument_group_count?: number; name?: string; office_id?: string; slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "project_count": 0 - * } - */ - ProjectCount: { - project_count?: number; - }; - /** - * @example { - * "instrument_ids": [ - * "instrument_ids", - * "instrument_ids" - * ] - * } - */ - ProjectInstrumentAssignments: { - instrument_ids?: string[]; - }; - /** - * @example { - * "role": "role", - * "role_id": "role_id", - * "profile_id": "profile_id", - * "id": "id", - * "email": "email", - * "username": "username" - * } - */ - ProjectMembership: { - email?: string; - id?: string; - profile_id?: string; - role?: string; - role_id?: string; - username?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", * "global_overrides": { * "date_range": { * "value": "value", @@ -9266,16 +8566,18 @@ export interface components { * "enabled": true * } * }, + * "created_at": "created_at", * "description": "description", * "project_name": "project_name", - * "update_date": "update_date", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", * "district_name": "district_name", + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", * "id": "id", - * "create_date": "create_date", * "plot_configs": [ * { * "name": "name", @@ -9291,283 +8593,27 @@ export interface components { * "slug": "slug" * } */ - ReportConfig: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.VReportConfig": { + created_at?: string; + created_by?: string; + created_by_username?: string; description?: string; district_name?: string; - global_overrides?: components["schemas"]["ReportConfigGlobalOverrides"]; + global_overrides?: components["schemas"]["db.ReportConfigGlobalOverrides"]; id?: string; name?: string; - plot_configs?: components["schemas"]["IDSlugName"][]; + plot_configs?: components["schemas"]["db.IDSlugName"][]; project_id?: string; project_name?: string; slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "date_range": { - * "value": "value", - * "enabled": true - * }, - * "show_nonvalidated": { - * "value": true, - * "enabled": true - * }, - * "show_masked": { - * "value": true, - * "enabled": true - * } - * } - */ - ReportConfigGlobalOverrides: { - date_range?: components["schemas"]["TextOption"]; - show_masked?: components["schemas"]["ToggleOption"]; - show_nonvalidated?: components["schemas"]["ToggleOption"]; - }; - /** - * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "global_overrides": { - * "date_range": { - * "value": "value", - * "enabled": true - * }, - * "show_nonvalidated": { - * "value": true, - * "enabled": true - * }, - * "show_masked": { - * "value": true, - * "enabled": true - * } - * }, - * "description": "description", - * "project_name": "project_name", - * "update_date": "update_date", - * "district_name": "district_name", - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_configs": [ - * { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * }, - * { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * } - * ], - * "slug": "slug" - * } - */ - ReportConfigWithPlotConfigs: { - create_date?: string; - creator_id?: string; - creator_username?: string; - description?: string; - district_name?: string; - global_overrides?: components["schemas"]["ReportConfigGlobalOverrides"]; - id?: string; - name?: string; - plot_configs?: components["schemas"]["PlotConfigScatterLinePlot"][]; - project_id?: string; - project_name?: string; - slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "file_key": "file_key", - * "creator": "creator", - * "progress_update_date": "progress_update_date", - * "report_config_id": "report_config_id", - * "progress": 0, - * "file_expiry": "file_expiry", - * "id": "id", - * "create_date": "create_date", - * "status": "status" - * } - */ - ReportDownloadJob: { - create_date?: string; - creator?: string; - file_expiry?: string; - file_key?: string; - id?: string; - progress?: number; - progress_update_date?: string; - report_config_id?: string; - status?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { * "time": "time", + * "instrument_id": "instrument_id", * "measurements": [ * { * "elevation": 0.8008281904610115, @@ -9604,8 +8650,9 @@ export interface components { * ] * } */ - SaaMeasurements: { - measurements?: components["schemas"]["SaaSegmentMeasurement"][]; + "db.VSaaMeasurement": { + instrument_id?: string; + measurements?: components["schemas"]["db.SaaMeasurement"][]; time?: string; }; /** @@ -9620,7 +8667,7 @@ export interface components { * "length_timeseries_id": "length_timeseries_id" * } */ - SaaSegment: { + "db.VSaaSegment": { id?: number; instrument_id?: string; length?: number; @@ -9630,87 +8677,31 @@ export interface components { y_timeseries_id?: string; z_timeseries_id?: string; }; - /** - * @example { - * "elevation": 0.8008281904610115, - * "temp": 1.4658129805029452, - * "z_cum_dev": 1.2315135367772556, - * "y_increment": 4.145608029883936, - * "x_cum_dev": 7.061401241503109, - * "temp_increment": 5.637376656633329, - * "z_increment": 1.0246457001441578, - * "y_cum_dev": 2.027123023002322, - * "x_increment": 9.301444243932576, - * "x": 2.3021358869347655, - * "y": 3.616076749251911, - * "z": 7.386281948385884, - * "segment_id": 6, - * "temp_cum_dev": 5.962133916683182 - * } - */ - SaaSegmentMeasurement: { - elevation?: number; - segment_id?: number; - temp?: number; - temp_cum_dev?: number; - temp_increment?: number; - x?: number; - x_cum_dev?: number; - x_increment?: number; - y?: number; - y_cum_dev?: number; - y_increment?: number; - z?: number; - z_cum_dev?: number; - z_increment?: number; - }; - /** - * @example { - * "item": "{}", - * "id": "id", - * "type": "type" - * } - */ - SearchResult: { - id?: string; - item?: Record; - type?: string; - }; - Site: { - description?: string; - elevation?: string; - elevationUnits?: string; - siteName?: components["schemas"]["SiteName"]; - }; - SiteName: { - id?: string; - nameType?: string; - }; /** * @example { * "alert_type_id": "alert_type_id", * "alert_config_id": "alert_config_id", - * "due_date": "due_date", + * "created_at": "created_at", * "alert_config_name": "alert_config_name", * "submittal_status_id": "submittal_status_id", * "submittal_status_name": "submittal_status_name", * "warning_sent": true, + * "completed_at": "completed_at", * "project_id": "project_id", * "alert_type_name": "alert_type_name", * "marked_as_missing": true, - * "completion_date": "completion_date", - * "id": "id", - * "create_date": "create_date" + * "due_at": "due_at", + * "id": "id" * } */ - Submittal: { + "db.VSubmittal": { alert_config_id?: string; alert_config_name?: string; alert_type_id?: string; alert_type_name?: string; - completion_date?: string; - create_date?: string; - due_date?: string; + completed_at?: string; + created_at?: string; + due_at?: string; id?: string; marked_as_missing?: boolean; project_id?: string; @@ -9720,41 +8711,13 @@ export interface components { }; /** * @example { - * "value": "value", - * "enabled": true - * } - */ - TextOption: { - enabled?: boolean; - value?: string; - }; - /** - * @example { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], * "instrument": "instrument", - * "type": "type", + * "type": "standard", * "instrument_id": "instrument_id", * "unit": "unit", * "parameter": "parameter", * "name": "name", - * "variable": "variable", + * "variable": "{}", * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, @@ -9763,7 +8726,7 @@ export interface components { * "parameter_id": "parameter_id" * } */ - Timeseries: { + "db.VTimeseries": { id?: string; instrument?: string; instrument_id?: string; @@ -9773,118 +8736,24 @@ export interface components { parameter?: string; parameter_id?: string; slug?: string; - type?: string; + type?: components["schemas"]["db.TimeseriesType"]; unit?: string; unit_id?: string; - values?: components["schemas"]["Measurement"][]; - variable?: string; - }; - /** - * @example { - * "items": [ - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * }, - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * } - * ] - * } - */ - TimeseriesCollectionItems: { - items?: components["schemas"]["Timeseries"][]; + variable?: Record; }; /** * @example { * "cwms_office_id": "cwms_office_id", - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], * "instrument": "instrument", * "cwms_extent_earliest_time": "cwms_extent_earliest_time", - * "type": "type", + * "type": "standard", * "cwms_timeseries_id": "cwms_timeseries_id", * "instrument_id": "instrument_id", * "unit": "unit", * "parameter": "parameter", * "cwms_extent_latest_time": "cwms_extent_latest_time", * "name": "name", - * "variable": "variable", + * "variable": "{}", * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, @@ -9893,7 +8762,7 @@ export interface components { * "parameter_id": "parameter_id" * } */ - TimeseriesCwms: { + "db.VTimeseriesCwm": { cwms_extent_earliest_time?: string; cwms_extent_latest_time?: string; cwms_office_id?: string; @@ -9907,110 +8776,28 @@ export interface components { parameter?: string; parameter_id?: string; slug?: string; - type?: string; + type?: components["schemas"]["db.TimeseriesType"]; unit?: string; unit_id?: string; - values?: components["schemas"]["Measurement"][]; - variable?: string; + variable?: Record; }; /** * @example { - * "items": [ - * { - * "timeseries_id": "timeseries_id", - * "items": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ] - * }, - * { - * "timeseries_id": "timeseries_id", - * "items": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ] - * } - * ] - * } - */ - TimeseriesMeasurementCollectionCollection: { - items?: components["schemas"]["MeasurementCollection"][]; - }; - /** - * @example { - * "utc_offset": "utc_offset", - * "name": "name", - * "abbrev": "abbrev", - * "is_dst": true - * } - */ - TimezoneOption: { - abbrev?: string; - is_dst?: boolean; - name?: string; - utc_offset?: string; - }; - /** - * @example { - * "value": true, - * "enabled": true - * } - */ - ToggleOption: { - enabled?: boolean; - value?: boolean; - }; - /** - * @example { - * "token_id": "token_id", - * "profile_id": "profile_id", - * "issued": "issued", - * "secret_token": "secret_token" - * } - */ - Token: { - issued?: string; - profile_id?: string; - secret_token?: string; - token_id?: string; - }; - /** - * @example { - * "token_id": "token_id", - * "issued": "issued" + * "annotation": "annotation", + * "timeseries_id": "timeseries_id", + * "validated": true, + * "masked": true, + * "time": "time", + * "value": 0.8008281904610115 * } */ - TokenInfoProfile: { - issued?: string; - token_id?: string; + "db.VTimeseriesMeasurement": { + annotation?: string; + masked?: boolean; + time?: string; + timeseries_id?: string; + validated?: boolean; + value?: number; }; /** * @example { @@ -10023,7 +8810,7 @@ export interface components { * "measure_id": "measure_id" * } */ - Unit: { + "db.VUnit": { abbreviation?: string; id?: string; measure?: string; @@ -10034,118 +8821,1050 @@ export interface components { }; /** * @example { - * "updater_username": "updater_username", + * "validated_field": "validated_field", + * "created_at": "created_at", + * "description": "description", + * "comment_field": "comment_field", + * "type": "csv", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "time_field": "time_field", + * "masked_field": "masked_field", * "tz_name": "tz_name", + * "updated_at": "updated_at", + * "comment_field_enabled": true, * "project_id": "project_id", - * "creator_username": "creator_username", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", - * "description": "description", + * "updated_by": "updated_by", + * "masked_field_enabled": true, + * "created_by_username": "created_by_username", * "id": "id", - * "create_date": "create_date", - * "type": "csv", - * "update_date": "update_date" + * "validated_field_enabled": true, + * "slug": "slug" * } */ - UploaderConfig: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.VUploaderConfig": { + comment_field?: string; + comment_field_enabled?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; description?: string; id?: string; + masked_field?: string; + masked_field_enabled?: boolean; name?: string; project_id?: string; - type?: components["schemas"]["UploaderConfigType"]; + slug?: string; + time_field?: string; + type?: components["schemas"]["db.UploaderConfigType"]; tz_name?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + validated_field?: string; + validated_field_enabled?: boolean; + }; + "dto.AlertConfig": { + alert_email_subscriptions?: components["schemas"]["dto.EmailAutocompleteResult"][]; + alert_type?: string; + alert_type_id?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instruments?: components["schemas"]["dto.AlertConfigInstrument"][]; + last_checked?: string; + last_reminded?: string; + mute_consecutive_alerts?: boolean; + name?: string; + project_id?: string; + project_name?: string; + remind_interval?: string; + schedule_interval?: string; + started_at?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + warning_interval?: string; + }; + "dto.AlertConfigInstrument": { + instrument_id?: string; + instrument_name?: string; + }; + "dto.AlertSubscription": { + alert_config_id?: string; + id?: string; + mute_notify?: boolean; + mute_ui?: boolean; + profile_id?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "field_name": "field_name" + * "formula_name": "formula_name", + * "formula": "formula", + * "id": "id", + * "instrument_id": "instrument_id", + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - UploaderConfigMapping: { + "dto.CalculatedTimeseries": { + formula?: string; + formula_name?: string; + id?: string; + instrument_id?: string; + parameter_id?: string; + slug?: string; + unit_id?: string; + }; + "dto.CollectionGroup": { + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.Datalogger": { + created_at?: string; + created_by?: string; + created_by_username?: string; + errors?: string[]; + id?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["dto.DataloggerTable"][]; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.DataloggerTable": { + id?: string; + table_name?: string; + }; + "dto.EmailAutocompleteResult": { + email?: string; + id?: string; + user_type?: string; + username?: string; + }; + "dto.EquivalencyTable": { + datalogger_id?: string; + datalogger_table_id?: string; + datalogger_table_name?: string; + rows?: components["schemas"]["dto.EquivalencyTableRow"][]; + }; + "dto.EquivalencyTableRow": { + display_name?: string; field_name?: string; + id?: string; + instrument_id?: string; timeseries_id?: string; }; - /** @enum {string} */ - UploaderConfigType: "csv" | "dux" | "toa5"; /** * @example { - * "values": [ + * "alert_config_id": "alert_config_id", + * "created_at": "created_at", + * "alert_config_name": "alert_config_name", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "submittal_id": "submittal_id", + * "updated_by_username": "updated_by_username", + * "instruments": [ * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * }, * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * } * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", + * "project_id": "project_id", * "name": "name", - * "variable": "variable", - * "latest_value": 6.027456183070403, + * "updated_by": "updated_by", + * "started_at": "started_at", + * "updatedd_at": "updatedd_at", + * "created_by_username": "created_by_username", * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "latest_time": "latest_time", - * "sort_order": 1, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" + * "ended_at": "ended_at" * } */ - collectionGroupDetailsTimeseries: { + "dto.Evaluation": { + alert_config_id?: string; + alert_config_name?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + ended_at?: string; id?: string; - instrument?: string; + instruments?: components["schemas"]["dto.EvaluationInstrument"][]; + name?: string; + project_id?: string; + project_name?: string; + started_at?: string; + submittal_id?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + /** + * @example { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + */ + "dto.EvaluationInstrument": { instrument_id?: string; - instrument_slug?: string; - is_computed?: boolean; - latest_time?: string; - latest_value?: number; + instrument_name?: string; + }; + /** + * @example { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + */ + "dto.IDSlugName": { + id?: string; name?: string; - parameter?: string; - parameter_id?: string; slug?: string; - sort_order?: number; - type?: string; - unit?: string; - unit_id?: string; - values?: components["schemas"]["Measurement"][]; - variable?: string; }; /** * @example { - * "bytes": [ - * 0, - * 0 - * ], - * "status": 6 + * "depth_timeseries_id": "depth_timeseries_id", + * "b180_timeseries_id": "b180_timeseries_id", + * "a180_timeseries_id": "a180_timeseries_id", + * "id": 0, + * "instrument_id": "instrument_id", + * "a0_timeseries_id": "a0_timeseries_id", + * "b0_timeseries_id": "b0_timeseries_id" * } */ - "pgtype.JSON": { - bytes?: number[]; - status?: components["schemas"]["pgtype.Status"]; + "dto.InclSegment": { + a0_timeseries_id?: string; + a180_timeseries_id?: string; + b0_timeseries_id?: string; + b180_timeseries_id?: string; + depth_timeseries_id?: string; + id?: number; + instrument_id?: string; + }; + /** + * @example { + * "has_cwms": true, + * "projects": [ + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * }, + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + * ], + * "alert_configs": [ + * "alert_configs", + * "alert_configs" + * ], + * "icon": "icon", + * "created_at": "created_at", + * "type": "type", + * "aware_id": "aware_id", + * "updated_by_username": "updated_by_username", + * "status_id": "status_id", + * "opts": { + * "key": "" + * }, + * "station": 1, + * "created_by_username": "created_by_username", + * "constants": [ + * "constants", + * "constants" + * ], + * "id": "id", + * "status_time": "status_time", + * "slug": "slug", + * "offset": 6, + * "type_id": "type_id", + * "show_cwms_tab": true, + * "usgs_id": "usgs_id", + * "groups": [ + * "groups", + * "groups" + * ], + * "created_by": "created_by", + * "name": "name", + * "updated_by": "updated_by", + * "updatedd_at": "updatedd_at", + * "geometry": [ + * 0, + * 0 + * ], + * "nid_id": "nid_id", + * "status": "status" + * } + */ + "dto.Instrument": { + alert_configs?: string[]; + aware_id?: string; + constants?: string[]; + created_at?: string; + created_by?: string; + created_by_username?: string; + geometry?: number[]; + groups?: string[]; + has_cwms?: boolean; + icon?: string; + id?: string; + name?: string; + nid_id?: string; + offset?: number; + opts?: { + [key: string]: unknown; + }; + projects?: components["schemas"]["dto.IDSlugName"][]; + show_cwms_tab?: boolean; + slug?: string; + station?: number; + status?: string; + status_id?: string; + status_time?: string; + type?: string; + type_id?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + usgs_id?: string; + }; + "dto.InstrumentGroup": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + id?: string; + instrument_count?: number; + name?: string; + project_id?: string; + slug?: string; + timeseries_count?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.InstrumentNote": { + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instrument_id?: string; + time?: string; + title?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.InstrumentNoteCollection": { + items?: components["schemas"]["dto.InstrumentNote"][]; + }; + "dto.InstrumentProjectAssignments": { + project_ids?: string[]; + }; + "dto.InstrumentStatus": { + id?: string; + status?: string; + status_id?: string; + time?: string; + }; + "dto.InstrumentStatusCollection": { + items?: components["schemas"]["dto.InstrumentStatus"][]; + }; + /** + * @example { + * "temp_timeseries_id": "temp_timeseries_id", + * "length": 6.027456183070403, + * "tilt_timeseries_id": "tilt_timeseries_id", + * "id": 0, + * "inc_dev_timeseries_id": "inc_dev_timeseries_id", + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" + * } + */ + "dto.IpiSegment": { + id?: number; + inc_dev_timeseries_id?: string; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + temp_timeseries_id?: string; + tilt_timeseries_id?: string; + }; + /** + * @example { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + */ + "dto.Measurement": { + annotation?: string; + error?: string; + masked?: boolean; + time?: string; + validated?: boolean; + value?: number; + }; + "dto.MeasurementCollection": { + items?: components["schemas"]["dto.Measurement"][]; + timeseries_id?: string; + }; + "dto.Opts": { + [key: string]: unknown; + }; + "dto.PlotConfigBullseyePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigBullseyePlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigBullseyePlotDisplay": { + x_axis_timeseries_id?: string; + y_axis_timeseries_id?: string; + }; + "dto.PlotConfigContourPlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigContourPlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigContourPlotDisplay": { + contour_smoothing?: boolean; + gradient_smoothing?: boolean; + locf_backfill?: string; + show_labels?: boolean; + time?: string; + timeseries_ids?: string[]; + }; + "dto.PlotConfigProfilePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigProfilePlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigProfilePlotDisplay": { + instrument_id?: string; + instrument_type?: string; + }; + "dto.PlotConfigScatterLineCustomShape": { + color?: string; + data_point?: number; + enabled?: boolean; + name?: string; + plot_configuration_id?: string; + }; + "dto.PlotConfigScatterLineDisplay": { + layout?: components["schemas"]["dto.PlotConfigScatterLineLayout"]; + traces?: components["schemas"]["dto.PlotConfigScatterLineTimeseriesTrace"][]; + }; + "dto.PlotConfigScatterLineLayout": { + custom_shapes?: components["schemas"]["dto.PlotConfigScatterLineCustomShape"][]; + y2_axis_title?: string; + y_axis_title?: string; + }; + "dto.PlotConfigScatterLinePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigScatterLineDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigScatterLineTimeseriesTrace": { + color?: string; + line_style?: string; + /** @description read-only */ + name?: string; + /** @description read-only */ + parameter?: string; + plot_configuration_id?: string; + show_markers?: boolean; + timeseries_id?: string; + trace_order?: number; + trace_type?: string; + width?: number; + /** @description y1 or y2, default y1 */ + y_axis?: string; + }; + /** + * @example { + * "image": "image", + * "federal_id": "federal_id", + * "created_at": "created_at", + * "created_by": "created_by", + * "instrument_count": 0, + * "office_id": "office_id", + * "updated_by_username": "updated_by_username", + * "instrument_group_count": 6, + * "name": "name", + * "updated_by": "updated_by", + * "updatedd_at": "updatedd_at", + * "created_by_username": "created_by_username", + * "district_id": "district_id", + * "id": "id", + * "slug": "slug" + * } + */ + "dto.Project": { + created_at?: string; + created_by?: string; + created_by_username?: string; + district_id?: string; + federal_id?: string; + id?: string; + image?: string; + instrument_count?: number; + instrument_group_count?: number; + name?: string; + office_id?: string; + slug?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.ProjectInstrumentAssignments": { + instrument_ids?: string[]; + }; + "dto.ReportConfig": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + district_name?: string; + global_overrides?: components["schemas"]["dto.ReportConfigGlobalOverrides"]; + id?: string; + name?: string; + plot_configs?: components["schemas"]["dto.IDSlugName"][]; + project_id?: string; + project_name?: string; + slug?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.ReportConfigGlobalOverrides": { + date_range?: components["schemas"]["dto.TextOption"]; + show_masked?: components["schemas"]["dto.ToggleOption"]; + show_nonvalidated?: components["schemas"]["dto.ToggleOption"]; + }; + "dto.ReportDownloadJob": { + created_at?: string; + created_by?: string; + file_expiry?: string; + file_key?: string; + id?: string; + progress?: number; + progress_updated_at?: string; + report_config_id?: string; + status?: string; + }; + /** + * @example { + * "z_timeseries_id": "z_timeseries_id", + * "temp_timeseries_id": "temp_timeseries_id", + * "y_timeseries_id": "y_timeseries_id", + * "x_timeseries_id": "x_timeseries_id", + * "length": 6.027456183070403, + * "id": 0, + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" + * } + */ + "dto.SaaSegment": { + id?: number; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + temp_timeseries_id?: string; + x_timeseries_id?: string; + y_timeseries_id?: string; + z_timeseries_id?: string; + }; + "dto.TextOption": { + enabled?: boolean; + value?: string; + }; + /** + * @example { + * "values": [ + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * }, + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + * ], + * "instrument": "instrument", + * "type": "type", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "variable", + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * } + */ + "dto.Timeseries": { + id?: string; + instrument?: string; + instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; + name?: string; + parameter?: string; + parameter_id?: string; + slug?: string; + type?: string; + unit?: string; + unit_id?: string; + values?: components["schemas"]["dto.Measurement"][]; + variable?: string; + }; + "dto.TimeseriesCollectionItems": { + items?: components["schemas"]["dto.Timeseries"][]; + }; + /** + * @example { + * "cwms_office_id": "cwms_office_id", + * "values": [ + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * }, + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + * ], + * "instrument": "instrument", + * "cwms_extent_earliest_time": "cwms_extent_earliest_time", + * "type": "type", + * "cwms_timeseries_id": "cwms_timeseries_id", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "cwms_extent_latest_time": "cwms_extent_latest_time", + * "name": "name", + * "variable": "variable", + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * } + */ + "dto.TimeseriesCwms": { + cwms_extent_earliest_time?: string; + cwms_extent_latest_time?: string; + cwms_office_id?: string; + cwms_timeseries_id?: string; + id?: string; + instrument?: string; + instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; + name?: string; + parameter?: string; + parameter_id?: string; + slug?: string; + type?: string; + unit?: string; + unit_id?: string; + values?: components["schemas"]["dto.Measurement"][]; + variable?: string; + }; + "dto.TimeseriesMeasurementCollectionCollection": { + items?: components["schemas"]["dto.MeasurementCollection"][]; + }; + "dto.ToggleOption": { + enabled?: boolean; + value?: boolean; + }; + "dto.UploaderConfig": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + type?: components["schemas"]["dto.UploaderConfigType"]; + tz_name?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + /** + * @example { + * "timeseries_id": "timeseries_id", + * "field_name": "field_name" + * } + */ + "dto.UploaderConfigMapping": { + field_name?: string; + timeseries_id?: string; + }; + /** @enum {string} */ + "dto.UploaderConfigType": "csv" | "dux" | "toa5"; + /** + * @example { + * "x": [ + * 0.8008281904610115, + * 0.8008281904610115 + * ], + * "y": [ + * 6.027456183070403, + * 6.027456183070403 + * ], + * "z": [ + * 1.4658129805029452, + * 1.4658129805029452 + * ] + * } + */ + "service.AggregatePlotConfigMeasurementsContourPlot": { + x?: number[]; + y?: number[]; + z?: number[]; + }; + /** + * @example { + * "aware_parameters": { + * "key": "aware_parameters" + * }, + * "instrument_id": "instrument_id", + * "aware_id": "aware_id" + * } + */ + "service.AwarePlatformParameterConfig": { + aware_id?: string; + aware_parameters?: { + [key: string]: string; + }; + instrument_id?: string; + }; + /** + * @example { + * "created_at": "created_at", + * "model_id": "model_id", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "tables": [ + * { + * "id": "id", + * "table_name": "table_name" + * }, + * { + * "id": "id", + * "table_name": "table_name" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "model": "model", + * "id": "id", + * "sn": "sn", + * "errors": [ + * "errors", + * "errors" + * ], + * "key": "key", + * "slug": "slug" + * } + */ + "service.DataloggerWithKey": { + created_at?: string; + created_by?: string; + created_by_username?: string; + errors?: string[]; + id?: string; + key?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["db.DataloggerTableIDName"][]; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + }; + "service.DomainMap": { + [key: string]: components["schemas"]["db.DomainGroupOpt"][]; + }; + /** + * @example { + * "status": "status" + * } + */ + "service.Healthcheck": { + status?: string; + }; + /** + * @example { + * "time": "time" + * } + */ + "service.Heartbeat": { + time?: string; + }; + /** + * @example { + * "is_valid": true, + * "errors": [ + * "errors", + * "errors" + * ] + * } + */ + "service.InstrumentsValidation": { + errors?: string[]; + is_valid?: boolean; + }; + /** + * @example { + * "global_overrides": { + * "date_range": { + * "value": "value", + * "enabled": true + * }, + * "show_nonvalidated": { + * "value": true, + * "enabled": true + * }, + * "show_masked": { + * "value": true, + * "enabled": true + * } + * }, + * "created_at": "created_at", + * "description": "description", + * "project_name": "project_name", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "district_name": "district_name", + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "id": "id", + * "plot_configs": [ + * { + * "date_range": "date_range", + * "display": "{}", + * "show_comments": true, + * "report_configs": [ + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * }, + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + * ], + * "created_at": "created_at", + * "auto_range": true, + * "show_masked": true, + * "threshold": 0, + * "created_by": "created_by", + * "show_nonvalidated": true, + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "id": "id", + * "plot_type": "scatter-line", + * "slug": "slug" + * }, + * { + * "date_range": "date_range", + * "display": "{}", + * "show_comments": true, + * "report_configs": [ + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * }, + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + * ], + * "created_at": "created_at", + * "auto_range": true, + * "show_masked": true, + * "threshold": 0, + * "created_by": "created_by", + * "show_nonvalidated": true, + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "id": "id", + * "plot_type": "scatter-line", + * "slug": "slug" + * } + * ], + * "slug": "slug" + * } + */ + "service.ReportConfigWithPlotConfigs": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + district_name?: string; + global_overrides?: components["schemas"]["db.ReportConfigGlobalOverrides"]; + id?: string; + name?: string; + plot_configs?: components["schemas"]["db.VPlotConfiguration"][]; + project_id?: string; + project_name?: string; + slug?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + }; + /** + * @example { + * "token_id": "token_id", + * "profile_id": "profile_id", + * "id": "id", + * "issued": "issued", + * "hash": "hash", + * "secret_token": "secret_token" + * } + */ + "service.Token": { + hash?: string; + id?: string; + issued?: string; + profile_id?: string; + secret_token?: string; + token_id?: string; }; - /** @enum {integer} */ - "pgtype.Status": 0 | 1 | 2; _timeseries_measurements_post_request: { /** * Format: binary diff --git a/sqlc.yml b/sqlc.yml index 0a76415b..41618cfb 100644 --- a/sqlc.yml +++ b/sqlc.yml @@ -14,6 +14,7 @@ sql: emit_json_tags: true emit_interface: true emit_empty_slices: true + emit_exact_table_names: true emit_pointers_for_null_types: true overrides: # uuid @@ -25,11 +26,6 @@ sql: type: uuid.UUID pointer: true - # geometry - - db_type: geometry - go_type: - type: Geometry - # timestamptz - db_type: timestamptz go_type: time.Time @@ -102,6 +98,10 @@ sql: type: DomainGroupOpt slice: true + # v_datalogger_preview + - column: v_datalogger_preview.preview + go_type: encoding/json.RawMessage + # v_evaluation - column: v_evaluation.instruments go_type: @@ -109,6 +109,8 @@ sql: slice: true # v_instrument + - column: v_instrument.geometry + go_type: encoding/json.RawMessage - column: v_instrument.projects go_type: type: IDSlugName From 6026aee2f703b8d892d2ebedba1a58b00f92177f Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 00:33:57 -0500 Subject: [PATCH 19/23] add updated incl opts to instrument uploader --- api/internal/service/instrument.go | 12 ++++++++++++ api/internal/service/instrument_incl.go | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/api/internal/service/instrument.go b/api/internal/service/instrument.go index 8c1eba53..c9446e54 100644 --- a/api/internal/service/instrument.go +++ b/api/internal/service/instrument.go @@ -209,5 +209,17 @@ func handleOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument, rt return err } } + if len(incl) != 0 { + var err error + switch rt { + case create: + err = createInclOptsBatch(ctx, q, incl) + case update: + err = updateInclOptsBatch(ctx, q, incl) + } + if err != nil { + return err + } + } return nil } diff --git a/api/internal/service/instrument_incl.go b/api/internal/service/instrument_incl.go index 4b435e3f..71ad5404 100644 --- a/api/internal/service/instrument_incl.go +++ b/api/internal/service/instrument_incl.go @@ -39,7 +39,7 @@ func (s DBService) InclSegmentUpdateBatch(ctx context.Context, instrumentID uuid return tx.Commit(ctx) } -func inclOptsCreateBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { +func createInclOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) createInclSegmentBatchParams := make([][]db.InclSegmentCreateBatchParams, len(ii)) From 56e6e936e826f3d467e2d14fcaac74f80ce6abb9 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 09:57:13 -0500 Subject: [PATCH 20/23] chore: move uploader parsers to separate file --- api/internal/service/uploader.go | 158 +------------- api/internal/service/uploader_parser.go | 277 ++++++++++++++++++++++++ 2 files changed, 279 insertions(+), 156 deletions(-) create mode 100644 api/internal/service/uploader_parser.go diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index a7a21c3c..3d3d4c87 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -2,11 +2,6 @@ package service import ( "context" - "encoding/csv" - "io" - "math" - "strconv" - "time" "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" @@ -41,7 +36,7 @@ func (s DBService) UploaderConfigMappingCreateBatch(ctx context.Context, ucID uu args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) for idx, m := range mm { args[idx] = db.UploaderConfigMappingCreateBatchParams{ - UploaderConfigID: m.UploaderConfigID, + UploaderConfigID: ucID, FieldName: m.FieldName, TimeseriesID: m.TimeseriesID, } @@ -66,7 +61,7 @@ func (s DBService) UploaderConfigMappingUpdateBatch(ctx context.Context, ucID uu args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) for idx, m := range mm { args[idx] = db.UploaderConfigMappingCreateBatchParams{ - UploaderConfigID: m.UploaderConfigID, + UploaderConfigID: ucID, FieldName: m.FieldName, TimeseriesID: m.TimeseriesID, } @@ -77,152 +72,3 @@ func (s DBService) UploaderConfigMappingUpdateBatch(ctx context.Context, ucID uu } return tx.Commit(ctx) } - -func (s DBService) TimeseriesMeasurementsCreateFromTOA5File(ctx context.Context, r io.Reader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - - qtx := s.WithTx(tx) - - reader := csv.NewReader(r) - - envHeader, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - unitsHeader, err := reader.Read() - if err != nil { - return err - } - processHeader, err := reader.Read() - if err != nil { - return err - } - - meta := dto.Environment{ - // StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - // OSVersion: envHeader[4], - // ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ - Model: &meta.Model, - Sn: meta.SerialNo, - }) - if err != nil { - return err - } - tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ - DataloggerID: dl.ID, - TableName: meta.TableName, - }) - if err != nil { - return err - } - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]dto.Field, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = dto.Field{ - Name: fieldHeader[i], - Units: unitsHeader[i], - Process: processHeader[i], - } - } - - eqt, err := qtx.EquivalencyTableGet(ctx, tableID) - if err != nil { - return err - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Fields { - fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID - } - - chunkSize := 1_000 - createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) - createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) - var mmtIdx, noteIdx int - for { - record, err := reader.Read() - if err == io.EOF { - break - } - if err != nil { - return err - } - - t, err := time.Parse(record[0], time.RFC3339) - if err != nil { - return err - } - - for idx, cell := range record[2:] { - fieldName := fields[idx].Name - tsID, ok := fieldNameTimeseriesIDMap[fieldName] - if !ok { - continue - } - v, err := strconv.ParseFloat(cell, 64) - if err != nil { - continue - } - createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ - TimeseriesID: tsID, - Time: t, - Value: v, - } - mmtIdx++ - if mmtIdx == chunkSize { - var err error - qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - mmtIdx = 0 - } - if math.IsNaN(v) || math.IsInf(v, 0) { - masked := true - createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ - TimeseriesID: tsID, - Time: t, - Masked: &masked, - } - noteIdx++ - if noteIdx == chunkSize { - var err error - qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - noteIdx = 0 - } - } - } - } - if mmtIdx != 0 { - qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) - if err != nil { - return err - } - } - if noteIdx != 0 { - qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) - if err != nil { - return err - } - } - return nil -} diff --git a/api/internal/service/uploader_parser.go b/api/internal/service/uploader_parser.go new file mode 100644 index 00000000..f74ceaab --- /dev/null +++ b/api/internal/service/uploader_parser.go @@ -0,0 +1,277 @@ +package service + +import ( + "context" + "encoding/csv" + "io" + "math" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +// datalogger toa5/dat parser +func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + // skip units header + _, err = reader.Read() + if err != nil { + return err + } + // skip process header + _, err = reader.Read() + if err != nil { + return err + } + + meta := dto.Environment{ + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { + return err + } + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Fields { + if eqtRow.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + return err + } + + return tx.Commit(ctx) +} + +// non-datalogger toa5/dat parser (use uploader config) +func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, r io.Reader, ucID uuid.UUID) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + _, err = reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + _, err = reader.Read() + if err != nil { + return err + } + _, err = reader.Read() + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + return err + } + + return tx.Commit(ctx) +} + +// cusom csv parser +func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, r io.Reader, ucID uuid.UUID) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + fieldHeader, err := reader.Read() + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + return err + } + + return tx.Commit(ctx) +} + +// dux file parser +// TODO + +func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, fields []string, fieldNameTimeseriesIDMap map[string]uuid.UUID) error { + chunkSize := 1_000 + createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mmtIdx, noteIdx int + for { + record, err := csvReader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + + // TODO: if the time isn't in UTC, we need to cast it as local time for both timeseries mmt and notes sql + for idx, cell := range record[2:] { + fieldName := fields[idx] + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + continue + } + v, err := strconv.ParseFloat(cell, 64) + if err != nil { + continue + } + createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + } + mmtIdx++ + if mmtIdx == chunkSize { + var err error + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mmtIdx = 0 + } + // TODO: also do a check if any fields are present and valid for notes (masked/annotated/validated) + if math.IsNaN(v) || math.IsInf(v, 0) { + masked := true + createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Masked: &masked, + } + noteIdx++ + if noteIdx == chunkSize { + var err error + q.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + noteIdx = 0 + } + } + } + } + if mmtIdx != 0 { + var err error + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if noteIdx != 0 { + var err error + q.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return nil +} From 660025504c4f08e6d4142609097a1d6ac453e7dd Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 15:54:10 -0500 Subject: [PATCH 21/23] add time/notes options to config --- api/internal/db/batch.go | 115 +++++++++++ api/internal/db/models.go | 4 + api/internal/db/querier.go | 3 + api/internal/db/uploader.sql_gen.go | 38 +++- api/internal/dto/uploader.go | 23 ++- api/internal/handler/collection_groups.go | 2 +- api/internal/handler/project.go | 1 + api/internal/server/docs/openapi.json | 178 ++++++++++++++---- api/internal/server/docs/openapi.yaml | 174 +++++++++++++---- api/internal/service/uploader_parser.go | 122 ++++++++---- .../repeat/0170__views_uploader.sql | 4 +- .../V1.21.00__uploader_config_offset.sql | 3 + api/queries/measurement.sql | 12 ++ api/queries/uploader.sql | 4 + report/generated.d.ts | 148 ++++++++++++--- 15 files changed, 688 insertions(+), 143 deletions(-) create mode 100644 api/migrations/schema/V1.21.00__uploader_config_offset.sql diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index 48ad9c28..2cc43e7f 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -12,6 +12,7 @@ import ( "github.com/google/uuid" "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" ) var ( @@ -1883,6 +1884,61 @@ func (b *TimeseriesMeasurementCreateBatchBatchResults) Close() error { return b.br.Close() } +const timeseriesMeasurementCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) +values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Timezone string `json:"timezone"` + LocalTime pgtype.Timestamp `json:"local_time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Timezone, + a.LocalTime, + a.Value, + } + batch.Queue(timeseriesMeasurementCreateOrUpdateAtTimezoneBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const timeseriesMeasurementCreateOrUpdateBatch = `-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do update set value = excluded.value @@ -2091,6 +2147,65 @@ func (b *TimeseriesNoteCreateBatchBatchResults) Close() error { return b.br.Close() } +const timeseriesNoteCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) +values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4, $5, $6) +on conflict on constraint notes_unique_time do nothing +` + +type TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Timezone string `json:"timezone"` + LocalTime pgtype.Timestamp `json:"local_time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Timezone, + a.LocalTime, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(timeseriesNoteCreateOrUpdateAtTimezoneBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const timeseriesNoteCreateOrUpdateBatch = `-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 25979c8e..b7c180ec 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -929,6 +929,8 @@ type UploaderConfig struct { MaskedField *string `json:"masked_field"` CommentFieldEnabled bool `json:"comment_field_enabled"` CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` } type UploaderConfigMapping struct { @@ -1428,4 +1430,6 @@ type VUploaderConfig struct { MaskedField *string `json:"masked_field"` CommentFieldEnabled bool `json:"comment_field_enabled"` CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` } diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index 1c276a76..fea94f1e 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -298,6 +298,7 @@ type Querier interface { TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error + TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults @@ -308,6 +309,7 @@ type Querier interface { TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error + TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults @@ -317,6 +319,7 @@ type Querier interface { UnitsList(ctx context.Context) ([]VUnit, error) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error + UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploaderConfig, error) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go index 206dadc7..ca218c1a 100644 --- a/api/internal/db/uploader.sql_gen.go +++ b/api/internal/db/uploader.sql_gen.go @@ -52,8 +52,42 @@ func (q *Queries) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error return err } +const uploaderConfigGet = `-- name: UploaderConfigGet :one +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset from v_uploader_config where id=$1 +` + +func (q *Queries) UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploaderConfig, error) { + row := q.db.QueryRow(ctx, uploaderConfigGet, id) + var i VUploaderConfig + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedAt, + &i.CreatedBy, + &i.CreatedByUsername, + &i.UpdatedBy, + &i.UpdatedAt, + &i.UpdatedByUsername, + &i.Type, + &i.TzName, + &i.TimeField, + &i.ValidatedFieldEnabled, + &i.ValidatedField, + &i.MaskedFieldEnabled, + &i.MaskedField, + &i.CommentFieldEnabled, + &i.CommentField, + &i.ColumnOffset, + &i.RowOffset, + ) + return i, err +} + const uploaderConfigListForProject = `-- name: UploaderConfigListForProject :many -select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field from v_uploader_config where project_id=$1 +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset from v_uploader_config where project_id=$1 ` func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) { @@ -86,6 +120,8 @@ func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uu &i.MaskedField, &i.CommentFieldEnabled, &i.CommentField, + &i.ColumnOffset, + &i.RowOffset, ); err != nil { return nil, err } diff --git a/api/internal/dto/uploader.go b/api/internal/dto/uploader.go index f9ed85c1..f54ceb00 100644 --- a/api/internal/dto/uploader.go +++ b/api/internal/dto/uploader.go @@ -11,13 +11,22 @@ const ( ) type UploaderConfig struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Name string `json:"name" db:"name"` - Slug string `json:"slug" db:"slug"` - Description string `json:"description" db:"description"` - Type UploaderConfigType `json:"type" db:"type"` - TzName string `json:"tz_name" db:"tz_name"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` AuditInfo } diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index 9aaf25ed..ef7781ec 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -44,7 +44,7 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" Format(uuid) -// @Success 200 {object} db.VCollectionGroupDetail +// @Success 200 {object} db.VCollectionGroupDetails // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/project.go b/api/internal/handler/project.go index 8442206e..7cea35c4 100644 --- a/api/internal/handler/project.go +++ b/api/internal/handler/project.go @@ -8,6 +8,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index d5cb9e7d..216a12b1 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -2317,7 +2317,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/db.InstrumentGroup" + "$ref" : "#/components/schemas/db.InstrumentGroupUpdateRow" } } }, @@ -4292,7 +4292,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/db.VTimeseriesMeasurement" + "$ref" : "#/components/schemas/db.MeasurementCollection" }, "type" : "array" } @@ -5103,7 +5103,7 @@ "content" : { "application/json" : { "schema" : { - "type" : "integer" + "$ref" : "#/components/schemas/service.ProjectCount" } } }, @@ -5961,7 +5961,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/db.VCollectionGroupDetail" + "$ref" : "#/components/schemas/db.VCollectionGroupDetails" } } }, @@ -8452,7 +8452,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/db.VTimeseriesCwm" + "$ref" : "#/components/schemas/db.VTimeseriesCwms" }, "type" : "array" } @@ -8862,15 +8862,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/db.ProfileProjectRoleGetRow" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -12436,7 +12436,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/db.VTimeseriesMeasurement" + "$ref" : "#/components/schemas/db.MeasurementCollection" }, "type" : "array" } @@ -12942,6 +12942,38 @@ }, "type" : "object" }, + "db.InstrumentGroupUpdateRow" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, "db.InstrumentIDName" : { "properties" : { "instrument_id" : { @@ -13013,6 +13045,43 @@ "type" : "string", "x-enum-varnames" : [ "JobStatusSUCCESS", "JobStatusFAIL", "JobStatusINIT" ] }, + "db.Measurement" : { + "properties" : { + "annotation" : { + "type" : "string" + }, + "error" : { + "type" : "string" + }, + "masked" : { + "type" : "boolean" + }, + "time" : { + "type" : "string" + }, + "validated" : { + "type" : "boolean" + }, + "value" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.MeasurementCollection" : { + "properties" : { + "items" : { + "items" : { + "$ref" : "#/components/schemas/db.Measurement" + }, + "type" : "array" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, "db.MeasurementCollectionLean" : { "properties" : { "items" : { @@ -13086,6 +13155,29 @@ }, "type" : "object" }, + "db.ProfileProjectRoleGetRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "role" : { + "type" : "string" + }, + "role_id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, "db.ProfileProjectRoleListForProjectRow" : { "properties" : { "email" : { @@ -13423,7 +13515,7 @@ }, "type" : "object" }, - "db.VCollectionGroupDetail" : { + "db.VCollectionGroupDetails" : { "properties" : { "created_at" : { "type" : "string" @@ -14275,7 +14367,7 @@ }, "type" : "object" }, - "db.VTimeseriesCwm" : { + "db.VTimeseriesCwms" : { "properties" : { "cwms_extent_earliest_time" : { "type" : "string" @@ -14331,29 +14423,6 @@ }, "type" : "object" }, - "db.VTimeseriesMeasurement" : { - "properties" : { - "annotation" : { - "type" : "string" - }, - "masked" : { - "type" : "boolean" - }, - "time" : { - "type" : "string" - }, - "timeseries_id" : { - "type" : "string" - }, - "validated" : { - "type" : "boolean" - }, - "value" : { - "type" : "number" - } - }, - "type" : "object" - }, "db.VUnit" : { "properties" : { "abbreviation" : { @@ -14382,6 +14451,9 @@ }, "db.VUploaderConfig" : { "properties" : { + "column_offset" : { + "type" : "integer" + }, "comment_field" : { "type" : "string" }, @@ -14415,6 +14487,9 @@ "project_id" : { "type" : "string" }, + "row_offset" : { + "type" : "integer" + }, "slug" : { "type" : "string" }, @@ -15901,6 +15976,15 @@ }, "dto.UploaderConfig" : { "properties" : { + "column_offset" : { + "type" : "integer" + }, + "comment_field" : { + "type" : "string" + }, + "comment_field_enabled" : { + "type" : "boolean" + }, "created_at" : { "type" : "string" }, @@ -15916,15 +16000,27 @@ "id" : { "type" : "string" }, + "masked_field" : { + "type" : "string" + }, + "masked_field_enabled" : { + "type" : "boolean" + }, "name" : { "type" : "string" }, "project_id" : { "type" : "string" }, + "row_offset" : { + "type" : "integer" + }, "slug" : { "type" : "string" }, + "time_field" : { + "type" : "string" + }, "type" : { "$ref" : "#/components/schemas/dto.UploaderConfigType" }, @@ -15939,6 +16035,12 @@ }, "updatedd_at" : { "type" : "string" + }, + "validated_field" : { + "type" : "string" + }, + "validated_field_enabled" : { + "type" : "boolean" } }, "type" : "object" @@ -16097,6 +16199,14 @@ }, "type" : "object" }, + "service.ProjectCount" : { + "properties" : { + "project_count" : { + "type" : "integer" + } + }, + "type" : "object" + }, "service.ReportConfigWithPlotConfigs" : { "properties" : { "created_at" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index 2ce95707..2e342ba3 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -1537,7 +1537,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/db.InstrumentGroup' + $ref: '#/components/schemas/db.InstrumentGroupUpdateRow' description: OK "400": content: @@ -2854,7 +2854,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/db.VTimeseriesMeasurement' + $ref: '#/components/schemas/db.MeasurementCollection' type: array description: OK "400": @@ -3386,7 +3386,7 @@ paths: content: application/json: schema: - type: integer + $ref: '#/components/schemas/service.ProjectCount' description: OK "400": content: @@ -3962,7 +3962,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/db.VCollectionGroupDetail' + $ref: '#/components/schemas/db.VCollectionGroupDetails' description: OK "400": content: @@ -5651,7 +5651,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/db.VTimeseriesCwm' + $ref: '#/components/schemas/db.VTimeseriesCwms' type: array description: OK "400": @@ -5929,12 +5929,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - type: string - description: OK + $ref: '#/components/schemas/db.ProfileProjectRoleGetRow' + description: Created "400": content: application/json: @@ -8344,7 +8344,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/db.VTimeseriesMeasurement' + $ref: '#/components/schemas/db.MeasurementCollection' type: array description: OK "400": @@ -8779,6 +8779,37 @@ components: updated_by: type: string type: object + db.InstrumentGroupUpdateRow: + example: + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at + description: description + id: id + created_by: created_by + slug: slug + properties: + created_at: + type: string + created_by: + type: string + description: + type: string + id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + updated_at: + type: string + updated_by: + type: string + type: object db.InstrumentIDName: example: instrument_name: instrument_name @@ -8852,6 +8883,52 @@ components: - JobStatusSUCCESS - JobStatusFAIL - JobStatusINIT + db.Measurement: + example: + annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 + properties: + annotation: + type: string + error: + type: string + masked: + type: boolean + time: + type: string + validated: + type: boolean + value: + type: number + type: object + db.MeasurementCollection: + example: + timeseries_id: timeseries_id + items: + - annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 + - annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 + properties: + items: + items: + $ref: '#/components/schemas/db.Measurement' + type: array + timeseries_id: + type: string + type: object db.MeasurementCollectionLean: example: timeseries_id: timeseries_id @@ -8927,6 +9004,28 @@ components: username: type: string type: object + db.ProfileProjectRoleGetRow: + example: + role: role + role_id: role_id + profile_id: profile_id + id: id + email: email + username: username + properties: + email: + type: string + id: + type: string + profile_id: + type: string + role: + type: string + role_id: + type: string + username: + type: string + type: object db.ProfileProjectRoleListForProjectRow: example: role: role @@ -9283,7 +9382,7 @@ components: warning_interval: type: string type: object - db.VCollectionGroupDetail: + db.VCollectionGroupDetails: example: timeseries: - instrument: instrument @@ -10229,7 +10328,7 @@ components: variable: type: object type: object - db.VTimeseriesCwm: + db.VTimeseriesCwms: example: cwms_office_id: cwms_office_id instrument: instrument @@ -10284,28 +10383,6 @@ components: variable: type: object type: object - db.VTimeseriesMeasurement: - example: - annotation: annotation - timeseries_id: timeseries_id - validated: true - masked: true - time: time - value: 0.8008281904610115 - properties: - annotation: - type: string - masked: - type: boolean - time: - type: string - timeseries_id: - type: string - validated: - type: boolean - value: - type: number - type: object db.VUnit: example: measure: measure @@ -10336,6 +10413,7 @@ components: validated_field: validated_field created_at: created_at description: description + row_offset: 6 comment_field: comment_field type: csv created_by: created_by @@ -10346,6 +10424,7 @@ components: updated_at: updated_at comment_field_enabled: true project_id: project_id + column_offset: 0 name: name updated_by: updated_by masked_field_enabled: true @@ -10354,6 +10433,8 @@ components: validated_field_enabled: true slug: slug properties: + column_offset: + type: integer comment_field: type: string comment_field_enabled: @@ -10376,6 +10457,8 @@ components: type: string project_id: type: string + row_offset: + type: integer slug: type: string time_field: @@ -11540,6 +11623,12 @@ components: type: object dto.UploaderConfig: properties: + column_offset: + type: integer + comment_field: + type: string + comment_field_enabled: + type: boolean created_at: type: string created_by: @@ -11550,12 +11639,20 @@ components: type: string id: type: string + masked_field: + type: string + masked_field_enabled: + type: boolean name: type: string project_id: type: string + row_offset: + type: integer slug: type: string + time_field: + type: string type: $ref: '#/components/schemas/dto.UploaderConfigType' tz_name: @@ -11566,6 +11663,10 @@ components: type: string updatedd_at: type: string + validated_field: + type: string + validated_field_enabled: + type: boolean type: object dto.UploaderConfigMapping: example: @@ -11724,6 +11825,13 @@ components: is_valid: type: boolean type: object + service.ProjectCount: + example: + project_count: 0 + properties: + project_count: + type: integer + type: object service.ReportConfigWithPlotConfigs: example: global_overrides: diff --git a/api/internal/service/uploader_parser.go b/api/internal/service/uploader_parser.go index f74ceaab..987d9538 100644 --- a/api/internal/service/uploader_parser.go +++ b/api/internal/service/uploader_parser.go @@ -3,6 +3,7 @@ package service import ( "context" "encoding/csv" + "errors" "io" "math" "strconv" @@ -11,6 +12,7 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" ) // datalogger toa5/dat parser @@ -87,7 +89,11 @@ func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID } - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: "UTC", + }); err != nil { return err } @@ -103,6 +109,16 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Cont defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) + uc, err := qtx.UploaderConfigGet(ctx, ucID) + if err != nil { + return err + } + + ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) + if err != nil { + return err + } + reader := csv.NewReader(r) _, err = reader.Read() @@ -129,11 +145,6 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Cont fields[i] = fieldHeader[i] } - ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) - if err != nil { - return err - } - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) for _, mapping := range ucm { if mapping.TimeseriesID == nil { @@ -142,7 +153,11 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Cont fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID } - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: uc.TzName, + }); err != nil { return err } @@ -158,25 +173,39 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Conte defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - reader := csv.NewReader(r) - - fieldHeader, err := reader.Read() + uc, err := qtx.UploaderConfigGet(ctx, ucID) if err != nil { return err } - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]string, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = fieldHeader[i] + ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) + if err != nil { + return err } - ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) + reader := csv.NewReader(r) + fieldHeader, err := reader.Read() if err != nil { return err } + timeFieldIdx := -1 + + fields := make([]string, len(fieldHeader)) + for idx := range fieldHeader { + header := fieldHeader[idx] + switch { + case uc.TimeField == header: + timeFieldIdx = idx + default: + fields[idx] = header + } + } + + if timeFieldIdx == -1 { + return errors.New("time field specified in uploader config does not exist") + } + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) for _, mapping := range ucm { if mapping.TimeseriesID == nil { @@ -185,7 +214,11 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Conte fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID } - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, fields, fieldNameTimeseriesIDMap); err != nil { + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: uc.TzName, + }); err != nil { return err } @@ -195,10 +228,20 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Conte // dux file parser // TODO -func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, fields []string, fieldNameTimeseriesIDMap map[string]uuid.UUID) error { +type timeseriesMeasurementNoteCreateOrUpdateBatchParams struct { + fields []string + fieldNameTimeseriesIDMap map[string]uuid.UUID + fieldNameNotesMap map[string]struct{} + timezone string + timeFieldIdx int + colOffset int + rowOffset int +} + +func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, arg timeseriesMeasurementNoteCreateOrUpdateBatchParams) error { chunkSize := 1_000 - createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) - createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams, chunkSize) + createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams, chunkSize) var mmtIdx, noteIdx int for { record, err := csvReader.Read() @@ -209,48 +252,53 @@ func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Que return err } - t, err := time.Parse(record[0], time.RFC3339) + // TODO: do we need to accept other time formats? + t, err := time.Parse(record[arg.timeFieldIdx], time.RFC3339) if err != nil { return err } - // TODO: if the time isn't in UTC, we need to cast it as local time for both timeseries mmt and notes sql - for idx, cell := range record[2:] { - fieldName := fields[idx] - tsID, ok := fieldNameTimeseriesIDMap[fieldName] - if !ok { + // get notes content and pply to all timeseries in loop + hasNotes := len(arg.fieldNameNotesMap) != 0 + if hasNotes { + } + + for idx, cell := range record[arg.colOffset:] { + fieldName := arg.fields[idx] + tsID, ok := arg.fieldNameTimeseriesIDMap[fieldName] + if ok { continue } v, err := strconv.ParseFloat(cell, 64) if err != nil { continue } - createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ TimeseriesID: tsID, - Time: t, + LocalTime: pgtype.Timestamp{Time: t, Valid: true}, + Timezone: arg.timezone, Value: v, } mmtIdx++ if mmtIdx == chunkSize { var err error - q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) if err != nil { return err } mmtIdx = 0 } - // TODO: also do a check if any fields are present and valid for notes (masked/annotated/validated) - if math.IsNaN(v) || math.IsInf(v, 0) { - masked := true - createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + + if masked := math.IsNaN(v) || math.IsInf(v, 0); masked { + createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams{ TimeseriesID: tsID, - Time: t, + LocalTime: pgtype.Timestamp{Time: t, Valid: true}, Masked: &masked, } noteIdx++ if noteIdx == chunkSize { var err error - q.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) + q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) if err != nil { return err } @@ -261,14 +309,14 @@ func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Que } if mmtIdx != 0 { var err error - q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) + q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) if err != nil { return err } } if noteIdx != 0 { var err error - q.TimeseriesNoteCreateOrUpdateBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) + q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) if err != nil { return err } diff --git a/api/migrations/repeat/0170__views_uploader.sql b/api/migrations/repeat/0170__views_uploader.sql index 50145f8b..647d79e9 100644 --- a/api/migrations/repeat/0170__views_uploader.sql +++ b/api/migrations/repeat/0170__views_uploader.sql @@ -19,7 +19,9 @@ CREATE VIEW v_uploader_config AS ( u.masked_field_enabled, u.masked_field, u.comment_field_enabled, - u.comment_field + u.comment_field, + u.column_offset, + u.row_offset FROM uploader_config u INNER JOIN profile pc ON u.created_by = pc.id LEFT JOIN profile pu ON u.updated_by = pu.id diff --git a/api/migrations/schema/V1.21.00__uploader_config_offset.sql b/api/migrations/schema/V1.21.00__uploader_config_offset.sql new file mode 100644 index 00000000..6c7c3f4e --- /dev/null +++ b/api/migrations/schema/V1.21.00__uploader_config_offset.sql @@ -0,0 +1,3 @@ +ALTER TABLE uploader_config +ADD COLUMN column_offset integer not null default 0, +ADD COLUMN row_offset integer not null default 0; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql index 994fa1ad..d84070bc 100644 --- a/api/queries/measurement.sql +++ b/api/queries/measurement.sql @@ -33,6 +33,12 @@ insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, on conflict on constraint timeseries_unique_time do update set value = excluded.value; +-- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) +values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(value)) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + -- name: TimeseriesNoteCreate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do nothing; @@ -43,6 +49,12 @@ insert into timeseries_notes (timeseries_id, time, masked, validated, annotation on conflict on constraint notes_unique_time do nothing; +-- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) +values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(masked), sqlc.arg(validated), sqlc.arg(annotation)) +on conflict on constraint notes_unique_time do nothing; + + -- name: TimeseriesNoteCreateOrUpdate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql index c3b105bd..246e2894 100644 --- a/api/queries/uploader.sql +++ b/api/queries/uploader.sql @@ -2,6 +2,10 @@ select * from v_uploader_config where project_id=$1; +-- name: UploaderConfigGet :one +select * from v_uploader_config where id=$1; + + -- name: UploaderConfigCreate :one insert into uploader_config (project_id, name, slug, description, created_at, created_by, type, tz_name) values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) diff --git a/report/generated.d.ts b/report/generated.d.ts index 9c5ebfb9..90517a63 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -1330,7 +1330,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.InstrumentGroup"]; + "application/json": components["schemas"]["db.InstrumentGroupUpdateRow"]; }; }; /** @description Bad Request */ @@ -2515,7 +2515,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.VTimeseriesMeasurement"][]; + "application/json": components["schemas"]["db.MeasurementCollection"][]; }; }; /** @description Bad Request */ @@ -3007,7 +3007,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": number; + "application/json": components["schemas"]["service.ProjectCount"]; }; }; /** @description Bad Request */ @@ -3465,7 +3465,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.VCollectionGroupDetail"]; + "application/json": components["schemas"]["db.VCollectionGroupDetails"]; }; }; /** @description Bad Request */ @@ -4901,7 +4901,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.VTimeseriesCwm"][]; + "application/json": components["schemas"]["db.VTimeseriesCwms"][]; }; }; /** @description Bad Request */ @@ -5078,10 +5078,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": string; + "application/json": components["schemas"]["db.ProfileProjectRoleGetRow"]; }; }; /** @description Bad Request */ @@ -7126,7 +7126,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.VTimeseriesMeasurement"][]; + "application/json": components["schemas"]["db.MeasurementCollection"][]; }; }; /** @description Bad Request */ @@ -7546,6 +7546,30 @@ export interface components { updated_at?: string; updated_by?: string; }; + /** + * @example { + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "description": "description", + * "id": "id", + * "created_by": "created_by", + * "slug": "slug" + * } + */ + "db.InstrumentGroupUpdateRow": { + created_at?: string; + created_by?: string; + description?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + updated_at?: string; + updated_by?: string; + }; /** * @example { * "instrument_name": "instrument_name", @@ -7600,6 +7624,51 @@ export interface components { }; /** @enum {string} */ "db.JobStatus": "SUCCESS" | "FAIL" | "INIT"; + /** + * @example { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + */ + "db.Measurement": { + annotation?: string; + error?: string; + masked?: boolean; + time?: string; + validated?: boolean; + value?: number; + }; + /** + * @example { + * "timeseries_id": "timeseries_id", + * "items": [ + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * }, + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + * ] + * } + */ + "db.MeasurementCollection": { + items?: components["schemas"]["db.Measurement"][]; + timeseries_id?: string; + }; /** * @example { * "timeseries_id": "timeseries_id", @@ -7658,6 +7727,24 @@ export interface components { id?: string; username?: string; }; + /** + * @example { + * "role": "role", + * "role_id": "role_id", + * "profile_id": "profile_id", + * "id": "id", + * "email": "email", + * "username": "username" + * } + */ + "db.ProfileProjectRoleGetRow": { + email?: string; + id?: string; + profile_id?: string; + role?: string; + role_id?: string; + username?: string; + }; /** * @example { * "role": "role", @@ -7997,7 +8084,7 @@ export interface components { * "slug": "slug" * } */ - "db.VCollectionGroupDetail": { + "db.VCollectionGroupDetails": { created_at?: string; created_by?: string; id?: string; @@ -8762,7 +8849,7 @@ export interface components { * "parameter_id": "parameter_id" * } */ - "db.VTimeseriesCwm": { + "db.VTimeseriesCwms": { cwms_extent_earliest_time?: string; cwms_extent_latest_time?: string; cwms_office_id?: string; @@ -8781,24 +8868,6 @@ export interface components { unit_id?: string; variable?: Record; }; - /** - * @example { - * "annotation": "annotation", - * "timeseries_id": "timeseries_id", - * "validated": true, - * "masked": true, - * "time": "time", - * "value": 0.8008281904610115 - * } - */ - "db.VTimeseriesMeasurement": { - annotation?: string; - masked?: boolean; - time?: string; - timeseries_id?: string; - validated?: boolean; - value?: number; - }; /** * @example { * "measure": "measure", @@ -8824,6 +8893,7 @@ export interface components { * "validated_field": "validated_field", * "created_at": "created_at", * "description": "description", + * "row_offset": 6, * "comment_field": "comment_field", * "type": "csv", * "created_by": "created_by", @@ -8834,6 +8904,7 @@ export interface components { * "updated_at": "updated_at", * "comment_field_enabled": true, * "project_id": "project_id", + * "column_offset": 0, * "name": "name", * "updated_by": "updated_by", * "masked_field_enabled": true, @@ -8844,6 +8915,7 @@ export interface components { * } */ "db.VUploaderConfig": { + column_offset?: number; comment_field?: string; comment_field_enabled?: boolean; created_at?: string; @@ -8855,6 +8927,7 @@ export interface components { masked_field_enabled?: boolean; name?: string; project_id?: string; + row_offset?: number; slug?: string; time_field?: string; type?: components["schemas"]["db.UploaderConfigType"]; @@ -9592,19 +9665,28 @@ export interface components { value?: boolean; }; "dto.UploaderConfig": { + column_offset?: number; + comment_field?: string; + comment_field_enabled?: boolean; created_at?: string; created_by?: string; created_by_username?: string; description?: string; id?: string; + masked_field?: string; + masked_field_enabled?: boolean; name?: string; project_id?: string; + row_offset?: number; slug?: string; + time_field?: string; type?: components["schemas"]["dto.UploaderConfigType"]; tz_name?: string; updated_by?: string; updated_by_username?: string; updatedd_at?: string; + validated_field?: string; + validated_field_enabled?: boolean; }; /** * @example { @@ -9737,6 +9819,14 @@ export interface components { errors?: string[]; is_valid?: boolean; }; + /** + * @example { + * "project_count": 0 + * } + */ + "service.ProjectCount": { + project_count?: number; + }; /** * @example { * "global_overrides": { From fca10c62724e3473dd6acb631e54a5c681460c19 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 16:12:39 -0500 Subject: [PATCH 22/23] fix: missing null fields for insert and update queries --- api/internal/db/uploader.sql_gen.go | 117 ++++++++++++++++++++++++---- api/internal/service/uploader.go | 46 +++++++---- api/queries/uploader.sql | 51 +++++++++++- 3 files changed, 180 insertions(+), 34 deletions(-) diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go index ca218c1a..7b6861cb 100644 --- a/api/internal/db/uploader.sql_gen.go +++ b/api/internal/db/uploader.sql_gen.go @@ -13,30 +13,86 @@ import ( ) const uploaderConfigCreate = `-- name: UploaderConfigCreate :one -insert into uploader_config (project_id, name, slug, description, created_at, created_by, type, tz_name) -values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) +insert into uploader_config ( + project_id, + name, + slug, + description, + created_at, + created_by, + type, + tz_name, + time_field, + validated_field_enabled, + validated_field, + masked_field_enabled, + masked_field, + comment_field_enabled, + comment_field, + column_offset, + row_offset +) +values ( + $1, + $2, + slugify($3, 'uploader_config'), + $4, + $5, + $6, + $7, + $8, + $9, + $10, + $11, + $12, + $13, + $14, + $15, + $16, + $17 +) returning id ` type UploaderConfigCreateParams struct { - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Description string `json:"description"` - CreatedAt time.Time `json:"created_at"` - CreatedBy uuid.UUID `json:"created_by"` - Type UploaderConfigType `json:"type"` - TzName string `json:"tz_name"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` } func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, uploaderConfigCreate, arg.ProjectID, arg.Name, + arg.Slug, arg.Description, arg.CreatedAt, arg.CreatedBy, arg.Type, arg.TzName, + arg.TimeField, + arg.ValidatedFieldEnabled, + arg.ValidatedField, + arg.MaskedFieldEnabled, + arg.MaskedField, + arg.CommentFieldEnabled, + arg.CommentField, + arg.ColumnOffset, + arg.RowOffset, ) var id uuid.UUID err := row.Scan(&id) @@ -173,18 +229,36 @@ update uploader_config set updated_by=$4, updated_at=$5, type=$6, - tz_name=$7 + tz_name=$7, + time_field=$8, + validated_field_enabled=$9, + validated_field=$10, + masked_field_enabled=$11, + masked_field=$12, + comment_field_enabled=$13, + comment_field=$14, + column_offset=$15, + row_offset=$16 where id=$1 ` type UploaderConfigUpdateParams struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Description string `json:"description"` - UpdatedBy *uuid.UUID `json:"updated_by"` - UpdatedAt *time.Time `json:"updated_at"` - Type UploaderConfigType `json:"type"` - TzName string `json:"tz_name"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` } func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error { @@ -196,6 +270,15 @@ func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUp arg.UpdatedAt, arg.Type, arg.TzName, + arg.TimeField, + arg.ValidatedFieldEnabled, + arg.ValidatedField, + arg.MaskedFieldEnabled, + arg.MaskedField, + arg.CommentFieldEnabled, + arg.CommentField, + arg.ColumnOffset, + arg.RowOffset, ) return err } diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index 3d3d4c87..4069ab05 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -10,25 +10,43 @@ import ( func (s DBService) UploaderConfigCreate(ctx context.Context, uc dto.UploaderConfig) (uuid.UUID, error) { return s.Queries.UploaderConfigCreate(ctx, db.UploaderConfigCreateParams{ - ProjectID: uc.ProjectID, - Name: uc.Name, - Description: uc.Description, - Type: db.UploaderConfigType(uc.Type), - TzName: uc.TzName, - CreatedBy: uc.CreatedBy, - CreatedAt: uc.CreatedAt, + ProjectID: uc.ProjectID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + CreatedBy: uc.CreatedBy, + CreatedAt: uc.CreatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, }) } func (s DBService) UploaderConfigUpdate(ctx context.Context, uc dto.UploaderConfig) error { return s.Queries.UploaderConfigUpdate(ctx, db.UploaderConfigUpdateParams{ - ID: uc.ID, - Name: uc.Name, - Description: uc.Description, - Type: db.UploaderConfigType(uc.Type), - TzName: uc.TzName, - UpdatedBy: uc.UpdatedBy, - UpdatedAt: uc.UpdatedAt, + ID: uc.ID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + UpdatedBy: uc.UpdatedBy, + UpdatedAt: uc.UpdatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, }) } diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql index 246e2894..f395a4a1 100644 --- a/api/queries/uploader.sql +++ b/api/queries/uploader.sql @@ -7,8 +7,44 @@ select * from v_uploader_config where id=$1; -- name: UploaderConfigCreate :one -insert into uploader_config (project_id, name, slug, description, created_at, created_by, type, tz_name) -values ($1, $2, slugify($2, 'uploader_config'), $3, $4, $5, $6, $7) +insert into uploader_config ( + project_id, + name, + slug, + description, + created_at, + created_by, + type, + tz_name, + time_field, + validated_field_enabled, + validated_field, + masked_field_enabled, + masked_field, + comment_field_enabled, + comment_field, + column_offset, + row_offset +) +values ( + sqlc.arg(project_id), + sqlc.arg(name), + slugify(sqlc.arg(slug), 'uploader_config'), + sqlc.arg(description), + sqlc.arg(created_at), + sqlc.arg(created_by), + sqlc.arg(type), + sqlc.arg(tz_name), + sqlc.arg(time_field), + sqlc.arg(validated_field_enabled), + sqlc.arg(validated_field), + sqlc.arg(masked_field_enabled), + sqlc.arg(masked_field), + sqlc.arg(comment_field_enabled), + sqlc.arg(comment_field), + sqlc.arg(column_offset), + sqlc.arg(row_offset) +) returning id; @@ -19,7 +55,16 @@ update uploader_config set updated_by=$4, updated_at=$5, type=$6, - tz_name=$7 + tz_name=$7, + time_field=$8, + validated_field_enabled=$9, + validated_field=$10, + masked_field_enabled=$11, + masked_field=$12, + comment_field_enabled=$13, + comment_field=$14, + column_offset=$15, + row_offset=$16 where id=$1; From 324448cdaf7553a337089a490e1c2486ee5c8d74 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 15 Nov 2024 17:51:23 -0500 Subject: [PATCH 23/23] feat: implement parser logic --- api/internal/handler/uploader.go | 36 ++++ api/internal/server/api.go | 1 + api/internal/service/datalogger_parser.go | 96 +++++++++++ api/internal/service/uploader.go | 32 ++++ api/internal/service/uploader_parser.go | 195 ++++++---------------- 5 files changed, 214 insertions(+), 146 deletions(-) create mode 100644 api/internal/service/datalogger_parser.go diff --git a/api/internal/handler/uploader.go b/api/internal/handler/uploader.go index 0453f939..b7668072 100644 --- a/api/internal/handler/uploader.go +++ b/api/internal/handler/uploader.go @@ -225,6 +225,7 @@ func (h *ApiHandler) UpdateUploaderConfigMappings(c echo.Context) error { // @Param uploader_config_id path string true "uploader config uuid" Format(uuid) // @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [delete] func (h *ApiHandler) DeleteAllUploaderConfigMappingsForUploaderConfig(c echo.Context) error { _, err := uuid.Parse(c.Param("project_id")) @@ -240,3 +241,38 @@ func (h *ApiHandler) DeleteAllUploaderConfigMappingsForUploaderConfig(c echo.Con } return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) } + +// UploadFileForUploaderConfig godoc +// +// @Summary uploads a file for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 201 created +// @Failure 400 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [delete] +func (h *ApiHandler) UploadFileForUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + file, err := c.FormFile("file") + if err != nil { + return err + } + src, err := file.Open() + if err != nil { + return err + } + defer src.Close() + if err := h.DBService.UploaderConfigUploadFile(c.Request().Context(), projectID, ucID, src); err != nil { + return httperr.InternalServerError(err) + } + return c.NoContent(http.StatusCreated) +} diff --git a/api/internal/server/api.go b/api/internal/server/api.go index 910de006..09445892 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -356,4 +356,5 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.POST("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.CreateUploaderConfigMappings) r.private.PUT("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.UpdateUploaderConfigMappings) r.private.DELETE("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.DeleteAllUploaderConfigMappingsForUploaderConfig) + r.private.POST("/projects/:project_id/uploader_configs/:uploader_config_id/uploads", h.UploadFileForUploaderConfig) } diff --git a/api/internal/service/datalogger_parser.go b/api/internal/service/datalogger_parser.go new file mode 100644 index 00000000..7387707d --- /dev/null +++ b/api/internal/service/datalogger_parser.go @@ -0,0 +1,96 @@ +package service + +import ( + "context" + "encoding/csv" + "io" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +// datalogger toa5/dat parser +func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + // skip units header + _, err = reader.Read() + if err != nil { + return err + } + // skip process header + _, err = reader.Read() + if err != nil { + return err + } + + meta := dto.Environment{ + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { + return err + } + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Fields { + if eqtRow.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: "UTC", + }); err != nil { + return err + } + + return tx.Commit(ctx) +} diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index 4069ab05..1cf45c81 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -2,6 +2,8 @@ package service import ( "context" + "errors" + "io" "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" @@ -90,3 +92,33 @@ func (s DBService) UploaderConfigMappingUpdateBatch(ctx context.Context, ucID uu } return tx.Commit(ctx) } + +func (s DBService) UploaderConfigUploadFile(ctx context.Context, projectID, ucID uuid.UUID, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + uc, err := qtx.UploaderConfigGet(ctx, ucID) + if err != nil { + return err + } + ucm, err := qtx.UploaderConfigMappingList(ctx, uc.ID) + if err != nil { + return err + } + switch uc.Type { + case db.UploaderConfigTypeCsv: + err = s.TimeseriesMeasurementCreateBatchFromCSVFile(ctx, r, uc, ucm) + case db.UploaderConfigTypeToa5: + err = s.TimeseriesMeasurementCreateBatchFromTOA5File(ctx, r, uc, ucm) + case db.UploaderConfigTypeDux: + err = s.TimeseriesMeasurementCreateBatchFromDuxFile(ctx, r, uc, ucm) + default: + return errors.New("not implemented") + } + + return tx.Commit(ctx) +} diff --git a/api/internal/service/uploader_parser.go b/api/internal/service/uploader_parser.go index 987d9538..c0325863 100644 --- a/api/internal/service/uploader_parser.go +++ b/api/internal/service/uploader_parser.go @@ -7,121 +7,30 @@ import ( "io" "math" "strconv" + "strings" "time" "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" "github.com/jackc/pgx/v5/pgtype" ) -// datalogger toa5/dat parser -func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx context.Context, r io.Reader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - reader := csv.NewReader(r) - - envHeader, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - // skip units header - _, err = reader.Read() - if err != nil { - return err - } - // skip process header - _, err = reader.Read() - if err != nil { - return err - } - - meta := dto.Environment{ - // StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - // OSVersion: envHeader[4], - // ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ - Model: &meta.Model, - Sn: meta.SerialNo, - }) - if err != nil { - return err - } - tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ - DataloggerID: dl.ID, - TableName: meta.TableName, - }) - if err != nil { - return err - } - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]string, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = fieldHeader[i] - } - - eqt, err := qtx.EquivalencyTableGet(ctx, tableID) - if err != nil { - return err - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Fields { - if eqtRow.TimeseriesID == nil { - continue - } - fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID - } - - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ - fields: fields, - fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, - timezone: "UTC", - }); err != nil { - return err - } - - return tx.Commit(ctx) +type timeseriesMeasurementNoteCreateOrUpdateBatchParams struct { + fields []string + fieldNameTimeseriesIDMap map[string]uuid.UUID + maskedColIdx int + validatedColIdx int + commentColIdx int + timezone string + timeFieldIdx int + colOffset int + rowOffset int } // non-datalogger toa5/dat parser (use uploader config) -func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, r io.Reader, ucID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - uc, err := qtx.UploaderConfigGet(ctx, ucID) - if err != nil { - return err - } - - ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) - if err != nil { - return err - } - +func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { reader := csv.NewReader(r) - - _, err = reader.Read() + _, err := reader.Read() if err != nil { return err } @@ -153,36 +62,19 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Cont fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID } - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, s.Queries, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ fields: fields, fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, timezone: uc.TzName, + colOffset: 2, }); err != nil { return err } - - return tx.Commit(ctx) + return nil } // cusom csv parser -func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, r io.Reader, ucID uuid.UUID) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - uc, err := qtx.UploaderConfigGet(ctx, ucID) - if err != nil { - return err - } - - ucm, err := qtx.UploaderConfigMappingList(ctx, ucID) - if err != nil { - return err - } - +func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { reader := csv.NewReader(r) fieldHeader, err := reader.Read() if err != nil { @@ -214,28 +106,18 @@ func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Conte fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID } - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, s.Queries, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ fields: fields, fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, timezone: uc.TzName, }); err != nil { return err } - - return tx.Commit(ctx) + return nil } -// dux file parser -// TODO - -type timeseriesMeasurementNoteCreateOrUpdateBatchParams struct { - fields []string - fieldNameTimeseriesIDMap map[string]uuid.UUID - fieldNameNotesMap map[string]struct{} - timezone string - timeFieldIdx int - colOffset int - rowOffset int +func (s DBService) TimeseriesMeasurementCreateBatchFromDuxFile(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + return errors.New("not implemented") } func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, arg timeseriesMeasurementNoteCreateOrUpdateBatchParams) error { @@ -252,16 +134,15 @@ func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Que return err } - // TODO: do we need to accept other time formats? + // TODO: do we need to accept other time formats? For now RFC3339 only t, err := time.Parse(record[arg.timeFieldIdx], time.RFC3339) if err != nil { return err } - // get notes content and pply to all timeseries in loop - hasNotes := len(arg.fieldNameNotesMap) != 0 - if hasNotes { - } + // get notes content and apply to all timeseries in loop + // TODO: this is not space efficient and should change to a range-based approach + hasNotes := arg.maskedColIdx != -1 || arg.validatedColIdx != -1 || arg.commentColIdx != -1 for idx, cell := range record[arg.colOffset:] { fieldName := arg.fields[idx] @@ -289,11 +170,33 @@ func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Que mmtIdx = 0 } - if masked := math.IsNaN(v) || math.IsInf(v, 0); masked { + // automask NaN values + hasNaN := math.IsNaN(v) || math.IsInf(v, 0) + if hasNotes || hasNaN { + var masked *bool + var validated *bool + var comment *string + if arg.maskedColIdx != -1 { + maskedVal := strings.ToLower(record[arg.maskedColIdx]) == "true" + masked = &maskedVal + } else if hasNaN { + masked = &hasNaN + } + if arg.validatedColIdx != -1 { + validatedVal := strings.ToLower(record[arg.validatedColIdx]) == "true" + validated = &validatedVal + } + if arg.commentColIdx != -1 { + commentVal := strings.ToLower(record[arg.validatedColIdx]) + comment = &commentVal + } createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams{ TimeseriesID: tsID, LocalTime: pgtype.Timestamp{Time: t, Valid: true}, - Masked: &masked, + Timezone: arg.timezone, + Masked: masked, + Validated: validated, + Annotation: comment, } noteIdx++ if noteIdx == chunkSize {