diff --git a/.gitignore b/.gitignore index 443ace390c..abbad6e072 100644 --- a/.gitignore +++ b/.gitignore @@ -77,3 +77,4 @@ tutorial/mimic_iii_demo/data/** /node_modules cypress/screenshots/ cypress/videos/ +/cypress/support/test_data/table.cqpp diff --git a/autodoc/src/main/java/com/bakdata/conquery/Constants.java b/autodoc/src/main/java/com/bakdata/conquery/Constants.java index 405dc6d19c..30fc07cd0a 100644 --- a/autodoc/src/main/java/com/bakdata/conquery/Constants.java +++ b/autodoc/src/main/java/com/bakdata/conquery/Constants.java @@ -31,10 +31,6 @@ import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.apiv1.query.concept.filter.ValidityDateContainer; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRefCollection; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.model.Base; import com.bakdata.conquery.model.Group; import com.bakdata.conquery.models.common.Range; @@ -182,8 +178,6 @@ public class Constants { public static final String JSON_CREATOR = JsonCreator.class.getName(); public static final String CPS_TYPE = CPSType.class.getName(); - public static final Set ID_REF = Set.of(NsIdRef.class.getName(), MetaIdRef.class.getName()); - public static final Set ID_REF_COL = Set.of(NsIdRefCollection.class.getName(), MetaIdRefCollection.class.getName()); public static final String JSON_IGNORE = JsonIgnore.class.getName(); public static final String JSON_BACK_REFERENCE = JsonBackReference.class.getName(); public static final String PATH = Path.class.getName(); diff --git a/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java b/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java index ea99912a73..7a36c1973b 100644 --- a/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java +++ b/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java @@ -267,16 +267,7 @@ private void handleField(ClassInfo currentType, FieldInfo field) throws IOExcept final TypeSignature typeSignature = field.getTypeSignatureOrTypeDescriptor(); final Ctx ctx = new Ctx().withField(field); - final String type; - if (ID_REF.stream().anyMatch(field::hasAnnotation)) { - type = ID_OF + printType(ctx.withIdOf(true), typeSignature); - } - else if (ID_REF_COL.stream().anyMatch(field::hasAnnotation)) { - type = LIST_OF + ID_OF + StringUtils.removeStart(printType(ctx.withIdOf(true), typeSignature), LIST_OF); - } - else { - type = printType(ctx, typeSignature); - } + final String type = printType(ctx, typeSignature); out.table( editLink(introspec), diff --git a/backend/src/main/java/com/bakdata/conquery/ResultHeaders.java b/backend/src/main/java/com/bakdata/conquery/ResultHeaders.java index 9d3eb527e3..e42347476b 100644 --- a/backend/src/main/java/com/bakdata/conquery/ResultHeaders.java +++ b/backend/src/main/java/com/bakdata/conquery/ResultHeaders.java @@ -9,64 +9,105 @@ import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.FixedLabelResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.common.LocalizedEnumPrinter; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import lombok.experimental.UtilityClass; @UtilityClass public class ResultHeaders { - public static ResultInfo datesInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).dates(); + public static ResultInfo datesInfo() { final ResultType.ListT type = new ResultType.ListT<>(ResultType.Primitive.DATE_RANGE); - return new FixedLabelResultInfo(label, label, type, Set.of(new SemanticType.EventDateT()), settings, ResultPrinters.printerFor(type, settings)); + return new FixedLabelResultInfo(type, Set.of(new SemanticType.EventDateT())){ + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).dates(); + } + }; } - public static ResultInfo historyDatesInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).dates(); + public static ResultInfo historyDatesInfo() { final ResultType.ListT type = new ResultType.ListT<>(ResultType.Primitive.DATE_RANGE); - return new FixedLabelResultInfo(label, label, type, Set.of(new SemanticType.EventDateT(), new SemanticType.GroupT()), settings, ResultPrinters.printerFor(type, settings)); + return new FixedLabelResultInfo(type, Set.of(new SemanticType.EventDateT(), new SemanticType.GroupT())) { + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).dates(); + } + }; } - public static ResultInfo sourceInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).source(); - - return new FixedLabelResultInfo(label, label, ResultType.Primitive.STRING, Set.of(new SemanticType.SourcesT(), new SemanticType.CategoricalT(), new SemanticType.GroupT()), settings, ResultPrinters.printerFor(ResultType.Primitive.STRING, settings)); + public static ResultInfo sourceInfo() { + return new FixedLabelResultInfo(ResultType.Primitive.STRING, Set.of(new SemanticType.SourcesT(), new SemanticType.CategoricalT(), new SemanticType.GroupT())) { + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).source(); + } + }; } - public static ResultInfo formContextInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).index(); + public static ResultInfo formContextInfo() { - return new FixedLabelResultInfo(label, label, ResultType.Primitive.INTEGER, Set.of(), settings, ResultPrinters.printerFor(ResultType.Primitive.INTEGER, settings)); + return new FixedLabelResultInfo(ResultType.Primitive.INTEGER, Set.of()) { + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).index(); + } + }; } - public static ResultInfo formDateRangeInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()) - .dateRange(); + public static ResultInfo formDateRangeInfo() { - return new FixedLabelResultInfo(label, label, ResultType.Primitive.DATE_RANGE, Set.of(), settings, ResultPrinters.printerFor(ResultType.Primitive.DATE_RANGE, settings)); + return new FixedLabelResultInfo(ResultType.Primitive.DATE_RANGE, Set.of()) { + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).dateRange(); + } + }; } - public static ResultInfo formResolutionInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).resolution(); + public static ResultInfo formResolutionInfo() { + + return new FixedLabelResultInfo(ResultType.Primitive.STRING, Set.of()) { + @Override + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + return new LocalizedEnumPrinter<>(printSettings, Resolution.class); + } - return new FixedLabelResultInfo(label, label, ResultType.Primitive.STRING, Set.of(), settings, new ResultPrinters.LocalizedEnumPrinter<>(settings, Resolution.class)); + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).resolution(); + } + }; } - public static ResultInfo formEventDateInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()) - .eventDate(); + public static ResultInfo formEventDateInfo() { - return new FixedLabelResultInfo(label, label, ResultType.Primitive.DATE, Set.of(), settings, ResultPrinters.printerFor(ResultType.Primitive.DATE, settings)); + return new FixedLabelResultInfo(ResultType.Primitive.DATE, Set.of()) { + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).eventDate(); + } + }; } - public static ResultInfo formObservationScopeInfo(PrintSettings settings) { - final String label = C10nCache.getLocalized(ResultHeadersC10n.class, settings.getLocale()).observationScope(); + public static ResultInfo formObservationScopeInfo() { + + return new FixedLabelResultInfo(ResultType.Primitive.STRING, Set.of()) { + @Override + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + return new LocalizedEnumPrinter<>(printSettings, FeatureGroup.class); + } - return new FixedLabelResultInfo(label, label, ResultType.Primitive.STRING, Set.of(), settings, new ResultPrinters.LocalizedEnumPrinter<>(settings, FeatureGroup.class)); + @Override + public String userColumnName(PrintSettings printSettings) { + return C10nCache.getLocalized(ResultHeadersC10n.class, printSettings.getLocale()).observationScope(); + } + }; } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java b/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java index 96e6205e84..3c034c2d1e 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java @@ -6,11 +6,10 @@ import com.bakdata.conquery.apiv1.frontend.FrontendValue; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IndexConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SearchIndexId; import com.bakdata.conquery.models.index.FrontendValueIndex; import com.bakdata.conquery.models.index.FrontendValueIndexKey; @@ -41,9 +40,10 @@ @CPSType(id = "CSV_TEMPLATE", base = SearchIndex.class) public class FilterTemplate extends IdentifiableImpl implements Searchable, SearchIndex { + private static final long serialVersionUID = 1L; + @NotNull - @NsIdRef - private Dataset dataset; + private DatasetId dataset; @NotEmpty private final String name; @@ -106,6 +106,6 @@ public TrieSearch createTrieSearch(IndexConfig config) throws Ind @Override public SearchIndexId createId() { - return new SearchIndexId(dataset.getId(), name); + return new SearchIndexId(dataset, name); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index e2c8448df5..52a007fd55 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -19,6 +19,12 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.Validator; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -52,16 +58,16 @@ import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; @@ -73,6 +79,7 @@ import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.resultinfo.printers.JavaResultPrinters; import com.bakdata.conquery.models.query.statistics.ResultStatistics; import com.bakdata.conquery.models.query.visitor.QueryVisitor; import com.bakdata.conquery.models.types.SemanticType; @@ -81,12 +88,6 @@ import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.validation.Validator; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.UriBuilder; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -107,17 +108,17 @@ public class QueryProcessor { public Stream getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) { - final Collection allQueries = storage.getAllExecutions(); + final Stream allQueries = storage.getAllExecutions(); return getQueriesFiltered(dataset.getId(), RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders); } - public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Collection allQueries, boolean allProviders) { + public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream allQueries, boolean allProviders) { - return allQueries.stream() + return allQueries // The following only checks the dataset, under which the query was submitted, but a query can target more that // one dataset. - .filter(q -> q.getDataset().getId().equals(datasetId)) + .filter(q -> q.getDataset().equals(datasetId)) // to exclude subtypes from somewhere else .filter(QueryProcessor::canFrontendRender) .filter(Predicate.not(ManagedExecution::isSystem)) @@ -128,7 +129,7 @@ public Stream getQueriesFiltered(DatasetId datasetId, UriBuilde ) .filter(q -> subject.isPermitted(q, Ability.READ)) .map(mq -> { - final OverviewExecutionStatus status = mq.buildStatusOverview(uriBuilder.clone(), subject); + final OverviewExecutionStatus status = mq.buildStatusOverview(subject); if (mq.isReadyToDownload()) { status.setResultUrls(getResultAssets(config.getResultProviders(), mq, uriBuilder, allProviders)); @@ -199,10 +200,10 @@ public static boolean isFrontendStructure(CQElement root) { /** * Cancel a running query: Sending cancellation to shards, which will cause them to stop executing them, results are not sent back, and incoming results will be discarded. */ - public void cancel(Subject subject, Dataset dataset, ManagedExecution query) { + public void cancel(Subject subject, ManagedExecution query) { // Does not make sense to cancel a query that isn't running. - ExecutionManager executionManager = datasetRegistry.get(dataset.getId()).getExecutionManager(); + ExecutionManager executionManager = datasetRegistry.get(query.getDataset()).getExecutionManager(); if (!query.getState().equals(ExecutionState.RUNNING)) { return; } @@ -220,20 +221,19 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc if (patch.getGroups() != null && !patch.getGroups().isEmpty()) { - for (ManagedExecutionId managedExecutionId : execution.getSubmitted().collectRequiredQueries()) { - final ManagedExecution subQuery = storage.getExecution(managedExecutionId); + for (ManagedExecutionId subExecutionId : execution.getSubmitted().collectRequiredQueries()) { - if (!subject.isPermitted(subQuery, Ability.READ)) { - log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subQuery.getId(), subject); + if (!subject.isPermitted(subExecutionId, Ability.READ)) { + log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subExecutionId, subject); continue; } - final ConqueryPermission canReadQuery = subQuery.createPermission(Set.of(Ability.READ)); + final ConqueryPermission canReadQuery = subExecutionId.createPermission(Set.of(Ability.READ)); final Set groupsToShareWith = new HashSet<>(patch.getGroups()); // Find all groups the query is already shared with, so we do not remove them, as patch is absolute - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (groupsToShareWith.contains(group.getId())) { continue; } @@ -249,7 +249,7 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc .groups(new ArrayList<>(groupsToShareWith)) .build(); - patchQuery(subject, subQuery, sharePatch); + patchQuery(subject, subExecutionId.resolve(), sharePatch); } } @@ -260,18 +260,17 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc public void reexecute(Subject subject, ManagedExecution query) { log.info("User[{}] reexecuted Query[{}]", subject.getId(), query); - ExecutionManager executionManager = datasetRegistry.get(query.getDataset().getId()).getExecutionManager(); if (!query.getState().equals(ExecutionState.RUNNING)) { final Namespace namespace = query.getNamespace(); - namespace.getExecutionManager().execute(namespace, query, config); + namespace.getExecutionManager().execute(query, config); } } public void deleteQuery(Subject subject, ManagedExecution execution) { log.info("User[{}] deleted Query[{}]", subject.getId(), execution.getId()); - datasetRegistry.get(execution.getDataset().getId()) + datasetRegistry.get(execution.getDataset()) .getExecutionManager() // Don't go over execution#getExecutionManager() as that's only set when query is initialized .clearQueryResults(execution); @@ -279,12 +278,12 @@ public void deleteQuery(Subject subject, ManagedExecution execution) { } public ExecutionState awaitDone(ManagedExecution query, int time, TimeUnit unit) { - final Namespace namespace = datasetRegistry.get(query.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(query.getDataset()); return namespace.getExecutionManager().awaitDone(query, time, unit); } public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject subject, UriBuilder url, Boolean allProviders) { - final Namespace namespace = datasetRegistry.get(query.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(query.getDataset()); query.initExecutable(config); @@ -325,7 +324,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext execution = ((ManagedQuery) namespace .getExecutionManager() - .createExecution(query, subject.getUser(), namespace, false)); + .createExecution(query, subject.getId(), namespace, false)); execution.setLastResultCount((long) statistic.getResolved().size()); @@ -341,7 +340,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext /** * Create and submit {@link EntityPreviewForm} for subject on to extract sources for entity, and extract some additional infos to be used as infocard. */ - public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { + public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { subject.authorize(dataset, Ability.ENTITY_PREVIEW); subject.authorize(dataset, Ability.PRESERVE_ID); @@ -437,7 +436,7 @@ public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Subje } // Execute the query - return executionManager.runQuery(namespace, query, subject.getUser(), config, system); + return executionManager.runQuery(namespace, query, subject.getId(), config, system); } /** @@ -459,8 +458,8 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe // If SecondaryIds differ from selected and prior, we cannot reuse them. if (query instanceof SecondaryIdQuery) { - final SecondaryIdDescription selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId(); - final SecondaryIdDescription reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId(); + final SecondaryIdDescriptionId selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId(); + final SecondaryIdDescriptionId reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId(); if (!selectedSecondaryId.equals(reusedSecondaryId)) { return null; @@ -471,7 +470,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe if (!user.isOwner(execution)) { final ManagedExecution newExecution = - executionManager.createExecution(execution.getSubmitted(), user, namespace, false); + executionManager.createExecution(execution.getSubmitted(), user.getId(), namespace, false); newExecution.setLabel(execution.getLabel()); newExecution.setTags(execution.getTags().clone()); storage.updateExecution(newExecution); @@ -486,7 +485,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe log.trace("Re-executing Query {}", execution); - executionManager.execute(namespace, execution, config); + executionManager.execute(execution, config); return execution; @@ -585,7 +584,7 @@ public ResultStatistics getResultStatistics(SingleTableResult managedQuery) { new PrintSettings(true, locale, managedQuery.getNamespace(), config, null, null, decimalFormat, integerFormat); final UniqueNamer uniqueNamer = new UniqueNamer(printSettings); - final List resultInfos = managedQuery.getResultInfos(printSettings); + final List resultInfos = managedQuery.getResultInfos(); final Optional dateInfo = @@ -593,7 +592,15 @@ public ResultStatistics getResultStatistics(SingleTableResult managedQuery) { final Optional dateIndex = dateInfo.map(resultInfos::indexOf); - return ResultStatistics.collectResultStatistics(managedQuery, resultInfos, dateInfo, dateIndex, printSettings, uniqueNamer, config); + return ResultStatistics.collectResultStatistics(managedQuery, + resultInfos, + dateInfo, + dateIndex, + printSettings, + uniqueNamer, + config, + new JavaResultPrinters() + ); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java index 2984e904e0..bcdcfa1b1b 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java @@ -3,14 +3,13 @@ import java.util.Collection; import java.util.List; import java.util.Set; - import javax.annotation.Nullable; import com.bakdata.conquery.apiv1.query.QueryDescription; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.error.ConqueryErrorInfo; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.ColumnDescriptor; import lombok.Data; import lombok.EqualsAndHashCode; @@ -65,6 +64,5 @@ public class FullExecutionStatus extends ExecutionStatus { /** * Possible {@link SecondaryIdDescription}s available, of {@link com.bakdata.conquery.models.datasets.concepts.Concept}s used in this Query. */ - @NsIdRefCollection - private Set availableSecondaryIds; + private Set availableSecondaryIds; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java index e2e0141abf..13232326a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java @@ -16,14 +16,14 @@ import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.cps.SubTyped; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner; import com.bakdata.conquery.models.forms.frontendconfiguration.FormType; import com.bakdata.conquery.models.forms.managed.ExternalExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -133,7 +133,7 @@ public String getFormType() { } @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ExternalExecution(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java index 71fbff1ada..d82687fc10 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java @@ -5,9 +5,9 @@ import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.forms.configs.FormConfig; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.util.VariableDefaultValue; import com.fasterxml.jackson.databind.JsonNode; import lombok.AllArgsConstructor; @@ -37,7 +37,7 @@ public class FormConfigAPI { @VariableDefaultValue @Builder.Default private LocalDateTime creationTime = LocalDateTime.now(); - public FormConfig intern(User owner, DatasetId dataset) { + public FormConfig intern(UserId owner, DatasetId dataset) { FormConfig intern = new FormConfig(); intern.setFormId(formId); intern.setFormType(formType); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java index 4187562315..22b42a922c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java @@ -24,15 +24,14 @@ import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.forms.util.ResolutionShortNames; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -92,6 +91,30 @@ public class ExportForm extends Form implements InternalForm { @JsonIgnore @EqualsAndHashCode.Exclude private List resolvedResolutions; + + /** + * Maps the given resolution to a fitting alignment. It tries to use the alignment which was given as a hint. + * If the alignment does not fit to a resolution (resolution is finer than the alignment), the first alignment that + * this resolution supports is chosen (see the alignment order in {@link Resolution}) + * @param resolutions The temporal resolutions for which sub queries should be generated per entity + * @param alignmentHint The preferred calendar alignment on which the sub queries of each resolution should be aligned. + * Note that this alignment is chosen when a resolution is equal or coarser. + * @return The given resolutions mapped to a fitting calendar alignment. + */ + public static List getResolutionAlignmentMap(List resolutions, Alignment alignmentHint) { + + return resolutions.stream() + .map(r -> ResolutionAndAlignment.of(r, getFittingAlignment(alignmentHint, r))) + .collect(Collectors.toList()); + } + + private static Alignment getFittingAlignment(Alignment alignmentHint, Resolution resolution) { + if(resolution.isAlignmentSupported(alignmentHint) ) { + return alignmentHint; + } + return resolution.getDefaultAlignment(); + } + @Override public void visit(Consumer visitor) { visitor.accept(this); @@ -99,7 +122,6 @@ public void visit(Consumer visitor) { features.forEach(visitor); } - @Override public Map createSubQueries() { return Map.of( @@ -108,6 +130,16 @@ public Map createSubQueries() { ); } + @Override + public String getLocalizedTypeLabel() { + return C10N.get(ExportFormC10n.class, I18n.LOCALE.get()).getType(); + } + + @Override + public ManagedInternalForm toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); + } + @Override public Set collectRequiredQueries() { if (queryGroupId == null) { @@ -120,7 +152,7 @@ public Set collectRequiredQueries() { @Override public void resolve(QueryResolveContext context) { if(queryGroupId != null) { - queryGroup = (ManagedQuery) context.getStorage().getExecution(queryGroupId); + queryGroup = (ManagedQuery) queryGroupId.resolve(); prerequisite = queryGroup.getQuery(); } else { @@ -150,33 +182,19 @@ public void resolve(QueryResolveContext context) { } } - @Override - public String getLocalizedTypeLabel() { - return C10N.get(ExportFormC10n.class, I18n.LOCALE.get()).getType(); - } - - /** - * Maps the given resolution to a fitting alignment. It tries to use the alignment which was given as a hint. - * If the alignment does not fit to a resolution (resolution is finer than the alignment), the first alignment that - * this resolution supports is chosen (see the alignment order in {@link Resolution}) - * @param resolutions The temporal resolutions for which sub queries should be generated per entity - * @param alignmentHint The preferred calendar alignment on which the sub queries of each resolution should be aligned. - * Note that this alignment is chosen when a resolution is equal or coarser. - * @return The given resolutions mapped to a fitting calendar alignment. + * Classes that can be used as Features in ExportForm, having default-exists, are triggered this way. */ - public static List getResolutionAlignmentMap(List resolutions, Alignment alignmentHint) { - - return resolutions.stream() - .map(r -> ResolutionAndAlignment.of(r, getFittingAlignment(alignmentHint, r))) - .collect(Collectors.toList()); - } - - private static Alignment getFittingAlignment(Alignment alignmentHint, Resolution resolution) { - if(resolution.isAlignmentSupported(alignmentHint) ) { - return alignmentHint; + public static interface DefaultSelectSettable { + public static void enable(List features) { + for (CQElement feature : features) { + if(feature instanceof DefaultSelectSettable){ + ((DefaultSelectSettable) feature).setDefaultExists(); + } + } } - return resolution.getDefaultAlignment(); + + void setDefaultExists(); } /** @@ -197,25 +215,4 @@ public static ResolutionAndAlignment of(Resolution resolution, Alignment alignme return new ResolutionAndAlignment(resolution, alignment); } } - - /** - * Classes that can be used as Features in ExportForm, having default-exists, are triggered this way. - */ - public static interface DefaultSelectSettable { - public static void enable(List features) { - for (CQElement feature : features) { - if(feature instanceof DefaultSelectSettable){ - ((DefaultSelectSettable) feature).setDefaultExists(); - } - } - } - - void setDefaultExists(); - } - - - @Override - public ManagedForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { - return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java index 168087f4b3..83a033cc72 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java @@ -23,12 +23,12 @@ import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -126,7 +126,7 @@ public String getLocalizedTypeLabel() { @Override - public ManagedInternalForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedInternalForm toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java index 9769140747..0ee8f35b54 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java @@ -3,10 +3,8 @@ import java.util.Collection; import java.util.List; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; @@ -22,12 +20,10 @@ public static class Labelled { @JsonProperty("default") private final Collection defaultConnectors; - @NsIdRefCollection - private final List> searchFilters; + private final List searchFilters; /** * Search concept needs to be parent of searchFilters, so frontend can resolve the filters. */ - @NsIdRef - private final Concept searchConcept; + private final ConceptId searchConcept; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java index 32e7871b23..057b7ac86c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ArrayConceptQuery.java @@ -11,7 +11,6 @@ import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -93,9 +92,9 @@ public void collectRequiredQueries(Set requiredQueries) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { final List resultInfos = new ArrayList<>(); - ResultInfo dateInfo = ResultHeaders.datesInfo(printSettings); + ResultInfo dateInfo = ResultHeaders.datesInfo(); if(getResolvedDateAggregationMode() != DateAggregationMode.NONE){ // Add one DateInfo for the whole Query @@ -103,7 +102,7 @@ public List getResultInfos(PrintSettings printSettings) { } int lastIndex = resultInfos.size(); - childQueries.forEach(q -> resultInfos.addAll(q.getResultInfos(printSettings))); + childQueries.forEach(q -> resultInfos.addAll(q.getResultInfos())); if(!resultInfos.isEmpty()) { // Remove DateInfo from each childQuery diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQElement.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQElement.java index cf309dcf90..61806adfb4 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQElement.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQElement.java @@ -11,7 +11,6 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -79,7 +78,7 @@ public final Set collectRequiredQueries() { public abstract void collectRequiredQueries(Set requiredQueries) ; @JsonIgnore - public abstract List getResultInfos(PrintSettings settings); + public abstract List getResultInfos(); public void visit(Consumer visitor) { visitor.accept(this); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQYes.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQYes.java index 2e3e34a727..8bdc6a00d6 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQYes.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/CQYes.java @@ -6,7 +6,6 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.queryplan.ConceptQueryPlan; @@ -33,7 +32,7 @@ public void collectRequiredQueries(Set requiredQueries) { } @Override - public List getResultInfos(PrintSettings settings) { + public List getResultInfos() { return Collections.emptyList(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java index 6f028f3684..0f3393f006 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/ConceptQuery.java @@ -10,7 +10,6 @@ import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -80,16 +79,16 @@ public void resolve(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { Preconditions.checkNotNull(resolvedDateAggregationMode); final List resultInfos = new ArrayList<>(); if (resolvedDateAggregationMode != DateAggregationMode.NONE) { - resultInfos.add(ResultHeaders.datesInfo(printSettings)); + resultInfos.add(ResultHeaders.datesInfo()); } - resultInfos.addAll(root.getResultInfos(printSettings)); + resultInfos.addAll(root.getResultInfos()); return resultInfos; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java index 59d80a904c..7255213f29 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java @@ -6,13 +6,13 @@ import java.util.stream.Stream; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.queryplan.QueryPlan; @@ -39,10 +39,10 @@ public Set collectRequiredQueries() { } @JsonIgnore - public abstract List getResultInfos(PrintSettings printSettings); + public abstract List getResultInfos(); @Override - public ManagedQuery toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedQuery toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedQuery(this, user, submittedDataset, storage, datasetRegistry); } @@ -59,7 +59,7 @@ public CQElement getReusableComponents() { * * @param results * @return the number of results in the result List. - * @see ManagedExecution#finish(ExecutionState) for how it's used. + * @see ManagedExecution#finish(ExecutionState, ExecutionManager) for how it's used. */ public long countResults(Stream results) { return results.map(EntityResult::listResultLines) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java index 3255482131..c457bde3da 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java @@ -9,14 +9,16 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.RequiredEntities; @@ -37,7 +39,7 @@ public interface QueryDescription extends Visitable { * Transforms the submitted query to an {@link ManagedExecution}. * In this step some external dependencies are resolve (such as {@link CQExternal}). * However, steps that require add or manipulates queries programmatically based on the submitted query - * should be done in an extra init procedure (see {@link ManagedExecution#doInitExecutable()}. + * should be done in an extra init procedure (see {@link ManagedExecution#doInitExecutable(Namespace)}. * These steps are executed right before the execution of the query and not necessary in this creation phase. * * @param user @@ -45,7 +47,7 @@ public interface QueryDescription extends Visitable { * @param storage * @return */ - ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry); + ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry); Set collectRequiredQueries(); @@ -81,15 +83,16 @@ static void authorizeQuery(QueryDescription queryDescription, Subject subject, D // Generate DatasetPermissions final Set datasets = nsIdCollector.getIdentifiables().stream() .map(NamespacedIdentifiable::getDataset) + .map(Id::resolve) .collect(Collectors.toSet()); subject.authorize(datasets, Ability.READ); // Generate ConceptPermissions - final Set concepts = nsIdCollector.getIdentifiables().stream() + final Set> concepts = nsIdCollector.getIdentifiables().stream() .filter(ConceptElement.class::isInstance) .map(ConceptElement.class::cast) - .map(ConceptElement::getConcept) + .>map(ConceptElement::getConcept) .collect(Collectors.toSet()); subject.authorize(concepts, Ability.READ); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java index 85ba3ddc9a..ed18c4234c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java @@ -5,20 +5,22 @@ import java.util.List; import java.util.Set; import java.util.function.Consumer; +import java.util.stream.Collectors; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -27,9 +29,8 @@ import com.bakdata.conquery.models.query.queryplan.ConceptQueryPlan; import com.bakdata.conquery.models.query.queryplan.SecondaryIdQueryPlan; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.SecondaryIdResultInfo; +import com.bakdata.conquery.models.query.resultinfo.SecondaryIdResultInfo; import com.fasterxml.jackson.annotation.JsonView; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -44,29 +45,31 @@ public class SecondaryIdQuery extends Query { protected DateAggregationMode dateAggregationMode = DateAggregationMode.MERGE; @NotNull private CQElement root; - @NsIdRef @NotNull - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; /** * @apiNote not using {@link ConceptQuery} directly in the API-spec simplifies the API. */ @JsonView(View.InternalCommunication.class) private ConceptQuery query; - @NsIdRefCollection @JsonView(View.InternalCommunication.class) - private Set withSecondaryId; + private Set withSecondaryId; - @NsIdRefCollection @JsonView(View.InternalCommunication.class) - private Set withoutSecondaryId; + private Set withoutSecondaryId; @Override public SecondaryIdQueryPlan createQueryPlan(QueryPlanContext context) { - final ConceptQueryPlan queryPlan = query.createQueryPlan(context.withSelectedSecondaryId(secondaryId)); - - return new SecondaryIdQueryPlan(query, context, secondaryId, withSecondaryId, withoutSecondaryId, queryPlan, context.getSecondaryIdSubPlanRetention()); + final SecondaryIdDescription secondaryIdDescription = secondaryId.resolve(); + final ConceptQueryPlan queryPlan = query.createQueryPlan(context.withSelectedSecondaryId(secondaryIdDescription)); + + return new SecondaryIdQueryPlan(query, context, secondaryIdDescription, withSecondaryId.stream() + .map(ColumnId::resolve) + .collect(Collectors.toSet()), withoutSecondaryId.stream() + .map(TableId::resolve) + .collect(Collectors.toSet()), queryPlan, context.getSecondaryIdSubPlanRetention()); } @Override @@ -99,21 +102,19 @@ public void resolve(final QueryResolveContext context) { query.visit(queryElement -> { // We cannot check for CQExternal here and add the ALL_IDS Table because it is not serializable at the moment - if (!(queryElement instanceof CQConcept)) { + if (!(queryElement instanceof CQConcept concept)) { return; } - final CQConcept concept = (CQConcept) queryElement; - for (CQTable connector : concept.getTables()) { - final Table table = connector.getConnector().getTable(); + final Table table = connector.getConnector().resolve().getResolvedTable(); final Column secondaryIdColumn = table.findSecondaryIdColumn(secondaryId); if (secondaryIdColumn != null && !concept.isExcludeFromSecondaryId()) { - withSecondaryId.add(secondaryIdColumn); + withSecondaryId.add(secondaryIdColumn.getId()); } else { - withoutSecondaryId.add(table); + withoutSecondaryId.add(table.getId()); } } }); @@ -125,12 +126,12 @@ public void resolve(final QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { final List resultInfos = new ArrayList<>(); - resultInfos.add(new SecondaryIdResultInfo(secondaryId, printSettings)); + resultInfos.add(new SecondaryIdResultInfo(secondaryId.resolve())); - resultInfos.addAll(query.getResultInfos(printSettings)); + resultInfos.addAll(query.getResultInfos()); return resultInfos; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java index c8f88d8dbb..115fb6b28c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java @@ -5,14 +5,17 @@ import java.util.Collection; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.stream.Collectors; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.ResultHeaders; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -20,7 +23,6 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefKeys; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -29,9 +31,11 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -41,20 +45,17 @@ import com.bakdata.conquery.models.query.queryplan.TableExportQueryPlan; import com.bakdata.conquery.models.query.resultinfo.ColumnResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; -import com.bakdata.conquery.models.query.resultinfo.printers.SecondaryIdResultInfo; +import com.bakdata.conquery.models.query.resultinfo.SecondaryIdResultInfo; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonView; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.Setter; +import lombok.ToString; import lombok.extern.slf4j.Slf4j; @@ -72,6 +73,7 @@ @Slf4j @Getter @Setter +@ToString(onlyExplicitlyIncluded = false) @CPSType(id = "TABLE_EXPORT", base = QueryDescription.class) @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) public class TableExportQuery extends Query { @@ -79,17 +81,22 @@ public class TableExportQuery extends Query { @Valid @NotNull @NonNull + @ToString.Include protected final Query query; + @NotNull + @ToString.Include private Range dateRange = Range.all(); @NotEmpty @Valid + @ToString.Include private List tables; /** * @see TableExportQueryPlan#isRawConceptValues() */ + @ToString.Include private boolean rawConceptValues = true; /** @@ -99,14 +106,13 @@ public class TableExportQuery extends Query { * - SecondaryIds are collected into a Column per SecondaryId * - The remaining columns are arbitrarily ordered, but usually grouped by their source table. */ - @NsIdRefKeys @JsonView(View.InternalCommunication.class) - private Map positions; + private Map positions; @JsonIgnore - private Set conceptColumns; + private Set conceptColumns; @JsonIgnore - private Map secondaryIdPositions; + private Map secondaryIdPositions; @Override @@ -122,13 +128,7 @@ public TableExportQueryPlan createQueryPlan(QueryPlanContext context) { } } - return new TableExportQueryPlan( - query.createQueryPlan(context), - CDateSet.create(CDateRange.of(dateRange)), - filterQueryNodes, - positions, - rawConceptValues - ); + return new TableExportQueryPlan(query.createQueryPlan(context), CDateSet.create(CDateRange.of(dateRange)), filterQueryNodes, positions, rawConceptValues); } @Override @@ -144,77 +144,78 @@ public void resolve(QueryResolveContext context) { // First is dates, second is source id final AtomicInteger currentPosition = new AtomicInteger(2); - secondaryIdPositions = calculateSecondaryIdPositions(currentPosition); - - final Set validityDates = tables.stream() - .map(CQConcept::getTables) - .flatMap(Collection::stream) - .map(CQTable::findValidityDate) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); - - // We need to know if a column is a concept column so we can prioritize it if it is also a SecondaryId + // We need to know if a column is a concept column, so we can prioritize it, if it is also a SecondaryId conceptColumns = tables.stream() - .map(CQConcept::getTables) - .flatMap(Collection::stream) - .map(CQTable::getConnector) - .map(Connector::getColumn) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); + .map(CQConcept::getTables) + .flatMap(Collection::stream) + .map(CQTable::getConnector) + .map(ConnectorId::resolve) + .map(Connector::getColumn) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); - positions = calculateColumnPositions(currentPosition, tables, secondaryIdPositions, conceptColumns, validityDates); + secondaryIdPositions = calculateSecondaryIdPositions(currentPosition, conceptColumns, tables); + final Set validityDates = + tables.stream().map(CQConcept::getTables).flatMap(Collection::stream).map(CQTable::findValidityDate).filter(Objects::nonNull).collect(Collectors.toSet()); - } - @Override - public List getResultInfos(PrintSettings printSettings) { - return createResultInfos(conceptColumns, printSettings); + positions = calculateColumnPositions(currentPosition, tables, secondaryIdPositions, conceptColumns, validityDates); } - private Map calculateSecondaryIdPositions(AtomicInteger currentPosition) { - final Map secondaryIdPositions = new HashMap<>(); + private static Map calculateSecondaryIdPositions( + AtomicInteger currentPosition, Set conceptColumns, List tables) { + final Map secondaryIdPositions = new HashMap<>(); // SecondaryIds are pulled to the front and grouped over all tables tables.stream() .flatMap(con -> con.getTables().stream()) - .flatMap(table -> Arrays.stream(table.getConnector().getTable().getColumns())) + .flatMap(table -> Arrays.stream(table.getConnector().resolve().getResolvedTable().getColumns())) + // Concept Columns are placed separately so they won't provide a secondaryId + .filter(Predicate.not(conceptColumns::contains)) .map(Column::getSecondaryId) .filter(Objects::nonNull) + .map(SecondaryIdDescriptionId::resolve) .distinct() .sorted(Comparator.comparing(SecondaryIdDescription::getLabel)) // Using for each and not a collector allows us to guarantee sorted insertion. - .forEach(secondaryId -> secondaryIdPositions.put(secondaryId, currentPosition.getAndIncrement())); + .forEach(secondaryId -> secondaryIdPositions.put(secondaryId.getId(), currentPosition.getAndIncrement())); return secondaryIdPositions; } - private static Map calculateColumnPositions(AtomicInteger currentPosition, List tables, Map secondaryIdPositions, Set conceptColumns, Set validityDates) { - final Map positions = new HashMap<>(); + private static Map calculateColumnPositions( + AtomicInteger currentPosition, + List tables, + Map secondaryIdPositions, + Collection conceptColumns, + Collection validityDates) { + final Map positions = new HashMap<>(); for (CQConcept concept : tables) { for (CQTable table : concept.getTables()) { // Set column positions, set SecondaryId positions to precomputed ones. - for (Column column : table.getConnector().getTable().getColumns()) { + for (Column column : table.getConnector().resolve().getResolvedTable().getColumns()) { // ValidityDates are handled separately in column=0 if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column))) { continue; } - if (positions.containsKey(column)) { + final ColumnId columnId = column.getId(); + if (positions.containsKey(columnId)) { continue; } // We want to have ConceptColumns separate here. - if (column.getSecondaryId() != null && !conceptColumns.contains(column)) { - positions.putIfAbsent(column, secondaryIdPositions.get(column.getSecondaryId())); + if (column.getSecondaryId() != null && !conceptColumns.contains(column.getId())) { + positions.putIfAbsent(columnId, secondaryIdPositions.get(column.getSecondaryId())); continue; } - positions.put(column, currentPosition.getAndIncrement()); + positions.put(columnId, currentPosition.getAndIncrement()); } } } @@ -222,76 +223,85 @@ private static Map calculateColumnPositions(AtomicInteger curre return positions; } - private List createResultInfos(Set conceptColumns, PrintSettings printSettings) { + @Override + public List getResultInfos() { + return createResultInfos(conceptColumns); + } + + private List createResultInfos(Set conceptColumns) { - final int size = positions.values().stream().mapToInt(i -> i).max().getAsInt() + 1; + final int size = calculateWidth(positions); + ; final ResultInfo[] infos = new ResultInfo[size]; - infos[0] = ResultHeaders.historyDatesInfo(printSettings); - infos[1] = ResultHeaders.sourceInfo(printSettings); + infos[0] = ResultHeaders.historyDatesInfo(); + infos[1] = ResultHeaders.sourceInfo(); - for (Map.Entry e : secondaryIdPositions.entrySet()) { - final SecondaryIdDescription desc = e.getKey(); + for (Map.Entry e : secondaryIdPositions.entrySet()) { + final SecondaryIdDescriptionId desc = e.getKey(); final Integer pos = e.getValue(); - infos[pos] = new SecondaryIdResultInfo(desc, printSettings); + infos[pos] = new SecondaryIdResultInfo(desc.resolve()); } - final Map> connectorColumns = - tables.stream() - .flatMap(con -> con.getTables().stream()) - .filter(tbl -> tbl.getConnector().getColumn() != null) - .collect(Collectors.toMap(tbl -> tbl.getConnector().getColumn(), tbl -> tbl.getConnector().getConcept())); + final Map> connectorColumns = tables.stream() + .flatMap(con -> con.getTables().stream()) + .map(CQTable::getConnector) + .map(ConnectorId::resolve) + .filter(con -> con.getColumn() != null) + .collect(Collectors.toMap(con -> con.getColumn().resolve(), Connector::getConcept)); - for (Map.Entry entry : positions.entrySet()) { + for (Map.Entry entry : positions.entrySet()) { final int position = entry.getValue(); - final Column column = entry.getKey(); + ColumnId columnId = entry.getKey(); + final Column column = columnId.resolve(); - if(position == 0) { + if (position == 0) { continue; } // SecondaryIds and date columns are pulled to the front, thus already covered. - if (column.getSecondaryId() != null && !conceptColumns.contains(column)) { - infos[secondaryIdPositions.get(column.getSecondaryId())].addSemantics(new SemanticType.ColumnT(column)); + if (column.getSecondaryId() != null && !conceptColumns.contains(columnId)) { + infos[secondaryIdPositions.get(column.getSecondaryId())].addSemantics(new SemanticType.ColumnT(columnId)); continue; } - final Set semantics = new HashSet<>(); - - ResultType resultType = ResultType.resolveResultType(column.getType()); - ResultPrinters.Printer printer = ResultPrinters.printerFor(resultType, printSettings); - + final ResultInfo columnResultInfo; if (connectorColumns.containsKey(column)) { - // Additionally, Concept Columns are returned as ConceptElementId, when rawConceptColumns is not set. - final Concept concept = connectorColumns.get(column).getConcept(); + // Additionally, Concept Columns are returned as ConceptElementId, when rawConceptColumns is not set. + columnResultInfo = new ColumnResultInfo(column, ResultType.Primitive.STRING, column.getDescription(), isRawConceptValues() ? null : concept); + // Columns that are used to build concepts are marked as ConceptColumn. - semantics.add(new SemanticType.ConceptColumnT(concept)); + columnResultInfo.addSemantics(new SemanticType.ConceptColumnT(concept.getId())); - if (!isRawConceptValues()) { - resultType = ResultType.Primitive.STRING; - printer = new ResultPrinters.ConceptIdPrinter(concept, printSettings); - } + infos[position] = columnResultInfo; } else { // If it's not a connector column, we just link to the source column. - semantics.add(new SemanticType.ColumnT(column)); + columnResultInfo = new ColumnResultInfo(column, ResultType.resolveResultType(column.getType()), column.getDescription(), null); + columnResultInfo.addSemantics(new SemanticType.ColumnT(column.getId())); } - infos[position] = new ColumnResultInfo(column, resultType, semantics, printer, column.getDescription(), printSettings); + infos[position] = columnResultInfo; + + } return List.of(infos); } + public static int calculateWidth(Map positions) { + return positions.values().stream().max(Integer::compareTo).orElse(0) + 1; + } + @Override public void visit(Consumer visitor) { visitor.accept(this); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java index f426a8c8e8..8da877e7ad 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java @@ -4,24 +4,22 @@ import java.util.Collections; import java.util.List; import java.util.Objects; - import javax.annotation.CheckForNull; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorSelectId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -37,23 +35,21 @@ public class CQTable { private List> filters = Collections.emptyList(); @NotNull - @NsIdRefCollection - private List selects = new ArrayList<>(); + private List selects = new ArrayList<>(); private boolean excludeFromTimeAggregation; @@ -91,15 +93,16 @@ public class CQConcept extends CQElement implements NamespacedIdentifiableHoldin public static CQConcept forSelect(Select select) { final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(select.getHolder().findConcept())); + // TODO transform to use only ids here + cqConcept.setElements(List.of(select.getHolder().findConcept().getId())); if (select.getHolder() instanceof Connector) { final CQTable table = new CQTable(); cqConcept.setTables(List.of(table)); - table.setConnector(((Connector) select.getHolder())); + table.setConnector(((Connector) select.getHolder()).getId()); - table.setSelects(List.of(select)); + table.setSelects(List.of((ConnectorSelectId) select.getId())); table.setConcept(cqConcept); } else { @@ -107,11 +110,11 @@ public static CQConcept forSelect(Select select) { .getConnectors().stream() .map(conn -> { final CQTable table = new CQTable(); - table.setConnector(conn); + table.setConnector(conn.getId()); return table; }).toList()); - cqConcept.setSelects(List.of(select)); + cqConcept.setSelects(List.of(select.getId())); } return cqConcept; @@ -119,10 +122,11 @@ public static CQConcept forSelect(Select select) { public static CQConcept forConnector(Connector source) { final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(source.getConcept())); + // TODO transform to use only ids here + cqConcept.setElements(List.of(source.getConcept().getId())); final CQTable cqTable = new CQTable(); cqTable.setConcept(cqConcept); - cqTable.setConnector(source); + cqTable.setConnector(source.getId()); cqConcept.setTables(List.of(cqTable)); return cqConcept; @@ -134,7 +138,7 @@ public String defaultLabel(Locale locale) { return null; } - if (elements.size() == 1 && elements.get(0).equals(getConcept())) { + if (elements.size() == 1 && elements.get(0).equals(getConceptId())) { return getConcept().getLabel(); } @@ -143,11 +147,12 @@ public String defaultLabel(Locale locale) { builder.append(getConcept().getLabel()); builder.append(" "); - for (ConceptElement id : elements) { - if (id.equals(getConcept())) { + for (ConceptElementId id : elements) { + ConceptElement conceptElement = id.resolve(); + if (conceptElement.equals(getConcept())) { continue; } - builder.append(id.getLabel()).append("+"); + builder.append(conceptElement.getLabel()).append("+"); } builder.deleteCharAt(builder.length() - 1); @@ -156,34 +161,19 @@ public String defaultLabel(Locale locale) { } @JsonIgnore - public Concept getConcept() { - return elements.get(0).getConcept(); + public ConceptId getConceptId() { + return elements.get(0).findConcept(); } @JsonIgnore - @ValidationMethod(message = "Not all Selects belong to the Concept.") - public boolean isAllSelectsForConcept() { - final Concept concept = getConcept(); - - if (!getSelects().stream().map(Select::getHolder).allMatch(concept::equals)) { - log.error("Not all selects belong to Concept[{}]", concept); - return false; - } - - return true; + public Concept getConcept() { + return getConceptId().resolve(); } - @JsonIgnore - @ValidationMethod(message = "Not all elements belong to the same Concept.") - public boolean isAllElementsForConcept() { - final Concept concept = getConcept(); - - if (!getElements().stream().map(ConceptElement::getConcept).allMatch(concept::equals)) { - log.error("Not all elements belong to Concept[{}]", concept); - return false; - } - - return true; + @Override + public void resolve(QueryResolveContext context) { + aggregateEventDates = !(excludeFromTimeAggregation || DateAggregationMode.NONE.equals(context.getDateAggregationMode())); + tables.forEach(t -> t.resolve(context)); } @Override @@ -199,9 +189,8 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { .collect(Collectors.toList()); //add filter to children - final List> aggregators = new ArrayList<>(); - aggregators.addAll(conceptAggregators); + final List> aggregators = new ArrayList<>(conceptAggregators); final List> connectorAggregators = createAggregators(plan, table.getSelects()); @@ -219,7 +208,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { final List> eventDateUnionAggregators = - aggregateEventDates ? List.of(new EventDateUnionAggregator(Set.of(table.getConnector().getTable()))) + aggregateEventDates ? List.of(new EventDateUnionAggregator(Set.of(table.getConnector().resolve().getResolvedTable()))) : Collections.emptyList(); aggregators.addAll(eventDateUnionAggregators); @@ -236,7 +225,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { final ConceptNode node = new ConceptNode( conceptSpecificNode, - elements, + elements.stream().>map(ConceptElementId::resolve).toList(), table, // if the node is excluded, don't pass it into the Node. !excludeFromSecondaryId && hasSelectedSecondaryId ? context.getSelectedSecondaryId() : null @@ -261,12 +250,43 @@ public void collectRequiredQueries(Set requiredQueries) { } + @Override + public List getResultInfos() { + final List resultInfos = new ArrayList<>(); + + for (SelectId select : selects) { + Select resolved = select.resolve(); + resultInfos.add(resolved.getResultInfo(this)); + } + + for (CQTable table : tables) { + for (SelectId sel : table.getSelects()) { + Select resolved = sel.resolve(); + resultInfos.add(resolved.getResultInfo(this)); + } + } + + return resultInfos; + } + + @Override + public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { + final Set connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet()); + + return new RequiredEntities(context.getBucketManager() + .getEntitiesWithConcepts(getElements().stream() + .>map(ConceptElementId::resolve) + .toList(), + connectors, context.getDateRestriction())); + } + /** * Generates Aggregators from Selects. These are collected and also appended to the list of aggregators in the * query plan that contribute to columns the result. */ - private static List> createAggregators(ConceptQueryPlan plan, Listresolve) .map(Select::createAggregator) .peek(plan::registerAggregator) .collect(Collectors.toList()); @@ -274,45 +294,51 @@ private static List> createAggregators(ConceptQueryPlan plan, List private ValidityDate selectValidityDate(CQTable table) { if (table.getDateColumn() != null) { - return table.getDateColumn().getValue(); + return table.getDateColumn().getValue().resolve(); } //else use this first defined validity date column - if (!table.getConnector().getValidityDates().isEmpty()) { - return table.getConnector().getValidityDates().get(0); + final Connector connector = table.getConnector().resolve(); + if (!connector.getValidityDates().isEmpty()) { + return connector.getValidityDates().get(0); } return null; } - @Override - public List getResultInfos(PrintSettings settings) { - final List resultInfos = new ArrayList<>(); - - for (Select select : selects) { - resultInfos.add(select.getResultInfo(this, settings)); - } + @JsonIgnore + @ValidationMethod(message = "Not all Selects belong to the Concept.") + public boolean isAllSelectsForConcept() { + final ConceptId concept = getConceptId(); - for (CQTable table : tables) { - for (Select sel : table.getSelects()) { - resultInfos.add(sel.getResultInfo(this, settings)); - } + if (!getSelects().stream().map(SelectId::findConcept).allMatch(concept::equals)) { + log.error("Not all selects belong to Concept[{}]", concept); + return false; } - return resultInfos; + return true; } - @Override - public void collectNamespacedObjects(Set> identifiables) { - identifiables.addAll(elements); - identifiables.addAll(selects); - tables.forEach(table -> identifiables.add(table.getConnector())); + @JsonIgnore + @ValidationMethod(message = "Not all elements belong to the same Concept.") + public boolean isAllElementsForConcept() { + + final ConceptId concept = getConceptId(); + + if (!getElements().stream().map(ConceptElementId::findConcept).allMatch(concept::equals)) { + log.error("Not all elements belong to Concept[{}]", concept); + return false; + } + + return true; } @Override - public void resolve(QueryResolveContext context) { - aggregateEventDates = !(excludeFromTimeAggregation || DateAggregationMode.NONE.equals(context.getDateAggregationMode())); - tables.forEach(t -> t.resolve(context)); + public void collectNamespacedObjects(Set> identifiables) { + final List> list = elements.stream().>map(ConceptElementId::resolve).toList(); + identifiables.addAll(list); + identifiables.addAll(selects.stream().map(Id::resolve).toList()); + tables.forEach(table -> identifiables.add(table.getConnector().resolve())); } @Override @@ -326,22 +352,15 @@ public void setDefaultExists() { return; } - final List conSelects = new ArrayList<>(t.getSelects()); - conSelects.addAll(t.getConnector().getDefaultSelects()); + final List conSelects = new ArrayList<>(t.getSelects()); + conSelects.addAll(t.getConnector().resolve().getDefaultSelects().stream().map(Select::getId).map(ConnectorSelectId.class::cast).toList()); t.setSelects(conSelects); } } - - @Override - public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { - final Set connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet()); - - return new RequiredEntities(context.getBucketManager().getEntitiesWithConcepts(getElements(), connectors, context.getDateRestriction())); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQDateRestriction.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQDateRestriction.java index 251f5c779b..6f78e0be8f 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQDateRestriction.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQDateRestriction.java @@ -12,7 +12,6 @@ import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -60,8 +59,8 @@ public void resolve(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings settings) { - return child.getResultInfos(settings); + public List getResultInfos() { + return child.getResultInfos(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQNegation.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQNegation.java index 2ae44af793..57fbef50f4 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQNegation.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQNegation.java @@ -8,7 +8,6 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -67,8 +66,8 @@ private DateAggregationAction determineDateAction(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings settings) { - return child.getResultInfos(settings); + public List getResultInfos() { + return child.getResultInfos(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQOr.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQOr.java index 6037200bea..882ead8a2b 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQOr.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQOr.java @@ -28,7 +28,6 @@ import com.bakdata.conquery.models.query.queryplan.specific.OrNode; import com.bakdata.conquery.models.query.resultinfo.FixedLabelResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.util.QueryUtils; import com.fasterxml.jackson.annotation.JsonView; @@ -118,18 +117,24 @@ private DateAggregationAction determineDateAction(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings settings) { + public List getResultInfos() { List resultInfos = new ArrayList<>(); for (CQElement c : children) { - resultInfos.addAll(c.getResultInfos(settings)); + resultInfos.addAll(c.getResultInfos()); } if (createExists()) { - final ResultPrinters.BooleanPrinter printer = new ResultPrinters.BooleanPrinter(settings); - final String userOrDefaultLabel = getUserOrDefaultLabel(settings.getLocale()); - final String defaultLabel = defaultLabel(settings.getLocale()); - - resultInfos.add(new FixedLabelResultInfo(userOrDefaultLabel, defaultLabel, ResultType.Primitive.BOOLEAN, Set.of(), settings, printer)); + resultInfos.add(new FixedLabelResultInfo(ResultType.Primitive.BOOLEAN, Set.of()) { + @Override + public String userColumnName(PrintSettings printSettings) { + return getUserOrDefaultLabel(printSettings.getLocale()); + } + + @Override + public String defaultColumnName(PrintSettings printSettings) { + return defaultLabel(printSettings.getLocale()); + } + }); } return resultInfos; diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java index 7162f8f4fb..496fa51120 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java @@ -13,7 +13,6 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -93,8 +92,8 @@ public void visit(Consumer visitor) { } @Override - public List getResultInfos(PrintSettings settings) { - return resolvedQuery.getReusableComponents().getResultInfos(settings); + public List getResultInfos() { + return resolvedQuery.getReusableComponents().getResultInfos(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/external/CQExternal.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/external/CQExternal.java index d6d23de78d..7815d7fe03 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/external/CQExternal.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/external/CQExternal.java @@ -19,7 +19,6 @@ import com.bakdata.conquery.models.config.IdColumnConfig; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -215,7 +214,7 @@ public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { } @Override - public List getResultInfos(PrintSettings settings) { + public List getResultInfos() { if (extra == null) { return Collections.emptyList(); } @@ -228,7 +227,7 @@ public List getResultInfos(PrintSettings settings) { final String column = headers[col]; final ResultType type = onlySingles ? ResultType.Primitive.STRING : new ResultType.ListT<>(ResultType.Primitive.STRING); - resultInfos.add(new ExternalResultInfo(column, type, settings)); + resultInfos.add(new ExternalResultInfo(column, type)); } return resultInfos; diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/temporal/CQAbstractTemporalQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/temporal/CQAbstractTemporalQuery.java index 33a720c5ea..63e33a0616 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/temporal/CQAbstractTemporalQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/temporal/CQAbstractTemporalQuery.java @@ -8,7 +8,6 @@ import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -81,10 +80,10 @@ public void resolve(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings settings) { + public List getResultInfos() { List resultInfos = new ArrayList<>(); - resultInfos.addAll(index.getChild().getResultInfos(settings)); - resultInfos.addAll(preceding.getChild().getResultInfos(settings)); + resultInfos.addAll(index.getChild().getResultInfos()); + resultInfos.addAll(preceding.getChild().getResultInfos()); return resultInfos; } diff --git a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java index 3ddcb57894..f4cb95a37c 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java @@ -32,7 +32,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; import io.dropwizard.core.setup.Environment; -import io.dropwizard.jersey.DropwizardResourceConfig; import io.dropwizard.lifecycle.Managed; import lombok.Getter; import lombok.NonNull; @@ -40,6 +39,7 @@ import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.glassfish.jersey.internal.inject.AbstractBinder; +import org.glassfish.jersey.server.ResourceConfig; /** * Central node of Conquery. Hosts the frontend, api, metadata and takes care of query distribution to @@ -94,7 +94,7 @@ public void run(Manager manager) throws InterruptedException { // Initialization of internationalization I18n.init(); - configureApiServlet(config, environment.jersey().getResourceConfig()); + configureApiServlet(config, environment); maintenanceService = environment.lifecycle() .scheduledExecutorService("Maintenance Service") @@ -155,7 +155,8 @@ private void registerTasks(Manager manager, Environment environment, ConqueryCon environment.lifecycle().addServerLifecycleListener(shutdown); } - private void configureApiServlet(ConqueryConfig config, DropwizardResourceConfig jerseyConfig) { + private void configureApiServlet(ConqueryConfig config, Environment environment) { + ResourceConfig jerseyConfig = environment.jersey().getResourceConfig(); RESTServer.configure(config, jerseyConfig); jerseyConfig.register(new AbstractBinder() { @Override @@ -165,12 +166,14 @@ protected void configure() { } }); + getInternalMapperFactory().customizeApiObjectMapper(environment.getObjectMapper(), getDatasetRegistry(), getMetaStorage()); + jerseyConfig.register(PathParamInjector.class); } private void loadMetaStorage() { log.info("Opening MetaStorage"); - getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage())); + getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage()), getEnvironment().metrics()); log.info("Loading MetaStorage"); getMetaStorage().loadData(); log.info("MetaStorage loaded {}", getMetaStorage()); diff --git a/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java b/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java index a1e084264b..a69e598fc0 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java @@ -65,7 +65,7 @@ public void run(ConqueryConfig config, Environment environment) throws Exception lifecycle.manage(clusterConnection); - final Collection workerStorages = config.getStorage().discoverWorkerStorages(); + final Collection workerStorages = config.getStorage().discoverWorkerStorages(); ExecutorService loaders = config.getQueries().getExecutionPool().createService("Worker loader"); @@ -74,7 +74,7 @@ public void run(ConqueryConfig config, Environment environment) throws Exception for (WorkerStorage workerStorage : workerStorages) { loaders.submit(() -> { try { - workersDone.add(workers.createWorker(workerStorage, config.isFailOnError())); + workersDone.add(workers.createWorker(workerStorage, config.isFailOnError(), environment)); } catch (Exception e) { log.error("Failed reading Storage", e); diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java index c1d942f3ee..0925b0efe9 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java @@ -2,13 +2,11 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Type; - import jakarta.inject.Inject; import jakarta.ws.rs.ext.ParamConverter; import jakarta.ws.rs.ext.ParamConverterProvider; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; @@ -16,7 +14,6 @@ import com.bakdata.conquery.models.worker.DatasetRegistry; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; -import lombok.RequiredArgsConstructor; @NoArgsConstructor @AllArgsConstructor @@ -46,6 +43,6 @@ public ParamConverter getConverter(Class rawType, Type genericType, An return new NamespacedIdRefParamConverter(parser, datasetRegistry); } - return new MetaIdRefParamConverter(parser, metaStorage.getCentralRegistry()); + return new MetaIdRefParamConverter(parser, metaStorage); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java index e70ceda844..ff01332b7a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java @@ -1,6 +1,7 @@ package com.bakdata.conquery.io.jackson; import com.fasterxml.jackson.databind.util.StdConverter; +import com.google.common.base.Throwables; /** * Interface for class instances that need initialization after deserialization and value injection. @@ -16,14 +17,20 @@ */ public interface Initializing { - void init(); + void init() throws Exception; class Converter extends StdConverter { @Override public T convert(T value) { - value.init(); - return value; + try { + value.init(); + return value; + } + catch (Exception e) { + Throwables.throwIfUnchecked(e); + throw new IllegalStateException("Could not initialize %s".formatted(value), e); + } } } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java index eb6e7a7c95..72c1d231a6 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java @@ -2,25 +2,26 @@ import jakarta.ws.rs.ext.ParamConverter; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.NonNull; import lombok.RequiredArgsConstructor; @RequiredArgsConstructor -public class MetaIdRefParamConverter, VALUE extends Identifiable> implements ParamConverter { +public class MetaIdRefParamConverter & MetaId, VALUE extends Identifiable> implements ParamConverter { private final IdUtil.Parser idParser; @NonNull - private final CentralRegistry registry; + private final MetaStorage storage; @Override public VALUE fromString(String value) { final ID id = idParser.parse(value); - return registry.resolve(id); + return (VALUE) id.get(storage); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java index c0c8fd7be8..5b849d36bc 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java @@ -11,8 +11,8 @@ public class MutableInjectableValues extends InjectableValues { private final ConcurrentHashMap values = new ConcurrentHashMap<>(); - - public MutableInjectableValues add(Class type, T value) { + + public MutableInjectableValues add(Class type, T value) { if(!type.isInstance(value)) { throw new IllegalArgumentException(value+" is not of type "+type); } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java index 2d07631c49..92530d5f9e 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java @@ -21,7 +21,7 @@ public class NamespacedIdRefParamConverter & NamespacedId, public VALUE fromString(String value) { final ID id = idParser.parse(value); - return registry.resolve(id); + return (VALUE) id.get(registry.getStorage(id.getDataset())); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java index 8e06f2954c..5cd563227a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java @@ -26,13 +26,31 @@ public class CBlockDeserializer extends JsonDeserializer implements Cont private JsonDeserializer beanDeserializer; + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Override + public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { + JavaType type = Optional + .ofNullable(ctxt.getContextualType()) + .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); + + while(type.isContainerType()) { + type = type.getContentType(); + } + BeanDescription descr = ctxt.getConfig().introspect(type); + JsonDeserializer deser = ctxt.getFactory().createBeanDeserializer(ctxt, type, descr); + if(deser instanceof ResolvableDeserializer) { + ((ResolvableDeserializer) deser).resolve(ctxt); + } + return new CBlockDeserializer((JsonDeserializer)deser); + } + @Override public CBlock deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { CBlock block = beanDeserializer.deserialize(p, ctxt); - TreeConcept concept = block.getConnector().getConcept(); if(block.getMostSpecificChildren() != null) { + TreeConcept concept = (TreeConcept) block.getConnector().getConcept().resolve(); // deduplicate concrete paths after loading from disk. for (int event = 0; event < block.getMostSpecificChildren().length; event++) { @@ -55,21 +73,5 @@ public CBlock deserializeWithType(JsonParser p, DeserializationContext ctxt, Typ return this.deserialize(p, ctxt); } - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional - .ofNullable(ctxt.getContextualType()) - .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); - while(type.isContainerType()) { - type = type.getContentType(); - } - BeanDescription descr = ctxt.getConfig().introspect(type); - JsonDeserializer deser = ctxt.getFactory().createBeanDeserializer(ctxt, type, descr); - if(deser instanceof ResolvableDeserializer) { - ((ResolvableDeserializer) deser).resolve(ctxt); - } - return new CBlockDeserializer((JsonDeserializer)deser); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java index 7307d1e0a7..f7604b1d79 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java @@ -1,13 +1,19 @@ package com.bakdata.conquery.io.jackson.serializer; import java.io.IOException; +import java.util.HashSet; +import java.util.List; import java.util.Optional; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.IIdInterner; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.core.JsonParser; @@ -28,35 +34,99 @@ public class IdDeserializer> extends JsonDeserializer imple private Class idClass; private IdUtil.Parser idParser; - private boolean checkForInjectedPrefix; + private boolean isNamespacedId; + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) { + JavaType type = Optional.ofNullable(ctxt.getContextualType()) + .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); + + while (type.isContainerType()) { + type = type.getContentType(); + } + Class> idClass = (Class>) type.getRawClass(); + IdUtil.Parser>> parser = IdUtil.createParser((Class) idClass); + + return new IdDeserializer( + idClass, + parser, + //we only need to check for the dataset prefix if the id requires it + NamespacedId.class.isAssignableFrom(idClass) + ); + } @SuppressWarnings("unchecked") @Override public ID deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (ID) ctxt.handleUnexpectedToken(Id.class, parser.getCurrentToken(), parser, "name references should be strings"); + JsonToken currentToken = parser.getCurrentToken(); + if (currentToken != JsonToken.VALUE_STRING) { + return (ID) ctxt.handleUnexpectedToken(Id.class, currentToken, parser, "name references should be strings. Was: " + currentToken); } String text = parser.getText(); + // We need to assign resolvers for namespaced and meta ids because meta-objects might reference namespaced objects (e.g. ExecutionsId) + NamespacedStorageProvider namespacedStorageProvider = NamespacedStorageProvider.getResolver(ctxt); + MetaStorage metaStorage = MetaStorage.get(ctxt); + try { - return deserializeId(text, idParser, checkForInjectedPrefix, ctxt); + final ID id = deserializeId(text, idParser, isNamespacedId, ctxt); + + setResolver(id, metaStorage, namespacedStorageProvider); + + return id; } catch (Exception e) { return (ID) ctxt.handleWeirdStringValue(idClass, text, "Could not parse `" + idClass.getSimpleName() + "` from `" + text + "`: " + e.getMessage()); } } + public static void setResolver(Id id, MetaStorage metaStorage, NamespacedStorageProvider namespacedStorageProvider) { + // Set resolvers in this id and subIds + final HashSet> ids = new HashSet<>(); + id.collectIds(ids); + for (Id subId : ids) { + if (subId.getNamespacedStorageProvider() != null || subId.getMetaStorage() != null) { + // Ids are constructed of other ids that might already have a resolver set + continue; + } + if (subId instanceof NamespacedId) { + subId.setNamespacedStorageProvider(namespacedStorageProvider); + } + else if (subId instanceof MetaId) { + subId.setMetaStorage(metaStorage); + } + } + } + public static > ID deserializeId(String text, IdUtil.Parser idParser, boolean checkForInjectedPrefix, DeserializationContext ctx) throws JsonMappingException { - if (checkForInjectedPrefix) { - //check if there was a dataset injected and if it is already a prefix - String datasetName = findDatasetName(ctx); - if (datasetName != null) { - return idParser.parsePrefixed(datasetName, text); - } + List components = checkForInjectedPrefix ? + IdUtil.Parser.asComponents(findDatasetName(ctx), text) : + IdUtil.Parser.asComponents(text); + + + IIdInterner iIdInterner = IIdInterner.get(ctx); + + if (iIdInterner == null) { + // Parse directly, as no interner is available + return idParser.parse(components); + } + + IIdInterner.ParserIIdInterner idParserIIdInterner = iIdInterner.forParser(idParser); + ID id = idParserIIdInterner.get(components); + + if (id != null) { + // Return cached id + return id; } - return idParser.parse(text); + + // Parse and cache + id = idParser.parse(components); + idParserIIdInterner.putIfAbsent(components, id); + + return id; } private static String findDatasetName(DeserializationContext ctx) throws JsonMappingException { @@ -81,23 +151,5 @@ public ID deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDes return this.deserialize(p, ctxt); } - @SuppressWarnings({"rawtypes", "unchecked"}) - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional.ofNullable(ctxt.getContextualType()) - .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); - while (type.isContainerType()) { - type = type.getContentType(); - } - Class> idClass = (Class>) type.getRawClass(); - IdUtil.Parser>> parser = IdUtil.createParser((Class) idClass); - - return new IdDeserializer( - idClass, - parser, - //we only need to check for the dataset prefix if the id requires it - NamespacedId.class.isAssignableFrom(idClass) - ); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java deleted file mode 100644 index ae70528480..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link Id} instead of the object content itself. - * - * @implNote You cannot expect MetaIdRefs to work beyond the ManagerNode! So resolve the content you need on the Manager (Or implement the necessary logic). - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(using = IdReferenceSerializer.class) -@JsonDeserialize(using = MetaIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface MetaIdRef { -} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java deleted file mode 100644 index 6711fab58a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link Id} instead of the object content itself. - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(contentUsing=IdReferenceSerializer.class) -@JsonDeserialize(contentUsing=MetaIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface MetaIdRefCollection {} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java deleted file mode 100644 index e08b026eea..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java +++ /dev/null @@ -1,93 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; -import java.util.InputMismatchException; -import java.util.Optional; - -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.*; -import com.fasterxml.jackson.databind.deser.ContextualDeserializer; -import com.fasterxml.jackson.databind.deser.SettableBeanProperty; -import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@AllArgsConstructor -@NoArgsConstructor -public class MetaIdReferenceDeserializer, T extends Identifiable> extends JsonDeserializer implements ContextualDeserializer { - - private Class type; - private JsonDeserializer beanDeserializer; - private Class idClass; - - @SuppressWarnings("unchecked") - @Override - public T deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (T) ctxt.handleUnexpectedToken(type, parser.getCurrentToken(), parser, "name references should be strings"); - } - - ID id = ctxt.readValue(parser, idClass); - - try { - final CentralRegistry centralRegistry = MetaStorage.get(ctxt).getCentralRegistry(); - - // Not all Components have registries, we leave it up to the validator to be angry. - if (centralRegistry == null) { - return null; - } - - Optional result = centralRegistry.getOptional(id); - - if (result.isEmpty()) { - throw new IdReferenceResolvingException(parser, "Could not find entry `" + id + "` of type " + type.getName(), id.toString(), type); - } - - if (!type.isAssignableFrom(result.get().getClass())) { - throw new InputMismatchException(String.format("Cannot assign type %s to %s ", result.get().getClass(), type)); - } - - return result.get(); - } - catch (Exception e) { - log.error("Error while resolving entry {} of type {}", id, type, e); - throw e; - } - } - - @Override - public T deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDeserializer typeDeserializer) throws IOException { - return this.deserialize(p, ctxt); - } - - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - - JavaType type = Optional.ofNullable(ctxt.getContextualType()) - .orElseGet(property::getType); - - BeanDescription descr = ctxt.getConfig().introspect(type); - - while (type.isContainerType()) { - type = type.getContentType(); - } - - Class cl = type.getRawClass(); - Class idClass = IdUtil.findIdClass(cl); - - return new MetaIdReferenceDeserializer<>(cl, ctxt.getFactory().createBeanDeserializer(ctxt, type, descr), idClass); - } - - @Override - public SettableBeanProperty findBackReference(String refName) { - return beanDeserializer.findBackReference(refName); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java deleted file mode 100644 index 155d5224fc..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link NamespacedId} instead of the object content itself. - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(using = IdReferenceSerializer.class) -@JsonDeserialize(using = NsIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface NsIdRef { -} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java deleted file mode 100644 index cb3f0a3d15..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link NamespacedId} instead of the object content itself. - * - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(contentUsing=IdReferenceSerializer.class) -@JsonDeserialize(contentUsing=NsIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface NsIdRefCollection {} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java deleted file mode 100644 index 08bb4ddc2a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - - -/** - * Annotation for Maps where the keys are supposed to be NsIdRefs - */ -@JacksonAnnotationsInside -@JsonDeserialize(keyUsing = NsIdReferenceKeyDeserializer.class) -@JsonSerialize(keyUsing = IdReferenceKeySerializer.class) -@Target({ElementType.FIELD, ElementType.PARAMETER}) -@Retention(RetentionPolicy.RUNTIME) -public @interface NsIdRefKeys { -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java deleted file mode 100644 index 59414d51b8..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; -import java.util.InputMismatchException; -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.worker.IdResolveContext; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.BeanDescription; -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.deser.ContextualDeserializer; -import com.fasterxml.jackson.databind.deser.SettableBeanProperty; -import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@AllArgsConstructor -@NoArgsConstructor -public class NsIdReferenceDeserializer & NamespacedId, T extends Identifiable> extends JsonDeserializer implements ContextualDeserializer { - - private Class type; - private JsonDeserializer beanDeserializer; - private Class idClass; - - @Override - public T deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDeserializer typeDeserializer) throws IOException { - return this.deserialize(p, ctxt); - } - - @SuppressWarnings("unchecked") - @Override - public T deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (T) ctxt.handleUnexpectedToken(type, parser.getCurrentToken(), parser, "name references should be strings"); - } - - ID id = ctxt.readValue(parser, idClass); - - try { - - final IdResolveContext idResolveContext = IdResolveContext.get(ctxt); - Optional result = idResolveContext.getOptional(id); - - if (result.isEmpty()) { - throw new IdReferenceResolvingException(parser, "Could not find entry `" + id + "` of type " + type.getName(), id.toString(), type); - } - - if (!type.isAssignableFrom(result.get().getClass())) { - throw new InputMismatchException(String.format("Cannot assign %s of type %s to %s ", id, result.get().getClass(), type)); - } - - return result.get(); - } - catch (Exception e) { - throw new RuntimeException("Error while resolving entry " + id + " of type " + type, e); - } - } - - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional - .ofNullable(ctxt.getContextualType()) - .orElseGet(property::getType); - - BeanDescription descr = ctxt.getConfig().introspect(type); - - while (type.isContainerType()) { - type = type.getContentType(); - } - Class cl = (Class) type.getRawClass(); - Class idClass = IdUtil.findIdClass(cl); - - return new NsIdReferenceDeserializer<>( - cl, - ctxt.getFactory().createBeanDeserializer(ctxt, type, descr), - idClass - ); - } - - @Override - public SettableBeanProperty findBackReference(String refName) { - return beanDeserializer.findBackReference(refName); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java deleted file mode 100644 index 1fd0fde25e..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; -import com.bakdata.conquery.models.worker.IdResolveContext; -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.KeyDeserializer; -import com.fasterxml.jackson.databind.deser.ContextualKeyDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; - -/** - * Deserializer for Map keys. See {@link NsIdRef} for details. - */ -@AllArgsConstructor -@NoArgsConstructor -public class NsIdReferenceKeyDeserializer & NamespacedId, VALUE extends NamespacedIdentifiable> extends KeyDeserializer implements ContextualKeyDeserializer { - - private IdUtil.Parser parser; - - @Override - public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException { - final ID id = parser.parse(key); - - return IdResolveContext.get(ctxt).resolve(id); - } - - @Override - public KeyDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - - final Class idClass = IdUtil.findIdClass(property.getType().getKeyType().getRawClass()); - final IdUtil.Parser parser = IdUtil.createParser(idClass); - - - return new NsIdReferenceKeyDeserializer<>(parser); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java index 2bfdb36dce..a60d742f96 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java +++ b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java @@ -1,14 +1,15 @@ package com.bakdata.conquery.io.mina; +import jakarta.validation.Validator; + import com.bakdata.conquery.models.exceptions.ValidatorHelper; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.messages.network.NetworkMessage; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.util.io.EndCheckableInputStream; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; -import jakarta.validation.Validator; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -18,17 +19,10 @@ public class BinaryJacksonCoder implements CQCoder> { private final ObjectWriter writer; private final ObjectReader reader; - public BinaryJacksonCoder(IdResolveContext datasets, Validator validator, ObjectMapper objectMapper) { + public BinaryJacksonCoder(NamespacedStorageProvider namespacedStorageProvider, Validator validator, ObjectMapper objectMapper) { this.validator = validator; writer = objectMapper.writerFor(NetworkMessage.class); - reader = datasets.injectIntoNew(objectMapper.readerFor(NetworkMessage.class)).without(Feature.AUTO_CLOSE_SOURCE); - } - - @Override - public Chunkable encode(NetworkMessage message) throws Exception { - ValidatorHelper.failOnError(log, validator.validate(message)); - - return new Chunkable(message.getMessageId(), writer, message); + reader = namespacedStorageProvider.injectIntoNew(objectMapper.readerFor(NetworkMessage.class)).without(Feature.AUTO_CLOSE_SOURCE); } @Override @@ -42,4 +36,11 @@ public NetworkMessage decode(ChunkedMessage message) throws Exception { return (NetworkMessage) obj; } } + + @Override + public Chunkable encode(NetworkMessage message) throws Exception { + ValidatorHelper.failOnError(log, validator.validate(message)); + + return new Chunkable(message.getMessageId(), writer, message); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java index e49a9554ba..53a354b7ba 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java @@ -5,18 +5,18 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Locale; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.permissions.Ability; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.util.io.FileUtil; import com.google.common.base.Strings; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -77,9 +77,9 @@ public static void checkSingleTableResult(ManagedExecution exec) { public static void authorizeExecutable(Subject subject, ManagedExecution exec) { - final Dataset dataset = exec.getDataset(); - subject.authorize(dataset, Ability.READ); - subject.authorize(dataset, Ability.DOWNLOAD); + final DatasetId datasetId = exec.getDataset(); + subject.authorize(datasetId, Ability.READ); + subject.authorize(datasetId, Ability.DOWNLOAD); subject.authorize(exec, Ability.READ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowRenderer.java index fde3a9ad80..d1f84adf31 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowRenderer.java @@ -9,19 +9,21 @@ import java.util.stream.Stream; import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.config.ArrowConfig; import com.bakdata.conquery.models.identifiable.mapping.PrintIdMapper; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.query.results.EntityResult; +import com.bakdata.conquery.models.types.ResultType; import lombok.extern.slf4j.Slf4j; import org.apache.arrow.util.Preconditions; import org.apache.arrow.vector.BitVector; import org.apache.arrow.vector.DateDayVector; import org.apache.arrow.vector.FieldVector; -import org.apache.arrow.vector.Float4Vector; import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.ValueVector; @@ -33,277 +35,288 @@ import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; import org.apache.arrow.vector.util.Text; +import org.jetbrains.annotations.NotNull; @Slf4j public class ArrowRenderer { - public static void renderToStream( + public static void renderToStream( Function writerProducer, PrintSettings printSettings, ArrowConfig arrowConfig, List idHeaders, List resultInfo, - Stream results) throws IOException { + Stream results, PrinterFactory printerFactory) throws IOException { - List fields = ArrowUtil.generateFields(idHeaders, resultInfo, new UniqueNamer(printSettings)); - VectorSchemaRoot root = VectorSchemaRoot.create(new Schema(fields, null), ROOT_ALLOCATOR); + final List fields = ArrowUtil.generateFields(idHeaders, resultInfo, new UniqueNamer(printSettings), printSettings); + final VectorSchemaRoot root = VectorSchemaRoot.create(new Schema(fields, null), ROOT_ALLOCATOR); // Build separate pipelines for id and value, as they have different sources but the same target - RowConsumer[] idWriters = generateWriterPipeline(root, 0, idHeaders.size(), printSettings, idHeaders); - RowConsumer[] valueWriter = generateWriterPipeline(root, idHeaders.size(), resultInfo.size(), printSettings, resultInfo); + final RowConsumer[] idWriters = generateWriterPipeline(root, 0, idHeaders.size(), idHeaders); + final RowConsumer[] valueWriter = generateWriterPipeline(root, idHeaders.size(), resultInfo.size(), resultInfo); + + final List printers = + Stream.concat(idHeaders.stream(), resultInfo.stream()) + .map(info -> info.createPrinter(printerFactory, printSettings)) + .toList(); // Write the data try (ArrowWriter writer = writerProducer.apply(root)) { - write(writer, root, idWriters, valueWriter, printSettings.getIdMapper(), results, arrowConfig.getBatchSize()); + write(writer, root, idWriters, valueWriter, printSettings.getIdMapper(), printers, results, arrowConfig.getBatchSize()); } - } + } public static void write( ArrowWriter writer, VectorSchemaRoot root, - RowConsumer[] idWriter, - RowConsumer[] valueWriter, + RowConsumer[] idWriters, + RowConsumer[] valueWriters, PrintIdMapper idMapper, + List printers, Stream results, int batchSize) throws IOException { Preconditions.checkArgument(batchSize > 0, "Batch size needs be larger than 0."); // TODO add time metric for writing - log.trace("Starting result write"); - writer.start(); - int batchCount = 0; - int batchLineCount = 0; - Iterator resultIterator = results.iterator(); - while (resultIterator.hasNext()) { - EntityResult cer = resultIterator.next(); - for (Object[] line : cer.listResultLines()) { - if(line.length != valueWriter.length) { - throw new IllegalStateException("The number of value writers and values in a result line differs. Writers: " + valueWriter.length + " Line: " + line.length); - } - for (RowConsumer rowConsumer : idWriter) { - // Write id information - rowConsumer.accept(batchLineCount, idMapper.map(cer).getExternalId()); - } - for (RowConsumer rowConsumer : valueWriter) { - // Write values - rowConsumer.accept(batchLineCount, line); - } - batchLineCount++; - - if (batchLineCount >= batchSize) { - root.setRowCount(batchLineCount); - writer.writeBatch(); - root.clear(); - batchLineCount = 0; - } - } - } - if (batchLineCount > 0) { - root.setRowCount(batchLineCount); - writer.writeBatch(); - root.clear(); - batchCount++; - } - log.trace("Wrote {} batches of size {} (last batch might be smaller)", batchCount, batchSize); - writer.end(); - } - - private static RowConsumer intVectorFiller(IntVector vector, Function resultExtractor) { - return (rowNumber, line) -> { - Integer value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - vector.setSafe(rowNumber, value); - }; - } - - private static RowConsumer bitVectorFiller(BitVector vector, Function resultExtractor) { - return (rowNumber, line) -> { - Boolean value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - vector.setSafe(rowNumber, value ? 1 : 0); - }; - } - - private static RowConsumer float8VectorFiller(Float8Vector vector, Function resultExtractor) { - return (rowNumber, line) -> { - Number value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - vector.setSafe(rowNumber, value.doubleValue()); - }; - } - - private static RowConsumer float4VectorFiller(Float4Vector vector, Function resultExtractor) { - return (rowNumber, line) -> { - Number value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - vector.setSafe(rowNumber, value.floatValue()); - }; - } - - private static RowConsumer varCharVectorFiller(VarCharVector vector, Function resultExtractor) { - return (rowNumber, line) -> { - String value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - vector.setSafe(rowNumber, new Text(value)); - }; - } - - private static RowConsumer dateDayVectorFiller(DateDayVector vector, Function resultExtractor) { - return (rowNumber, line) -> { - Number value = resultExtractor.apply(line); - if (value == null) { - vector.setNull(rowNumber); - return; - } - - // Treat our internal infinity dates (Interger.MIN and Integer.MAX) also as null - final int epochDay = value.intValue(); - if (CDate.isNegativeInfinity(epochDay) || CDate.isPositiveInfinity(epochDay)) { - vector.setNull(rowNumber); - return; - } - - vector.setSafe(rowNumber, epochDay); - }; - } - - private static RowConsumer structVectorFiller(StructVector vector, RowConsumer [] nestedConsumers, Function> resultExtractor) { - return (rowNumber, line) -> { - // Values is a horizontal list - List values = resultExtractor.apply(line); - if (values == null) { - vector.setNull(rowNumber); - return; - } - if(values.size() != nestedConsumers.length) { - throw new IllegalStateException("The number of the provided nested value differs from the number of consumer for the generated vectors. Provided values: " + values + "\t Available consumers: " + nestedConsumers.length); - } - for (RowConsumer nestedConsumer : nestedConsumers) { - nestedConsumer.accept(rowNumber, values.toArray()); - } - - // Finally mark that we populated the nested vectors - vector.setIndexDefined(rowNumber); - }; - } - - private static RowConsumer listVectorFiller(ListVector vector, RowConsumer nestedConsumer, Function> resultExtractor){ - return (rowNumber, line) -> { - // Values is a vertical list - List values = resultExtractor.apply(line); - if (values == null) { - vector.setNull(rowNumber); - return; - } - - int start = vector.startNewValue(rowNumber); - - for (int i = 0; i < values.size(); i++) { - // These short lived one value arrays are a workaround at the moment - nestedConsumer.accept(Math.addExact(start, i), new Object[] {values.get(i)}); - } - - vector.endValue(rowNumber, values.size()); - }; - } - - - public static RowConsumer[] generateWriterPipeline(VectorSchemaRoot root, int vectorOffset, int numVectors, final PrintSettings settings, List resultInfos) { - Preconditions.checkArgument(vectorOffset >= 0, "Offset was negative: %s", vectorOffset); - Preconditions.checkArgument(numVectors >= 0, "Number of vectors was negative: %s", numVectors); - - RowConsumer[] builder = new RowConsumer[numVectors]; - - for ( - int vecI = vectorOffset; - (vecI < root.getFieldVectors().size()) && (vecI < vectorOffset + numVectors); - vecI++ - ) { - final int pos = vecI - vectorOffset; - final FieldVector vector = root.getVector(vecI); - final ResultInfo resultInfo = resultInfos.get(pos); - builder[pos] = generateVectorFiller(pos, vector, settings, resultInfo.getPrinter()); + log.trace("Starting result write"); + + writer.start(); + int batchCount = 0; + int batchLineCount = 0; + final Iterator resultIterator = results.iterator(); + + while (resultIterator.hasNext()) { + final EntityResult cer = resultIterator.next(); + + final Object[] printedExternalId = getPrintedExternalId(idWriters, idMapper, printers, cer); + + for (Object[] line : cer.listResultLines()) { + Preconditions.checkState( + line.length == valueWriters.length, + "The number of value writers and values in a result line differs. Writers: %d Line: %d".formatted(valueWriters.length, line.length) + ); + + for (int index = 0; index < idWriters.length; index++) { + if (printedExternalId[index] == null) { + continue; + } + + idWriters[index].accept(batchLineCount, printedExternalId[index]); + } + + for (int index = 0; index < valueWriters.length; index++) { + final int colId = index + idWriters.length; + // In this case, the printer normalizes and adjusts values. + + final Object value = line[index]; + + Object printed = null; + + if (value != null) { + Printer printer = printers.get(colId); + printed = printer.apply(value); + } + + valueWriters[index].accept(batchLineCount, printed); + } + batchLineCount++; + + if (batchLineCount >= batchSize) { + root.setRowCount(batchLineCount); + writer.writeBatch(); + root.clear(); + batchLineCount = 0; + } + } + } + if (batchLineCount > 0) { + root.setRowCount(batchLineCount); + writer.writeBatch(); + root.clear(); + batchCount++; } - return builder; + log.trace("Wrote {} batches of size {} (last batch might be smaller)", batchCount, batchSize); + writer.end(); + } - } + @NotNull + private static Object[] getPrintedExternalId(RowConsumer[] idWriters, PrintIdMapper idMapper, List printers, EntityResult cer) { + final String[] externalId = idMapper.map(cer).getExternalId(); - private static RowConsumer generateVectorFiller(int pos, ValueVector vector, final PrintSettings settings, ResultPrinters.Printer printer) { - if (vector instanceof IntVector intVector) { - return intVectorFiller(intVector, (line) -> (Integer) line[pos]); + final Object[] printedExternalId = new String[externalId.length]; + + for (int index = 0; index < idWriters.length; index++) { + Printer printer = printers.get(index); + printedExternalId[index] = printer.apply(externalId[index]); } + return printedExternalId; + } + + private static RowConsumer intVectorFiller(IntVector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final Integer value = (Integer) valueRaw; + + vector.setSafe(rowNumber, value); + }; + } + + private static RowConsumer bitVectorFiller(BitVector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final Boolean value = (Boolean) valueRaw; + + vector.setSafe(rowNumber, value ? 1 : 0); + }; + } + + private static RowConsumer moneyVectorFiller(IntVector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final int value = (int) valueRaw; + + vector.setSafe(rowNumber, value); + }; + } + + private static RowConsumer float8VectorFiller(Float8Vector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final Number value = (Number) valueRaw; + + vector.setSafe(rowNumber, value.doubleValue()); + }; + } + + private static RowConsumer varCharVectorFiller(VarCharVector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + final String value = (String) valueRaw; + vector.setSafe(rowNumber, new Text(value)); + }; + } + + private static RowConsumer dateDayVectorFiller(DateDayVector vector) { + return (rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final Number value = (Number) valueRaw; + + // Treat our internal infinity dates (Interger.MIN and Integer.MAX) also as null + final int epochDay = value.intValue(); + + if (CDate.isNegativeInfinity(epochDay) || CDate.isPositiveInfinity(epochDay)) { + vector.setNull(rowNumber); + return; + } + + vector.setSafe(rowNumber, epochDay); + }; + } + + private static RowConsumer dateRangeVectorFiller(StructVector vector) { + final List nestedVectors = vector.getPrimitiveVectors(); + final RowConsumer minConsumer = generateVectorFiller(nestedVectors.get(0), ResultType.Primitive.DATE); + final RowConsumer maxConsumer = generateVectorFiller(nestedVectors.get(1), ResultType.Primitive.DATE); + + return ((rowNumber, valueRaw) -> { + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final CDateRange value = (CDateRange) valueRaw; + + minConsumer.accept(rowNumber, value.getMinValue()); + maxConsumer.accept(rowNumber, value.getMaxValue()); + + // Finally mark that we populated the nested vectors + vector.setIndexDefined(rowNumber); + }); + } + + private static RowConsumer listVectorFiller(ListVector vector, RowConsumer nestedConsumer) { + return (rowNumber, valueRaw) -> { + + if (valueRaw == null) { + vector.setNull(rowNumber); + return; + } + + final List values = (List) valueRaw; - if (vector instanceof VarCharVector varCharVector) { - return varCharVectorFiller( - varCharVector, - (line) -> { - // This is a bit clunky at the moment, since this lambda is executed for each textual value - // in the result, but it should be okay for now. This code moves as soon shards deliver themselves - // arrow as a result. + final int start = vector.startNewValue(rowNumber); - if (line[pos] == null) { - // If there is no value, we don't want to have it displayed as an empty string (see next if) - return null; - } - // We reference the printer directly, - return printer.print(line[pos]); - }); - } + for (int i = 0; i < values.size(); i++) { + nestedConsumer.accept(Math.addExact(start, i), values.get(i)); + } - if (vector instanceof BitVector bitVector) { - return bitVectorFiller(bitVector, (line) -> (Boolean) line[pos]); - } + vector.endValue(rowNumber, values.size()); + }; + } - if (vector instanceof Float4Vector float4Vector) { - return float4VectorFiller(float4Vector, (line) -> (Number) line[pos]); - } - if (vector instanceof Float8Vector float8Vector) { - return float8VectorFiller(float8Vector, (line) -> (Number) line[pos]); - } + public static RowConsumer[] generateWriterPipeline(VectorSchemaRoot root, int vectorOffset, int numVectors, List resultInfos) { + Preconditions.checkArgument(vectorOffset >= 0, "Offset was negative: %s", vectorOffset); + Preconditions.checkArgument(numVectors >= 0, "Number of vectors was negative: %s", numVectors); - if (vector instanceof DateDayVector dateDayVector) { - return dateDayVectorFiller(dateDayVector, (line) -> (Number) line[pos]); - } + final RowConsumer[] builder = new RowConsumer[numVectors]; - if (vector instanceof StructVector structVector) { + for (int vecI = vectorOffset; (vecI < root.getFieldVectors().size()) && (vecI < vectorOffset + numVectors); vecI++) { + final int pos = vecI - vectorOffset; + final FieldVector vector = root.getVector(vecI); + final ResultInfo resultInfo = resultInfos.get(pos); + builder[pos] = generateVectorFiller(vector, resultInfo.getType()); - List nestedVectors = structVector.getPrimitiveVectors(); - RowConsumer [] nestedConsumers = new RowConsumer[nestedVectors.size()]; + } + return builder; - for (int i = 0; i < nestedVectors.size(); i++) { - nestedConsumers[i] = generateVectorFiller(i, nestedVectors.get(i), settings, printer); - } - return structVectorFiller(structVector, nestedConsumers, (line) -> (List) line[pos]); - } + } + + private static RowConsumer generateVectorFiller(ValueVector vector, ResultType type) { + if (type instanceof ResultType.ListT listT) { + final ValueVector nestedVector = ((ListVector) vector).getDataVector(); + + return listVectorFiller(((ListVector) vector), generateVectorFiller(nestedVector, listT.getElementType())); + } - if (vector instanceof ListVector listVector) { + return switch (((ResultType.Primitive) type)) { + case BOOLEAN -> bitVectorFiller(((BitVector) vector)); + case INTEGER -> intVectorFiller(((IntVector) vector)); + case MONEY -> moneyVectorFiller(((IntVector) vector)); + case DATE -> dateDayVectorFiller(((DateDayVector) vector)); + case NUMERIC -> float8VectorFiller((Float8Vector) vector); + case STRING -> varCharVectorFiller(((VarCharVector) vector)); + case DATE_RANGE -> dateRangeVectorFiller((StructVector) vector); - ValueVector nestedVector = listVector.getDataVector(); + }; - // pos = 0 is a workaround for now - return listVectorFiller(listVector, generateVectorFiller(0, nestedVector, settings, ((ResultPrinters.ListPrinter) printer).elementPrinter()), (line) -> (List) line[pos]); - } - throw new IllegalArgumentException("Unsupported vector type " + vector); - } + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowUtil.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowUtil.java index 7d2bc1ffcd..741885e961 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ArrowUtil.java @@ -2,9 +2,8 @@ import java.util.ArrayList; import java.util.List; -import java.util.function.BiFunction; -import java.util.stream.Collectors; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; import com.bakdata.conquery.models.types.ResultType; @@ -23,93 +22,85 @@ public class ArrowUtil { public static final RootAllocator ROOT_ALLOCATOR = new RootAllocator(); - private BiFunction fieldFor(ResultType type) { + private Field fieldFor(ResultType type, String name) { if (type instanceof ResultType.ListT) { - return ArrowUtil::listField; + return ArrowUtil.listField(name, type); } return switch (((ResultType.Primitive) type)) { - case BOOLEAN -> ArrowUtil::boolField; - case INTEGER, MONEY -> ArrowUtil::integerField; - case NUMERIC -> ArrowUtil::floatField; - case DATE -> ArrowUtil::dateField; - case DATE_RANGE -> ArrowUtil::dateRangeField; - case STRING -> ArrowUtil::stringField; + case BOOLEAN -> ArrowUtil.boolField(name); + case INTEGER -> ArrowUtil.integerField(name); + case MONEY -> ArrowUtil.moneyField(name); + case NUMERIC -> ArrowUtil.floatField(name); + case DATE -> ArrowUtil.dateField(name); + case DATE_RANGE -> ArrowUtil.dateRangeField(name); + case STRING -> ArrowUtil.stringField(name); }; } - private static Field stringField(ResultInfo info, @NonNull String uniqueName) { + private static Field stringField(@NonNull String uniqueName) { return new Field(uniqueName, FieldType.nullable(new ArrowType.Utf8()), null); } - private static Field boolField(ResultInfo info, @NonNull String uniqueName) { + private static Field boolField(@NonNull String uniqueName) { return new Field(uniqueName, FieldType.nullable(ArrowType.Bool.INSTANCE), null); } - private static Field integerField(ResultInfo info, @NonNull String uniqueName) { + private static Field integerField(@NonNull String uniqueName) { return new Field(uniqueName, FieldType.nullable(new ArrowType.Int(32, true)), null); } - private static Field floatField(ResultInfo info, @NonNull String uniqueName) { + private static Field moneyField(@NonNull String uniqueName) { + /*TODO FK + use decimal: new Field(uniqueName, FieldType.nullable(new ArrowType.Decimal(38 - scale, scale, 128)), null); + This will also impact Frontend preview, and ExternalFormBackends, needs planning. + Note: I suspect jsArrow has a bug, where it reads Decimal as BigInt + */ + return new Field(uniqueName, FieldType.nullable(new ArrowType.Int(32, true)), null); + + } + + private static Field floatField(@NonNull String uniqueName) { return new Field(uniqueName, FieldType.nullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null); } - private static Field dateField(ResultInfo info, @NonNull String uniqueName) { + private static Field dateField(@NonNull String uniqueName) { return new Field(uniqueName, FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null); } - private static Field dateRangeField(ResultInfo info, @NonNull String uniqueName) { + private static Field dateRangeField(@NonNull String uniqueName) { return new Field( uniqueName, FieldType.nullable(ArrowType.Struct.INSTANCE), List.of( - dateField(info, "min"), - dateField(info, "max") + dateField("min"), + dateField("max") )); } - private static Field listField(ResultInfo info, @NonNull String uniqueName) { - if (!(info.getType() instanceof ResultType.ListT)) { - throw new IllegalStateException("Expected result type of " + ResultType.ListT.class.getName() + " but got " + info.getType().getClass().getName()); - } - - final ResultType elementType = ((ResultType.ListT) info.getType()).getElementType(); - BiFunction nestedFieldCreator = fieldFor(elementType); - final Field nestedField = nestedFieldCreator.apply(info, uniqueName); - return new Field( - uniqueName, - FieldType.nullable(ArrowType.List.INSTANCE), - List.of(nestedField) - ); - } - - /** - * Creates an arrow field vector (a column) corresponding to the internal conquery type and initializes the column with - * a localized header. - * @param info internal meta data for the result column - * @param collector to create unique names across the columns - * @return a Field (the arrow representation of the column) - */ - public Field createField(ResultInfo info, UniqueNamer collector) { - // Fallback to string field if type is not explicitly registered - BiFunction fieldCreator = fieldFor(info.getType()); - return fieldCreator.apply(info, collector.getUniqueName(info)); + private static Field listField(@NonNull String uniqueName, ResultType type) { + final ResultType elementType = ((ResultType.ListT) type).getElementType(); + final Field nestedField = fieldFor(elementType, uniqueName); + + return new Field(uniqueName, FieldType.nullable(ArrowType.List.INSTANCE), List.of(nestedField)); } - public static List generateFields(@NonNull List info, UniqueNamer collector) { + public static List generateFields(@NonNull List info, UniqueNamer collector, PrintSettings printSettings) { return info.stream() - .map(i -> createField(i, collector)) - .collect(Collectors.toUnmodifiableList()); + .map(i -> fieldFor(i.getType(), collector.getUniqueName(i, printSettings))) + .toList(); } @NotNull - public static List generateFields(List idHeaders, List resultInfo, UniqueNamer uniqueNamer) { + public static List generateFields(List idHeaders, List resultInfo, UniqueNamer uniqueNamer, PrintSettings printSettings) { // Combine id and value Fields to one vector to build a schema - final List idFields = generateFields(idHeaders, uniqueNamer); - List fields = new ArrayList<>(idFields); - fields.addAll(generateFields(resultInfo, uniqueNamer)); + List fields = new ArrayList<>(); + + fields.addAll(generateFields(idHeaders, uniqueNamer, printSettings)); + fields.addAll(generateFields(resultInfo, uniqueNamer, printSettings)); + return fields; } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java index 7e2c02ada1..68d0f4b245 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java @@ -11,26 +11,27 @@ import java.util.Locale; import java.util.OptionalLong; import java.util.function.Function; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ArrowConfig; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; +import com.bakdata.conquery.models.query.resultinfo.printers.ArrowResultPrinters; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.arrow.vector.VectorSchemaRoot; @@ -83,15 +84,15 @@ public static Response getArrow ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); - log.info("Downloading results for {}", exec.getId()); + log.info("Downloading results for {}", datasetId); ResultUtil.authorizeExecutable(subject, exec); // Get the locale extracted by the LocaleFilter - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, config.getIdColumns().getIds()); final Locale locale = I18n.LOCALE.get(); @@ -99,8 +100,8 @@ public static Response getArrow // Collect ResultInfos for id columns and result columns - final List resultInfosId = config.getIdColumns().getIdResultInfos(settings); - final List resultInfosExec = exec.getResultInfos(settings); + final List resultInfosId = config.getIdColumns().getIdResultInfos(); + final List resultInfosExec = exec.getResultInfos(); StreamingOutput out = output -> { try { @@ -110,7 +111,7 @@ public static Response getArrow arrowConfig, resultInfosId, resultInfosExec, - exec.streamResults(limit) + exec.streamResults(limit), new ArrowResultPrinters() ); } finally { diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/RowConsumer.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/RowConsumer.java index 9c9d27a5af..057a98306b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/RowConsumer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/RowConsumer.java @@ -1,7 +1,5 @@ package com.bakdata.conquery.io.result.arrow; -import java.util.Objects; - @FunctionalInterface public interface RowConsumer { @@ -10,23 +8,6 @@ public interface RowConsumer { * * @param t the input argument */ - void accept(int rowNumber, Object[] row); - - /** - * Returns a composed {@code Consumer} that performs, in sequence, this - * operation followed by the {@code after} operation. If performing either - * operation throws an exception, it is relayed to the caller of the - * composed operation. If performing this operation throws an exception, - * the {@code after} operation will not be performed. - * - * @param after the operation to perform after this operation - * @return a composed {@code Consumer} that performs in sequence this - * operation followed by the {@code after} operation - * @throws NullPointerException if {@code after} is null - */ - default RowConsumer andThen(RowConsumer after) { - Objects.requireNonNull(after); - return (n, r) -> { accept(n, r); after.accept(n, r); }; - } + void accept(int rowNumber, Object value); } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/csv/CsvRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/csv/CsvRenderer.java index 295df2b14a..a9eac207ff 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/csv/CsvRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/csv/CsvRenderer.java @@ -9,6 +9,9 @@ import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.StringResultPrinters; import com.bakdata.conquery.models.query.results.EntityResult; import com.univocity.parsers.csv.CsvWriter; import lombok.RequiredArgsConstructor; @@ -22,36 +25,46 @@ public class CsvRenderer { private final CsvWriter writer; private final PrintSettings cfg; - public void toCSV(List idHeaders, List infos, Stream resultStream) { + public void toCSV(List idHeaders, List infos, Stream resultStream, PrintSettings printSettings) { UniqueNamer uniqNamer = new UniqueNamer(cfg); - final String[] headers = Stream.concat(idHeaders.stream(), infos.stream()).map(uniqNamer::getUniqueName).toArray(String[]::new); + final String[] headers = Stream.concat(idHeaders.stream(), infos.stream()).map(info -> uniqNamer.getUniqueName(info, printSettings)).toArray(String[]::new); writer.writeHeaders(headers); - createCSVBody(cfg, infos, resultStream); + createCSVBody(cfg, infos, resultStream, printSettings, new StringResultPrinters()); } - private void createCSVBody(PrintSettings cfg, List infos, Stream results) { + private void createCSVBody(PrintSettings cfg, List infos, Stream results, PrintSettings printSettings, + PrinterFactory printerFactory) { + final Printer[] printers = infos.stream().map(info -> info.createPrinter(printerFactory, printSettings)).toArray(Printer[]::new); + results.map(result -> Pair.of(cfg.getIdMapper().map(result), result)) .sorted(Map.Entry.comparingByKey()) .forEach(res -> res .getValue() .streamValues() - .forEach(result -> printLine(res.getKey(), infos, result))); + .forEach(result -> printLine(res.getKey(), printers, result))); } - public void printLine(EntityPrintId entity, List infos, Object[] value) { + public void printLine(EntityPrintId entity, Printer[] printers, Object[] values) { // Cast here to Object[] so it is clear to intellij that the varargs call is intended writer.addValues((Object[]) entity.getExternalId()); try { - for (int i = 0; i < infos.size(); i++) { - writer.addValue(infos.get(i).printNullable(value[i])); + for (int i = 0; i < printers.length; i++) { + final Object value = values[i]; + + if (value == null) { + writer.addValue(""); + continue; + } + + writer.addValue(printers[i].apply(value)); } } catch (Exception e) { - throw new IllegalStateException("Unable to print line " + Arrays.deepToString(value), e); + throw new IllegalStateException("Unable to print line " + Arrays.deepToString(values), e); } writer.writeValuesToRow(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java index 0eb59cca95..3492cee595 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java @@ -8,13 +8,18 @@ import java.nio.charset.Charset; import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -23,11 +28,6 @@ import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.io.EofException; @@ -41,9 +41,9 @@ public class ResultCsvProcessor { public Response createResult(Subject subject, E exec, boolean pretty, Charset charset, OptionalLong limit) { - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); ConqueryMDC.setLocation(subject.getName()); log.info("Downloading results for {}", exec.getId()); @@ -62,7 +62,7 @@ public Response createResult(Su final StreamingOutput out = os -> { try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, charset))) { final CsvRenderer renderer = new CsvRenderer(config.getCsv().createWriter(writer), settings); - renderer.toCSV(config.getIdColumns().getIdResultInfos(settings), exec.getResultInfos(settings), exec.streamResults(limit)); + renderer.toCSV(config.getIdColumns().getIdResultInfos(), exec.getResultInfos(), exec.streamResults(limit), settings); } catch (EofException e) { log.trace("User canceled download"); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java index 71e10f3ae1..3169c321be 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java @@ -3,6 +3,7 @@ import java.io.IOException; import java.io.OutputStream; import java.math.BigDecimal; +import java.time.LocalDate; import java.util.List; import java.util.Map; import java.util.OptionalLong; @@ -11,15 +12,20 @@ import c10n.C10N; import com.bakdata.conquery.internationalization.ExcelSheetNameC10n; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ExcelConfig; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.mapping.PrintIdMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.resultinfo.printers.ExcelResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; import com.google.common.collect.ImmutableMap; @@ -42,60 +48,35 @@ public class ExcelRenderer { public static final int MAX_LINES = 1_048_576; - - private static TypeWriter writer(ResultType type) { - if(!(type instanceof ResultType.Primitive)){ - //Excel cannot handle complex types so we just toString them. - return (info, settings, cell, value, styles) -> writeStringCell(info, cell, value, styles); - } - - return switch (((ResultType.Primitive) type)) { - case BOOLEAN -> (info, settings, cell, value, styles) -> writeBooleanCell(info, cell, value, styles); - case INTEGER -> ExcelRenderer::writeIntegerCell; - case MONEY -> ExcelRenderer::writeMoneyCell; - case NUMERIC -> ExcelRenderer::writeNumericCell; - case DATE -> ExcelRenderer::writeDateCell; - default -> (info, settings, cell, value, styles) -> writeStringCell(info, cell, value, styles); - }; - } - public static final int CHARACTER_WIDTH_DIVISOR = 256; public static final int AUTOFILTER_SPACE_WIDTH = 3; - private final SXSSFWorkbook workbook; private final ExcelConfig config; - private final PrintSettings cfg; + private final PrintSettings settings; private final ImmutableMap styles; - - - public ExcelRenderer(ExcelConfig config, PrintSettings cfg) { + public ExcelRenderer(ExcelConfig config, PrintSettings settings) { workbook = new SXSSFWorkbook(); this.config = config; - styles = config.generateStyles(workbook, cfg); - this.cfg = cfg; - } - - @FunctionalInterface - private interface TypeWriter { - void writeCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles); + styles = config.generateStyles(workbook, settings); + this.settings = settings; } public void renderToStream(List idHeaders, E exec, OutputStream outputStream, OptionalLong limit, PrintSettings printSettings) throws IOException { - final List resultInfosExec = exec.getResultInfos(printSettings); + final List resultInfosExec = exec.getResultInfos(); setMetaData(exec); - SXSSFSheet sheet = workbook.createSheet(C10N.get(ExcelSheetNameC10n.class, I18n.LOCALE.get()).result()); + final SXSSFSheet sheet = workbook.createSheet(C10N.get(ExcelSheetNameC10n.class, I18n.LOCALE.get()).result()); try { sheet.setDefaultColumnWidth(config.getDefaultColumnWidth()); // Create a table environment inside the excel sheet - XSSFTable table = createTableEnvironment(exec, sheet); + final XSSFTable table = createTableEnvironment(exec, sheet); - writeHeader(sheet, idHeaders, resultInfosExec, table); + writeHeader(sheet, idHeaders, resultInfosExec, table, printSettings); - int writtenLines = writeBody(sheet, resultInfosExec, exec.streamResults(OptionalLong.of(limit.orElse(MAX_LINES)))); + final int writtenLines = writeBody(sheet, resultInfosExec, exec.streamResults(OptionalLong.of(limit.orElse(MAX_LINES))), new ExcelResultPrinters()); postProcessTable(sheet, table, writtenLines, idHeaders.size()); @@ -114,47 +95,25 @@ private void setMetaData(E exec final POIXMLProperties.CoreProperties coreProperties = workbook.getXSSFWorkbook().getProperties().getCoreProperties(); coreProperties.setTitle(exec.getLabelWithoutAutoLabelSuffix()); - final User owner = exec.getOwner(); - coreProperties.setCreator(owner != null ? owner.getLabel() : config.getApplicationName()); + final UserId owner = exec.getOwner(); + coreProperties.setCreator(owner != null ? owner.resolve().getLabel() : config.getApplicationName()); coreProperties.setKeywords(String.join(" ", exec.getTags())); final POIXMLProperties.ExtendedProperties extendedProperties = workbook.getXSSFWorkbook().getProperties().getExtendedProperties(); extendedProperties.setApplication(config.getApplicationName()); } - /** - * Do postprocessing on the result to improve the visuals: - * - Set the area of the table environment - * - Freeze the id columns - * - Add autofilters (not for now) - */ - private void postProcessTable(SXSSFSheet sheet, XSSFTable table, int writtenLines, int size) { - // Extend the table area to the added data - CellReference topLeft = new CellReference(0, 0); - - // The area must be at least a header row and a data row. If no line was written we include an empty data row so POI is happy - CellReference bottomRight = new CellReference(Math.max(1, writtenLines), table.getColumnCount() - 1); - AreaReference newArea = new AreaReference(topLeft, bottomRight, workbook.getSpreadsheetVersion()); - table.setArea(newArea); - - // Add auto filters. This must be done on the lower level CTTable. Using SXSSFSheet::setAutoFilter will corrupt the table - table.getCTTable().addNewAutoFilter(); - - // Freeze Header and id columns - sheet.createFreezePane(size, 1); - } - /** * Create a table environment, which improves mainly the visuals of the produced table. */ @NotNull private XSSFTable createTableEnvironment(ManagedExecution exec, SXSSFSheet sheet) { - XSSFTable table = sheet.getWorkbook().getXSSFWorkbook().getSheet(sheet.getSheetName()).createTable(null); + final XSSFTable table = sheet.getWorkbook().getXSSFWorkbook().getSheet(sheet.getSheetName()).createTable(null); - CTTable cttable = table.getCTTable(); + final CTTable cttable = table.getCTTable(); table.setName(exec.getLabelWithoutAutoLabelSuffix()); cttable.setTotalsRowShown(false); - CTTableStyleInfo styleInfo = cttable.addNewTableStyleInfo(); + final CTTableStyleInfo styleInfo = cttable.addNewTableStyleInfo(); // Not sure how important this name is styleInfo.setName("TableStyleMedium2"); styleInfo.setShowColumnStripes(false); @@ -170,24 +129,24 @@ private void writeHeader( SXSSFSheet sheet, List idHeaders, List infos, - XSSFTable table) { + XSSFTable table, PrintSettings printSettings) { - CTTableColumns columns = table.getCTTable().addNewTableColumns(); + final CTTableColumns columns = table.getCTTable().addNewTableColumns(); columns.setCount(idHeaders.size() + infos.size()); - UniqueNamer uniqueNamer = new UniqueNamer(cfg); + final UniqueNamer uniqueNamer = new UniqueNamer(settings); { - Row header = sheet.createRow(0); + final Row header = sheet.createRow(0); // First to create the columns and track them for auto size before the first row is written int currentColumn = 0; for (ResultInfo idHeader : idHeaders) { - CTTableColumn column = columns.addNewTableColumn(); + final CTTableColumn column = columns.addNewTableColumn(); // Table column ids MUST be set and MUST start at 1, excel will fail otherwise column.setId(currentColumn + 1); - final String uniqueName = uniqueNamer.getUniqueName(idHeader); + final String uniqueName = uniqueNamer.getUniqueName(idHeader, printSettings); column.setName(uniqueName); - Cell headerCell = header.createCell(currentColumn); + final Cell headerCell = header.createCell(currentColumn); headerCell.setCellValue(uniqueName); // Track column explicitly, because sheet.trackAllColumnsForAutoSizing() does not work with @@ -198,12 +157,12 @@ private void writeHeader( } for (ResultInfo info : infos) { - final String columnName = uniqueNamer.getUniqueName(info); - CTTableColumn column = columns.addNewTableColumn(); + final String columnName = uniqueNamer.getUniqueName(info, printSettings); + final CTTableColumn column = columns.addNewTableColumn(); column.setId(currentColumn + 1); column.setName(columnName); - Cell headerCell = header.createCell(currentColumn); + final Cell headerCell = header.createCell(currentColumn); headerCell.setCellValue(columnName); sheet.trackColumnForAutoSizing(currentColumn); @@ -216,11 +175,15 @@ private void writeHeader( private int writeBody( SXSSFSheet sheet, List infos, - Stream resultLines) { + Stream resultLines, PrinterFactory printerFactory) { // Row 0 is the Header the data starts at 1 final AtomicInteger currentRow = new AtomicInteger(1); - final int writtenLines = resultLines.mapToInt(l -> this.writeRowsForEntity(infos, l, currentRow, cfg, sheet)).sum(); + + final TypeWriter[] writers = infos.stream().map(info -> writer(info.getType(), info.createPrinter(printerFactory, settings), settings)).toArray(TypeWriter[]::new); + final PrintIdMapper idMapper = settings.getIdMapper(); + + final int writtenLines = resultLines.mapToInt(l -> writeRowsForEntity(infos, l, currentRow, sheet, writers, idMapper)).sum(); // The result was shorter than the number of rows to track, so we auto size here explicitly if (writtenLines < config.getLastRowToAutosize()) { @@ -230,44 +193,65 @@ private int writeBody( return writtenLines; } + /** + * Do postprocessing on the result to improve the visuals: + * - Set the area of the table environment + * - Freeze the id columns + * - Add autofilters (not for now) + */ + private void postProcessTable(SXSSFSheet sheet, XSSFTable table, int writtenLines, int size) { + // Extend the table area to the added data + final CellReference topLeft = new CellReference(0, 0); + + // The area must be at least a header row and a data row. If no line was written we include an empty data row so POI is happy + final CellReference bottomRight = new CellReference(Math.max(1, writtenLines), table.getColumnCount() - 1); + final AreaReference newArea = new AreaReference(topLeft, bottomRight, workbook.getSpreadsheetVersion()); + table.setArea(newArea); + + // Add auto filters. This must be done on the lower level CTTable. Using SXSSFSheet::setAutoFilter will corrupt the table + table.getCTTable().addNewAutoFilter(); + + // Freeze Header and id columns + sheet.createFreezePane(size, 1); + } + /** * Writes the result lines for each entity. */ - private int writeRowsForEntity( - List infos, - EntityResult internalRow, - final AtomicInteger currentRow, - PrintSettings settings, - SXSSFSheet sheet) { - String[] ids = settings.getIdMapper().map(internalRow).getExternalId(); + private int writeRowsForEntity(List infos, EntityResult internalRow, final AtomicInteger currentRow, SXSSFSheet sheet, TypeWriter[] writers, PrintIdMapper idMapper) { + + final String[] ids = idMapper.map(internalRow).getExternalId(); int writtenLines = 0; - for (Object[] resultValues : internalRow.listResultLines()) { + for (Object[] line : internalRow.listResultLines()) { final int thisRow = currentRow.getAndIncrement(); - Row row = sheet.createRow(thisRow); + final Row row = sheet.createRow(thisRow); + // Write id cells int currentColumn = 0; + for (String id : ids) { - Cell idCell = row.createCell(currentColumn); + final Cell idCell = row.createCell(currentColumn); idCell.setCellValue(id); currentColumn++; } // Write data cells - for (int i = 0; i < infos.size(); i++) { - ResultInfo resultInfo = infos.get(i); - Object resultValue = resultValues[i]; - Cell dataCell = row.createCell(currentColumn); + for (int index = 0; index < infos.size(); index++) { + final Object value = line[index]; + final Cell dataCell = row.createCell(currentColumn); currentColumn++; - if (resultValue == null) { + + if (value == null) { continue; } + // Fallback to string if type is not explicitly registered - TypeWriter typeWriter = writer(resultInfo.getType()); + final TypeWriter typeWriter = writers[index]; - typeWriter.writeCell(resultInfo, settings, dataCell, resultValue, styles); + typeWriter.writeCell(value, dataCell, styles); } if (thisRow == config.getLastRowToAutosize()) { @@ -302,57 +286,66 @@ private void setColumnWidthsAndUntrack(SXSSFSheet sheet) { } } + private static TypeWriter writer(ResultType type, Printer printer, PrintSettings settings) { + if (type instanceof ResultType.ListT) { + //Excel cannot handle LIST types so we just toString them. + return (value, cell, styles) -> writeStringCell(cell, value, printer); + } + + return switch (((ResultType.Primitive) type)) { + case BOOLEAN -> (value, cell, styles) -> writeBooleanCell(value, cell, printer); + case INTEGER -> (value, cell, styles) -> writeIntegerCell(value, cell, printer, styles); + case MONEY -> (value, cell, styles) -> writeMoneyCell(value, cell, printer, settings, styles); + case NUMERIC -> (value, cell, styles) -> writeNumericCell(value, cell, printer, styles); + case DATE -> (value, cell, styles) -> writeDateCell(value, cell, printer, styles); + default -> (value, cell, styles) -> writeStringCell(cell, value, printer); + }; + } + // Type specific cell writers - private static void writeStringCell(ResultInfo info, Cell cell, Object value, Map styles) { - cell.setCellValue( - info.printNullable( - value - )); + private static void writeStringCell(Cell cell, Object value, Printer printer) { + cell.setCellValue((String) printer.apply(value)); } /** * This writer is only used on Columns with the result type {@link ResultType.Primitive#BOOLEAN}, not on complex types such as `LIST[BOOLEAN]`, * because MS Excel can only represent those as strings */ - private static void writeBooleanCell(ResultInfo info, Cell cell, Object value, Map styles) { - if (value instanceof Boolean aBoolean) { - cell.setCellValue(aBoolean); - return; - } - cell.setCellValue(info.printNullable(value)); + private static void writeBooleanCell(Object value, Cell cell, Printer printer) { + cell.setCellValue((Boolean) printer.apply(value)); } - private static void writeDateCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles) { - if (!(value instanceof Number)) { - throw new IllegalStateException(String.format("`%s` Expected an Number but got an '%s' with the value: %s", - info, - value != null ? value.getClass().getName() : "no type", value - )); - } - cell.setCellValue(CDate.toLocalDate(((Number) value).intValue())); - cell.setCellStyle(styles.get(ExcelConfig.DATE_STYLE)); - } - - public static void writeIntegerCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles) { - cell.setCellValue(((Number) value).longValue()); + public static void writeIntegerCell(Object value, Cell cell, Printer printer, Map styles) { + cell.setCellValue(((Number) printer.apply(value)).longValue()); cell.setCellStyle(styles.get(ExcelConfig.INTEGER_STYLE)); } - public static void writeNumericCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles) { - cell.setCellValue(((Number) value).doubleValue()); - cell.setCellStyle(styles.get(ExcelConfig.NUMERIC_STYLE)); - } + public static void writeMoneyCell(Object valueRaw, Cell cell, Printer printer, PrintSettings settings, Map styles) { + + final BigDecimal value = (BigDecimal) printer.apply(valueRaw); - public static void writeMoneyCell(ResultInfo info, PrintSettings settings, Cell cell, Object value, Map styles) { - CellStyle currencyStyle = styles.get(ExcelConfig.CURRENCY_STYLE_PREFIX + settings.getCurrency().getCurrencyCode()); + final CellStyle currencyStyle = styles.get(ExcelConfig.CURRENCY_STYLE_PREFIX + settings.getCurrency().getCurrencyCode()); if (currencyStyle == null) { - // Print as cents or what ever the minor currency unit is - cell.setCellValue(value.toString()); + // Print as cents or whatever the minor currency unit is + cell.setCellValue(value.movePointRight(settings.getCurrency().getDefaultFractionDigits()).intValue()); return; } cell.setCellStyle(currencyStyle); - cell.setCellValue( - new BigDecimal(((Number) value).longValue()).movePointLeft(settings.getCurrency().getDefaultFractionDigits()).doubleValue() - ); + cell.setCellValue(value.doubleValue()); + } + + public static void writeNumericCell(Object value, Cell cell, Printer printer, Map styles) { + cell.setCellValue(((Number) printer.apply(value)).doubleValue()); + cell.setCellStyle(styles.get(ExcelConfig.NUMERIC_STYLE)); + } + + private static void writeDateCell(Object value, Cell cell, Printer printer, Map styles) { + cell.setCellValue((LocalDate) printer.apply(value)); + cell.setCellStyle(styles.get(ExcelConfig.DATE_STYLE)); + } + + @FunctionalInterface + private interface TypeWriter { + void writeCell(Object value, Cell cell, Map styles); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java index 89194e75c8..18a0205c2b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java @@ -4,14 +4,18 @@ import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -20,10 +24,6 @@ import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -42,13 +42,13 @@ public Response createResult(Su ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); log.info("Downloading results for {}", exec.getId()); ResultUtil.authorizeExecutable(subject, exec); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); final IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, conqueryConfig.getIdColumns().getIds()); final Locale locale = I18n.LOCALE.get(); @@ -57,7 +57,7 @@ public Response createResult(Su final ExcelRenderer excelRenderer = new ExcelRenderer(excelConfig, settings); final StreamingOutput out = output -> { - excelRenderer.renderToStream(conqueryConfig.getIdColumns().getIdResultInfos(settings), exec, output, limit, settings); + excelRenderer.renderToStream(conqueryConfig.getIdColumns().getIdResultInfos(), exec, output, limit, settings); log.trace("FINISHED downloading {}", exec.getId()); }; diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java index c8ed12696c..ef14d4f10b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java @@ -20,7 +20,7 @@ public Response getResult(Subject subject, ExternalExecution execution, String f ResultUtil.authorizeExecutable(subject, execution); - ExecutionManager executionManager = datasetRegistry.get(execution.getDataset().getId()).getExecutionManager(); + ExecutionManager executionManager = datasetRegistry.get(execution.getDataset()).getExecutionManager(); ExternalState externalResult = executionManager.getResult(execution.getId()); return externalResult.fetchExternalResult(fileName); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/EntityResultWriteSupport.java b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/EntityResultWriteSupport.java index 8dcad2291d..387eadbb09 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/EntityResultWriteSupport.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/EntityResultWriteSupport.java @@ -1,19 +1,20 @@ package com.bakdata.conquery.io.result.parquet; -import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Stream; import com.bakdata.conquery.io.result.arrow.ArrowUtil; -import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.ArrowResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.MultilineEntityResult; import com.bakdata.conquery.models.types.ResultType; -import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.arrow.vector.types.pojo.Schema; @@ -24,6 +25,7 @@ import org.apache.parquet.io.api.Binary; import org.apache.parquet.io.api.RecordConsumer; import org.apache.parquet.schema.MessageType; +import org.jetbrains.annotations.NotNull; /** * {@link WriteSupport} for Conquery's {@link EntityResult} type. @@ -40,14 +42,69 @@ public class EntityResultWriteSupport extends WriteSupport { private final PrintSettings printSettings; private MessageType schema; - private List columnConsumer; + private List columnConsumers; + private List columnPrinters; private RecordConsumer recordConsumer; + /** + * Generates the parquet schema format from the {@link ResultInfo}s of a query + * + * @param idHeaders {@link ResultInfo} for the Ids + * @param resultValueInfos {@link ResultInfo} for the result values + * @param uniqueNamer A column namer for the fields in the schema + * @param printSettings1 + * @return the parquet schema + */ + public static MessageType generateSchema(List idHeaders, List resultValueInfos, UniqueNamer uniqueNamer, PrintSettings printSettings1) { + /* + Because Parquet Schemas rely on primitive types with logical annotations + which are tedious to configure, we take the detour over the arrow schema. + */ + final SchemaMapping schemaMapping = + new SchemaConverter().fromArrow(new Schema(ArrowUtil.generateFields(idHeaders, resultValueInfos, uniqueNamer, printSettings1))); + + return schemaMapping.getParquetSchema(); + + } + + private static List generateColumnConsumers(List idHeaders, List resultInfos) { + return Stream.concat(idHeaders.stream(), resultInfos.stream()) + .map(ResultInfo::getType) + .map(EntityResultWriteSupport::columnConsumerForType) + .toList(); + + } + + private static List generateColumnPrinters(List idHeaders, List resultInfos, PrintSettings printSettings, PrinterFactory printerFactory) { + + return Stream.concat(idHeaders.stream(), resultInfos.stream()) + .map(info -> info.createPrinter(printerFactory, printSettings)) + .toList(); + + } + + private static ColumnConsumer columnConsumerForType(ResultType resultType) { + + if (resultType instanceof ResultType.ListT listT) { + return new ListColumnConsumer(columnConsumerForType(listT.getElementType())); + } + + return switch (((ResultType.Primitive) resultType)) { + case BOOLEAN -> new BooleanColumnConsumer(); + case INTEGER, DATE -> new IntegerColumnConsumer(); + case NUMERIC -> new NumericColumnConsumer(); + case MONEY -> new MoneyColumnConsumer(); + case DATE_RANGE -> new DateRangeColumnConsumer(); + case STRING -> new StringColumnConsumer(); + }; + } + @Override public WriteContext init(Configuration configuration) { - schema = generateSchema(idHeaders, resultInfo, new UniqueNamer(printSettings)); - columnConsumer = generateColumnConsumers(idHeaders, resultInfo, printSettings); + schema = generateSchema(idHeaders, resultInfo, new UniqueNamer(printSettings), printSettings); + columnConsumers = generateColumnConsumers(idHeaders, resultInfo); + columnPrinters = generateColumnPrinters(idHeaders, resultInfo, printSettings, new ArrowResultPrinters()); return new WriteContext(schema, Map.of()); } @@ -65,32 +122,45 @@ public void write(EntityResult record) { // This should not happen because of the workaround in ParquetRenderer log.warn("Processing a MultilineEntityResult is not working properly. Only the first line will be output"); } + + // Write ID fields + final Object[] printedExternalId = getPrintedExternalId(record); + for (Object[] listResultLine : listResultLines) { recordConsumer.startMessage(); - // Write ID fields - final String[] externalId = printSettings.getIdMapper().map(record).getExternalId(); - int cellIdx = 0; - for (int i = 0; i < externalId.length; i++, cellIdx++) { - final String subId = externalId[i]; - if (subId == null) { + + for (int index = 0; index < printedExternalId.length; index++) { + final Object printed = printedExternalId[index]; + if (printed == null) { continue; } - final String fieldName = schema.getFieldName(cellIdx); - recordConsumer.startField(fieldName, cellIdx); - columnConsumer.get(cellIdx).accept(recordConsumer, subId); - recordConsumer.endField(fieldName, cellIdx); + + final String fieldName = schema.getFieldName(index); + + recordConsumer.startField(fieldName, index); + columnConsumers.get(index).accept(recordConsumer, printed); + recordConsumer.endField(fieldName, index); } // Write Result fields - for (int i = 0; i < resultInfo.size(); i++, cellIdx++) { - final Object resultValue = listResultLine[i]; - if (resultValue == null) { + for (int index = 0; index < listResultLine.length; index++) { + final int colId = index + printedExternalId.length; + + final Object value = listResultLine[index]; + + if (value == null) { + // Parquet consumers cannot handle null? continue; } - final String fieldName = schema.getFieldName(cellIdx); - recordConsumer.startField(fieldName, cellIdx); - columnConsumer.get(cellIdx).accept(recordConsumer, resultValue); - recordConsumer.endField(fieldName, cellIdx); + + Printer printer = columnPrinters.get(colId); + final Object printed = printer.apply(value); + + final String fieldName = schema.getFieldName(colId); + + recordConsumer.startField(fieldName, colId); + columnConsumers.get(colId).accept(recordConsumer, printed); + recordConsumer.endField(fieldName, colId); } recordConsumer.endMessage(); @@ -98,102 +168,86 @@ public void write(EntityResult record) { } - /** - * Generates the parquet schema format from the {@link ResultInfo}s of a query - * - * @param idHeaders {@link ResultInfo} for the Ids - * @param resultValueInfos {@link ResultInfo} for the result values - * @param uniqueNamer A column namer for the fields in the schema - * @return the parquet schema - */ - public static MessageType generateSchema( - List idHeaders, - List resultValueInfos, UniqueNamer uniqueNamer) { + @NotNull + private Object[] getPrintedExternalId(EntityResult record) { + final String[] externalId = printSettings.getIdMapper().map(record).getExternalId(); - /* - Because Parquet Schemas rely on primitive types with logical annotations - which are tedious to configure, we take the detour over the arrow schema. - */ - final SchemaMapping schemaMapping = new SchemaConverter().fromArrow(new Schema(ArrowUtil.generateFields(idHeaders, resultValueInfos, uniqueNamer))); - - return schemaMapping.getParquetSchema(); + final Object[] printedExternalId = new String[externalId.length]; + for (int index = 0; index < externalId.length; index++) { + Printer printer = columnPrinters.get(index); + printedExternalId[index] = printer.apply(externalId[index]); + } + return printedExternalId; } - @Data - private static class StringTColumnConsumer implements ColumnConsumer { - - private final ResultPrinters.Printer printer; - private final PrintSettings printSettings; + private record StringColumnConsumer() implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { - final String printValue = getPrinter().print(o); - recordConsumer.addBinary(Binary.fromString(printValue)); + recordConsumer.addBinary(Binary.fromString((String) o)); } } - @RequiredArgsConstructor - private static class BooleanTColumnConsumer implements ColumnConsumer { - + private record BooleanColumnConsumer() implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { recordConsumer.addBoolean((boolean) o); } } - @RequiredArgsConstructor - private static class IntegerTColumnConsumer implements ColumnConsumer { + private record IntegerColumnConsumer() implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { - recordConsumer.addInteger((int) o); + recordConsumer.addInteger(((Number) o).intValue()); } } - @RequiredArgsConstructor - private static class NumericTColumnConsumer implements ColumnConsumer { + + private record NumericColumnConsumer() implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { - recordConsumer.addDouble((double) o); + recordConsumer.addDouble(((Number) o).doubleValue()); } } - @RequiredArgsConstructor - private static class DateRangeTColumnConsumer implements ColumnConsumer { - private final static String MIN_FIELD_NAME = "min"; - private final static String MAX_FIELD_NAME = "max"; - + private record MoneyColumnConsumer() implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { - List dateRange = (List) o; - recordConsumer.startGroup(); - Integer min = dateRange.get(0); + recordConsumer.addInteger(((Integer) o)); + } + } + + + private record DateRangeColumnConsumer() implements ColumnConsumer { + private static final String MIN_FIELD_NAME = "min"; + private static final String MAX_FIELD_NAME = "max"; + @Override + public void accept(RecordConsumer recordConsumer, Object o) { + final CDateRange dateRange = (CDateRange) o; + recordConsumer.startGroup(); - if (min != null && !(CDate.isNegativeInfinity(min))) { + if (dateRange.hasLowerBound()) { recordConsumer.startField(MIN_FIELD_NAME, 0); - recordConsumer.addInteger(min); + recordConsumer.addInteger(dateRange.getMinValue()); recordConsumer.endField(MIN_FIELD_NAME, 0); } - Integer max = dateRange.get(1); - if (max != null && !(CDate.isPositiveInfinity(max))) { + if (dateRange.hasUpperBound()) { recordConsumer.startField(MAX_FIELD_NAME, 1); - recordConsumer.addInteger(max); + recordConsumer.addInteger(dateRange.getMaxValue()); recordConsumer.endField(MAX_FIELD_NAME, 1); } + recordConsumer.endGroup(); } } - @RequiredArgsConstructor - private static class ListTColumnConsumer implements ColumnConsumer { - - private final ColumnConsumer elementConsumer; - private final PrintSettings printSettings; + private record ListColumnConsumer(ColumnConsumer elementConsumer) implements ColumnConsumer { @Override public void accept(RecordConsumer recordConsumer, Object o) { @@ -207,7 +261,7 @@ public void accept(RecordConsumer recordConsumer, Object o) { return; } - // This nesting is wierd but documented https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists + // This nesting is weird but documented https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists recordConsumer.startGroup(); recordConsumer.startField("list", 0); for (Object elem : list) { @@ -221,31 +275,4 @@ public void accept(RecordConsumer recordConsumer, Object o) { recordConsumer.endGroup(); } } - - private static List generateColumnConsumers(List idHeaders, List resultInfos, PrintSettings printSettings) { - final List consumers = new ArrayList<>(); - for (ResultInfo idHeader : idHeaders) { - consumers.add(getForResultType(idHeader.getType(), idHeader.getPrinter(), printSettings)); - } - - for (ResultInfo resultInfo : resultInfos) { - consumers.add(getForResultType(resultInfo.getType(), resultInfo.getPrinter(), printSettings)); - } - return consumers; - } - - private static ColumnConsumer getForResultType(ResultType resultType, ResultPrinters.Printer printer, PrintSettings printSettings) { - - if (resultType instanceof ResultType.ListT listT) { - return new ListTColumnConsumer(getForResultType(listT.getElementType(), ((ResultPrinters.ListPrinter) printer).elementPrinter(), printSettings), printSettings); - } - - return switch (((ResultType.Primitive) resultType)) { - case BOOLEAN -> new BooleanTColumnConsumer(); - case INTEGER, DATE, MONEY -> new IntegerTColumnConsumer(); - case NUMERIC -> new NumericTColumnConsumer(); - case DATE_RANGE -> new DateRangeTColumnConsumer(); - case STRING -> new StringTColumnConsumer(printer, printSettings); - }; - } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ParquetRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ParquetRenderer.java index 12dcb2bbe2..a903133ff6 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ParquetRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ParquetRenderer.java @@ -24,6 +24,50 @@ @UtilityClass public class ParquetRenderer { + public static void writeToStream( + OutputStream outputStream, + List idHeaders, + List resultInfo, + PrintSettings printSettings, + Stream results) throws IOException { + + // Wrap the request output stream in an output file, so the parquet writer can consume it + final OutputFile outputFile = new StreamOutputFile(new PositionTrackingOutputStream(new CountingOutputStream(outputStream))); + + final ConqueryParquetWriterBuilder conqueryParquetWriterBuilder = + new ConqueryParquetWriterBuilder(outputFile) + .setIdHeaders(idHeaders) + .setResultInfo(resultInfo) + .setPrintSettings(printSettings); + + try (final ParquetWriter parquetWriter = conqueryParquetWriterBuilder.build()) { + + /* + WORKAROUND: We need the conversion to SinglelineEntityResult here because a RecordConsumer only produces one line/record + even if multiple messages are started. + */ + Iterator resultIterator = results.flatMap(ParquetRenderer::convertToSingleLine).iterator(); + while (resultIterator.hasNext()) { + final EntityResult entityResult = resultIterator.next(); + + parquetWriter.write(entityResult); + } + } + } + + /** + * Converts a possible {@link MultilineEntityResult} to a stream of {@link SinglelineEntityResult}s. + * + * @param entityResult the result to convert. + * @return the stream of {@link SinglelineEntityResult}s + */ + private static Stream convertToSingleLine(EntityResult entityResult) { + if (entityResult instanceof SinglelineEntityResult) { + return Stream.of((SinglelineEntityResult) entityResult); + } + return entityResult.streamValues().map(line -> new SinglelineEntityResult(entityResult.getEntityId(), line)); + } + @RequiredArgsConstructor public static class StreamOutputFile implements OutputFile { @@ -35,7 +79,7 @@ public PositionOutputStream create(long blockSizeHint) throws IOException { } @Override - public PositionOutputStream createOrOverwrite(long blockSizeHint) throws IOException { + public PositionOutputStream createOrOverwrite(long blockSizeHint) { return outputStream; } @@ -53,10 +97,10 @@ public long defaultBlockSize() { @RequiredArgsConstructor public static class PositionTrackingOutputStream extends PositionOutputStream { - final private CountingOutputStream stream; + private final CountingOutputStream stream; @Override - public long getPos() throws IOException { + public long getPos() { return stream.getCount(); } @@ -66,51 +110,5 @@ public void write(int b) throws IOException { } } - public static void writeToStream( - OutputStream outputStream, - - List idHeaders, - List resultInfo, - PrintSettings printSettings, - Stream results) throws IOException { - - // Wrap the request output stream in an output file, so the parquet writer can consume it - final OutputFile outputFile = new StreamOutputFile( - new PositionTrackingOutputStream( - new CountingOutputStream(outputStream))); - - final ConqueryParquetWriterBuilder conqueryParquetWriterBuilder = new ConqueryParquetWriterBuilder(outputFile) - .setIdHeaders(idHeaders) - .setResultInfo(resultInfo) - .setPrintSettings(printSettings); - - try (final ParquetWriter parquetWriter = conqueryParquetWriterBuilder.build()) { - - /* - WORKAROUND: We need the conversion to SinglelineEntityResult here because a RecordConsumer only produces one line/record - even if multiple messages are started. - */ - Iterator resultIterator = results.flatMap(ParquetRenderer::convertToSingleLine).iterator(); - while (resultIterator.hasNext()) { - final EntityResult entityResult = resultIterator.next(); - - parquetWriter.write(entityResult); - } - } - } - - /** - * Converts a possible {@link MultilineEntityResult} to a stream of {@link SinglelineEntityResult}s. - * - * @param entityResult the result to convert. - * @return the stream of {@link SinglelineEntityResult}s - */ - private static Stream convertToSingleLine(EntityResult entityResult) { - if (entityResult instanceof SinglelineEntityResult) { - return Stream.of((SinglelineEntityResult) entityResult); - } - return entityResult.streamValues().map(line -> new SinglelineEntityResult(entityResult.getEntityId(), line)); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java index 6dfb0e61e0..f6c75099c1 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java @@ -4,13 +4,17 @@ import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -20,10 +24,6 @@ import com.bakdata.conquery.resources.api.ResultParquetResource; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -32,14 +32,14 @@ public class ResultParquetProcessor { public static final MediaType PARQUET_MEDIA_TYPE = MediaType.valueOf(ResultParquetResource.PARQUET_MEDIA_TYPE_STRING); - private final DatasetRegistry datasetRegistry; + private final DatasetRegistry datasetRegistry; private final ConqueryConfig config; public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty, OptionalLong limit) { ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); log.info("Downloading results for {}", exec.getId()); @@ -47,7 +47,7 @@ public Response createResultFile(Subject subject, ManagedExecution exec, boolean ResultUtil.checkSingleTableResult(exec); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); final IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, config.getIdColumns().getIds()); @@ -59,8 +59,8 @@ public Response createResultFile(Subject subject, ManagedExecution exec, boolean final SingleTableResult singleTableResult = (SingleTableResult) exec; ParquetRenderer.writeToStream( output, - config.getIdColumns().getIdResultInfos(settings), - singleTableResult.getResultInfos(settings), + config.getIdColumns().getIdResultInfos(), + singleTableResult.getResultInfos(), settings, singleTableResult.streamResults(limit) ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java index b9e9ba69ff..d23ef17c6f 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java @@ -3,7 +3,7 @@ import java.io.Closeable; import java.io.IOException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import lombok.extern.slf4j.Slf4j; @@ -11,16 +11,21 @@ @Slf4j public abstract class ConqueryStorage implements Closeable { - public abstract CentralRegistry getCentralRegistry(); - /** * @implSpec The order defines the order of loading. Dependencies should be modeled here. * @implNote If you implement this method, please do it always from scratch and not using calls to super, it can be quite annoying. */ public abstract ImmutableList getStores(); - public abstract void openStores(ObjectMapper objectMapper); - + /** + * Initializes the internal stores. + * Injects this storage into the provided object mapper. + * + * @param objectMapper (optional) needed when the {@link com.bakdata.conquery.models.config.StoreFactory} deserializes objects + * @param metricRegistry + */ + public abstract void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry); + public final void loadData(){ for (ManagedStore store : getStores()) { store.loadData(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java deleted file mode 100644 index 45d30b56b6..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java +++ /dev/null @@ -1,72 +0,0 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; - -/** - * Registered items are directly referenced. Compare to {@link IdentifiableCachedStore} - */ -public class DirectIdentifiableStore> extends IdentifiableStore { - - public DirectIdentifiableStore(CentralRegistry centralRegistry, Store, VALUE> store) { - super(store, centralRegistry); - } - - @Override - protected Id extractKey(VALUE value) { - return (Id) value.getId(); - } - - @Override - protected void removed(VALUE value) { - try { - if (value == null) { - return; - } - - onRemove.accept(value); - centralRegistry.remove(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to remove " + value, e); - } - } - - @Override - protected void added(VALUE value) { - try { - if (value == null) { - return; - } - - centralRegistry.register(value); - onAdd.accept(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to add " + value, e); - } - } - - @Override - protected void updated(VALUE value) { - try { - if (value == null) { - return; - } - final Optional> old = centralRegistry.getOptional(value.getId()); - - if (old.isPresent()) { - onRemove.accept((VALUE) old.get()); - } - - centralRegistry.update(value); - onAdd.accept(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to add " + value, e); - } - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java b/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java new file mode 100644 index 0000000000..a7a6d30b07 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java @@ -0,0 +1,24 @@ +package com.bakdata.conquery.io.storage; + +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class FailingProvider implements NamespacedStorageProvider { + + public final static FailingProvider INSTANCE = new FailingProvider(); + public static final String ERROR_MSG = "Cannot be used in this environment. This id '%s' cannot be resolved on this node."; + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this); + } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + throw new UnsupportedOperationException(ERROR_MSG.formatted(datasetId)); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java deleted file mode 100644 index a53be0d30e..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import lombok.Getter; -import lombok.Setter; -import lombok.experimental.Accessors; - -/** - * Registers accessors of values instead of the value itself to the central registry. - * Might be useful if the object are very large and should only be loaded on demand. - */ -@Accessors(fluent=true) @Setter @Getter -public class IdentifiableCachedStore> extends IdentifiableStore { - - public IdentifiableCachedStore(CentralRegistry centralRegistry, Store, VALUE> store) { - super(store, centralRegistry); - } - - @Override - protected Id extractKey(VALUE value) { - return (Id) value.getId(); - } - - @Override - protected void removed(VALUE value) { - try { - if(value != null) { - onRemove.accept(value); - centralRegistry.remove(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to remove "+value, e); - } - } - - @Override - protected void added(VALUE value) { - try { - if(value != null) { - final Id key = extractKey(value); - centralRegistry.registerCacheable(key, this::get); - onAdd.accept(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to add "+value, e); - } - } - - @Override - protected void updated(VALUE value) { - try { - if(value != null) { - final Id key = extractKey(value); - final Optional oldOpt = centralRegistry.updateCacheable(key, this::get); - if (oldOpt.isPresent()) { - final VALUE old = (VALUE) oldOpt.get(); - onRemove.accept(old); - } - onAdd.accept(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to add "+value, e); - } - } - - @Override - public void loadData() { - store.loadData(); - for (Id key : getAllKeys()) { - centralRegistry.registerCacheable(key, this::get); - } - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java index 7fe75b7766..08076fef95 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.io.storage; import com.bakdata.conquery.io.storage.xodus.stores.KeyIncludingStore; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.util.functions.ThrowingConsumer; @@ -11,16 +10,14 @@ import lombok.experimental.Accessors; /** - * Store for Identifiable values. Directly delegates all changes to the supplied {@link CentralRegistry}. + * Store for Identifiable values. *

- * The {@link ThrowingConsumer}s can be used to reflect/model dependencies of the identifiable values inside the store. For example {@link com.bakdata.conquery.models.datasets.concepts.Concept} holds multiple {@link com.bakdata.conquery.models.datasets.concepts.Connector}s where a deletion of a concept requires the deletion of the Conncetors as well. {@link NamespacedStorage} is the main user of those two methods and should be looked at if desired. + * The {@link ThrowingConsumer}s can be used to reflect/model dependencies of the identifiable values inside the store. For example {@link com.bakdata.conquery.models.datasets.concepts.Concept} holds multiple {@link com.bakdata.conquery.models.datasets.concepts.Connector}s where a deletion of a concept requires the deletion of the Conncetors as well. {@link NamespacedStorageImpl} is the main user of those two methods and should be looked at if desired. */ @Accessors(fluent = true) @Setter @Getter -public abstract class IdentifiableStore> extends KeyIncludingStore, VALUE> { - - protected final CentralRegistry centralRegistry; +public class IdentifiableStore> extends KeyIncludingStore, VALUE> { // TODO: 09.01.2020 fk: Consider making these part of a class that is passed on creation instead so they are less loosely bound. @NonNull @@ -31,17 +28,60 @@ public abstract class IdentifiableStore> extends K protected ThrowingConsumer onRemove = (v) -> { }; - public IdentifiableStore(Store, VALUE> store, CentralRegistry centralRegistry) { + public IdentifiableStore(Store, VALUE> store) { super(store); - this.centralRegistry = centralRegistry; + } + + + @Override + protected Id extractKey(VALUE value) { + return (Id) value.getId(); + } + + @Override + protected void removed(VALUE value) { + try { + if (value == null) { + return; + } + + onRemove.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to remove " + value, e); + } } @Override - protected abstract Id extractKey(VALUE value); + protected void added(VALUE value) { + try { + if (value == null) { + return; + } - @Override - protected abstract void removed(VALUE value); + onAdd.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to add " + value, e); + } + } - @Override - protected abstract void added(VALUE value); + @Override + protected void updated(VALUE value) { + try { + if (value == null) { + return; + } + final VALUE old = store.get((Id) value.getId()); + + if (old != null) { + onRemove.accept(old); + } + + onAdd.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to add " + value, e); + } + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java index d0cf222cef..a7e4285a53 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java @@ -1,6 +1,6 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; +import java.util.stream.Stream; import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.MutableInjectableValues; @@ -10,40 +10,42 @@ import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.*; +import com.bakdata.conquery.models.identifiable.IdResolvingException; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.MetaId; +import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.RoleId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +/** + * Access to persisted entities that are not namespace/dataset crucial (see {@link NamespacedStorageImpl}). + * All entities are loaded through a cache. The cache can be configured through the StoreFactory. + */ @Slf4j @RequiredArgsConstructor public class MetaStorage extends ConqueryStorage implements Injectable { - @Getter - protected final CentralRegistry centralRegistry = new CentralRegistry(); private final StoreFactory storageFactory; - private IdentifiableStore executions; private IdentifiableStore formConfigs; private IdentifiableStore authUser; private IdentifiableStore authRole; private IdentifiableStore authGroup; - public void openStores(ObjectMapper mapper) { - authUser = storageFactory.createUserStore(centralRegistry, "meta", this, mapper); - authRole = storageFactory.createRoleStore(centralRegistry, "meta", this, mapper); - authGroup = storageFactory.createGroupStore(centralRegistry, "meta", this, mapper); - // Executions depend on users - executions = storageFactory.createExecutionsStore(centralRegistry, "meta", mapper); - formConfigs = storageFactory.createFormConfigStore(centralRegistry, "meta", mapper); - + public static MetaStorage get(DeserializationContext ctxt) throws JsonMappingException { + return (MetaStorage) ctxt + .findInjectableValue(MetaStorage.class.getName(), null, null); } @Override @@ -63,12 +65,21 @@ public ImmutableList getStores() { ); } - @Override - public void clear() { - super.clear(); - centralRegistry.clear(); + public void openStores(ObjectMapper mapper, MetricRegistry metricRegistry) { + if (mapper != null) { + this.injectInto(mapper); + } + authUser = storageFactory.createUserStore("meta", mapper); + authRole = storageFactory.createRoleStore("meta", mapper); + authGroup = storageFactory.createGroupStore("meta", mapper); + // Executions depend on users + executions = storageFactory.createExecutionsStore("meta", mapper); + formConfigs = storageFactory.createFormConfigStore("meta", mapper); + } + // Executions + public void addExecution(ManagedExecution query) { executions.add(query); } @@ -77,19 +88,21 @@ public ManagedExecution getExecution(ManagedExecutionId id) { return executions.get(id); } - public Collection getAllExecutions() { - return executions.getAll(); + public Stream getAllExecutions() { + return executions.getAllKeys().map(executions::get); } - public void updateExecution(ManagedExecution query) { + public synchronized void updateExecution(ManagedExecution query) { executions.update(query); } - public void removeExecution(ManagedExecutionId id) { + public synchronized void removeExecution(ManagedExecutionId id) { executions.remove(id); } - public void addGroup(Group group) { + // Groups + + public synchronized void addGroup(Group group) { log.info("Adding group = {}", group.getId()); authGroup.add(group); } @@ -100,8 +113,8 @@ public Group getGroup(GroupId groupId) { return group; } - public Collection getAllGroups() { - return authGroup.getAll(); + public Stream getAllGroups() { + return authGroup.getAllKeys().map(authGroup::get); } public void removeGroup(GroupId id) { @@ -109,12 +122,14 @@ public void removeGroup(GroupId id) { authGroup.remove(id); } - public void updateGroup(Group group) { + public synchronized void updateGroup(Group group) { log.info("Updating group = {}", group.getId()); authGroup.update(group); } - public void addUser(User user) { + // User + + public synchronized void addUser(User user) { log.info("Adding user = {}", user.getId()); authUser.add(user); } @@ -125,21 +140,23 @@ public User getUser(UserId userId) { return user; } - public Collection getAllUsers() { - return authUser.getAll(); + public Stream getAllUsers() { + return authUser.getAllKeys().map(authUser::get); } - public void removeUser(UserId userId) { + public synchronized void removeUser(UserId userId) { log.info("Removing user = {}", userId); authUser.remove(userId); } - public void updateUser(User user) { + public synchronized void updateUser(User user) { log.info("Updating user = {}", user.getId()); authUser.update(user); } - public void addRole(Role role) { + // Roles + + public synchronized void addRole(Role role) { authRole.add(role); } @@ -149,49 +166,72 @@ public Role getRole(RoleId roleId) { return role; } - public Collection getAllRoles() { - return authRole.getAll(); + public Stream getAllRoles() { + return authRole.getAllKeys().map(authRole::get); } - public void removeRole(RoleId roleId) { + public synchronized void removeRole(RoleId roleId) { log.info("Removing role = {}", roleId); authRole.remove(roleId); } - public void updateRole(Role role) { + public synchronized void updateRole(Role role) { log.info("Updating role = {}", role.getId()); authRole.update(role); } + // FormConfigs + public FormConfig getFormConfig(FormConfigId id) { return formConfigs.get(id); } - public Collection getAllFormConfigs() { - return formConfigs.getAll(); + public Stream getAllFormConfigs() { + return formConfigs.getAllKeys().map(formConfigs::get); } - public void removeFormConfig(FormConfigId id) { + public synchronized void removeFormConfig(FormConfigId id) { formConfigs.remove(id); } @SneakyThrows - public void updateFormConfig(FormConfig formConfig) { + public synchronized void updateFormConfig(FormConfig formConfig) { formConfigs.update(formConfig); } @SneakyThrows - public void addFormConfig(FormConfig formConfig) { + public synchronized void addFormConfig(FormConfig formConfig) { formConfigs.add(formConfig); } + // Utility @Override public MutableInjectableValues inject(MutableInjectableValues values) { return values.add(MetaStorage.class, this); } - public static MetaStorage get(DeserializationContext ctxt) throws JsonMappingException { - return (MetaStorage) ctxt.findInjectableValue(MetaStorage.class.getName(), null, null); + /** + * Almost identical to {@link MetaStorage#get(Id)}, but throws an IdResolvingException if no object could be resolved. + * @return the object or throws an {@link IdResolvingException} if the Object could not be resolved. + */ + public & MetaId, VALUE> VALUE resolve(ID id) { + try { + VALUE o = get(id); + if (o == null) { + throw new IdResolvingException(id); + } + return o; + } + catch (IdResolvingException e) { + throw e; + } + catch (Exception e) { + throw new IdResolvingException(id, e); + } + } + + public & MetaId, VALUE> VALUE get(ID id) { + return (VALUE) id.get(this); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java index 4cd1c80f4a..a761782331 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java @@ -1,74 +1,135 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; - +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; import lombok.RequiredArgsConstructor; import lombok.ToString; +import lombok.experimental.Delegate; /** * Provides a view on the storage that does not allow modification of the storage (update, delete). */ @RequiredArgsConstructor @ToString(of = "delegate") -public class ModificationShieldedWorkerStorage { +public class ModificationShieldedWorkerStorage implements WorkerStorage, Injectable { + @Delegate private final WorkerStorage delegate; - public CentralRegistry getCentralRegistry() { - return delegate.getCentralRegistry(); + @Override + public void addCBlock(CBlock cBlock) { + throw new UnsupportedOperationException(); } + @Override + public void removeCBlock(CBlockId id) { + throw new UnsupportedOperationException(); + } - public Import getImport(ImportId id) { - return delegate.getImport(id); + @Override + public void addBucket(Bucket bucket) { + throw new UnsupportedOperationException(); } - public Collection getAllImports() { - return delegate.getAllImports(); + @Override + public void removeBucket(BucketId id) { + throw new UnsupportedOperationException(); } + @Override + public void setWorker(WorkerInformation worker) { + throw new UnsupportedOperationException(); + } + @Override + public void updateWorker(WorkerInformation worker) { + throw new UnsupportedOperationException(); + } - public Dataset getDataset() { - return delegate.getDataset(); + @Override + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + throw new UnsupportedOperationException(); } + @Override + public void removeStorage() { + throw new UnsupportedOperationException(); + } - public Collection> getAllConcepts() { - return delegate.getAllConcepts(); + @Override + public void addImport(Import imp) { + throw new UnsupportedOperationException(); } + @Override + public void updateImport(Import imp) { + throw new UnsupportedOperationException(); + } - public Bucket getBucket(BucketId id) { - return delegate.getBucket(id); + @Override + public void removeImport(ImportId id) { + throw new UnsupportedOperationException(); } + @Override + public void updateDataset(Dataset dataset) { + throw new UnsupportedOperationException(); + } - public Collection getAllBuckets() { - return delegate.getAllBuckets(); + @Override + public void addTable(Table table) { + throw new UnsupportedOperationException(); } + @Override + public void removeTable(TableId table) { + throw new UnsupportedOperationException(); + } - public Collection getAllCBlocks() { - return delegate.getAllCBlocks(); + @Override + public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { + throw new UnsupportedOperationException(); } - public Table getTable(TableId tableId){ - return delegate.getTable(tableId); + @Override + public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { + throw new UnsupportedOperationException(); } - public Concept getConcept(ConceptId conceptId) { - return delegate.getConcept(conceptId); + @Override + public void updateConcept(Concept concept) { + throw new UnsupportedOperationException(); } + + @Override + public void removeConcept(ConceptId id) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + throw new UnsupportedOperationException(); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(WorkerStorageImpl.class, this); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java index f22cc675e9..7a1b52173a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java @@ -1,11 +1,9 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; import java.util.Objects; +import java.util.stream.Stream; -import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.datasets.PreviewConfig; @@ -16,14 +14,13 @@ import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; import com.bakdata.conquery.models.worker.WorkerToBucketsMap; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import lombok.ToString; import lombok.extern.slf4j.Slf4j; @Slf4j -@ToString -public class NamespaceStorage extends NamespacedStorage implements Injectable { +public class NamespaceStorage extends NamespacedStorageImpl { protected IdentifiableStore internToExternMappers; protected IdentifiableStore searchIndexes; @@ -32,7 +29,7 @@ public class NamespaceStorage extends NamespacedStorage implements Injectable { protected SingletonStore preview; protected SingletonStore workerToBuckets; - protected CachedStore entity2Bucket; + protected Store entity2Bucket; public NamespaceStorage(StoreFactory storageFactory, String pathName) { super(storageFactory, pathName); @@ -44,18 +41,24 @@ private void decorateIdMapping(SingletonStore idMapping) { .onAdd(mapping -> mapping.setStorage(this)); } + private void decorateInternToExternMappingStore(IdentifiableStore store) { + // We don't call internToExternMapper::init this is done by the first select that needs the mapping + } + + @Override - public void openStores(ObjectMapper objectMapper) { - super.openStores(objectMapper); + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + super.openStores(objectMapper, metricRegistry); - internToExternMappers = getStorageFactory().createInternToExternMappingStore(super.getPathName(), getCentralRegistry(), objectMapper); - searchIndexes = getStorageFactory().createSearchIndexStore(super.getPathName(), getCentralRegistry(), objectMapper); + internToExternMappers = getStorageFactory().createInternToExternMappingStore(super.getPathName(), objectMapper); + searchIndexes = getStorageFactory().createSearchIndexStore(super.getPathName(), objectMapper); idMapping = getStorageFactory().createIdMappingStore(super.getPathName(), objectMapper); - structure = getStorageFactory().createStructureStore(super.getPathName(), getCentralRegistry(), objectMapper); + structure = getStorageFactory().createStructureStore(super.getPathName(), objectMapper); workerToBuckets = getStorageFactory().createWorkerToBucketsStore(super.getPathName(), objectMapper); - preview = getStorageFactory().createPreviewStore(super.getPathName(), getCentralRegistry(), objectMapper); + preview = getStorageFactory().createPreviewStore(super.getPathName(), objectMapper); entity2Bucket = getStorageFactory().createEntity2BucketStore(super.getPathName(), objectMapper); + decorateInternToExternMappingStore(internToExternMappers); decorateIdMapping(idMapping); } @@ -83,17 +86,18 @@ public ImmutableList getStores() { } - + // IdMapping public EntityIdMap getIdMapping() { return idMapping.get(); } - public void updateIdMapping(EntityIdMap idMapping) { this.idMapping.update(idMapping); } + // Bucket to Worker Assignment + public void setWorkerToBucketsMap(WorkerToBucketsMap map) { workerToBuckets.update(map); } @@ -115,6 +119,7 @@ public void registerEntity(String entity, int bucket) { entity2Bucket.update(entity, bucket); } + // Structure public StructureNode[] getStructure() { return Objects.requireNonNullElseGet(structure.get(), () -> new StructureNode[0]); @@ -124,7 +129,13 @@ public void updateStructure(StructureNode[] structure) { this.structure.update(structure); } + // InternToExternMappers + public InternToExternMapper getInternToExternMapper(InternToExternMapperId id) { + return getInternToExternMapperFromStorage(id); + } + + private InternToExternMapper getInternToExternMapperFromStorage(InternToExternMapperId id) { return internToExternMappers.get(id); } @@ -136,26 +147,34 @@ public void removeInternToExternMapper(InternToExternMapperId id) { internToExternMappers.remove(id); } - public Collection getInternToExternMappers() { + public Stream getInternToExternMappers() { return internToExternMappers.getAll(); } - public void removeSearchIndex(SearchIndexId id) { - searchIndexes.remove(id); - } + // SearchIndices public SearchIndex getSearchIndex(SearchIndexId id) { + return getSearchIndexFromStorage(id); + } + + private SearchIndex getSearchIndexFromStorage(SearchIndexId id) { return searchIndexes.get(id); } + public void removeSearchIndex(SearchIndexId id) { + searchIndexes.remove(id); + } + public void addSearchIndex(SearchIndex searchIndex) { searchIndexes.add(searchIndex); } - public Collection getSearchIndices() { + public Stream getSearchIndices() { return searchIndexes.getAll(); } + // PreviewConfig + public void setPreviewConfig(PreviewConfig previewConfig){ preview.update(previewConfig); } @@ -168,6 +187,7 @@ public void removePreviewConfig() { preview.remove(); } + // Utilities @Override public MutableInjectableValues inject(MutableInjectableValues values) { diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java index 25baf5df05..e934f53621 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java @@ -1,231 +1,68 @@ -package com.bakdata.conquery.io.storage; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; -import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.Connector; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; -import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; -import com.bakdata.conquery.models.identifiable.ids.specific.TableId; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import lombok.Getter; -import lombok.SneakyThrows; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - -/** - * Overlapping storage structure for {@link WorkerStorage} and {@link NamespaceStorage}. - * The reason for the overlap ist primarily that all this stored members are necessary in the - * SerDes communication between the manager and the shards/worker for the resolving of ids included in - * messages (see also {@link com.bakdata.conquery.io.jackson.serializer.NsIdRef}). - */ -@Slf4j -@ToString(onlyExplicitlyIncluded = true) -public abstract class NamespacedStorage extends ConqueryStorage implements Injectable { - - @Getter - protected final CentralRegistry centralRegistry = new CentralRegistry(); - @Getter - @ToString.Include - private final String pathName; - @Getter - private final StoreFactory storageFactory; - - protected SingletonStore dataset; - protected IdentifiableStore secondaryIds; - protected IdentifiableStore

tables; - protected IdentifiableStore imports; - protected IdentifiableStore> concepts; - - public NamespacedStorage(StoreFactory storageFactory, String pathName) { - this.pathName = pathName; - this.storageFactory = storageFactory; - } - - public void openStores(ObjectMapper objectMapper) { - // Before we start to parse the stores we need to replace the injected value for the IdResolveContext (from DatasetRegistry to this centralRegistry) - new SingletonNamespaceCollection(centralRegistry).injectInto(objectMapper); - this.injectInto(objectMapper); - - dataset = storageFactory.createDatasetStore(pathName, objectMapper); - secondaryIds = storageFactory.createSecondaryIdDescriptionStore(centralRegistry, pathName, objectMapper); - tables = storageFactory.createTableStore(centralRegistry, pathName, objectMapper); - imports = storageFactory.createImportStore(centralRegistry, pathName, objectMapper); - concepts = storageFactory.createConceptStore(centralRegistry, pathName, objectMapper); - - decorateDatasetStore(dataset); - decorateTableStore(tables); - decorateConceptStore(concepts); - } - - @Override - public ImmutableList getStores() { - return ImmutableList.of(dataset, secondaryIds, tables, imports, concepts); - } - - @Override - public void clear() { - super.clear(); - centralRegistry.clear(); - } - - private void decorateDatasetStore(SingletonStore store) { - store.onAdd(centralRegistry::register).onRemove(centralRegistry::remove); - } - - private void decorateTableStore(IdentifiableStore
store) { - store.onAdd(table -> { - for (Column column : table.getColumns()) { - column.init(); - getCentralRegistry().register(column); - } - }) - .onRemove(table -> { - for (Column c : table.getColumns()) { - getCentralRegistry().remove(c); - } - }); - } - - private void decorateConceptStore(IdentifiableStore> store) { - store.onAdd(concept -> { - - if (concept.getDataset() == null) { - throw new IllegalStateException("Concept had no dataset set"); - } - - if (!concept.getDataset().equals(dataset.get())) { - throw new IllegalStateException("Concept is not for this dataset."); - } - - concept.getSelects().forEach(centralRegistry::register); - for (Connector connector : concept.getConnectors()) { - centralRegistry.register(connector); - connector.collectAllFilters().forEach(centralRegistry::register); - connector.getSelects().forEach(centralRegistry::register); - connector.getValidityDates().forEach(centralRegistry::register); - } - - - if (concept instanceof TreeConcept) { - ((TreeConcept) concept).getAllChildren().forEach(centralRegistry::register); - } - }).onRemove(concept -> { - concept.getSelects().forEach(centralRegistry::remove); - //see #146 remove from Dataset.concepts - for (Connector connector : concept.getConnectors()) { - connector.getSelects().forEach(centralRegistry::remove); - connector.collectAllFilters().forEach(centralRegistry::remove); - connector.getValidityDates().forEach(centralRegistry::remove); - centralRegistry.remove(connector); - } - - if (concept instanceof TreeConcept) { - ((TreeConcept) concept).getAllChildren().forEach(centralRegistry::remove); - } - }); - } - - public void addImport(Import imp) { - imports.add(imp); - } - - public Import getImport(ImportId id) { - return imports.get(id); - } - - public Collection getAllImports() { - return imports.getAll(); - } - - public void updateImport(Import imp) { - imports.update(imp); - } - - public void removeImport(ImportId id) { - imports.remove(id); - } - - public Dataset getDataset() { - return dataset.get(); - } - - public void updateDataset(Dataset dataset) { - this.dataset.update(dataset); - } - - public List
getTables() { - return new ArrayList<>(tables.getAll()); - } - - public Table getTable(TableId tableId) { - return tables.get(tableId); - } - - public void addTable(Table table) { - tables.add(table); - } - - public void removeTable(TableId table) { - tables.remove(table); - } - - public List getSecondaryIds() { - return new ArrayList<>(secondaryIds.getAll()); - } - - public SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId) { - return secondaryIds.get(descriptionId); - } - - public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { - secondaryIds.add(secondaryIdDescription); - } - - public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { - secondaryIds.remove(secondaryIdDescriptionId); - } - - public Concept getConcept(ConceptId id) { - return concepts.get(id); - } - - public boolean hasConcept(ConceptId id) { - return concepts.get(id) != null; - } - - @SneakyThrows - public void updateConcept(Concept concept) { - concepts.update(concept); - } - - public void removeConcept(ConceptId id) { - concepts.remove(id); - } - - public Collection> getAllConcepts() { - return concepts.getAll(); - } - - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return values.add(NamespacedStorage.class, this); - } -} +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; + +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import lombok.SneakyThrows; + +public interface NamespacedStorage extends NamespacedStorageProvider { + void addImport(Import imp); + + Import getImport(ImportId id); + + Stream getAllImports(); + + void updateImport(Import imp); + + void removeImport(ImportId id); + + void updateDataset(Dataset dataset); + + Table getTable(TableId tableId); + + Stream
getTables(); + + void addTable(Table table); + + void removeTable(TableId table); + + SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId); + + Stream getSecondaryIds(); + + void addSecondaryId(SecondaryIdDescription secondaryIdDescription); + + void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId); + + Concept getConcept(ConceptId id); + + Stream> getAllConcepts(); + + boolean hasConcept(ConceptId id); + + @SneakyThrows + void updateConcept(Concept concept); + + void removeConcept(ConceptId id); + + @Override + default NamespacedStorage getStorage(DatasetId datasetId) { + if (getDataset() == null || datasetId.getName().equals(getDataset().getName())) { + // Storage was empty (new Worker/Namespace) or it matches + return this; + } + return null; + } + + Dataset getDataset(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java new file mode 100644 index 0000000000..0e0555a94c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java @@ -0,0 +1,249 @@ +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; +import com.bakdata.conquery.models.config.StoreFactory; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import lombok.Getter; +import lombok.SneakyThrows; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + +/** + * Overlapping storage structure for {@link WorkerStorageImpl} and {@link NamespaceStorage}. + * The reason for the overlap ist primarily that all this stored members are necessary in the + * SerDes communication between the manager and the shards/worker for the resolving of ids included in + * messages. + */ +@Slf4j +@ToString(onlyExplicitlyIncluded = true) +public abstract class NamespacedStorageImpl extends ConqueryStorage implements Injectable, NamespacedStorage { + + @Getter + @ToString.Include + private final String pathName; + @Getter + private final StoreFactory storageFactory; + + protected SingletonStore dataset; + protected IdentifiableStore secondaryIds; + protected IdentifiableStore
tables; + protected IdentifiableStore imports; + protected IdentifiableStore> concepts; + + public NamespacedStorageImpl(StoreFactory storageFactory, String pathName) { + this.pathName = pathName; + this.storageFactory = storageFactory; + } + + @Override + public ImmutableList getStores() { + return ImmutableList.of(dataset, secondaryIds, tables, imports, concepts); + } + + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + if (objectMapper != null) { + injectInto(objectMapper); + } + + dataset = storageFactory.createDatasetStore(pathName, objectMapper); + secondaryIds = storageFactory.createSecondaryIdDescriptionStore(pathName, objectMapper); + tables = storageFactory.createTableStore(pathName, objectMapper); + imports = storageFactory.createImportStore(pathName, objectMapper); + concepts = storageFactory.createConceptStore(pathName, objectMapper); + + decorateDatasetStore(dataset); + decorateSecondaryIdDescriptionStore(secondaryIds); + decorateTableStore(tables); + decorateImportStore(imports); + decorateConceptStore(concepts); + } + + private void decorateDatasetStore(SingletonStore store) { + } + + private void decorateSecondaryIdDescriptionStore(IdentifiableStore store) { + // Nothing to decorate + } + + private void decorateTableStore(IdentifiableStore
store) { + + } + + private void decorateImportStore(IdentifiableStore store) { + // Intentionally left blank + } + + private void decorateConceptStore(IdentifiableStore> store) { + store.onAdd(concept -> { + + if (concept.getDataset() != null && !concept.getDataset().equals(dataset.get().getId())) { + throw new IllegalStateException("Concept is not for this dataset."); + } + + concept.setDataset(dataset.get().getId()); + + }); + } + + // Imports + + @Override + public void addImport(Import imp) { + imports.add(imp); + } + + @Override + public Import getImport(ImportId id) { + return getImportFromStorage(id); + } + + private Import getImportFromStorage(ImportId id) { + return imports.get(id); + } + + @Override + public Stream getAllImports() { + return imports.getAll(); + } + + @Override + public void updateImport(Import imp) { + imports.update(imp); + } + + @Override + public void removeImport(ImportId id) { + imports.remove(id); + } + + // Datasets + + @Override + public void updateDataset(Dataset dataset) { + this.dataset.update(dataset); + } + public & NamespacedId, VALUE> VALUE get(ID id) { + return (VALUE) id.get(this); + } +@Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this). + add(NamespacedStorage.class, this); + }@Override + public Dataset getDataset() { + return dataset.get(); + } + + + + // Tables + + @Override + public Table getTable(TableId tableId) { + return getTableFromStorage(tableId); + } + + private Table getTableFromStorage(TableId tableId) { + return tables.get(tableId); + } + + @Override + public Stream
getTables() { + return tables.getAllKeys().map(TableId.class::cast).map(this::getTable); + } + + + @Override + public void addTable(Table table) { + tables.add(table); + } + + @Override + public void removeTable(TableId table) { + tables.remove(table); + } + + // SecondaryId + + @Override + public SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId) { + return getSecondaryIdFromStorage(descriptionId); + } + + private SecondaryIdDescription getSecondaryIdFromStorage(SecondaryIdDescriptionId descriptionId) { + return secondaryIds.get(descriptionId); + } + + @Override + public Stream getSecondaryIds() { + return secondaryIds.getAllKeys().map(SecondaryIdDescriptionId.class::cast).map(this::getSecondaryId); + } + + @Override + public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { + secondaryIds.add(secondaryIdDescription); + } + + @Override + public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { + secondaryIds.remove(secondaryIdDescriptionId); + } + + // Concepts + + @Override + public Concept getConcept(ConceptId id) { + return getConceptFromStorage(id); + } + + private Concept getConceptFromStorage(ConceptId id) { + return concepts.get(id); + } + + @Override + public Stream> getAllConcepts() { + return concepts.getAllKeys().map(ConceptId.class::cast).map(this::getConcept); + } + + @Override + public boolean hasConcept(ConceptId id) { + return concepts.get(id) != null; + } + + @Override + @SneakyThrows + public void updateConcept(Concept concept) { + log.debug("Updating Concept[{}]", concept.getId()); + concepts.update(concept); + } + + @Override + public void removeConcept(ConceptId id) { + log.debug("Removing Concept[{}]", id); + concepts.remove(id); + } + + // Utility + + + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java b/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java index 51e92011a6..0458b2f8d5 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java @@ -1,6 +1,6 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; @@ -14,17 +14,17 @@ public interface Store extends ManagedStore { // TODO: 08.01.2020 fk: Is this still necessary? The implementation in XodusStore uses different methods that in our context don't act differently. public void update(KEY key, VALUE value); - + public void remove(KEY key); public int count(); - public Collection getAll(); + public Stream getAll(); - public Collection getAllKeys(); + public Stream getAllKeys(); - /** + /** * Consumer of key-value pairs stored in this Store. Used in conjunction with for-each. */ @FunctionalInterface diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java b/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java index ef3483f264..f025319b58 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.io.storage; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.io.storage.xodus.stores.StoreInfo; import com.bakdata.conquery.models.auth.entities.Group; @@ -17,7 +16,6 @@ import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; @@ -50,7 +48,7 @@ */ @RequiredArgsConstructor @Getter -@ToString(of = {"name", "keyType", "valueType"}) +@ToString(of = {"keyType", "valueType"}) public enum StoreMappings { AUTH_GROUP(Group.class, GroupId.class), @@ -79,24 +77,10 @@ public enum StoreMappings { private final Class keyType; /** - * Store for identifiable values, with injectors. Store is also cached. + * Store for identifiable values, with injectors. */ - public static > DirectIdentifiableStore identifiable(Store, T> baseStore, CentralRegistry centralRegistry) { - return new DirectIdentifiableStore<>(centralRegistry, baseStore); - } - - /** - * General Key-Value store with caching. - */ - public static CachedStore cached(Store baseStore) { - return new CachedStore<>(baseStore); - } - - /** - * Identifiable store, that lazy registers items in the central registry. - */ - public static > IdentifiableCachedStore identifiableCachedStore(Store, T> baseStore, CentralRegistry centralRegistry) { - return new IdentifiableCachedStore(centralRegistry, baseStore); + public static > IdentifiableStore identifiable(Store, T> baseStore) { + return new IdentifiableStore<>(baseStore); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java index ac51d97c4b..128a708973 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java @@ -1,121 +1,45 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Collection; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; -import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.events.Bucket; -import com.bakdata.conquery.models.events.CBlock; -import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; -import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.worker.WorkerInformation; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@ToString(of = "worker") -public class WorkerStorage extends NamespacedStorage implements Injectable { - - private SingletonStore worker; - private IdentifiableStore buckets; - private IdentifiableStore cBlocks; - - public WorkerStorage(StoreFactory storageFactory, String pathName) { - super(storageFactory, pathName); - } - - @Override - public void openStores(ObjectMapper objectMapper) { - super.openStores(objectMapper); - - worker = getStorageFactory().createWorkerInformationStore(getPathName(), objectMapper); - buckets = getStorageFactory().createBucketStore(centralRegistry, getPathName(), objectMapper); - cBlocks = getStorageFactory().createCBlockStore(centralRegistry, getPathName(), objectMapper); - } - - @Override - public ImmutableList getStores() { - return ImmutableList.of( - dataset, - secondaryIds, - tables, - imports, - concepts, - - worker, - buckets, - cBlocks - ); - } - - - public void addCBlock(CBlock cBlock) { - log.trace("Adding CBlock[{}]", cBlock.getId()); - cBlocks.add(cBlock); - } - - public CBlock getCBlock(CBlockId id) { - return cBlocks.get(id); - } - - public void removeCBlock(CBlockId id) { - log.trace("Removing CBlock[{}]", id); - cBlocks.remove(id); - } - - public Collection getAllCBlocks() { - return cBlocks.getAll(); - } - - public void addBucket(Bucket bucket) { - log.trace("Adding Bucket[{}]", bucket.getId()); - buckets.add(bucket); - } - - public Bucket getBucket(BucketId id) { - return buckets.get(id); - } - - public void removeBucket(BucketId id) { - log.trace("Removing Bucket[{}]", id); - buckets.remove(id); - } - - public Collection getAllBuckets() { - return buckets.getAll(); - } - - public WorkerInformation getWorker() { - return worker.get(); - } - - public void setWorker(WorkerInformation worker) { - this.worker.add(worker); - } - - public void updateWorker(WorkerInformation worker) { - this.worker.update(worker); - } - - //block manager overrides - public void updateConcept(Concept concept) { - log.debug("Updating Concept[{}]", concept.getId()); - concepts.update(concept); - } - - public void removeConcept(ConceptId id) { - log.debug("Removing Concept[{}]", id); - concepts.remove(id); - } - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return super.inject(values).add(WorkerStorage.class, this); - } -} +package com.bakdata.conquery.io.storage; + +import java.io.Closeable; +import java.util.stream.Stream; + +import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; + +public interface WorkerStorage extends NamespacedStorage, Closeable { + void addCBlock(CBlock cBlock); + + CBlock getCBlock(CBlockId id); + + void removeCBlock(CBlockId id); + + Stream getAllCBlocks(); + + Stream getAllCBlockIds(); + + void addBucket(Bucket bucket); + + Bucket getBucket(BucketId id); + + void removeBucket(BucketId id); + + Stream getAllBuckets(); + + Stream getAllBucketIds(); + + WorkerInformation getWorker(); + + void setWorker(WorkerInformation worker); + + void updateWorker(WorkerInformation worker); + + + void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry); + void loadData(); + void removeStorage(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java new file mode 100644 index 0000000000..118ed982eb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java @@ -0,0 +1,153 @@ +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; +import jakarta.validation.Validator; + +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; +import com.bakdata.conquery.models.config.StoreFactory; +import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@ToString(of = "worker") +public class WorkerStorageImpl extends NamespacedStorageImpl implements WorkerStorage { + + private SingletonStore worker; + private IdentifiableStore buckets; + private IdentifiableStore cBlocks; + + public WorkerStorageImpl(StoreFactory storageFactory, Validator validator, String pathName) { + super(storageFactory, pathName); + } + + @Override + public ImmutableList getStores() { + return ImmutableList.of( + dataset, + secondaryIds, + tables, + imports, + concepts, + + worker, + buckets, + cBlocks + ); + } + + @Override + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + super.openStores(objectMapper, metricRegistry); + + worker = getStorageFactory().createWorkerInformationStore(getPathName(), objectMapper); + buckets = getStorageFactory().createBucketStore(getPathName(), objectMapper); + cBlocks = getStorageFactory().createCBlockStore(getPathName(), objectMapper); + + decorateWorkerStore(worker); + decorateBucketStore(buckets); + decorateCBlockStore(cBlocks); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return super.inject(values).add(WorkerStorage.class, this); + } + + private void decorateWorkerStore(SingletonStore store) { + // Nothing to decorate + } + + private void decorateBucketStore(IdentifiableStore store) { + // Nothing to decorate + } + + // CBlocks + + private void decorateCBlockStore(IdentifiableStore baseStoreCreator) { + // Nothing to decorate + } + + @Override + public void addCBlock(CBlock cBlock) { + log.trace("Adding CBlock[{}]", cBlock.getId()); + cBlocks.add(cBlock); + } @Override + public CBlock getCBlock(CBlockId id) { + return cBlocks.get(id); + } + + @Override + public void removeCBlock(CBlockId id) { + log.trace("Removing CBlock[{}]", id); + cBlocks.remove(id); + } + + @Override + public Stream getAllCBlocks() { + return cBlocks.getAllKeys().map(CBlockId.class::cast).map(this::getCBlock); + } + + @Override + public Stream getAllCBlockIds() { + return cBlocks.getAllKeys().map(CBlockId.class::cast); + } + + // Buckets + + @Override + public void addBucket(Bucket bucket) { + log.trace("Adding Bucket[{}]", bucket.getId()); + buckets.add(bucket); + } + + @Override + public Bucket getBucket(BucketId id) { + return buckets.get(id); + } + + @Override + public void removeBucket(BucketId id) { + log.trace("Removing Bucket[{}]", id); + buckets.remove(id); + } + + @Override + public Stream getAllBuckets() { + return buckets.getAllKeys().map(BucketId.class::cast).map(this::getBucket); + } + + @Override + public Stream getAllBucketIds() { + return buckets.getAllKeys().map(BucketId.class::cast); + } + + // Worker + + @Override + public WorkerInformation getWorker() { + return worker.get(); + } + + @Override + public void setWorker(WorkerInformation worker) { + this.worker.add(worker); + } + + @Override + public void updateWorker(WorkerInformation worker) { + this.worker.update(worker); + } + + // Utilities + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java index 96e2ca06f2..9097885ec4 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java @@ -9,15 +9,15 @@ import java.io.SequenceInputStream; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.UUID; -import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.stream.Stream; +import jakarta.validation.Validator; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.io.mina.ChunkingOutputStream; import com.bakdata.conquery.io.storage.Store; @@ -28,8 +28,6 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.primitives.Ints; -import jakarta.validation.Validator; -import jakarta.validation.constraints.NotEmpty; import jetbrains.exodus.env.Environment; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -153,15 +151,13 @@ public int count() { } @Override - public Collection getAll() { + public Stream getAll() { throw new UnsupportedOperationException(); } @Override - public Collection getAllKeys() { - Collection out = new ConcurrentLinkedQueue<>(); // has to be concurrent because forEach is concurrent. - metaStore.forEach((key, value, size) -> out.add(key)); - return out; + public Stream getAllKeys() { + return metaStore.getAllKeys(); } private BigStoreMetaKeys writeValue(VALUE value) { diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java index bf6588683f..043058bfaf 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java @@ -1,9 +1,9 @@ package com.bakdata.conquery.io.storage.xodus.stores; import java.io.IOException; -import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Stream; import com.bakdata.conquery.io.jackson.serializer.IdReferenceResolvingException; import com.bakdata.conquery.io.storage.Store; @@ -115,8 +115,8 @@ public void loadData() { } @Override - public Collection getAll() { - return cache.values(); + public Stream getAll() { + return cache.values().stream(); } @Override @@ -125,8 +125,8 @@ public String toString() { } @Override - public Collection getAllKeys() { - return cache.keySet(); + public Stream getAllKeys() { + return cache.keySet().stream(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java index d89a3c5f87..e04b05acac 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java @@ -2,9 +2,7 @@ import java.io.Closeable; import java.io.IOException; -import java.util.Collection; - -import com.bakdata.conquery.io.storage.Store; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.ManagedStore; import com.bakdata.conquery.io.storage.Store; @@ -12,18 +10,18 @@ public abstract class KeyIncludingStore implements Closeable, ManagedStore { protected final Store store; - + public KeyIncludingStore(Store store) { this.store = store; } - + protected abstract KEY extractKey(VALUE value); - + public void add(VALUE value) { store.add(extractKey(value), value); added(value); } - + public VALUE get(KEY key) { return store.get(key); } @@ -34,34 +32,33 @@ public void update(VALUE value) { updated(value); store.update(extractKey(value), value); } - + public void remove(KEY key) { VALUE old = get(key); store.remove(key); if(old != null) removed(old); } - + public void loadData() { store.loadData(); - for(VALUE value : getAll()) { - added(value); - } + getAll().forEach(this::added); } - - public Collection getAll() { - return store.getAll(); + + public Stream getAll() { + return store.getAllKeys() + .map(store::get); } - - public Collection getAllKeys() { + + public Stream getAllKeys() { return store.getAllKeys(); } - + @Override public String toString() { return store.toString(); } - + protected abstract void removed(VALUE value); protected abstract void added(VALUE value); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 840a154ac7..94d04e5e3d 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -10,7 +10,6 @@ import java.nio.file.Files; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -25,8 +24,10 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; +import jakarta.validation.Validator; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.JacksonUtil; @@ -43,7 +44,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import jakarta.validation.Validator; import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; import lombok.Data; @@ -515,13 +515,13 @@ public int count() { } @Override - public Collection getAll() { - throw new UnsupportedOperationException(); + public Stream getAll() { + return store.getAllKeys().stream().map(store::get).map(this::readValue); } @Override - public Collection getAllKeys() { - throw new UnsupportedOperationException(); + public Stream getAllKeys() { + return store.getAllKeys().stream().map(this::readKey); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java deleted file mode 100644 index 6a66f56d93..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java +++ /dev/null @@ -1,126 +0,0 @@ -package com.bakdata.conquery.io.storage.xodus.stores; - -import java.io.IOException; -import java.util.Collection; -import java.util.Optional; -import java.util.concurrent.ExecutionException; - -import com.bakdata.conquery.io.storage.Store; -import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import io.dropwizard.util.Duration; -import lombok.extern.slf4j.Slf4j; - -/** - * Weakly cached store, using {@link LoadingCache} to maintain values. Is a wrapper around the supplied {@link Store}. - */ -@Slf4j -public class WeakCachedStore implements Store { - - private final LoadingCache> cache; - - private final Store store; - - public WeakCachedStore(Store store, Duration weakCacheDuration) { - this.store = store; - this.cache = CacheBuilder.newBuilder() - .weakValues() - .expireAfterAccess( - weakCacheDuration.getQuantity(), - weakCacheDuration.getUnit() - ) - .build(new CacheLoader>() { - @Override - public Optional load(KEY key) throws Exception { - log.trace("Needing to load entry "+key+" in "+this); - return Optional.ofNullable(store.get(key)); - } - }); - } - - - @Override - public void add(KEY key, VALUE value) { - try { - Optional old = cache.get(key); - if(old.isPresent()) { - throw new IllegalStateException("The id "+key+" is already part of this store"); - } - cache.put(key, Optional.of(value)); - store.add(key, value); - } - catch(ExecutionException e) { - throw new RuntimeException("Failed to load entry for key "+key, e); - } - } - - @Override - public VALUE get(KEY key) { - try { - return cache.get(key).orElse(null); - } - catch (ExecutionException e) { - throw new RuntimeException("Failed to load entry for key "+key, e); - } - } - - @Override - public IterationStatistic forEach(StoreEntryConsumer consumer) { - throw new UnsupportedOperationException(); - } - - @Override - public void update(KEY key, VALUE value) { - cache.put(key, Optional.of(value)); - store.update(key, value); - } - - @Override - public void remove(KEY key) { - cache.invalidate(key); - store.remove(key); - } - - @Override - public int count() { - return store.count(); - } - - @Override - public void loadData() {} - - @Override - public Collection getAll() { - return store.getAll(); - } - - @Override - public Collection getAllKeys() { - return store.getAllKeys(); - } - - @Override - public String toString() { - return "weakcached "+store.toString(); - } - - - @Override - public void clear() { - cache.invalidateAll(); - store.clear(); - } - - @Override - public void removeStore() { - cache.invalidateAll(); - store.removeStore(); - } - - @Override - public void close() throws IOException { - store.close(); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java index 95c0777b26..eb9acab3ac 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java @@ -1,5 +1,7 @@ package com.bakdata.conquery.io.storage.xodus.stores; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; @@ -77,6 +79,18 @@ public void forEach(BiConsumer consumer) { } } + public List getAllKeys() { + return environment.computeInReadonlyTransaction(txn -> { + List keys = new ArrayList<>(); + try (Cursor c = store.openCursor(txn)) { + while (c.getNext()) { + keys.add(c.getKey()); + } + return keys; + } + }); + } + public boolean update(ByteIterable key, ByteIterable value) { return environment.computeInTransaction(t -> store.put(t, key, value)); } diff --git a/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java b/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java index 748c92d2d5..0032246cdb 100644 --- a/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java +++ b/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java @@ -4,8 +4,11 @@ import java.util.HashSet; import java.util.Set; +import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.QueryDescription; -import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; @@ -15,10 +18,6 @@ import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.apiv1.query.CQElement; -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.query.visitor.QueryVisitor; import com.codahale.metrics.Counter; import com.codahale.metrics.Histogram; @@ -115,23 +114,23 @@ public void accept(Visitable element) { } if (element instanceof CQConcept) { - for (Select select : ((CQConcept) element).getSelects()) { - doReport(CLASSES, select.getClass().getSimpleName()); - doReport(SELECTS, select.getId().toString()); + for (SelectId select : ((CQConcept) element).getSelects()) { + doReport(CLASSES, select.resolve().getClass().getSimpleName()); + doReport(SELECTS, select.toString()); } // Report classes and ids used of filters and selects for (CQTable table : ((CQConcept) element).getTables()) { for (FilterValue filter : table.getFilters()) { - doReport(CLASSES, filter.getFilter().getClass().getSimpleName()); - doReport(FILTERS, filter.getFilter().getId().toString()); + doReport(CLASSES, filter.getFilter().resolve().getClass().getSimpleName()); + doReport(FILTERS, filter.getFilter().toString()); } - for (Select select : table.getSelects()) { - doReport(CLASSES, select.getClass().getSimpleName()); + for (SelectId select : table.getSelects()) { + doReport(CLASSES, select.resolve().getClass().getSimpleName()); - doReport(SELECTS, select.getId().toString()); + doReport(SELECTS, select.toString()); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java index c623aa055e..f7b654083f 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java @@ -24,22 +24,19 @@ */ public interface NamespaceHandler { - N createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment); - - void removeNamespace(DatasetId id, N namespace); - /** * Creates the {@link NamespaceSetupData} that is shared by all {@link Namespace} types. */ - static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final ConqueryConfig config, final InternalMapperFactory internalMapperFactory, DatasetRegistry datasetRegistry) { + static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final ConqueryConfig config, final InternalMapperFactory internalMapperFactory, DatasetRegistry datasetRegistry, Environment environment) { List injectables = new ArrayList<>(); injectables.add(datasetRegistry); injectables.add(storage); - ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetRegistry); - ObjectMapper communicationMapper = internalMapperFactory.createManagerCommunicationMapper(datasetRegistry); - ObjectMapper preprocessMapper = internalMapperFactory.createPreprocessMapper(datasetRegistry); + ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(storage); + ObjectMapper communicationMapper = internalMapperFactory.createNamespaceCommunicationMapper(storage); + ObjectMapper preprocessMapper = internalMapperFactory.createPreprocessMapper(storage); + // Todo remove these injectables.forEach(i -> { i.injectInto(persistenceMapper); i.injectInto(communicationMapper); @@ -48,7 +45,7 @@ static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final C // Each store needs its own mapper because each injects its own registry - storage.openStores(Jackson.copyMapperAndInjectables(persistenceMapper)); + storage.openStores(Jackson.copyMapperAndInjectables(persistenceMapper), environment.metrics()); storage.loadData(); JobManager jobManager = new JobManager(storage.getDataset().getName(), config.isFailOnError()); @@ -57,4 +54,8 @@ static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final C return new NamespaceSetupData(injectables, communicationMapper, preprocessMapper, jobManager, filterSearch); } + N createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment); + + void removeNamespace(DatasetId id, N namespace); + } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java index fe831a4733..e78ec3ce91 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java @@ -3,6 +3,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; /** * Listener for updates of stored entities in ConQuery. @@ -19,6 +20,6 @@ public interface StorageListener { void onAddConcept(Concept concept); - void onDeleteConcept(Concept concept); + void onDeleteConcept(ConceptId concept); } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java index 3208880ab7..53d0602f52 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionManager.java @@ -18,8 +18,10 @@ import com.bakdata.conquery.models.messages.SlowMessage; import com.bakdata.conquery.models.messages.network.MessageToManagerNode; import com.bakdata.conquery.models.messages.network.NetworkMessageContext; +import com.bakdata.conquery.models.messages.network.specific.ForwardToNamespace; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; +import com.bakdata.conquery.models.worker.ShardNodeInformation; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -36,7 +38,6 @@ @RequiredArgsConstructor public class ClusterConnectionManager extends IoHandlerAdapter { - private IoAcceptor acceptor; private final DatasetRegistry datasetRegistry; private final JobManager jobManager; private final Validator validator; @@ -44,6 +45,7 @@ public class ClusterConnectionManager extends IoHandlerAdapter { private final InternalMapperFactory internalMapperFactory; @Getter private final ClusterState clusterState; + private IoAcceptor acceptor; @Override public void sessionOpened(IoSession session) { @@ -62,28 +64,43 @@ public void exceptionCaught(IoSession session, Throwable cause) { @Override public void messageReceived(IoSession session, Object message) { - if (message instanceof MessageToManagerNode toManagerNode) { - - log.trace("ManagerNode received {} from {}", message.getClass().getSimpleName(), session.getRemoteAddress()); - - Job job = new ReactingJob<>(toManagerNode, - new NetworkMessageContext.ManagerNodeNetworkContext( - new NetworkSession(session), - datasetRegistry, - clusterState, - config.getCluster().getBackpressure() - )); - - if (toManagerNode instanceof SlowMessage slowMessage) { - slowMessage.setProgressReporter(job.getProgressReporter()); - jobManager.addSlowJob(job); - } - else { - jobManager.addFastJob(job); - } + if (!(message instanceof MessageToManagerNode toManagerNode)) { + log.error("Unknown message type {} in {}", message.getClass(), message); + return; + + } + + final ShardNodeInformation shardNodeInformation = clusterState.getShardNodes().get(session.getRemoteAddress()); + + final NetworkSession nwSession; + + if (shardNodeInformation == null) { + // In case the shard is not yet registered, we wont have a shardNodeInformation to pull the session from + nwSession = new NetworkSession(session); } else { - log.error("Unknown message type {} in {}", message.getClass(), message); + nwSession = shardNodeInformation.getSession(); + } + + log.trace("ManagerNode received {} from {}", message.getClass().getSimpleName(), session.getRemoteAddress()); + + final Job job = new ReactingJob<>(toManagerNode, + new NetworkMessageContext.ManagerNodeNetworkContext(nwSession, + datasetRegistry, + clusterState, + config.getCluster().getBackpressure() + ) + ); + + if (toManagerNode instanceof ForwardToNamespace nsMesg) { + datasetRegistry.get(nsMesg.getDatasetId()).getJobManager().addSlowJob(job); + } + else if (toManagerNode instanceof SlowMessage slowMessage) { + slowMessage.setProgressReporter(job.getProgressReporter()); + jobManager.addSlowJob(job); + } + else { + jobManager.addFastJob(job); } } @@ -91,9 +108,9 @@ public void start() throws IOException { acceptor = new NioSocketAcceptor(); acceptor.getFilterChain().addFirst("mdc", new MdcFilter("Manager[%s]")); - ObjectMapper om = internalMapperFactory.createManagerCommunicationMapper(datasetRegistry); + final ObjectMapper om = internalMapperFactory.createManagerCommunicationMapper(datasetRegistry); - BinaryJacksonCoder coder = new BinaryJacksonCoder(datasetRegistry, validator, om); + final BinaryJacksonCoder coder = new BinaryJacksonCoder(datasetRegistry, validator, om); acceptor.getFilterChain().addLast("codec", new CQProtocolCodecFilter(new ChunkWriter(coder), new ChunkReader(coder, om))); acceptor.setHandler(this); acceptor.getSessionConfig().setAll(config.getCluster().getMina()); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java index 0511b7048b..71c007fdc4 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java @@ -21,7 +21,6 @@ import com.bakdata.conquery.models.messages.network.specific.AddShardNode; import com.bakdata.conquery.models.messages.network.specific.RegisterWorker; import com.bakdata.conquery.models.messages.network.specific.UpdateJobManagerStatus; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.models.worker.ShardWorkers; import com.bakdata.conquery.models.worker.Worker; import com.bakdata.conquery.models.worker.WorkerInformation; @@ -70,7 +69,7 @@ public void sessionOpened(IoSession session) { // Schedule ShardNode and Worker registration, so we don't block this thread which does the actual sending scheduler.schedule(() -> { - context = new NetworkMessageContext.ShardNodeNetworkContext(networkSession, workers, config, environment.getValidator()); + context = new NetworkMessageContext.ShardNodeNetworkContext(networkSession, workers, config, environment); log.info("Connected to ManagerNode @ `{}`", session.getRemoteAddress()); // Authenticate with ManagerNode @@ -85,7 +84,6 @@ public void sessionOpened(IoSession session) { }, 0, TimeUnit.SECONDS); - scheduleIdleLogger(scheduler, session, config.getCluster().getIdleTimeOut()); } @@ -114,7 +112,7 @@ public void sessionClosed(IoSession session) { } private void connectToCluster() { - InetSocketAddress address = new InetSocketAddress( + final InetSocketAddress address = new InetSocketAddress( config.getCluster().getManagerURL().getHostAddress(), config.getCluster().getPort() ); @@ -168,12 +166,12 @@ private void disconnectFromCluster() { } @NotNull - private NioSocketConnector getClusterConnector(IdResolveContext workers) { + private NioSocketConnector getClusterConnector(ShardWorkers workers) { ObjectMapper om = internalMapperFactory.createShardCommunicationMapper(); - NioSocketConnector connector = new NioSocketConnector(); + final NioSocketConnector connector = new NioSocketConnector(); - BinaryJacksonCoder coder = new BinaryJacksonCoder(workers, environment.getValidator(), om); + final BinaryJacksonCoder coder = new BinaryJacksonCoder(workers, environment.getValidator(), om); connector.getFilterChain().addFirst("mdc", new MdcFilter("Shard[%s]")); connector.getFilterChain().addLast("codec", new CQProtocolCodecFilter(new ChunkWriter(coder), new ChunkReader(coder, om))); connector.setHandler(this); @@ -268,10 +266,6 @@ private void reportJobManagerStatus() { } } - public boolean isBusy() { - return jobManager.isSlowWorkerBusy(); - } - @Override public void stop() throws Exception { // close scheduler before disconnect to avoid scheduled reconnects @@ -279,4 +273,8 @@ public void stop() throws Exception { disconnectFromCluster(); jobManager.close(); } + + public boolean isBusy() { + return jobManager.isSlowWorkerBusy(); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java index f5a1b5179b..3d7ca63abb 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java @@ -6,6 +6,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Stream; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.mode.ImportHandler; import com.bakdata.conquery.models.datasets.Import; @@ -27,10 +32,6 @@ import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.models.worker.WorkerInformation; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.NotFoundException; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.Response; import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -47,10 +48,10 @@ public class ClusterImportHandler implements ImportHandler { @SneakyThrows @Override public void updateImport(Namespace namespace, InputStream inputStream) { - handleImport(namespace, inputStream, true); + handleImport(namespace, inputStream, true, datasetRegistry); } - private static void handleImport(Namespace namespace, InputStream inputStream, boolean update) throws IOException { + private static void handleImport(Namespace namespace, InputStream inputStream, boolean update, DatasetRegistry datasetRegistry) throws IOException { try (PreprocessedReader parser = new PreprocessedReader(inputStream, namespace.getPreprocessMapper())) { // We parse semi-manually as the incoming file consist of multiple documents we read progressively: // 1) the header to check metadata @@ -60,7 +61,7 @@ private static void handleImport(Namespace namespace, InputStream inputStream, b final Table table = validateImportable(((DistributedNamespace) namespace), header, update); - readAndDistributeImport(((DistributedNamespace) namespace), table, header, parser); + readAndDistributeImport(((DistributedNamespace) namespace), table, header, parser, datasetRegistry); clearDependentConcepts(namespace.getStorage().getAllConcepts(), table); } @@ -97,7 +98,7 @@ private static Table validateImportable(DistributedNamespace namespace, Preproce } // before updating the import, make sure that all workers removed the prior import - namespace.getWorkerHandler().sendToAll(new RemoveImportJob(processedImport)); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(processedImport.getId())); namespace.getStorage().removeImport(importId); } else if (processedImport != null) { @@ -107,7 +108,7 @@ else if (processedImport != null) { return table; } - private static void readAndDistributeImport(DistributedNamespace namespace, Table table, PreprocessedHeader header, PreprocessedReader reader) { + private static void readAndDistributeImport(DistributedNamespace namespace, Table table, PreprocessedHeader header, PreprocessedReader reader, DatasetRegistry datasetRegistry) { final TableId tableId = new TableId(namespace.getDataset().getId(), header.getTable()); final ImportId importId = new ImportId(tableId, header.getName()); @@ -149,16 +150,12 @@ private static void readAndDistributeImport(DistributedNamespace namespace, Tabl } - private static void clearDependentConcepts(Collection> allConcepts, Table table) { - for (Concept c : allConcepts) { - for (Connector con : c.getConnectors()) { - if (!con.getTable().equals(table)) { - continue; - } - - con.getConcept().clearMatchingStats(); - } - } + private static void clearDependentConcepts(Stream> allConcepts, Table table) { + allConcepts.map(Concept::getConnectors) + .flatMap(List::stream) + .filter(con -> con.getResolvedTableId().equals(table.getId())) + .map(Connector::getConcept) + .forEach(Concept::clearMatchingStats); } /** @@ -177,19 +174,19 @@ public static WorkerId sendBucket(Bucket bucket, WorkerInformation responsibleWo @SneakyThrows @Override public void addImport(Namespace namespace, InputStream inputStream) { - handleImport(namespace, inputStream, false); + handleImport(namespace, inputStream, false, datasetRegistry); } @Override public void deleteImport(Import imp) { - final DatasetId id = imp.getTable().getDataset().getId(); + final DatasetId id = imp.getTable().getDataset(); final DistributedNamespace namespace = datasetRegistry.get(id); - clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable()); + clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable().resolve()); namespace.getStorage().removeImport(imp.getId()); - namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp)); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp.getId())); // Remove bucket assignments for consistency report namespace.getWorkerHandler().removeBucketAssignmentsForImportFormWorkers(imp); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java index 560ab1465b..c8bf780127 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java @@ -24,7 +24,7 @@ public class ClusterNamespaceHandler implements NamespaceHandler datasetRegistry, Environment environment) { - NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry); + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry, environment); DistributedExecutionManager executionManager = new DistributedExecutionManager(metaStorage, datasetRegistry, clusterState); WorkerHandler workerHandler = new WorkerHandler(namespaceData.getCommunicationMapper(), namespaceStorage); clusterState.getWorkerHandlers().put(namespaceStorage.getDataset().getId(), workerHandler); @@ -49,7 +49,7 @@ public DistributedNamespace createNamespace(NamespaceStorage namespaceStorage, M @Override public void removeNamespace(DatasetId id, DistributedNamespace namespace) { - clusterState.getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(namespace.getDataset()))); + clusterState.getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(namespace.getDataset().getId()))); clusterState.getWorkerHandlers().keySet().removeIf(worker -> worker.getDataset().getDataset().equals(id)); } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java index 9d8360a383..7bdc4fa948 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java @@ -4,6 +4,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.jobs.SimpleJob; import com.bakdata.conquery.models.messages.namespaces.specific.RemoveConcept; @@ -29,35 +30,35 @@ class ClusterStorageListener implements StorageListener { @Override public void onAddSecondaryId(SecondaryIdDescription secondaryId) { - datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateSecondaryId(secondaryId)); + datasetRegistry.get(secondaryId.getDataset()).getWorkerHandler().sendToAll(new UpdateSecondaryId(secondaryId)); } @Override public void onDeleteSecondaryId(SecondaryIdDescription secondaryId) { - datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveSecondaryId(secondaryId)); + datasetRegistry.get(secondaryId.getDataset()).getWorkerHandler().sendToAll(new RemoveSecondaryId(secondaryId.getId())); } @Override public void onAddTable(Table table) { - datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateTable(table)); + datasetRegistry.get(table.getDataset()).getWorkerHandler().sendToAll(new UpdateTable(table)); } @Override public void onRemoveTable(Table table) { - datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveTable(table)); + datasetRegistry.get(table.getDataset()).getWorkerHandler().sendToAll(new RemoveTable(table.getId())); } @Override public void onAddConcept(Concept concept) { - WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); + WorkerHandler handler = datasetRegistry.get(concept.getDataset()).getWorkerHandler(); SimpleJob simpleJob = new SimpleJob(String.format("sendToAll : Add %s ", concept.getId()), () -> handler.sendToAll(new UpdateConcept(concept))); jobManager.addSlowJob(simpleJob); } @Override - public void onDeleteConcept(Concept concept) { - WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); - SimpleJob simpleJob = new SimpleJob("sendToAll: remove " + concept.getId(), () -> handler.sendToAll(new RemoveConcept(concept))); + public void onDeleteConcept(ConceptId concept) { + WorkerHandler handler = datasetRegistry.get(concept.getDataset()).getWorkerHandler(); + SimpleJob simpleJob = new SimpleJob("sendToAll: remove " + concept, () -> handler.sendToAll(new RemoveConcept(concept))); jobManager.addSlowJob(simpleJob); } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java index 9aa102eb54..ffb80ce5f1 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java @@ -1,17 +1,18 @@ package com.bakdata.conquery.mode.cluster; -import jakarta.validation.Validator; - import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.identifiable.ids.IIdInterner; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.ShardWorkers; import com.fasterxml.jackson.databind.DeserializationConfig; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationConfig; +import jakarta.validation.Validator; public record InternalMapperFactory(ConqueryConfig config, Validator validator) { @@ -19,26 +20,62 @@ public ObjectMapper createShardCommunicationMapper() { return createInternalObjectMapper(View.InternalCommunication.class); } - public ObjectMapper createWorkerCommunicationMapper(ShardWorkers workers) { + /** + * @return a preconfigured binary object mapper + */ + private ObjectMapper createInternalObjectMapper(Class viewClass) { + final ObjectMapper objectMapper = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER)); + + final MutableInjectableValues injectableValues = new MutableInjectableValues(); + objectMapper.setInjectableValues(injectableValues); + + injectableValues.add(Validator.class, validator); + config.injectInto(objectMapper); + new IIdInterner().injectInto(objectMapper); + + if (viewClass != null) { + setViewClass(objectMapper, viewClass); + } + + return objectMapper; + } + + public static void setViewClass(ObjectMapper objectMapper, Class viewClass) { + // Set serialization config + SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); + + serializationConfig = serializationConfig.withView(viewClass); + + objectMapper.setConfig(serializationConfig); + + // Set deserialization config + DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); + + deserializationConfig = deserializationConfig.withView(viewClass); + + objectMapper.setConfig(deserializationConfig); + } + + public ObjectMapper createWorkerCommunicationMapper(WorkerStorage storage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.InternalCommunication.class); - workers.injectInto(objectMapper); + storage.injectInto(objectMapper); return objectMapper; } - public ObjectMapper createWorkerPersistenceMapper(ShardWorkers workers) { + public ObjectMapper createWorkerPersistenceMapper(WorkerStorage storage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.Persistence.Shard.class); - workers.injectInto(objectMapper); + storage.injectInto(objectMapper); return objectMapper; } - public ObjectMapper createNamespacePersistenceMapper(DatasetRegistry datasetRegistry) { + public ObjectMapper createNamespacePersistenceMapper(NamespaceStorage namespaceStorage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.Persistence.Manager.class); - datasetRegistry.injectInto(objectMapper); + namespaceStorage.injectInto(objectMapper); return objectMapper; } @@ -60,53 +97,22 @@ public ObjectMapper createManagerCommunicationMapper(DatasetRegistry datasetR return objectMapper; } + public ObjectMapper createNamespaceCommunicationMapper(NamespaceStorage namespaceStorage) { + ObjectMapper objectMapper = createInternalObjectMapper(View.InternalCommunication.class); - - public ObjectMapper createPreprocessMapper(DatasetRegistry datasetRegistry) { - ObjectMapper objectMapper = createInternalObjectMapper(null); - - datasetRegistry.injectInto(objectMapper); + namespaceStorage.injectInto(objectMapper); return objectMapper; } - /** - * @return a preconfigured binary object mapper - */ - private ObjectMapper createInternalObjectMapper(Class viewClass) { - final ObjectMapper objectMapper = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER)); - - final MutableInjectableValues injectableValues = new MutableInjectableValues(); - objectMapper.setInjectableValues(injectableValues); - - injectableValues.add(Validator.class, validator); - config.injectInto(objectMapper); + public ObjectMapper createPreprocessMapper(NamespaceStorage namespaceStorage) { + ObjectMapper objectMapper = createInternalObjectMapper(null); - if (viewClass != null) { - setViewClass(objectMapper, viewClass); - } + namespaceStorage.injectInto(objectMapper); return objectMapper; } - public static void setViewClass(ObjectMapper objectMapper, Class viewClass) { - // Set serialization config - SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); - - serializationConfig = serializationConfig.withView(viewClass); - - objectMapper.setConfig(serializationConfig); - - // Set deserialization config - DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); - - deserializationConfig = deserializationConfig.withView(viewClass); - - objectMapper.setConfig(deserializationConfig); - } - - - /** * Customize the mapper from the environment, that is used in the REST-API. * In contrast to the internal object mapper this uses textual JSON representation @@ -125,6 +131,7 @@ public void customizeApiObjectMapper(ObjectMapper objectMapper, DatasetRegistry< objectMapper.setInjectableValues(injectableValues); injectableValues.add(Validator.class, validator); + new IIdInterner().injectInto(objectMapper); datasetRegistry.injectInto(objectMapper); metaStorage.injectInto(objectMapper); config.injectInto(objectMapper); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java index f9f808b392..6a40a74705 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java @@ -39,7 +39,7 @@ public class LocalNamespaceHandler implements NamespaceHandler { @Override public LocalNamespace createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment) { - NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry); + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry, environment); IdColumnConfig idColumns = config.getIdColumns(); SqlConnectorConfig sqlConnectorConfig = config.getSqlConnectorConfig(); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java index 8d53dde819..ff55603cef 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java @@ -4,6 +4,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; public class LocalStorageListener implements StorageListener { @@ -30,6 +31,6 @@ public void onAddConcept(Concept concept) { } @Override - public void onDeleteConcept(Concept concept) { + public void onDeleteConcept(ConceptId concept) { } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java index a6490c4b73..655e93da5b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java @@ -225,7 +225,7 @@ public static User flatCopyUser(@NonNull User originUser, String namePrefix, @No // Give read permission to all executions the original user owned copiedPermission.addAll( - storage.getAllExecutions().stream() + storage.getAllExecutions() .filter(originUser::isOwner) .map(exc -> exc.createPermission(Ability.READ.asSet())) .collect(Collectors.toSet()) @@ -233,7 +233,7 @@ public static User flatCopyUser(@NonNull User originUser, String namePrefix, @No // Give read permission to all form configs the original user owned copiedPermission.addAll( - storage.getAllFormConfigs().stream() + storage.getAllFormConfigs() .filter(originUser::isOwner) .map(conf -> conf.createPermission(Ability.READ.asSet())) .collect(Collectors.toSet()) diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java index 788e856cc7..007a1195af 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java @@ -39,7 +39,7 @@ public class AuthorizationHelper { public static List getGroupsOf(@NonNull Subject subject, @NonNull MetaStorage storage){ - return storage.getAllGroups().stream() + return storage.getAllGroups() .filter(g -> g.getMembers().contains(subject.getId())) .sorted() .collect(Collectors.toList()); @@ -78,11 +78,11 @@ public static Multimap getEffectiveUserPermissions(U } public static List getUsersByRole(MetaStorage storage, Role role) { - return storage.getAllUsers().stream().filter(u -> u.getRoles().contains(role.getId())).collect(Collectors.toList()); + return storage.getAllUsers().filter(u -> u.getRoles().contains(role.getId())).collect(Collectors.toList()); } public static List getGroupsByRole(MetaStorage storage, Role role) { - return storage.getAllGroups().stream().filter(g -> g.getRoles().contains(role.getId())).collect(Collectors.toList()); + return storage.getAllGroups().filter(g -> g.getRoles().contains(role.getId())).collect(Collectors.toList()); } /** @@ -95,9 +95,10 @@ public static void authorizeDownloadDatasets(@NonNull Subject subject, @NonNull Set datasets = collector.getIdentifiables() - .stream() - .map(NamespacedIdentifiable::getDataset) - .collect(Collectors.toSet()); + .stream() + .map(NamespacedIdentifiable::getDataset) + .map(DatasetId::resolve) + .collect(Collectors.toSet()); subject.authorize(datasets, Ability.DOWNLOAD); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java index b2afa47540..ca4759999b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.Executors; - import jakarta.validation.Validator; import com.bakdata.conquery.Conquery; @@ -13,7 +12,7 @@ import com.bakdata.conquery.apiv1.auth.PasswordHashCredential; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.Store; -import com.bakdata.conquery.io.storage.StoreMappings; +import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore; import com.bakdata.conquery.io.storage.xodus.stores.XodusStore; import com.bakdata.conquery.models.auth.ConqueryAuthenticationInfo; @@ -101,7 +100,7 @@ protected void onInit() { // Open/create the database/store File passwordStoreFile = new File(storageDir, storeName); passwordEnvironment = Environments.newInstance(passwordStoreFile, passwordStoreConfig.createConfig()); - passwordStore = StoreMappings.cached( + passwordStore = new CachedStore<>( new SerializingStore<>( new XodusStore( passwordEnvironment, @@ -216,7 +215,7 @@ public boolean removeUser(User user) { @Override public List getAllUsers() { - return ImmutableList.copyOf(passwordStore.getAllKeys()); + return ImmutableList.copyOf(passwordStore.getAllKeys().toList()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java index 7287547901..a8d76b142c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java @@ -40,7 +40,7 @@ public Group(String name, String label, MetaStorage storage) { public Set getEffectivePermissions() { Set permissions = getPermissions(); for (RoleId roleId : roles) { - permissions = Sets.union(permissions, storage.getRole(roleId).getEffectivePermissions()); + permissions = Sets.union(permissions, getMetaStorage().getRole(roleId).getEffectivePermissions()); } return permissions; } @@ -54,7 +54,7 @@ public synchronized void addMember(User user) { @Override public void updateStorage() { - storage.updateGroup(this); + getMetaStorage().updateGroup(this); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java index 279e693356..85267f0846 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java @@ -4,17 +4,15 @@ import java.util.Comparator; import java.util.HashSet; import java.util.Set; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.specific.PermissionOwnerId; -import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.collect.ImmutableSet; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -62,16 +60,11 @@ public abstract class PermissionOwner permissions = new HashSet<>(); - @JacksonInject(useInput = OptBoolean.FALSE) - @NotNull - @EqualsAndHashCode.Exclude - protected MetaStorage storage; - public PermissionOwner(String name, String label, MetaStorage storage) { this.name = name; this.label = label; - this.storage = storage; + setMetaStorage(storage); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java index 3d0ec8c1d8..4102263bd0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java @@ -8,8 +8,6 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import java.util.Set; - @NoArgsConstructor(access = AccessLevel.PRIVATE) public class Role extends PermissionOwner { @@ -24,14 +22,16 @@ public Set getEffectivePermissions() { } @Override - public RoleId createId() { - return new RoleId(name); + protected void updateStorage() { + getMetaStorage().updateRole(this); + } @Override - protected void updateStorage() { - storage.updateRole(this); - + public RoleId createId() { + RoleId roleId = new RoleId(name); + roleId.setMetaStorage(getMetaStorage()); + return roleId; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java index 142ea20e8a..9dc10ffd61 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java @@ -5,6 +5,7 @@ import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -56,19 +57,22 @@ public User(String name, String label, MetaStorage storage) { public Set getEffectivePermissions() { Set permissions = getPermissions(); for (RoleId roleId : roles) { - Role role = storage.getRole(roleId); + Role role = getMetaStorage().getRole(roleId); if (role == null) { log.warn("Could not find role {} to gather permissions", roleId); continue; } permissions = Sets.union(permissions, role.getEffectivePermissions()); } - for (Group group : storage.getAllGroups()) { + + for (Iterator it = getMetaStorage().getAllGroups().iterator(); it.hasNext(); ) { + Group group = it.next(); if (!group.containsMember(this)) { continue; } permissions = Sets.union(permissions, group.getEffectivePermissions()); } + return permissions; } @@ -79,6 +83,11 @@ public synchronized void addRole(Role role) { } } + @Override + public void updateStorage() { + getMetaStorage().updateUser(this); + } + @Override public synchronized void removeRole(RoleId role) { if (roles.remove(role)) { @@ -91,64 +100,13 @@ public Set getRoles() { return Collections.unmodifiableSet(roles); } - @Override - public void updateStorage() { - storage.updateUser(this); - } - - @Override - public UserId createId() { - return new UserId(name); - } - - /** - * This class is non-static so it's a fixed part of the enclosing User object. - * It's protected for testing purposes only. - */ - public class ShiroUserAdapter extends FilteredUser { - - @Getter - private final ThreadLocal authenticationInfo = - ThreadLocal.withInitial(() -> new ConqueryAuthenticationInfo(User.this, null, null, false, null)); - - @Override - public Object getPrincipal() { - return getId(); - } @Override - public void checkPermission(Permission permission) throws AuthorizationException { - SecurityUtils.getSecurityManager().checkPermission(getPrincipals(), permission); - } - - @Override - public void checkPermissions(Collection permissions) throws AuthorizationException { - SecurityUtils.getSecurityManager().checkPermissions(getPrincipals(), permissions); - } - - @Override - public PrincipalCollection getPrincipals() { - return authenticationInfo.get().getPrincipals(); - } - - @Override - public boolean isPermitted(Permission permission) { - return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permission); - } - - @Override - public boolean[] isPermitted(List permissions) { - return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permissions); - } - - @Override - public boolean isPermittedAll(Collection permissions) { - return SecurityUtils.getSecurityManager().isPermittedAll(getPrincipals(), permissions); + public void authorize(Set objects, Ability ability) { + for (Authorized object : objects) { + authorize(object, ability); } + } - - - - - } public void authorize(@NonNull Authorized object, @NonNull Ability ability) { + public void authorize(@NonNull Authorized object, @NonNull Ability ability) { if (isOwner(object)) { return; } @@ -156,10 +114,19 @@ public boolean isPermittedAll(Collection permissions) { shiroUserAdapter.checkPermission(object.createPermission(EnumSet.of(ability))); } - public void authorize(Set objects, Ability ability) { - for (Authorized object : objects) { - authorize(object, ability); - } + public boolean isOwner(Authorized object) { + return object instanceof Owned && getId().equals(((Owned) object).getOwner()); + } + + @Override + public UserId createId() { + UserId userId = new UserId(name); + userId.setMetaStorage(getMetaStorage()); + return userId; + } + + public boolean isPermittedAll(Collection authorized, Ability ability) { + return authorized.stream().allMatch(auth -> isPermitted(auth, ability)); } public boolean isPermitted(Authorized object, Ability ability) { @@ -170,13 +137,6 @@ public boolean isPermitted(Authorized object, Ability ability) { return shiroUserAdapter.isPermitted(object.createPermission(EnumSet.of(ability))); } - - public boolean isPermittedAll(Collection authorized, Ability ability) { - return authorized.stream() - .allMatch(auth -> isPermitted(auth, ability)); - } - - public boolean[] isPermitted(List authorizeds, Ability ability) { return authorizeds.stream() .map(auth -> isPermitted(auth, ability)) @@ -184,11 +144,6 @@ public boolean[] isPermitted(List authorizeds, Ability abi .toBooleanArray(); } - - public boolean isOwner(Authorized object) { - return object instanceof Owned && equals(((Owned) object).getOwner()); - } - @JsonIgnore @Override public boolean isDisplayLogout() { @@ -197,15 +152,14 @@ public boolean isDisplayLogout() { @JsonIgnore @Override - public void setAuthenticationInfo(ConqueryAuthenticationInfo info) { - shiroUserAdapter.getAuthenticationInfo().set(info); + public ConqueryAuthenticationInfo getAuthenticationInfo() { + return shiroUserAdapter.getAuthenticationInfo().get(); } - @JsonIgnore @Override - public ConqueryAuthenticationInfo getAuthenticationInfo() { - return shiroUserAdapter.getAuthenticationInfo().get(); + public void setAuthenticationInfo(ConqueryAuthenticationInfo info) { + shiroUserAdapter.getAuthenticationInfo().set(info); } @Override @@ -214,6 +168,53 @@ public User getUser() { return this; } + /** + * This class is non-static, so it's a fixed part of the enclosing User object. + * It's protected for testing purposes only. + */ + @Getter + public class ShiroUserAdapter extends FilteredUser { + + private final ThreadLocal authenticationInfo = + ThreadLocal.withInitial(() -> new ConqueryAuthenticationInfo(User.this, null, null, false, null)); + + @Override + public Object getPrincipal() { + return getId(); + } + + @Override + public void checkPermission(Permission permission) throws AuthorizationException { + SecurityUtils.getSecurityManager().checkPermission(getPrincipals(), permission); + } + + @Override + public PrincipalCollection getPrincipals() { + return authenticationInfo.get().getPrincipals(); + } + + @Override + public void checkPermissions(Collection permissions) throws AuthorizationException { + SecurityUtils.getSecurityManager().checkPermissions(getPrincipals(), permissions); + } + + @Override + public boolean isPermitted(Permission permission) { + return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permission); + } + + @Override + public boolean[] isPermitted(List permissions) { + return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permissions); + } + + @Override + public boolean isPermittedAll(Collection permissions) { + return SecurityUtils.getSecurityManager().isPermittedAll(getPrincipals(), permissions); + } + + + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java index f3f668a5b6..c31ed1f75a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java @@ -272,18 +272,20 @@ private Set getUserGroups(JWTClaimsSet claims, String groupIdAttribute) { private void syncGroupMappings(User user, Set mappedGroupsToDo) { // TODO mark mappings as managed by keycloak - for (Group group : storage.getAllGroups()) { - if (group.containsMember(user)) { - if (mappedGroupsToDo.contains(group)) { - // Mapping is still valid, remove from todo-list - mappedGroupsToDo.remove(group); - } - else { - // Mapping is not valid any more remove user from group - group.removeMember(user.getId()); - } - } - } + storage.getAllGroups().forEach((group) -> { + + if (group.containsMember(user)) { + if (mappedGroupsToDo.contains(group)) { + // Mapping is still valid, remove from todo-list + mappedGroupsToDo.remove(group); + } + else { + // Mapping is not valid any more remove user from group + group.removeMember(user.getId()); + } + } + } + ); for (Group group : mappedGroupsToDo) { group.addMember(user); diff --git a/backend/src/main/java/com/bakdata/conquery/models/common/Range.java b/backend/src/main/java/com/bakdata/conquery/models/common/Range.java index 49bac9bf77..c085c4aec2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/common/Range.java +++ b/backend/src/main/java/com/bakdata/conquery/models/common/Range.java @@ -1,7 +1,9 @@ package com.bakdata.conquery.models.common; +import java.math.BigDecimal; import java.util.Optional; +import com.bakdata.conquery.models.config.FrontendConfig; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -10,7 +12,6 @@ import lombok.Getter; import lombok.NonNull; import lombok.With; -import lombok.experimental.Wither; @With @Getter @@ -20,30 +21,25 @@ public class Range implements IRange> { private final T min; private final T max; - public Range(T min, T max){ + public Range(T min, T max) { this.min = min; this.max = max; - if(!isOrdered()) { + if (!isOrdered()) { throw new IllegalArgumentException(String.format("min '%s' is not less than max '%s'", min, max)); } } - @Override - public String toString() { - if (isExactly()) { - return "[" + getMin() + "]"; - } - - if (isAtLeast()) { - return "[" + getMin() + ", +∞)"; - } - - if (isAtMost()) { - return "(-∞, " + getMax() + "]"; - } + @ValidationMethod(message = "If a range is not open in one direction, min needs to be less or equal to max") + @JsonIgnore + public final boolean isOrdered() { + return isOpen() || min.compareTo(max) <= 0; + } - return "[" + getMin() + ", " + getMax() + "]"; + @Override + @JsonIgnore + public boolean isOpen() { + return max == null || min == null; } public static > Range exactly(T exactly) { @@ -67,6 +63,23 @@ public static > Range all() { return new Range<>(null, null); } + @Override + public String toString() { + if (isExactly()) { + return "[" + getMin() + "]"; + } + + if (isAtLeast()) { + return "[" + getMin() + ", +∞)"; + } + + if (isAtMost()) { + return "(-∞, " + getMax() + "]"; + } + + return "[" + getMin() + ", " + getMax() + "]"; + } + @Override @JsonIgnore public boolean isExactly() { @@ -85,18 +98,6 @@ public boolean isAtMost() { return max != null && min == null; } - @Override - @JsonIgnore - public boolean isAll() { - return max == null && min == null; - } - - @Override - @JsonIgnore - public boolean isOpen() { - return max == null || min == null; - } - @Override public boolean contains(Range other) { if (other == null) { @@ -126,12 +127,24 @@ public boolean contains(Range other) { return contains(other.getMin()) && contains(other.getMax()); } - @ValidationMethod(message = "If a range is not open in one direction, min needs to be less or equal to max") + @Override @JsonIgnore - public final boolean isOrdered() { - return isOpen() || min.compareTo(max) <= 0; + public boolean isAll() { + return max == null && min == null; } + @Override + public boolean contains(T value) { + if (value == null) { + return false; + } + + if (getMin() != null && value.compareTo(getMin()) < 0) { + return false; + } + + return getMax() == null || value.compareTo(getMax()) <= 0; + } @Override public Range span(@NonNull Range other) { @@ -148,28 +161,15 @@ public Range span(@NonNull Range other) { return out; } - @Override - public boolean contains(T value) { - if(value == null) { - return false; - } - - if (getMin() != null && value.compareTo(getMin()) < 0) { - return false; - } - - return getMax() == null || value.compareTo(getMax()) <= 0; - } - public static class IntegerRange extends Range { public IntegerRange(Integer min, Integer max) { super(min, max); } - public static IntegerRange fromNumberRange(IRange orig){ - return new Range.IntegerRange( - Optional.ofNullable(orig.getMin()).map(Number::intValue).orElse(null), - Optional.ofNullable(orig.getMax()).map(Number::intValue).orElse(null)); + public static IntegerRange fromNumberRange(IRange orig) { + return new Range.IntegerRange(Optional.ofNullable(orig.getMin()).map(Number::intValue).orElse(null), + Optional.ofNullable(orig.getMax()).map(Number::intValue).orElse(null) + ); } @Override @@ -177,30 +177,30 @@ public boolean contains(Integer value) { return value != null && contains(value.intValue()); } - public boolean contains(Number value) { - return value != null && contains(value.intValue()); - } - public boolean contains(int value) { - if(getMin() != null && value < getMin()) { + if (getMin() != null && value < getMin()) { return false; } - if(getMax() != null && value > getMax()) { + if (getMax() != null && value > getMax()) { return false; } return true; } + + public boolean contains(Number value) { + return value != null && contains(value.intValue()); + } } public static class LongRange extends Range { - public LongRange (Long min, Long max) { + public LongRange(Long min, Long max) { super(min, max); } - public static LongRange fromNumberRange(IRange orig){ - return new Range.LongRange( - Optional.ofNullable(orig.getMin()).map(Number::longValue).orElse(null), - Optional.ofNullable(orig.getMax()).map(Number::longValue).orElse(null)); + public static LongRange fromNumberRange(IRange orig) { + return new Range.LongRange(Optional.ofNullable(orig.getMin()).map(Number::longValue).orElse(null), + Optional.ofNullable(orig.getMax()).map(Number::longValue).orElse(null) + ); } @Override @@ -208,19 +208,19 @@ public boolean contains(Long value) { return value != null && contains(value.longValue()); } - public boolean contains(Number value) { - return value != null && contains(value.longValue()); - } - public boolean contains(long value) { - if(getMin() != null && value < getMin()) { + if (getMin() != null && value < getMin()) { return false; } - if(getMax() != null && value > getMax()) { + if (getMax() != null && value > getMax()) { return false; } return true; } + + public boolean contains(Number value) { + return value != null && contains(value.longValue()); + } } public static class FloatRange extends Range { @@ -228,10 +228,10 @@ public FloatRange(Float min, Float max) { super(min, max); } - public static FloatRange fromNumberRange(IRange orig){ - return new Range.FloatRange( - Optional.ofNullable(orig.getMin()).map(Number::floatValue).orElse(null), - Optional.ofNullable(orig.getMax()).map(Number::floatValue).orElse(null)); + public static FloatRange fromNumberRange(IRange orig) { + return new Range.FloatRange(Optional.ofNullable(orig.getMin()).map(Number::floatValue).orElse(null), + Optional.ofNullable(orig.getMax()).map(Number::floatValue).orElse(null) + ); } @Override @@ -239,22 +239,22 @@ public boolean contains(Float value) { return value != null && contains(value.floatValue()); } - public boolean contains(Number value) { - return value != null && contains(value.floatValue()); - } - public boolean contains(float value) { - if(getMin() != null && value < getMin()) { + if (getMin() != null && value < getMin()) { return false; } - if(getMax() != null && value > getMax()) { + if (getMax() != null && value > getMax()) { return false; } - if(Float.isNaN(value)) { + if (Float.isNaN(value)) { return false; } return true; } + + public boolean contains(Number value) { + return value != null && contains(value.floatValue()); + } } public static class DoubleRange extends Range { @@ -262,10 +262,10 @@ public DoubleRange(Double min, Double max) { super(min, max); } - public static DoubleRange fromNumberRange(IRange orig){ - return new Range.DoubleRange( - Optional.ofNullable(orig.getMin()).map(Number::doubleValue).orElse(null), - Optional.ofNullable(orig.getMax()).map(Number::doubleValue).orElse(null)); + public static DoubleRange fromNumberRange(IRange orig) { + return new Range.DoubleRange(Optional.ofNullable(orig.getMin()).map(Number::doubleValue).orElse(null), + Optional.ofNullable(orig.getMax()).map(Number::doubleValue).orElse(null) + ); } @Override @@ -273,21 +273,42 @@ public boolean contains(Double value) { return value != null && contains(value.doubleValue()); } - public boolean contains(Number value) { - return value != null && contains(value.doubleValue()); - } - public boolean contains(double value) { - if(getMin() != null && value < getMin()) { + if (getMin() != null && value < getMin()) { return false; } - if(getMax() != null && value > getMax()) { + if (getMax() != null && value > getMax()) { return false; } - if(Double.isNaN(value)) { + if (Double.isNaN(value)) { return false; } return true; } + + public boolean contains(Number value) { + return value != null && contains(value.doubleValue()); + } } + + public static class MoneyRange extends Range { + public MoneyRange(BigDecimal min, BigDecimal max) { + super(min, max); + } + + public static MoneyRange fromNumberRange(IRange orig, FrontendConfig.CurrencyConfig currency) { + BigDecimal mappedMin = Optional.ofNullable(orig.getMin()) + .map(val -> new BigDecimal(val.longValue()).movePointLeft(currency.getDecimalScale())) + .orElse(null); + + BigDecimal mappedMax = Optional.ofNullable(orig.getMax()) + .map(val -> new BigDecimal(val.longValue()).movePointLeft(currency.getDecimalScale())) + .orElse(null); + + return new Range.MoneyRange(mappedMin, mappedMax); + } + + + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelConfig.java index 0aa040d8c3..bdf0cc42d4 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelConfig.java @@ -1,5 +1,11 @@ package com.bakdata.conquery.models.config; +import java.util.Collections; +import java.util.Map; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; + import com.bakdata.conquery.models.query.PrintSettings; import com.google.common.collect.ImmutableMap; import lombok.AllArgsConstructor; @@ -12,15 +18,6 @@ import org.apache.poi.ss.usermodel.Font; import org.apache.poi.xssf.streaming.SXSSFSheet; import org.apache.poi.xssf.streaming.SXSSFWorkbook; -import org.apache.poi.xssf.usermodel.XSSFDataFormat; -import org.apache.poi.xssf.usermodel.XSSFFont; -import org.apache.poi.xssf.usermodel.XSSFWorkbook; - -import jakarta.validation.constraints.Min; -import jakarta.validation.constraints.NotBlank; -import jakarta.validation.constraints.NotNull; -import java.util.Collections; -import java.util.Map; @Data public class ExcelConfig { @@ -33,7 +30,8 @@ public class ExcelConfig { private static final Map FALLBACK_STYLES = Map.of( BASIC_STYLE, new CellStyler(), - CURRENCY_STYLE_PREFIX + "EUR", new CellStyler().withDataFormatString("#,##0.00 €"), + // \u00A0 is the non breakable space + CURRENCY_STYLE_PREFIX + "EUR", new CellStyler().withDataFormatString("#,##0.00\u00A0€"), NUMERIC_STYLE, new CellStyler().withDataFormatString("#,##0.00"), INTEGER_STYLE, new CellStyler().withDataFormatString("#,##0") ); diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java index 633ffbc9f4..5f2e713f22 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/ExcelResultProvider.java @@ -13,8 +13,6 @@ import com.bakdata.conquery.io.result.ResultRender.ResultRendererProvider; import com.bakdata.conquery.io.result.excel.ResultExcelProcessor; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.i18n.I18n; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.resources.api.ResultExcelResource; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -73,13 +71,10 @@ public Collection generateResultURLs(ManagedExecution exec, UriBuil return Collections.emptyList(); } - - final PrintSettings printSettings = new PrintSettings(true, I18n.LOCALE.get(), exec.getNamespace(), exec.getConfig(), null, null); - // Save id column count to later check if xlsx dimensions are feasible - idColumnsCount = exec.getConfig().getIdColumns().getIdResultInfos(printSettings).size(); + idColumnsCount = exec.getConfig().getIdColumns().getIdResultInfos().size(); - final int columnCount = singleExecution.getResultInfos(printSettings).size() + idColumnsCount; + final int columnCount = singleExecution.getResultInfos().size() + idColumnsCount; final int maxColumnCount = SpreadsheetVersion.EXCEL2007.getMaxColumns(); if (columnCount > maxColumnCount) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/IdColumnConfig.java b/backend/src/main/java/com/bakdata/conquery/models/config/IdColumnConfig.java index ebac0b4a46..c757481494 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/IdColumnConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/IdColumnConfig.java @@ -15,7 +15,6 @@ import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.FixedLabelResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -123,20 +122,24 @@ public boolean isExactlyOnePseudo() { * @return */ @JsonIgnore - public List getIdResultInfos(PrintSettings printSettings) { + public List getIdResultInfos() { return ids.stream().filter(ColumnConfig::isPrint).map(col -> { - final Map labels = col.getLabel(); - // Get the label for the locale, - // fall back to any label if there is exactly one defined, - // then fall back to the field name. - final String label = Objects.requireNonNullElse(labels.getOrDefault( - printSettings.getLocale(), - // fall backs - labels.size() == 1 ? labels.values().stream().collect(MoreCollectors.onlyElement()) : col.getField() - ), col.getField()); //TODO we can now hook our anonymizers into this - return new FixedLabelResultInfo(label, label, ResultType.Primitive.STRING, Set.of(new SemanticType.IdT(col.getName())), printSettings, ResultPrinters.printerFor(ResultType.Primitive.STRING, printSettings)); + return new FixedLabelResultInfo(ResultType.Primitive.STRING, Set.of(new SemanticType.IdT(col.getName()))) { + @Override + public String userColumnName(PrintSettings printSettings) { + final Map labels = col.getLabel(); + // Get the label for the locale, + // fall back to any label if there is exactly one defined, + // then fall back to the field name. + return Objects.requireNonNullElse(labels.getOrDefault( + printSettings.getLocale(), + // fall backs + labels.size() == 1 ? labels.values().stream().collect(MoreCollectors.onlyElement()) : col.getField() + ), col.getField()); + } + }; }).collect(Collectors.toUnmodifiableList()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java index 78ec8aecea..683d733351 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java @@ -4,22 +4,24 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.storage.IdentifiableStore; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.WorkerStorage; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.*; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.PreviewConfig; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.StructureNode; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -34,23 +36,23 @@ public interface StoreFactory { Collection discoverNamespaceStorages(); - Collection discoverWorkerStorages(); + Collection discoverWorkerStorages(); // NamespacedStorage (Important for serdes communication between manager and shards) SingletonStore createDatasetStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper); - // WorkerStorage - IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + // WorkerStorageImpl + IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper); SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper); @@ -59,24 +61,25 @@ public interface StoreFactory { SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper); - SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper); // MetaStorage - IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper); - SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper); + + Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper); - CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 797df2caab..4488c38e11 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -24,12 +24,12 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.IdentifiableStore; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; -import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.StoreMappings; import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.io.storage.xodus.stores.BigStore; import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.EnvironmentRegistry; @@ -51,7 +51,6 @@ import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -65,7 +64,6 @@ import com.google.common.collect.MultimapBuilder; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; -import io.dropwizard.util.Duration; import jetbrains.exodus.env.Environment; import lombok.AllArgsConstructor; import lombok.Getter; @@ -117,50 +115,31 @@ public class XodusStoreFactory implements StoreFactory { C_BLOCKS.storeInfo().getName() ) ); - + @JsonIgnore + private final transient Multimap + openStoresInEnv = + Multimaps.synchronizedSetMultimap(MultimapBuilder.hashKeys().hashSetValues().build()); private Path directory = Path.of("storage"); - private boolean validateOnWrite = false; @NotNull @Valid private XodusConfig xodus = new XodusConfig(); - @JsonIgnore private EnvironmentRegistry registry = new EnvironmentRegistry(); - /** * Number of threads reading from XoduStore. + * * @implNote it's always only one thread reading from disk, dispatching to multiple reader threads. */ @Min(1) private int readerWorkers = Runtime.getRuntime().availableProcessors(); - /** * How many slots of buffering to use before the IO thread is put to sleep. */ @Min(1) private int bufferPerWorker = 20; - @JsonIgnore private ExecutorService readerExecutorService; - - public ExecutorService getReaderExecutorService() { - if (readerExecutorService == null){ - readerExecutorService = new ThreadPoolExecutor( - 1, getReaderWorkers(), - 5, TimeUnit.MINUTES, - new ArrayBlockingQueue<>(getReaderWorkers() * getBufferPerWorker()), - new ThreadPoolExecutor.CallerRunsPolicy() - ); - } - - return readerExecutorService; - } - - private boolean useWeakDictionaryCaching; - @NotNull - private Duration weakCacheDuration = Duration.hours(48); - /** * Flag for the {@link SerializingStore} whether to delete values from the underlying store, that cannot be mapped to an object anymore. */ @@ -182,71 +161,14 @@ public ExecutorService getReaderExecutorService() { @JsonIgnore private transient Validator validator; - @JsonIgnore - private final transient Multimap - openStoresInEnv = - Multimaps.synchronizedSetMultimap(MultimapBuilder.hashKeys().hashSetValues().build()); - @Override public Collection discoverNamespaceStorages() { return loadNamespacedStores("dataset_", (storePath) -> new NamespaceStorage(this, storePath), NAMESPACE_STORES); } @Override - public Collection discoverWorkerStorages() { - return loadNamespacedStores("worker_", (storePath) -> new WorkerStorage(this, storePath), WORKER_STORES); - } - - - private List loadNamespacedStores(String prefix, Function creator, Set storesToTest) { - final File baseDir = getDirectory().toFile(); - - if (baseDir.mkdirs()) { - log.warn("Had to create Storage Dir at `{}`", baseDir); - } - - final List storages = new ArrayList<>(); - - for (File directory : Objects.requireNonNull(baseDir.listFiles((file, name) -> file.isDirectory() && name.startsWith(prefix)))) { - - final String name = directory.getName(); - - ConqueryMDC.setLocation(directory.toString()); - - try (Environment environment = registry.findOrCreateEnvironment(directory, xodus)) { - if (!environmentHasStores(environment, storesToTest)) { - log.warn("No valid {}storage found in {}", prefix, directory); - continue; - } - } - - final T namespacedStorage = creator.apply(name); - - storages.add(namespacedStorage); - } - - return storages; - } - - private boolean environmentHasStores(Environment env, Set storesToTest) { - return env.computeInTransaction(t -> { - final List allStoreNames = env.getAllStoreNames(t); - final boolean complete = new HashSet<>(allStoreNames).containsAll(storesToTest); - if (complete) { - log.trace("Storage contained all stores: {}", storesToTest); - return true; - } - - final HashSet missing = Sets.newHashSet(storesToTest); - allStoreNames.forEach(missing::remove); - log.warn("Environment did not contain all required stores. It is missing: {}. It had {}. {}", missing, allStoreNames, - loadEnvironmentWithMissingStores - ? "Loading environment anyway." - : "Skipping environment." - ); - - return loadEnvironmentWithMissingStores; - }); + public Collection discoverWorkerStorages() { + return loadNamespacedStores("worker_", (storePath) -> new WorkerStorageImpl(this, validator, storePath), WORKER_STORES); } @Override @@ -255,53 +177,33 @@ public SingletonStore createDatasetStore(String pathName, ObjectMapper } @Override - public IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SECONDARY_IDS, objectMapper), centralRegistry); + public IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SECONDARY_IDS, objectMapper)); } @Override - public IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, INTERN_TO_EXTERN, objectMapper), centralRegistry); + public IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, TABLES, objectMapper)); } @Override - public IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SEARCH_INDEX, objectMapper), centralRegistry); + public IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, CONCEPTS, objectMapper)); } @Override - public SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, ENTITY_PREVIEW, objectMapper)); + public IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, IMPORTS, objectMapper)); } @Override - public CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.cached(createStore(findEnvironment(pathName), validator, ENTITY_TO_BUCKET, objectMapper)); + public IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, C_BLOCKS, objectMapper)); } @Override - public IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, TABLES, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, CONCEPTS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, IMPORTS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, C_BLOCKS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, BUCKETS, objectMapper), centralRegistry); + public IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, BUCKETS, objectMapper)); } @Override @@ -319,7 +221,7 @@ public SingletonStore createIdMappingStore(String pathName, ObjectM openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); - return new SingletonStore<>(new CachedStore<>(bigStore)); + return new SingletonStore<>(bigStore); } } @@ -329,34 +231,57 @@ public SingletonStore createWorkerToBucketsStore(String path } @Override - public SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { + public SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper) { return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, STRUCTURE, objectMapper)); } @Override - public IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "executions")), validator, EXECUTIONS, objectMapper), centralRegistry); + public IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "executions")), validator, EXECUTIONS, objectMapper)); + } + + @Override + public IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "formConfigs")), validator, FORM_CONFIG, objectMapper)); + } + + @Override + public IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "users")), validator, AUTH_USER, objectMapper)); } @Override - public IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "formConfigs")), validator, FORM_CONFIG, objectMapper), centralRegistry); + public IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "roles")), validator, AUTH_ROLE, objectMapper)); } @Override - public IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "users")), validator, AUTH_USER, objectMapper), centralRegistry); + public IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "groups")), validator, AUTH_GROUP, objectMapper)); } @Override - public IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "roles")), validator, AUTH_ROLE, objectMapper), centralRegistry); + public IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, INTERN_TO_EXTERN, objectMapper)); } + @Override + public IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SEARCH_INDEX, objectMapper)); + } @Override - public IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "groups")), validator, AUTH_GROUP, objectMapper), centralRegistry); + public SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, ENTITY_PREVIEW, objectMapper)); + } + + @Override + public Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { + return createStore(findEnvironment(pathName), validator, ENTITY_TO_BUCKET, objectMapper); + } + + private Environment findEnvironment(File path) { + return registry.findOrCreateEnvironment(path, getXodus()); } private File resolveSubDir(String... subdirs) { @@ -369,23 +294,35 @@ private File resolveSubDir(String... subdirs) { return current.toFile(); } - /** - * Returns this.directory if the list is empty. - */ - @NonNull - @JsonIgnore - private File getStorageDir(String pathName) { - return getDirectory().resolve(pathName).toFile(); - } + public Store createStore(Environment environment, Validator validator, StoreMappings storeId, ObjectMapper objectMapper) { + final StoreInfo storeInfo = storeId.storeInfo(); + synchronized (openStoresInEnv) { + if (openStoresInEnv.get(environment).stream().map(XodusStore::getName).anyMatch(name -> storeInfo.getName().equals(name))) { + throw new IllegalStateException("Attempted to open an already opened store:" + storeInfo.getName()); + } + final XodusStore store = new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); - private Environment findEnvironment(String pathName) { - final File path = getStorageDir(pathName); - return registry.findOrCreateEnvironment(path, getXodus()); + openStoresInEnv.put(environment, store); + + return new CachedStore<>( + new SerializingStore<>( + store, + validator, + objectMapper, + storeInfo.getKeyType(), + storeInfo.getValueType(), + isValidateOnWrite(), + isRemoveUnreadableFromStore(), + getUnreadableDataDumpDirectory(), + getReaderExecutorService() + )); + } } - private Environment findEnvironment(File path) { + private Environment findEnvironment(String pathName) { + final File path = getStorageDir(pathName); return registry.findOrCreateEnvironment(path, getXodus()); } @@ -407,7 +344,7 @@ private void closeStore(XodusStore store) { private void removeStore(XodusStore store) { final Environment env = store.getEnvironment(); - synchronized (openStoresInEnv){ + synchronized (openStoresInEnv) { final Collection stores = openStoresInEnv.get(env); stores.remove(store); @@ -420,12 +357,34 @@ private void removeStore(XodusStore store) { removeEnvironment(env); } + public ExecutorService getReaderExecutorService() { + if (readerExecutorService == null) { + readerExecutorService = new ThreadPoolExecutor( + 1, getReaderWorkers(), + 5, TimeUnit.MINUTES, + new ArrayBlockingQueue<>(getReaderWorkers() * getBufferPerWorker()), + new ThreadPoolExecutor.CallerRunsPolicy() + ); + } + + return readerExecutorService; + } + + /** + * Returns this.directory if the list is empty. + */ + @NonNull + @JsonIgnore + private File getStorageDir(String pathName) { + return getDirectory().resolve(pathName).toFile(); + } + private void removeEnvironment(Environment env) { log.info("Removed last XodusStore in Environment. Removing Environment as well: {}", env.getLocation()); - final List xodusStore= env.computeInReadonlyTransaction(env::getAllStoreNames); + final List xodusStore = env.computeInReadonlyTransaction(env::getAllStoreNames); - if (!xodusStore.isEmpty()){ + if (!xodusStore.isEmpty()) { throw new IllegalStateException("Cannot delete environment, because it still contains these stores:" + xodusStore); } @@ -439,31 +398,54 @@ private void removeEnvironment(Environment env) { } } - public Store createStore(Environment environment, Validator validator, StoreMappings storeId, ObjectMapper objectMapper) { - final StoreInfo storeInfo = storeId.storeInfo(); - synchronized (openStoresInEnv) { + private List loadNamespacedStores(String prefix, Function creator, Set storesToTest) { + final File baseDir = getDirectory().toFile(); - if(openStoresInEnv.get(environment).stream().map(XodusStore::getName).anyMatch(name -> storeInfo.getName().equals(name))){ - throw new IllegalStateException("Attempted to open an already opened store:" + storeInfo.getName()); - } + if (baseDir.mkdirs()) { + log.warn("Had to create Storage Dir at `{}`", baseDir); + } - final XodusStore store = new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); + final List storages = new ArrayList<>(); - openStoresInEnv.put(environment, store); + for (File directory : Objects.requireNonNull(baseDir.listFiles((file, name) -> file.isDirectory() && name.startsWith(prefix)))) { - return new CachedStore<>( - new SerializingStore<>( - store, - validator, - objectMapper, - storeInfo.getKeyType(), - storeInfo.getValueType(), - isValidateOnWrite(), - isRemoveUnreadableFromStore(), - getUnreadableDataDumpDirectory(), - getReaderExecutorService() - )); + final String name = directory.getName(); + + ConqueryMDC.setLocation(directory.toString()); + + try (Environment environment = registry.findOrCreateEnvironment(directory, xodus)) { + if (!environmentHasStores(environment, storesToTest)) { + log.warn("No valid {}storage found in {}", prefix, directory); + continue; + } + } + + final T namespacedStorage = creator.apply(name); + + storages.add(namespacedStorage); } + + return storages; } + private boolean environmentHasStores(Environment env, Set storesToTest) { + return env.computeInTransaction(t -> { + final List allStoreNames = env.getAllStoreNames(t); + final boolean complete = new HashSet<>(allStoreNames).containsAll(storesToTest); + if (complete) { + log.trace("Storage contained all stores: {}", storesToTest); + return true; + } + + final HashSet missing = Sets.newHashSet(storesToTest); + allStoreNames.forEach(missing::remove); + log.warn("Environment did not contain all required stores. It is missing: {}. It had {}. {}", missing, allStoreNames, + loadEnvironmentWithMissingStores + ? "Loading environment anyway." + : "Skipping environment." + ); + + return loadEnvironmentWithMissingStores; + }); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java index bd20ea676d..68ab45af2f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java @@ -1,19 +1,20 @@ package com.bakdata.conquery.models.datasets; import javax.annotation.Nullable; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.frontend.FrontendValue; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IndexConfig; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.util.search.TrieSearch; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; @@ -50,12 +51,12 @@ public class Column extends Labeled implements NamespacedIdentifiable< * if this is set this column counts as the secondary id of the given name for this * table */ - @NsIdRef - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; + @JsonIgnore @Override - public ColumnId createId() { - return new ColumnId(table.getId(), getName()); + public DatasetId getDataset() { + return table.getDataset(); } @Override @@ -63,13 +64,6 @@ public String toString() { return "Column(id = " + getId() + ", type = " + getType() + ")"; } - @JsonIgnore - @Override - public Dataset getDataset() { - return table.getDataset(); - } - - /** * We create only an empty search here, because the content is provided through {@link com.bakdata.conquery.models.messages.namespaces.specific.RegisterColumnValues} and filled by the caller. */ @@ -79,6 +73,16 @@ public TrieSearch createTrieSearch(IndexConfig config) { } public void init() { + if (getPosition() >= 0) { + // Column was initialized + return; + } + position = ArrayUtils.indexOf(getTable().getColumns(), this); } + + @Override + public ColumnId createId() { + return new ColumnId(table.getId(), getName()); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java index 47bf5d0056..432cbb1170 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java @@ -8,42 +8,59 @@ import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.auth.permissions.Authorized; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; -import com.bakdata.conquery.models.auth.permissions.DatasetPermission; import com.bakdata.conquery.models.identifiable.Labeled; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.OptBoolean; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @Getter @Setter @NoArgsConstructor +@EqualsAndHashCode(callSuper = true) public class Dataset extends Labeled implements Injectable, Authorized, NamespacedIdentifiable { - public Dataset(String name) { - setName(name); - } /** * Used to programmatically generate proper {@link com.bakdata.conquery.models.identifiable.ids.NamespacedId}s. */ public static final Dataset PLACEHOLDER = new Dataset("PLACEHOLDER"); - - public static boolean isAllIdsTable(Table table){ - return table.getName().equalsIgnoreCase(ConqueryConstants.ALL_IDS_TABLE); - } - /** * Sorting weight for Frontend. */ private int weight; + /** + * Resolver for {@link com.bakdata.conquery.models.identifiable.ids.NamespacedId}s. + * It is usually injected when this object is loaded from a store, or set manually, when it is created. + **/ + @JacksonInject(useInput = OptBoolean.FALSE) + @Getter + @Setter + @JsonIgnore + @EqualsAndHashCode.Exclude + private transient NamespacedStorageProvider namespacedStorageProvider; + + public Dataset(String name) { + setName(name); + } + + public static boolean isAllIdsTable(Table table){ + return table.getName().equalsIgnoreCase(ConqueryConstants.ALL_IDS_TABLE); + } + @JsonIgnore public Table getAllIdsTable() { //TODO store this somehow? / Add this at dataset creation final Table table = new Table(); - table.setDataset(this); + table.setDataset(this.getId()); table.setName(ConqueryConstants.ALL_IDS_TABLE); + + // We could use the resolvers of this dataset, but actually this table's id should never be resolved return table; } @@ -54,17 +71,19 @@ public MutableInjectableValues inject(MutableInjectableValues mutableInjectableV @Override public DatasetId createId() { - return new DatasetId(getName()); + DatasetId datasetId = new DatasetId(getName()); + datasetId.setNamespacedStorageProvider(getNamespacedStorageProvider()); + return datasetId; } @Override public ConqueryPermission createPermission(Set abilities) { - return DatasetPermission.onInstance(abilities,getId()); + return getId().createPermission(abilities); } @JsonIgnore @Override - public Dataset getDataset() { - return this; + public DatasetId getDataset() { + return this.getId(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java index aeee1a4e4a..73eafe2af5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java @@ -4,10 +4,11 @@ import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; @@ -27,8 +28,7 @@ public class Import extends NamedImpl implements NamespacedIdentifiabl @Valid @NotNull - @NsIdRef - private final Table table; + private final TableId table; private long numberOfEntities; @@ -41,7 +41,7 @@ public class Import extends NamedImpl implements NamespacedIdentifiabl @Override public ImportId createId() { - return new ImportId(table.getId(), getName()); + return new ImportId(table, getName()); } public long estimateMemoryConsumption() { @@ -54,7 +54,7 @@ public long estimateMemoryConsumption() { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getTable().getDataset(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java index a00bfbfb9e..b4117fd28f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.models.events.stores.root.ColumnStore; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportColumnId; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonCreator; @@ -45,7 +46,7 @@ public String toString() { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return parent.getDataset(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java index e43fbd8d06..568a31adfd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java @@ -3,17 +3,21 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Optional; +import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; @@ -28,18 +32,11 @@ import com.google.common.collect.MoreCollectors; import com.google.common.collect.Sets; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; -import jakarta.ws.rs.core.UriBuilder; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; -/** - * @implNote I am using ids as references here instead of {@link NsIdRef} because I want the PreviewConfig to be pretty soft, instead of having to manage it as a dependent for Concepts and Tables. - */ @Data @Slf4j @AllArgsConstructor @@ -54,7 +51,6 @@ public class PreviewConfig { * @implSpec the order of selects is the order of the output fields. */ @Valid - @NotNull private List infoCardSelects = List.of(); @Valid @@ -63,13 +59,11 @@ public class PreviewConfig { /** * Columns that should not be displayed to users in entity preview. */ - @NotNull private Set hidden = Collections.emptySet(); /** * SecondaryIds where the columns should be grouped together. */ - @NotNull private Set grouping = Collections.emptySet(); /** @@ -77,13 +71,11 @@ public class PreviewConfig { * * @implNote This is purely for the frontend, the backend can theoretically be queried for all Connectors. */ - @NotNull private Set allConnectors = Collections.emptySet(); /** * Connectors that shall be selected by default by the frontend. */ - @NotNull private Set defaultConnectors = Collections.emptySet(); /** @@ -93,7 +85,6 @@ public class PreviewConfig { *

* The Frontend will use the concepts filters to render a search for entity preview. */ - @NotNull private Set searchFilters = Collections.emptySet(); @JacksonInject(useInput = OptBoolean.FALSE) @@ -120,19 +111,21 @@ public record InfoCardSelect(@NotNull String label, SelectId select, String desc /** * Defines a group of selects that will be evaluated per quarter and year in the requested period of the entity-preview. */ - public record TimeStratifiedSelects(@NotNull String label, String description, @NotEmpty List selects){ + public record TimeStratifiedSelects(@NotNull String label, String description, @NotEmpty List selects) { } @ValidationMethod(message = "Selects may be referenced only once.") @JsonIgnore public boolean isSelectsUnique() { - return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::select).distinct().count() == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); + return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::select).distinct().count() + == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); } @ValidationMethod(message = "Labels must be unique.") @JsonIgnore public boolean isLabelsUnique() { - return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::label).distinct().count() == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); + return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::label).distinct().count() + == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); } @JsonIgnore @@ -186,31 +179,34 @@ public String resolveSelectLabel(SelectResultInfo info) { public Listresolve) + .filter(Objects::nonNull) .collect(Collectors.toList()); } - public List> resolveSearchFilters() { - return getSearchFilters().stream() - .map(filterId -> getDatasetRegistry().findRegistry(filterId.getDataset()).getOptional(filterId)) - .flatMap(Optional::stream) + public List resolveSearchFilters() { + if (searchFilters == null) { + return Collections.emptyList(); + } + + return searchFilters.stream() + .map(FilterId::resolve) + .filter(Objects::nonNull) + .map(Filter::getId) .toList(); } - public Concept resolveSearchConcept() { - if (getSearchFilters().isEmpty()) { + public ConceptId resolveSearchConcept() { + if (searchFilters == null) { return null; } - return getSearchFilters().stream() - .map(filterId -> getDatasetRegistry().findRegistry(filterId.getDataset()).getOptional(filterId)) - .flatMap(Optional::stream) + + return searchFilters.stream() + .map(FilterId::>resolve) .map(filter -> filter.getConnector().getConcept()) .distinct() + .map(Concept::getId) .collect(MoreCollectors.onlyElement()); } - - - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java index adc8ae83ed..609e1496ef 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java @@ -1,11 +1,11 @@ package com.bakdata.conquery.models.datasets; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; -import com.bakdata.conquery.models.index.InternToExternMapper; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.Getter; import lombok.NoArgsConstructor; @@ -17,14 +17,12 @@ @JsonIgnoreProperties({"searchDisabled", "generateSearchSuffixes", "searchMinSuffixLength"}) public class SecondaryIdDescription extends Labeled implements NamespacedIdentifiable { - @NsIdRef - private Dataset dataset; + private DatasetId dataset; private String description; - @NsIdRef @View.ApiManagerPersistence - private InternToExternMapper mapping; + private InternToExternMapperId mapping; /** * If true, SecondaryId will not be displayed to the user or listed in APIs. @@ -33,7 +31,7 @@ public class SecondaryIdDescription extends Labeled im @Override public SecondaryIdDescriptionId createId() { - return new SecondaryIdDescriptionId(dataset.getId(), getName()); + return new SecondaryIdDescriptionId(dataset, getName()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java index 26a5c19389..a856cf6f99 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java @@ -4,20 +4,25 @@ import java.util.HashSet; import java.util.Set; import java.util.stream.Stream; - import javax.annotation.CheckForNull; import javax.annotation.Nullable; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; +import com.bakdata.conquery.io.jackson.Initializing; import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.config.DatabaseConfig; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; +import com.fasterxml.jackson.annotation.OptBoolean; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -25,19 +30,24 @@ @Getter @Setter @Slf4j -public class Table extends Labeled implements NamespacedIdentifiable { +@JsonDeserialize(converter = Table.Initializer.class) +public class Table extends Labeled implements NamespacedIdentifiable, Initializing { // TODO: 10.01.2020 fk: register imports here? - @NsIdRef - private Dataset dataset; + private DatasetId dataset; + + @JacksonInject(useInput = OptBoolean.FALSE) + @JsonIgnore + private NamespacedStorage storage; + @NotNull @Valid @JsonManagedReference private Column[] columns = new Column[0]; /** * Defines the primary key/column of this table. Only required for SQL mode. - * If unset {@link ...SqlConnectorConfig#primaryColumn} is assumed. + * If unset {@link DatabaseConfig#getPrimaryColumn()} is assumed. */ @Nullable @JsonManagedReference @@ -46,9 +56,9 @@ public class Table extends Labeled implements NamespacedIdentifiable secondaryIds = new HashSet<>(); + final Set secondaryIds = new HashSet<>(); for (Column column : columns) { - final SecondaryIdDescription secondaryId = column.getSecondaryId(); + final SecondaryIdDescriptionId secondaryId = column.getSecondaryId(); if (secondaryId != null && !secondaryIds.add(secondaryId)) { log.error("{} is duplicated", secondaryId); return false; @@ -73,11 +83,12 @@ public boolean isDistinctLabels() { @Override public TableId createId() { - return new TableId(dataset.getId(), getName()); + return new TableId(dataset, getName()); } public Stream findImports(NamespacedStorage storage) { - return storage.getAllImports().stream().filter(imp -> imp.getTable().equals(this)); + TableId thisId = this.getId(); + return storage.getAllImports().filter(imp -> imp.getTable().equals(thisId)); } public Column getColumnByName(@NotNull String columnName) { @@ -91,7 +102,7 @@ public Column getColumnByName(@NotNull String columnName) { * selects the right column for the given secondaryId from this table */ @CheckForNull - public Column findSecondaryIdColumn(SecondaryIdDescription secondaryId) { + public Column findSecondaryIdColumn(SecondaryIdDescriptionId secondaryId) { for (Column col : columns) { if (col.getSecondaryId() == null || !secondaryId.equals(col.getSecondaryId())) { @@ -104,4 +115,18 @@ public Column findSecondaryIdColumn(SecondaryIdDescription secondaryId) { return null; } + @Override + public void init() { + if (dataset == null) { + dataset = storage.getDataset().getId(); + } else if (storage != null && !dataset.equals(storage.getDataset().getId())) { + throw new IllegalStateException("Datasets don't match. Namespace: %s Table: %s".formatted(storage.getDataset().getId(), dataset)); + } + + for (Column column : columns) { + column.init(); + } + } + + public static class Initializer extends Initializing.Converter

{} } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java index 4fcb77d2ae..d421668ea6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java @@ -4,17 +4,21 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import jakarta.validation.Valid; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.auth.permissions.Authorized; -import com.bakdata.conquery.models.auth.permissions.ConceptPermission; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.models.exceptions.ConfigurationException; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.queryplan.QPNode; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -25,7 +29,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import jakarta.validation.Valid; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -53,8 +56,15 @@ public abstract class Concept extends ConceptElemen @Valid private List connectors = Collections.emptyList(); - @NsIdRef - private Dataset dataset; + private DatasetId dataset; + + /** + * rawValue is expected to be an Integer, expressing a localId for {@link TreeConcept#getElementByLocalId(int)}. + * + *

+ * If {@link PrintSettings#isPrettyPrint()} is false, {@link ConceptElement#getId()} is used to print. + */ + public abstract String printConceptLocalId(Object rawValue, PrintSettings printSettings); public List getDefaultSelects() { public abstract List getSelects(); + public Select getSelectByName(String name) { + for (Select select : getSelects()) { + if (select.getName().equals(name)) { + return select; + } + } + return null; + } + + public void initElements() throws ConfigurationException, JSONException { + getSelects().forEach(Select::init); + getConnectors().forEach(CONNECTOR::init); + } + @Override @JsonIgnore public Concept getConcept() { @@ -70,7 +94,7 @@ public Concept getConcept() { @Override public ConceptId createId() { - return new ConceptId(dataset.getId(), getName()); + return new ConceptId(dataset, getName()); } public int countElements() { @@ -90,6 +114,17 @@ public QPNode createConceptQuery(QueryPlanContext context, List> f @Override public ConqueryPermission createPermission(Set abilities) { - return ConceptPermission.onInstance(abilities, getId()); + return getId().createPermission(abilities); } + + public CONNECTOR getConnectorByName(String name) { + for (CONNECTOR connector : connectors) { + if (connector.getName().equals(name)) { + return connector; + } + } + return null; + } + + public abstract ConceptElement findById(ConceptElementId id); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java index 3b458d9696..037bda5cba 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java @@ -2,7 +2,6 @@ import java.util.Collections; import java.util.List; - import javax.annotation.CheckForNull; import com.bakdata.conquery.apiv1.KeyValue; @@ -14,22 +13,17 @@ import lombok.Getter; import lombok.Setter; +@Setter +@Getter @EqualsAndHashCode(callSuper = true) public abstract class ConceptElement>> extends Labeled implements NamespacedIdentifiable { - @Getter - @Setter private String description; - @Getter - @Setter private List additionalInfos = Collections.emptyList(); /** * Initialize this only when needed. It is not needed */ - @Getter - @Setter - @JsonIgnore @CheckForNull private MatchingStats matchingStats; diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java index 6baf7f2154..a077550599 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java @@ -10,8 +10,6 @@ import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; @@ -19,9 +17,17 @@ import com.bakdata.conquery.models.identifiable.IdMap; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; -import com.fasterxml.jackson.annotation.*; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.fasterxml.jackson.annotation.JsonAlias; +import com.fasterxml.jackson.annotation.JsonBackReference; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonManagedReference; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.Multiset.Entry; import io.dropwizard.validation.ValidationMethod; @@ -61,26 +67,28 @@ public abstract class Connector extends Labeled implements SelectHo @Setter(AccessLevel.NONE) @Valid private transient IdMap> allFiltersMap; - - public Collection> getFilters() { - return allFiltersMap.values(); - } - @NotNull @Getter @Setter @JsonManagedReference @Valid private List

collectRequiredTables() { - return this.getHolder().findConcept().getConnectors().stream().map(Connector::getTable).collect(Collectors.toSet()); + return this.getHolder().findConcept().getConnectors().stream().map(Connector::getResolvedTable).collect(Collectors.toSet()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java index 3a70cfc5cc..9d32279997 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java @@ -1,15 +1,16 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.AllValuesAggregator; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.common.MappedPrinter; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.conversion.model.select.DistinctSelectConverter; import com.bakdata.conquery.sql.conversion.model.select.SelectConverter; @@ -19,19 +20,14 @@ public class DistinctSelect extends MappableSingleColumnSelect { @JsonCreator - public DistinctSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public DistinctSelect(ColumnId column, + InternToExternMapperId mapping) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new AllValuesAggregator<>(getColumn()); - } - - @Override - public ResultType getResultType() { - return new ResultType.ListT(super.getResultType()); + return new AllValuesAggregator<>(getColumn().resolve()); } @Override @@ -40,11 +36,16 @@ public SelectConverter createConverter() { } @Override - public ResultPrinters.Printer createPrinter(PrintSettings printSettings) { + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { if(getMapping() == null){ - return super.createPrinter(printSettings); + return super.createPrinter(printerFactory, printSettings); } - return new ResultPrinters.ListPrinter(new ResultPrinters.MappedPrinter(getMapping()), printSettings); + return printerFactory.getListPrinter(new MappedPrinter(getMapping().resolve()), printSettings); + } + + @Override + public ResultType getResultType() { + return new ResultType.ListT<>(super.getResultType()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java index e4e4288478..0ca305e6aa 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.FirstValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.FirstValueSelectConverter; @@ -16,14 +15,16 @@ public class FirstValueSelect extends MappableSingleColumnSelect { @JsonCreator - public FirstValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public FirstValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new FirstValueAggregator<>(getColumn()); + return new FirstValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java index 1112b8bb9a..d976c6b803 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.LastValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.LastValueSelectConverter; @@ -16,14 +15,16 @@ public class LastValueSelect extends MappableSingleColumnSelect { @JsonCreator - public LastValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public LastValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new LastValueAggregator<>(getColumn()); + return new LastValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java index 458330c893..54e7104d7d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.RandomValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.RandomValueSelectConverter; @@ -15,14 +14,16 @@ @CPSType(id = "RANDOM", base = Select.class) public class RandomValueSelect extends MappableSingleColumnSelect { @JsonCreator - public RandomValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public RandomValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new RandomValueAggregator<>(getColumn()); + return new RandomValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java index a8a5e16ff0..4e4e0c39c7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java @@ -4,19 +4,19 @@ import java.util.EnumSet; import java.util.List; import java.util.Set; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.types.SemanticType; import com.fasterxml.jackson.annotation.JsonIgnore; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; @@ -30,10 +30,9 @@ @Getter public abstract class SingleColumnSelect extends Select { - @NsIdRef @NotNull @NonNull - private Column column; + private ColumnId column; /** * Indicates if the values in the specified column belong to a categorical set @@ -50,18 +49,18 @@ public EnumSet getAcceptedColumnTypes() { } @Override - public SelectResultInfo getResultInfo(CQConcept cqConcept, PrintSettings settings) { + public SelectResultInfo getResultInfo(CQConcept cqConcept) { if(categorical){ - return new SelectResultInfo(this, cqConcept, Set.of(new SemanticType.CategoricalT()), settings); + return new SelectResultInfo(this, cqConcept, Set.of(new SemanticType.CategoricalT())); } - return new SelectResultInfo(this, cqConcept, Collections.emptySet(), settings); + return new SelectResultInfo(this, cqConcept, Collections.emptySet()); } @Nullable @Override - public List getRequiredColumns() { + public List getRequiredColumns() { return List.of(getColumn()); } @@ -69,11 +68,12 @@ public List getRequiredColumns() { @ValidationMethod(message = "Column does not match required Type.") public boolean isValidColumnType() { - if (getAcceptedColumnTypes().contains(getColumn().getType())) { + MajorTypeId type = getColumn().resolve().getType(); + if (getAcceptedColumnTypes().contains(type)) { return true; } - log.error("Column[{}] is of Type[{}]. Not one of [{}]", column.getId(), column.getType(), getAcceptedColumnTypes()); + log.error("Column[{}] is of Type[{}]. Not one of [{}]", column, type, getAcceptedColumnTypes()); return false; } @@ -82,11 +82,12 @@ public boolean isValidColumnType() { @ValidationMethod(message = "Columns is not for Connectors' Table.") public boolean isForConnectorTable() { - if (getColumn().getTable().equals(((Connector) getHolder()).getTable())) { + Table resolvedTable = ((Connector) getHolder()).getResolvedTable(); + if (getColumn().getTable().equals(resolvedTable.getId())) { return true; } - log.error("Column[{}] ist not for Table[{}]", column.getId(), ((Connector) getHolder()).getTable()); + log.error("Column[{}] ist not for Table[{}]", column, resolvedTable); return false; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountOccurencesSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountOccurencesSelect.java deleted file mode 100644 index c06d8cf943..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountOccurencesSelect.java +++ /dev/null @@ -1,53 +0,0 @@ -package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; - -import java.util.EnumSet; - -import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.concepts.select.Select; -import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; -import com.bakdata.conquery.models.events.MajorTypeId; -import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; -import com.bakdata.conquery.models.query.queryplan.aggregators.specific.MultiSelectAggregator; -import com.bakdata.conquery.models.query.queryplan.aggregators.specific.SelectAggregator; -import com.bakdata.conquery.models.types.ResultType; -import com.fasterxml.jackson.annotation.JsonCreator; -import jakarta.validation.constraints.NotNull; -import lombok.Getter; -import lombok.Setter; - -//TODO delete? -@CPSType(id = "COUNT_OCCURENCES", base = Select.class) -public class CountOccurencesSelect extends SingleColumnSelect { - - @Override - public EnumSet getAcceptedColumnTypes() { - return EnumSet.of(MajorTypeId.STRING); - } - - @Getter - @Setter - @NotNull - private String[] selection; - - @JsonCreator - public CountOccurencesSelect(@NsIdRef Column column, String[] selection) { - super(column); - this.selection = selection; - } - - @Override - public Aggregator createAggregator() { - if (selection.length == 1) { - return new SelectAggregator(getColumn(), selection[0]); - } - - return new MultiSelectAggregator(getColumn(), selection); - } - - @Override - public ResultType getResultType() { - return ResultType.Primitive.INTEGER; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java index 5e446bf403..a03861dd07 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java @@ -1,14 +1,14 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountQuartersOfDateRangeAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountQuartersOfDatesAggregator; @@ -30,18 +30,15 @@ @CPSType(id = "COUNT_QUARTERS", base = Select.class) public class CountQuartersSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (isSingleColumnDaterange()) { return List.of(column); } @@ -50,11 +47,12 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { - return switch (getColumn().getType()) { - case DATE_RANGE -> new CountQuartersOfDateRangeAggregator(getColumn()); - case DATE -> new CountQuartersOfDatesAggregator(getColumn()); - default -> - throw new IllegalArgumentException(String.format("Column '%s' is not of Date (-Range) Type but '%s'", getColumn(), getColumn().getType())); + final Column column = getColumn().resolve(); + final MajorTypeId typeId = column.getType(); + return switch (typeId) { + case DATE_RANGE -> new CountQuartersOfDateRangeAggregator(column); + case DATE -> new CountQuartersOfDatesAggregator(column); + default -> throw new IllegalArgumentException(String.format("Column '%s' is not of Date (-Range) Type but '%s'", getColumn(), typeId)); }; } @@ -67,5 +65,4 @@ public ResultType getResultType() { public SelectConverter createConverter() { return new CountQuartersSqlAggregator(); } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java index dd4b351e23..0c20e77498 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java @@ -3,19 +3,18 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.DistinctValuesWrapperAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountAggregator; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.conversion.model.aggregator.CountSqlAggregator; import com.bakdata.conquery.sql.conversion.model.select.SelectConverter; -import jakarta.validation.constraints.NotNull; import lombok.Data; import lombok.NoArgsConstructor; import org.jetbrains.annotations.Nullable; @@ -27,31 +26,31 @@ public class CountSelect extends Select { private boolean distinct = false; - @NsIdRefCollection @NotNull - private List distinctByColumn = Collections.emptyList(); + private List distinctByColumn = Collections.emptyList(); + - @NsIdRef @NotNull - private Column column; + private ColumnId column; @Override public Aggregator createAggregator() { + final Column resolved = getColumn().resolve(); if (!isDistinct()) { - return new CountAggregator(getColumn()); + return new CountAggregator(resolved); } if (distinctByColumn != null && !getDistinctByColumn().isEmpty()) { - return new DistinctValuesWrapperAggregator<>(new CountAggregator(getColumn()), getDistinctByColumn()); + return new DistinctValuesWrapperAggregator(new CountAggregator(resolved), getDistinctByColumn().stream().map(ColumnId::resolve).toList()); } - return new DistinctValuesWrapperAggregator<>(new CountAggregator(getColumn()), List.of(getColumn())); + return new DistinctValuesWrapperAggregator(new CountAggregator(resolved), List.of(getColumn().resolve())); } @Nullable @Override - public List getRequiredColumns() { - final List out = new ArrayList<>(); + public List getRequiredColumns() { + final List out = new ArrayList<>(); out.add(getColumn()); if (distinctByColumn != null) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java index aa164acc16..4747fef16c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java @@ -2,20 +2,19 @@ import java.time.temporal.ChronoUnit; import java.util.EnumSet; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DateDistanceAggregator; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.conversion.model.aggregator.DateDistanceSqlAggregator; import com.bakdata.conquery.sql.conversion.model.select.SelectConverter; import com.fasterxml.jackson.annotation.JsonCreator; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; @@ -28,7 +27,7 @@ public class DateDistanceSelect extends SingleColumnSelect { private ChronoUnit timeUnit = ChronoUnit.YEARS; @JsonCreator - public DateDistanceSelect(@NsIdRef Column column) { + public DateDistanceSelect(ColumnId column) { super(column); } @@ -39,7 +38,7 @@ public EnumSet getAcceptedColumnTypes() { @Override public Aggregator createAggregator() { - return new DateDistanceAggregator(getColumn(), getTimeUnit()); + return new DateDistanceAggregator(getColumn().resolve(), getTimeUnit()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java index a3a9ef9775..127fa40e1e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java @@ -1,14 +1,12 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DateUnionAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -27,18 +25,15 @@ @JsonIgnoreProperties("categorical") public class DateUnionSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (column != null) { return List.of(column); } @@ -48,7 +43,7 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { // TODO fix this for 2 columns - return new DateUnionAggregator(getColumn()); + return new DateUnionAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java index a52ca3634c..d0f35c2325 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java @@ -1,14 +1,12 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DurationSumAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -27,18 +25,15 @@ @JsonIgnoreProperties("categorical") public class DurationSumSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (column != null) { return List.of(column); } @@ -47,7 +42,7 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { - return new DurationSumAggregator(getColumn()); + return new DurationSumAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java index a810c8e984..2f79121b10 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java @@ -1,15 +1,15 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; -import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.MultiSelectFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.FlagsAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -34,18 +34,18 @@ @ToString public class FlagSelect extends Select { - @NsIdRefCollection - private final Map flags; + private final Map flags; @Override - public List getRequiredColumns() { - return new ArrayList<>(flags.values()); + public List getRequiredColumns() { + return flags.values().stream().toList(); } @Override public Aggregator createAggregator() { - return new FlagsAggregator(flags); + final Map collect = flags.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().resolve())); + return new FlagsAggregator(collect); } @@ -58,7 +58,7 @@ public boolean isAllColumnsOfSameTable() { @JsonIgnore @ValidationMethod(message = "Columns must be BOOLEAN.") public boolean isAllColumnsBoolean() { - return flags.values().stream().map(Column::getType).allMatch(MajorTypeId.BOOLEAN::equals); + return flags.values().stream().map(ColumnId::resolve).map(Column::getType).allMatch(MajorTypeId.BOOLEAN::equals); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java index 4d9ea4faa6..d6eab22730 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java @@ -2,71 +2,70 @@ import java.util.Collections; import java.util.Set; - import javax.annotation.Nullable; +import jakarta.validation.Valid; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.common.MappedPrinter; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; -import jakarta.validation.Valid; import lombok.Getter; +@Getter public abstract class MappableSingleColumnSelect extends SingleColumnSelect { /** * If a mapping was provided the mapping changes the aggregator result before it is processed by a {@link com.bakdata.conquery.io.result.ResultRender.ResultRendererProvider}. */ - @Getter @Valid @Nullable @View.ApiManagerPersistence - @NsIdRef - private final InternToExternMapper mapping; + private final InternToExternMapperId mapping; - public MappableSingleColumnSelect(Column column, @Nullable InternToExternMapper mapping) { + public MappableSingleColumnSelect(ColumnId column, @Nullable InternToExternMapperId mapping) { super(column); this.mapping = mapping; } @Override - public ResultPrinters.Printer createPrinter(PrintSettings printSettings) { + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { if (mapping == null) { - return super.createPrinter(printSettings); + return super.createPrinter(printerFactory, printSettings); } - return new ResultPrinters.MappedPrinter(getMapping()); + return new MappedPrinter(mapping.resolve()); } @Override - public SelectResultInfo getResultInfo(CQConcept cqConcept, PrintSettings settings) { - - if (!isCategorical()) { - return new SelectResultInfo(this, cqConcept, Collections.emptySet(), settings); + public ResultType getResultType() { + if(mapping == null){ + return ResultType.resolveResultType(getColumn().resolve().getType()); } - - return new SelectResultInfo(this, cqConcept, Set.of(new SemanticType.CategoricalT()), settings); + return ResultType.Primitive.STRING; } @Override - public ResultType getResultType() { - if(mapping == null){ - return ResultType.resolveResultType(getColumn().getType()); + public SelectResultInfo getResultInfo(CQConcept cqConcept) { + + if (!isCategorical()) { + return new SelectResultInfo(this, cqConcept, Collections.emptySet()); } - return ResultType.Primitive.STRING; + + return new SelectResultInfo(this, cqConcept, Set.of(new SemanticType.CategoricalT())); } public void loadMapping() { if (mapping != null) { - mapping.init(); + mapping.resolve().init(); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java index ccc50623dd..5fb858be2b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java @@ -3,11 +3,10 @@ import java.util.EnumSet; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.PrefixTextAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -28,14 +27,14 @@ public EnumSet getAcceptedColumnTypes() { private String prefix; @JsonCreator - public PrefixSelect(@NsIdRef Column column, String prefix) { + public PrefixSelect(ColumnId column, String prefix) { super(column); this.prefix = prefix; } @Override public Aggregator createAggregator() { - return new PrefixTextAggregator(getColumn(), prefix); + return new PrefixTextAggregator(getColumn().resolve(), prefix); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java index 43657445b5..1d3dbaf693 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java @@ -3,11 +3,10 @@ import java.util.EnumSet; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.QuartersInYearAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -25,13 +24,13 @@ public EnumSet getAcceptedColumnTypes() { } @JsonCreator - public QuartersInYearSelect(@NsIdRef Column column) { + public QuartersInYearSelect(ColumnId column) { super(column); } @Override public Aggregator createAggregator() { - return new QuartersInYearAggregator(getColumn()); + return new QuartersInYearAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java index 406d6164b6..bc50b24deb 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java @@ -4,13 +4,13 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.ColumnAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.DistinctValuesWrapperAggregator; @@ -28,7 +28,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @@ -39,23 +38,19 @@ @NoArgsConstructor(onConstructor_ = @JsonCreator) public class SumSelect extends Select { - @NsIdRefCollection @NotNull - private List distinctByColumn = Collections.emptyList(); + private List distinctByColumn = Collections.emptyList(); - @NsIdRef @NotNull - private Column column; - - @NsIdRef - private Column subtractColumn; + private ColumnId column; + private ColumnId subtractColumn; - public SumSelect(Column column) { + public SumSelect(ColumnId column) { this(column, null); } - public SumSelect(Column column, Column subtractColumn) { + public SumSelect(ColumnId column, ColumnId subtractColumn) { this.column = column; this.subtractColumn = subtractColumn; } @@ -63,33 +58,35 @@ public SumSelect(Column column, Column subtractColumn) { @Override public Aggregator createAggregator() { if (distinctByColumn != null && !distinctByColumn.isEmpty()) { - return new DistinctValuesWrapperAggregator<>(getAggregator(), getDistinctByColumn()); + return new DistinctValuesWrapperAggregator<>(getAggregator(), getDistinctByColumn().stream().map(ColumnId::resolve).toList()); } return getAggregator(); } private ColumnAggregator getAggregator() { + Column resolved = getColumn().resolve(); if (subtractColumn == null) { - return switch (getColumn().getType()) { - case INTEGER -> new IntegerSumAggregator(getColumn()); - case MONEY -> new MoneySumAggregator(getColumn()); - case DECIMAL -> new DecimalSumAggregator(getColumn()); - case REAL -> new RealSumAggregator(getColumn()); - default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", getColumn().getType())); + return switch (resolved.getType()) { + case INTEGER -> new IntegerSumAggregator(resolved); + case MONEY -> new MoneySumAggregator(resolved); + case DECIMAL -> new DecimalSumAggregator(resolved); + case REAL -> new RealSumAggregator(resolved); + default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", resolved.getType())); }; } - if (getColumn().getType() != getSubtractColumn().getType()) { - throw new IllegalStateException(String.format("Column types are not the same: Column %s\tSubstractColumn %s", getColumn().getType(), getSubtractColumn() + Column resolvedSubstract = getSubtractColumn().resolve(); + if (resolved.getType() != resolvedSubstract.getType()) { + throw new IllegalStateException(String.format("Column types are not the same: Column %s\tSubstractColumn %s", resolved.getType(), resolvedSubstract .getType())); } - return switch (getColumn().getType()) { - case INTEGER -> new IntegerDiffSumAggregator(getColumn(), getSubtractColumn()); - case MONEY -> new MoneyDiffSumAggregator(getColumn(), getSubtractColumn()); - case DECIMAL -> new DecimalDiffSumAggregator(getColumn(), getSubtractColumn()); - case REAL -> new RealDiffSumAggregator(getColumn(), getSubtractColumn()); - default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", getColumn().getType())); + return switch (resolved.getType()) { + case INTEGER -> new IntegerDiffSumAggregator(resolved, resolvedSubstract); + case MONEY -> new MoneyDiffSumAggregator(resolved, resolvedSubstract); + case DECIMAL -> new DecimalDiffSumAggregator(resolved, resolvedSubstract); + case REAL -> new RealDiffSumAggregator(resolved, resolvedSubstract); + default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", resolved.getType())); }; } @@ -97,8 +94,8 @@ private ColumnAggregator getAggregator() { private static final EnumSet NUMBER_COMPATIBLE = EnumSet.of(MajorTypeId.INTEGER, MajorTypeId.MONEY, MajorTypeId.DECIMAL, MajorTypeId.REAL); @Override - public List getRequiredColumns() { - final List out = new ArrayList<>(); + public List getRequiredColumns() { + final List out = new ArrayList<>(); out.add(getColumn()); @@ -120,19 +117,19 @@ public SelectConverter createConverter() { @Override public ResultType getResultType() { - return ResultType.resolveResultType(getColumn().getType()); + return ResultType.resolveResultType(getColumn().resolve().getType()); } @ValidationMethod(message = "Column is not of Summable Type.") @JsonIgnore public boolean isSummableColumnType() { - return NUMBER_COMPATIBLE.contains(getColumn().getType()); + return NUMBER_COMPATIBLE.contains(getColumn().resolve().getType()); } @ValidationMethod(message = "Columns are not of same Type.") @JsonIgnore public boolean isColumnsOfSameType() { - return getSubtractColumn() == null || getSubtractColumn().getType().equals(getColumn().getType()); + return getSubtractColumn() == null || getSubtractColumn().resolve().getType().equals(getColumn().resolve().getType()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java index 128dcf61bd..23588dc9c4 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java @@ -4,7 +4,6 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -36,15 +35,13 @@ public class ConceptTreeCache { * @implNote ConcurrentHashMap does not allow null values, but we want to have null values in the map. So we wrap the values in Optional. */ @JsonIgnore - private final Map>> cached = new ConcurrentHashMap<>();; + private final Map> cached = new ConcurrentHashMap<>();; /** * If id is already in cache, use that. If not calculate it by querying treeConcept. If rowMap was not used to query, cache the response. - * - * @param value */ - public ConceptElement findMostSpecificChild(String value, CalculatedValue> rowMap) throws ConceptConfigurationException { + public ConceptTreeChild findMostSpecificChild(String value, CalculatedValue> rowMap) throws ConceptConfigurationException { if(cached.containsKey(value)) { hits++; @@ -53,7 +50,7 @@ public ConceptElement findMostSpecificChild(String value, CalculatedValue child = treeConcept.findMostSpecificChild(value, rowMap); + final ConceptTreeChild child = treeConcept.findMostSpecificChild(value, rowMap); if(!rowMap.isCalculated()) { cached.put(value, Optional.ofNullable(child)); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java index d9bfe7c94c..4ef04d6efe 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java @@ -3,16 +3,16 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptTreeChildId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -50,36 +50,15 @@ public void clearMatchingStats() { setMatchingStats(null); } - @Override - @JsonIgnore - public int[] getPrefix() { - if (prefix == null) { - int[] pPrefix = getParent().getPrefix(); - prefix = Arrays.copyOf(pPrefix, pPrefix.length + 1); - prefix[prefix.length - 1] = this.getLocalId(); - } - return prefix; - } - public void init() throws ConceptConfigurationException { if (condition != null) { condition.init(this); } } - @Override - public ConceptTreeChildId createId() { - return new ConceptTreeChildId(parent.getId(), getName()); - } - - @Override - public boolean matchesPrefix(int[] conceptPrefix) { - return conceptPrefix.length > depth && conceptPrefix[depth] == localId; - } - @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getConcept().getDataset(); } @@ -95,4 +74,44 @@ public TreeConcept getConcept() { } throw new IllegalStateException("The node " + this + " seems to have no root"); } + + @Override + @JsonIgnore + public int[] getPrefix() { + if (prefix == null) { + int[] pPrefix = getParent().getPrefix(); + prefix = Arrays.copyOf(pPrefix, pPrefix.length + 1); + prefix[prefix.length - 1] = this.getLocalId(); + } + return prefix; + } + + @Override + public boolean matchesPrefix(int[] conceptPrefix) { + return conceptPrefix.length > depth && conceptPrefix[depth] == localId; + } + + @Override + public ConceptTreeChildId createId() { + return new ConceptTreeChildId(parent.getId(), getName()); + } + + /** + * Parts only contains references to child elements. + * If parts is empty return self. + * If the first part does not match the name of a child return null + */ + ConceptTreeChild findByParts(List parts) { + if (parts.isEmpty()) { + return this; + } + + for (ConceptTreeChild child : children) { + if (parts.get(0).equals(child.getName())) { + final List subList = parts.size() > 1 ? parts.subList(1, parts.size()) : Collections.emptyList(); + return child.findByParts(subList); + } + } + return null; + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java index bb00b8d103..26d09940c7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java @@ -2,45 +2,46 @@ import java.util.ArrayList; import java.util.List; - import javax.annotation.CheckForNull; +import jakarta.validation.Valid; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -@Getter @Setter +@Getter +@Setter @Slf4j public class ConceptTreeConnector extends Connector { private static final long serialVersionUID = 1L; - @NsIdRef @CheckForNull - private Table table; + @CheckForNull + private TableId table; - @NsIdRef @CheckForNull - private Column column = null; + @CheckForNull + private ColumnId column = null; private CTCondition condition = null; - @Valid @JsonManagedReference + @Valid + @JsonManagedReference private List> filters = new ArrayList<>(); @JsonIgnore @ValidationMethod(message = "Table and Column usage are exclusive") public boolean isTableXOrColumn() { - if(table != null){ + if (table != null) { return column == null; } @@ -49,17 +50,34 @@ public boolean isTableXOrColumn() { @JsonIgnore @ValidationMethod(message = "Column is not STRING.") - public boolean isColumnForTree(){ - return column == null || column.getType().equals(MajorTypeId.STRING); + public boolean isColumnForTree() { + return column == null || column.resolve().getType().equals(MajorTypeId.STRING); } - @Override @JsonIgnore - public Table getTable() { - if(column != null){ + @Override + @JsonIgnore + public Table getResolvedTable() { + if (column != null) { + return column.getTable().resolve(); + } + + if (table != null) { + return table.resolve(); + } + return null; + } + + @Override + @JsonIgnore + public TableId getResolvedTableId() { + if (column != null) { return column.getTable(); } - return table; + if (table != null) { + return table; + } + return null; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java index 8cbc0d80f4..d770bfc0a6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java @@ -4,25 +4,29 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.Initializing; -import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.SelectHolder; import com.bakdata.conquery.models.datasets.concepts.select.concept.UniversalSelect; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; +import com.bakdata.conquery.models.exceptions.ConfigurationException; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.Getter; +import lombok.NonNull; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -31,7 +35,7 @@ */ @Slf4j @CPSType(id = "TREE", base = Concept.class) -@JsonDeserialize(converter = TreeConcept.TreeConceptInitializer.class) +@JsonDeserialize(converter = TreeConcept.Initializer.class) public class TreeConcept extends Concept implements ConceptTreeNode, SelectHolder, Initializing { @JsonIgnore @@ -43,36 +47,28 @@ public class TreeConcept extends Concept implements Concep @JsonIgnore private final List> localIdMap = new ArrayList<>(); - @Getter @Setter @Valid private List children = Collections.emptyList(); - - @JsonIgnore + @View.Internal @Getter @Setter private int localId; - @NotNull @Getter @Setter @Valid @JsonManagedReference private List selects = new ArrayList<>(); - @JsonIgnore - private final Map caches = new ConcurrentHashMap<>(); + private int nChildren = -1; @Override public Concept findConcept() { return getConcept(); } - public ConceptTreeCache getCache(Import imp) { - return caches.get(imp); - } - @Override public ConceptTreeNode getParent() { return null; @@ -89,47 +85,21 @@ public boolean matchesPrefix(int[] conceptPrefix) { return conceptPrefix != null && conceptPrefix[0] == 0; } - public void init() { - setLocalId(0); - localIdMap.add(this); - - final List openList = new ArrayList<>(getChildren()); - - for (ConceptTreeConnector con : getConnectors()) { - if (con.getCondition() == null) { - continue; - } - - try { - con.getCondition().init(this); - } catch (ConceptConfigurationException e) { - throw new RuntimeException("Unable to init condition", e); - } - } - - for (int i = 0; i < openList.size(); i++) { - final ConceptTreeChild ctc = openList.get(i); - - try { - ctc.setLocalId(localIdMap.size()); - localIdMap.add(ctc); - ctc.setDepth(ctc.getParent().getDepth() + 1); - - ctc.init(); - - } catch (Exception e) { - throw new RuntimeException("Error trying to consolidate the node " + ctc.getLabel() + " in " + getLabel(), e); - } - - openList.addAll((openList.get(i)).getChildren()); - } + @JsonIgnore + public Stream getAllChildren() { + return localIdMap.stream().filter(ConceptTreeChild.class::isInstance).map(ConceptTreeChild.class::cast); } - public ConceptElement findMostSpecificChild(String stringValue, CalculatedValue> rowMap) throws ConceptConfigurationException { + public ConceptTreeChild findMostSpecificChild(String stringValue, CalculatedValue> rowMap) throws ConceptConfigurationException { return findMostSpecificChild(stringValue, rowMap, null, getChildren()); } - private ConceptElement findMostSpecificChild(String stringValue, CalculatedValue> rowMap, ConceptElement best, List currentList) + private ConceptTreeChild findMostSpecificChild( + String stringValue, + CalculatedValue> rowMap, + ConceptTreeChild best, + List currentList + ) throws ConceptConfigurationException { while (currentList != null && !currentList.isEmpty()) { @@ -166,13 +136,91 @@ private ConceptElement findMostSpecificChild(String stringValue, CalculatedValue return best; } - @JsonIgnore - public Stream getAllChildren() { - return localIdMap.stream().filter(ConceptTreeChild.class::isInstance).map(ConceptTreeChild.class::cast); + + + /** + * Method to get the element of this concept tree that has the specified local ID. + * This should only be used by the query engine itself as an index. + * + * @param ids the local id array to look for + * @return the element matching the most specific local id in the array + */ + public ConceptTreeNode getElementByLocalIdPath( int @NonNull [] ids) { + final int mostSpecific = ids[ids.length - 1]; + return getElementByLocalId(mostSpecific); } - @JsonIgnore - private int nChildren = -1; + public ConceptTreeNode getElementByLocalId(int localId) { + return localIdMap.get(localId); + } + + /** + * rawValue is expected to be an Integer, expressing a localId for {@link TreeConcept#getElementByLocalId(int)}. + *

+ * If {@link PrintSettings#isPrettyPrint()} is true, {@link ConceptElement#getLabel()} is used to print. + * If {@link PrintSettings#isPrettyPrint()} is false, {@link ConceptElement#getId()} is used to print. + */ + public String printConceptLocalId(Object rawValue, PrintSettings printSettings) { + + if (rawValue == null) { + return null; + } + + final int localId = (int) rawValue; + + final ConceptTreeNode node = getElementByLocalId(localId); + + if (!printSettings.isPrettyPrint()) { + return node.getId().toString(); + } + + if (node.getDescription() == null) { + return node.getLabel(); + } + + return node.getLabel() + " - " + node.getDescription(); + + } + + @Override + public void init() throws Exception { + initElements(); + } + + @Override + public void initElements() throws ConfigurationException, JSONException { + super.initElements(); + setLocalId(0); + localIdMap.add(this); + + final List openList = new ArrayList<>(getChildren()); + + for (ConceptTreeConnector con : getConnectors()) { + if (con.getCondition() == null) { + continue; + } + + con.getCondition().init(this); + } + + for (int i = 0; i < openList.size(); i++) { + final ConceptTreeChild ctc = openList.get(i); + + try { + ctc.setLocalId(localIdMap.size()); + localIdMap.add(ctc); + ctc.setDepth(ctc.getParent().getDepth() + 1); + + ctc.init(); + + } + catch (Exception e) { + throw new RuntimeException("Error trying to consolidate the node " + ctc.getLabel() + " in " + getLabel(), e); + } + + openList.addAll((openList.get(i)).getChildren()); + } + } @Override @JsonIgnore @@ -184,29 +232,32 @@ public int countElements() { return nChildren = 1 + (int) getAllChildren().count(); } - public void initializeIdCache(Import importId) { - caches.computeIfAbsent(importId, id -> new ConceptTreeCache(this)); - } + public ConceptElement>> findById(ConceptElementId id) { + List parts = new ArrayList<>(); + id.collectComponents(parts); + final ConceptId conceptId = getId(); + List components = conceptId.getComponents(); - public void removeImportCache(Import imp) { - caches.remove(imp); - } + // Check if dataset and concept name match + if (!(parts.get(0).equals(components.get(0)) && parts.get(1).equals(components.get(1)))) { + return null; + } - /** - * Method to get the element of this concept tree that has the specified local ID. - * This should only be used by the query engine itself as an index. - * - * @param ids the local id array to look for - * @return the element matching the most specific local id in the array - */ - public ConceptTreeNode getElementByLocalIdPath(int[] ids) { - final int mostSpecific = ids[ids.length - 1]; - return getElementByLocalId(mostSpecific); - } + if (parts.size() == 2) { + // Perfect match <3 + return this; + } - public ConceptTreeNode getElementByLocalId(int localId) { - return localIdMap.get(localId); + for (ConceptTreeChild child : children) { + if (parts.get(2).equals(child.getName())) { + final List subParts = parts.size() > 3 ? parts.subList(3, parts.size()) : Collections.emptyList(); + return child.findByParts(subParts); + } + } + + return null; } - public static class TreeConceptInitializer extends Initializing.Converter {} + public static class Initializer extends Initializing.Converter { + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java index a5f9f733c8..45a4c0fe53 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java +++ b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java @@ -4,6 +4,7 @@ import c10n.annotations.En; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.query.entity.Entity; public interface ErrorMessages { @@ -59,4 +60,8 @@ public interface ErrorMessages { @En("Something went wrong while querying the database: ${0}.") @De("Etwas ist beim Anfragen des Servers fehlgeschlagen: ${0}.") String sqlError(Throwable error); + + @En("The id '${0}' could not be resolved'.") + @De("Die id '${0}' konnte nicht aufgelöst werden.") + String idUnresolvable(Id id); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java index 530aa94187..cb0e75af50 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java @@ -6,12 +6,13 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.function.IntFunction; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; @@ -27,6 +28,9 @@ import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.preproc.PreprocessedData; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -35,8 +39,6 @@ import io.dropwizard.validation.ValidationMethod; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; -import jakarta.validation.constraints.Min; -import jakarta.validation.constraints.NotNull; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; @@ -56,31 +58,31 @@ @ToString(onlyExplicitlyIncluded = true, callSuper = true) @AllArgsConstructor @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}, access = AccessLevel.PROTECTED) - public class Bucket extends IdentifiableImpl implements NamespacedIdentifiable { @Min(0) private final int bucket; - - @ToString.Include - @JsonManagedReference - @Setter(AccessLevel.PROTECTED) - private ColumnStore[] stores; - /** * start of each Entity in {@code stores}. */ private final Object2IntMap start; - /** * Number of events per Entity in {@code stores}. */ private final Object2IntMap ends; - private final int numberOfEvents; + private final ImportId imp; + @ToString.Include + @JsonManagedReference + @Setter(AccessLevel.PROTECTED) + private ColumnStore[] stores; - @NsIdRef - private final Import imp; + public static Bucket fromPreprocessed(Table table, PreprocessedData container, Import imp) { + final ColumnStore[] storesSorted = sortColumns(table, container.getStores()); + final int numberOfEvents = container.getEnds().values().stream().mapToInt(i -> i).max().orElse(0); + + return new Bucket(container.getBucketId(), new Object2IntOpenHashMap<>(container.getStarts()), new Object2IntOpenHashMap<>(container.getEnds()), numberOfEvents, imp.getId(), storesSorted); + } private static ColumnStore[] sortColumns(Table table, Map stores) { return Arrays.stream(table.getColumns()) @@ -90,28 +92,15 @@ private static ColumnStore[] sortColumns(Table table, Map s .toArray(ColumnStore[]::new); } - public static Bucket fromPreprocessed(Table table, PreprocessedData container, Import imp) { - final ColumnStore[] storesSorted = sortColumns(table, container.getStores()); - final int numberOfEvents = container.getEnds().values().stream().mapToInt(i -> i).max().orElse(0); - - return new Bucket(container.getBucketId(), storesSorted, new Object2IntOpenHashMap<>(container.getStarts()), new Object2IntOpenHashMap<>(container.getEnds()),numberOfEvents, imp); - } - @JsonIgnore @ValidationMethod(message = "Number of events does not match the length of some stores.") public boolean isNumberOfEventsEqualsNumberOfStores() { return Arrays.stream(stores).allMatch(columnStore -> columnStore.getLines() == getNumberOfEvents()); } - - @JsonIgnore - public Table getTable() { - return imp.getTable(); - } - @Override public BucketId createId() { - return new BucketId(imp.getId(), bucket); + return new BucketId(imp, bucket); } /** @@ -126,10 +115,9 @@ public boolean containsEntity(String entity) { } public int getEntityStart(String entityId) { - return start.get(entityId); + return start.getInt(entityId); } - public int getEntityEnd(String entityId) { return ends.getInt(entityId); } @@ -138,14 +126,14 @@ public final boolean has(int event, Column column) { return getStore(column).has(event); } - public String getString(int event, @NotNull Column column) { - return ((StringStore) getStore(column)).getString(event); - } - public ColumnStore getStore(@NotNull Column column) { return stores[column.getPosition()]; } + public String getString(int event, @NotNull Column column) { + return ((StringStore) getStore(column)).getString(event); + } + public long getInteger(int event, @NotNull Column column) { return ((IntegerStore) getStore(column)).getInteger(event); } @@ -162,7 +150,7 @@ public BigDecimal getDecimal(int event, @NotNull Column column) { return ((DecimalStore) getStore(column)).getDecimal(event); } - public long getMoney(int event, @NotNull Column column) { + public BigDecimal getMoney(int event, @NotNull Column column) { return ((MoneyStore) getStore(column)).getMoney(event); } @@ -170,10 +158,6 @@ public int getDate(int event, @NotNull Column column) { return ((DateStore) getStore(column)).getDate(event); } - public CDateRange getDateRange(int event, Column column) { - return ((DateRangeStore) getStore(column)).getDateRange(event); - } - public boolean eventIsContainedIn(int event, ValidityDate validityDate, CDateSet dateRanges) { final CDateRange dateRange = validityDate.getValidityDate(event, this); @@ -192,11 +176,26 @@ public CDateRange getAsDateRange(int event, Column column) { }; } + public CDateRange getDateRange(int event, Column column) { + return ((DateRangeStore) getStore(column)).getDateRange(event); + } + public Object createScriptValue(int event, @NotNull Column column) { return getStore(column).createScriptValue(event); } - public Map calculateMap(int event) { + public IntFunction> mapCalculator(){ + Column[] columns = getTable().resolve().getColumns(); + + return event -> calculateMap(event, stores, columns); + } + + @JsonIgnore + public TableId getTable() { + return imp.getTable(); + } + + private static Map calculateMap(int event, ColumnStore[] stores, Column[] columns) { final Map out = new HashMap<>(stores.length); for (int i = 0; i < stores.length; i++) { @@ -204,7 +203,7 @@ public Map calculateMap(int event) { if (!store.has(event)) { continue; } - out.put(getTable().getColumns()[i].getName(), store.createScriptValue(event)); + out.put(columns[i].getName(), store.createScriptValue(event)); } return out; @@ -212,11 +211,11 @@ public Map calculateMap(int event) { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getTable().getDataset(); } public ColumnStore getStore(@NotNull String storeName) { - return getStore(getTable().getColumnByName(storeName)); + return getStore(getTable().resolve().getColumnByName(storeName)); } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java index d7f2a99f4a..1a9023a301 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java @@ -8,17 +8,22 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeCache; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.CalculateCBlocksJob; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.query.entity.Entity; @@ -57,34 +62,35 @@ public class BucketManager { *

* Connector -> Bucket -> [BucketId -> CBlock] */ - private final Map>> connectorToCblocks; + private final Map>> connectorToCblocks; /** * Table -> BucketN -> [Buckets] */ - private final Map>> tableToBuckets; + private final Map>> tableToBuckets; @Getter private final int entityBucketSize; + private final Map> treeCaches = new ConcurrentHashMap<>(); + public static BucketManager create(Worker worker, WorkerStorage storage, int entityBucketSize) { - final Map>> connectorCBlocks = new HashMap<>(); - final Map>> tableBuckets = new HashMap<>(); + final Map>> connectorCBlocks = new HashMap<>(); + final Map>> tableBuckets = new HashMap<>(); final Object2IntMap entity2Bucket = new Object2IntOpenHashMap<>(); final IntArraySet assignedBucketNumbers = worker.getInfo().getIncludedBuckets(); log.trace("Trying to load these buckets that map to: {}", assignedBucketNumbers); - for (Bucket bucket : storage.getAllBuckets()) { + storage.getAllBuckets().forEach(bucket -> { + log.trace("Processing bucket {}", bucket.getId()); if (!assignedBucketNumbers.contains(bucket.getBucket())) { log.warn("Found Bucket[{}] in Storage that does not belong to this Worker according to the Worker information.", bucket.getId()); } registerBucket(bucket, entity2Bucket, tableBuckets); - } + }); - for (CBlock cBlock : storage.getAllCBlocks()) { - registerCBlock(cBlock, connectorCBlocks); - } + storage.getAllCBlocks().forEach(cBlock -> registerCBlock(cBlock, connectorCBlocks)); return new BucketManager(worker.getJobManager(), storage, worker, entity2Bucket, connectorCBlocks, tableBuckets, entityBucketSize); } @@ -92,10 +98,10 @@ public static BucketManager create(Worker worker, WorkerStorage storage, int ent /** * register entities, and create query specific indices for bucket */ - private static void registerBucket(Bucket bucket, Object2IntMap entity2Bucket, Map>> tableBuckets) { + private static void registerBucket(Bucket bucket, Object2IntMap entity2Bucket, Map>> tableBuckets) { for (String entity : bucket.entities()) { - if(entity2Bucket.containsKey(entity)){ + if (entity2Bucket.containsKey(entity)) { // This is an unrecoverable state, but should not happen in practice. Just a precaution. assert entity2Bucket.getInt(entity) == bucket.getBucket(); continue; @@ -104,19 +110,18 @@ private static void registerBucket(Bucket bucket, Object2IntMap entity2B entity2Bucket.put(entity, bucket.getBucket()); } - tableBuckets - .computeIfAbsent(bucket.getTable(), id -> new Int2ObjectAVLTreeMap<>()) - .computeIfAbsent(bucket.getBucket(), n -> new ArrayList<>()) - .add(bucket); + tableBuckets.computeIfAbsent(bucket.getTable(), id -> new Int2ObjectAVLTreeMap<>()) + .computeIfAbsent(bucket.getBucket(), n -> new ArrayList<>()) + .add(bucket.getId()); } /** * Assert validity of operation, and create index for CBlocks. */ - private static void registerCBlock(CBlock cBlock, Map>> connectorCBlocks) { + private static void registerCBlock(CBlock cBlock, Map>> connectorCBlocks) { connectorCBlocks.computeIfAbsent(cBlock.getConnector(), connectorId -> new Int2ObjectAVLTreeMap<>()) .computeIfAbsent(cBlock.getBucket().getBucket(), bucketId -> new HashMap<>(3)) - .put(cBlock.getBucket(), cBlock); + .put(cBlock.getBucket(), cBlock.getId()); } @@ -124,33 +129,24 @@ private static void registerCBlock(CBlock cBlock, Map> allConcepts = storage.getAllConcepts(); + storage.getAllConcepts().filter(TreeConcept.class::isInstance).flatMap(concept -> concept.getConnectors().stream().map(ConceptTreeConnector.class::cast)) - log.info("BEGIN full update for {} concepts.", allConcepts.size()); - - for (Concept c : allConcepts) { - if (!(c instanceof TreeConcept)) { - continue; - } - for (ConceptTreeConnector con : ((TreeConcept) c).getConnectors()) { - for (Bucket bucket : storage.getAllBuckets()) { + .forEach(connector -> storage.getAllBucketIds().forEach(bucketId -> { - final CBlockId cBlockId = new CBlockId(bucket.getId(), con.getId()); + final CBlockId cBlockId = new CBlockId(bucketId, connector.getId()); - if (!con.getTable().equals(bucket.getTable())) { - continue; - } + if (!connector.getResolvedTableId().equals(bucketId.getImp().getTable())) { + return; + } - if (hasCBlock(cBlockId)) { - log.trace("Skip calculation of CBlock[{}], because it was loaded from the storage.", cBlockId); - continue; - } + if (hasCBlock(cBlockId)) { + log.trace("Skip calculation of CBlock[{}], because it was loaded from the storage.", cBlockId); + return; + } - log.trace("CBlock[{}] missing in Storage. Queuing recalculation", cBlockId); - job.addCBlock(bucket, con); - } - } - } + log.warn("CBlock[{}] missing in Storage. Queuing recalculation", cBlockId); + job.addCBlock(bucketId.resolve(), connector); + })); if (!job.isEmpty()) { jobManager.addSlowJob(job); @@ -171,120 +167,92 @@ public void addBucket(Bucket bucket) { final CalculateCBlocksJob job = new CalculateCBlocksJob(storage, this, worker.getJobsExecutorService()); - for (Concept concept : storage.getAllConcepts()) { - if (!(concept instanceof TreeConcept)) { - continue; - } - for (ConceptTreeConnector connector : ((TreeConcept) concept).getConnectors()) { - if (!connector.getTable().equals(bucket.getTable())) { - continue; - } - - final CBlockId cBlockId = new CBlockId(bucket.getId(), connector.getId()); - - - if (hasCBlock(cBlockId)) { - continue; - } - - job.addCBlock(bucket, connector); - - } - } + storage.getAllConcepts() + .filter(TreeConcept.class::isInstance) + .flatMap(concept -> concept.getConnectors().stream()) + .filter(connector -> connector.getResolvedTableId().equals(bucket.getTable())) + .filter(connector -> !hasCBlock(new CBlockId(bucket.getId(), connector.getId()))) + .forEach(connector -> job.addCBlock(bucket, (ConceptTreeConnector) connector)); jobManager.addSlowJob(job); } - public void removeTable(Table table) { - final Int2ObjectMap> removed = tableToBuckets.remove(table); + public void removeTable(TableId table) { + final Int2ObjectMap> removed = tableToBuckets.remove(table); // It's possible no buckets were registered yet if (removed != null) { - removed.values() - .stream() - .flatMap(List::stream) - .forEach(this::removeBucket); + removed.values().stream().flatMap(List::stream).forEach(this::removeBucket); } - storage.removeTable(table.getId()); + storage.removeTable(table); } - public void removeBucket(Bucket bucket) { - storage.getAllCBlocks() - .stream() - .filter(cblock -> cblock.getBucket().equals(bucket)) - .forEach(this::removeCBlock); + public void removeBucket(BucketId bucket) { + storage.getAllCBlockIds().filter(cblock -> cblock.getBucket().equals(bucket)).forEach(this::removeCBlock); - tableToBuckets.getOrDefault(bucket.getTable(), Int2ObjectMaps.emptyMap()) - .getOrDefault(bucket.getBucket(), Collections.emptyList()) - .remove(bucket); + tableToBuckets.getOrDefault(bucket.getImp().getTable(), Int2ObjectMaps.emptyMap()).getOrDefault(bucket.getBucket(), Collections.emptyList()).remove(bucket); - storage.removeBucket(bucket.getId()); + storage.removeBucket(bucket); } - private void removeCBlock(CBlock cBlock) { + private void removeCBlock(CBlockId cBlock) { connectorToCblocks.getOrDefault(cBlock.getConnector(), Int2ObjectMaps.emptyMap()) .getOrDefault(cBlock.getBucket().getBucket(), Collections.emptyMap()) .values() .remove(cBlock); - storage.removeCBlock(cBlock.getId()); + storage.removeCBlock(cBlock); } public Set getEntities() { return Collections.unmodifiableSet(entity2Bucket.keySet()); } - private int getBucket(String id) { - return entity2Bucket.getInt(id); - } - /** * Remove all buckets comprising the import. Which will in-turn remove all CBLocks. */ - public void removeImport(Import imp) { - storage.getAllBuckets() - .stream() - .filter(bucket -> bucket.getImp().equals(imp)) - .forEach(this::removeBucket); + public void removeImport(ImportId imp) { + storage.getAllBucketIds().filter(bucket -> bucket.getImp().equals(imp)).forEach(this::removeBucket); - for (Concept concept : storage.getAllConcepts()) { - if (!(concept instanceof TreeConcept)) { - continue; - } + storage.getAllConcepts() + .filter(TreeConcept.class::isInstance) + .forEach(concept -> removeConceptTreeCacheByImport(concept.getId(), imp)); - ((TreeConcept) concept).removeImportCache(imp); - } - storage.removeImport(imp.getId()); + storage.removeImport(imp); } - public List getEntityBucketsForTable(Entity entity, Table table) { + public List getEntityBucketsForTable(Entity entity, TableId table) { final int bucketId = getBucket(entity.getId()); - return tableToBuckets.getOrDefault(table, Int2ObjectMaps.emptyMap()) - .getOrDefault(bucketId, Collections.emptyList()); + return tableToBuckets.getOrDefault(table, Int2ObjectMaps.emptyMap()).getOrDefault(bucketId, Collections.emptyList()); + } + + private int getBucket(String id) { + return entity2Bucket.getInt(id); } /** * Collects all Entites, that have any of the concepts on the connectors in a specific time. */ - public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { + public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { final long requiredBits = ConceptNode.calculateBitMask(concepts); final Set out = new HashSet<>(); - for (Connector connector : connectors) { + for (ConnectorId connector : connectors) { if (!connectorToCblocks.containsKey(connector)) { continue; } - for (Map bucketCBlockMap : connectorToCblocks.get(connector).values()) { - for (CBlock cblock : bucketCBlockMap.values()) { - for (String entity : cblock.getBucket().entities()) { + for (Map bucketCBlockMap : connectorToCblocks.get(connector).values()) { + for (CBlockId cBlockId : bucketCBlockMap.values()) { + for (String entity : cBlockId.getBucket().resolve().entities()) { - if (cblock.isConceptIncluded(entity, requiredBits) && restriction.intersects(cblock.getEntityDateRange(entity))) { + CBlock cBlock = cBlockId.resolve(); + if (cBlock.isConceptIncluded(entity, requiredBits) && restriction.intersects(cBlock.getEntityDateRange(entity))) { out.add(entity); } } @@ -295,20 +263,18 @@ public Set getEntitiesWithConcepts(Collection> concept return out; } - public Map getEntityCBlocksForConnector(Entity entity, Connector connector) { + public Map getEntityCBlocksForConnector(Entity entity, ConnectorId connector) { final int bucketId = getBucket(entity.getId()); - return connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()) - .getOrDefault(bucketId, Collections.emptyMap()); + return connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()).getOrDefault(bucketId, Collections.emptyMap()); } - public boolean hasEntityCBlocksForConnector(Entity entity, Connector connector) { + public boolean hasEntityCBlocksForConnector(Entity entity, ConnectorId connector) { final int bucketId = getBucket(entity.getId()); - final Map cblocks = connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()) - .getOrDefault(bucketId, Collections.emptyMap()); + final Map cblocks = connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()).getOrDefault(bucketId, Collections.emptyMap()); - for (Bucket bucket : cblocks.keySet()) { - if (bucket.containsEntity(entity.getId())) { + for (BucketId bucket : cblocks.keySet()) { + if (bucket.resolve().containsEntity(entity.getId())) { return true; } } @@ -329,18 +295,16 @@ public void removeConcept(Concept concept) { // Just drop all CBlocks at once for the connectors for (Connector connector : concept.getConnectors()) { - final Int2ObjectMap> removed = connectorToCblocks.remove(connector); + final Int2ObjectMap> removed = connectorToCblocks.remove(connector.getId()); // It's possible that no data has been loaded yet if (removed != null) { - removed.values().stream() - .map(Map::values) - .flatMap(Collection::stream) - .map(CBlock::getId) - .forEach(storage::removeCBlock); + removed.values().stream().map(Map::values).flatMap(Collection::stream).forEach(storage::removeCBlock); } } + removeConceptTreeCacheByConcept(concept.getId()); + storage.removeConcept(concept.getId()); } @@ -355,27 +319,25 @@ public void addConcept(Concept concept) { for (ConceptTreeConnector connector : ((TreeConcept) concept).getConnectors()) { - for (Bucket bucket : storage.getAllBuckets()) { - if (!bucket.getTable().equals(connector.getTable())) { - continue; - } - - final CBlockId cBlockId = new CBlockId(bucket.getId(), connector.getId()); - - if (hasCBlock(cBlockId)) { - continue; - } - - job.addCBlock(bucket, connector); - } + storage.getAllBuckets() + .filter(bucket -> bucket.getTable().equals(connector.getResolvedTableId())) + .filter(bucket -> !hasCBlock(new CBlockId(bucket.getId(), connector.getId()))) + .forEach(bucket -> job.addCBlock(bucket, connector)); } + jobManager.addSlowJob(job); + } - if(job.isEmpty()){ - return; - } - jobManager.addSlowJob(job); + public ConceptTreeCache getConceptTreeCache(TreeConcept concept, ImportId imp) { + return treeCaches.computeIfAbsent(concept.getId(), (ignored) -> new ConcurrentHashMap<>()).computeIfAbsent(imp, (ignored) -> new ConceptTreeCache(concept)); } + public void removeConceptTreeCacheByImport(ConceptId concept, ImportId imp) { + treeCaches.get(concept).remove(imp); + } + + public void removeConceptTreeCacheByConcept(ConceptId concept) { + treeCaches.remove(concept); + } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java b/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java index cfc5e0182b..c973350b87 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java @@ -4,32 +4,31 @@ import java.util.Collection; import java.util.HashMap; import java.util.Map; +import java.util.function.IntFunction; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.jackson.serializer.CBlockDeserializer; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; -import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeCache; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.events.stores.root.StringStore; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.queryplan.specific.ConceptNode; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; @@ -51,12 +50,10 @@ public class CBlock extends IdentifiableImpl implements NamespacedIdentifiable { //TODO Index per StringStore for isOfInterest @ToString.Include - @NsIdRef - private final Bucket bucket; + private final BucketId bucket; @NotNull - @NsIdRef @ToString.Include - private final ConceptTreeConnector connector; + private final ConnectorId connector; /** * We leverage the fact that a Bucket contains entities from bucketSize * {@link Bucket#getBucket()} to (1 + bucketSize) * {@link Bucket#getBucket()} - 1 to layout our internal structure. * This is maps the first Entities entry in this bucket to 0. @@ -92,108 +89,29 @@ public static long estimateMemoryBytes(long entities, long entries, double depth ); } - public static CBlock createCBlock(ConceptTreeConnector connector, Bucket bucket, int bucketSize) { + public static CBlock createCBlock(ConceptTreeConnector connector, Bucket bucket, BucketManager bucketManager) { + final int bucketSize = bucketManager.getEntityBucketSize(); final int root = bucket.getBucket() * bucketSize; - final int[][] mostSpecificChildren = calculateSpecificChildrenPaths(bucket, connector); + final int[][] mostSpecificChildren = calculateSpecificChildrenPaths(bucket, connector, bucketManager); //TODO Object2LongMap final Map includedConcepts = calculateConceptElementPathBloomFilter(bucketSize, bucket, mostSpecificChildren); final Map entitySpans = calculateEntityDateIndices(bucket); - return new CBlock(bucket, connector, root, includedConcepts, entitySpans, mostSpecificChildren); + final CBlock cBlock = new CBlock(bucket.getId(), connector.getId(), root, includedConcepts, entitySpans, mostSpecificChildren); + return cBlock; } /** * Calculates the path for each event from the root of the {@link TreeConcept} to the most specific {@link ConceptTreeChild} * denoted by the individual {@link ConceptTreeChild#getPrefix()}. */ - private static int[][] calculateSpecificChildrenPaths(Bucket bucket, ConceptTreeConnector connector) { - - final Column column; - - final TreeConcept treeConcept = connector.getConcept(); - - // If we have a column, and it is of string-type, we initialize a cache. - if (connector.getColumn() != null && bucket.getStore(connector.getColumn()) instanceof StringStore) { - - column = connector.getColumn(); - - treeConcept.initializeIdCache(bucket.getImp()); - } - // No column only possible if we have just one tree element! - else if (treeConcept.countElements() == 1) { - column = null; - } - else { - throw new IllegalStateException(String.format("Cannot build tree over Connector[%s] without Column", connector.getId())); - } - - final CTCondition connectorCondition = connector.getCondition(); - - final int[][] mostSpecificChildren = new int[bucket.getNumberOfEvents()][]; - - Arrays.fill(mostSpecificChildren, ConceptTreeConnector.NOT_CONTAINED); - - final ConceptTreeCache cache = treeConcept.getCache(bucket.getImp()); - - for (int event = 0; event < bucket.getNumberOfEvents(); event++) { - - - try { - String stringValue = ""; - - final boolean has = column != null && bucket.has(event, column); - - if (column != null && has) { - stringValue = bucket.getString(event, column); - } - - // Events can also be filtered, allowing a single table to be used by multiple connectors. - // Lazy evaluation of map to avoid allocations if possible. - // Copy event for closure. - final int _event = event; - final CalculatedValue> rowMap = new CalculatedValue<>(() -> bucket.calculateMap(_event)); - - if (connectorCondition != null && !connectorCondition.matches(stringValue, rowMap)) { - mostSpecificChildren[event] = Connector.NOT_CONTAINED; - continue; - } - - // Events without values are assigned to the root - if (column != null && !has) { - mostSpecificChildren[event] = treeConcept.getPrefix(); - continue; - } - - final ConceptElement child = cache == null - ? treeConcept.findMostSpecificChild(stringValue, rowMap) - : cache.findMostSpecificChild(stringValue, rowMap); - - // All unresolved elements resolve to the root. - if (child == null) { - mostSpecificChildren[event] = treeConcept.getPrefix(); - continue; - } - - // put path into event - mostSpecificChildren[event] = child.getPrefix(); - } - catch (ConceptConfigurationException ex) { - log.error("Failed to resolve event {}-{} against concept {}", bucket, event, treeConcept, ex); - } - } - - if (cache != null) { - log.trace( - "Hits: {}, Misses: {}, Hits/Misses: {}, %Hits: {} (Up to now)", - cache.getHits(), - cache.getMisses(), - (double) cache.getHits() / cache.getMisses(), - (double) cache.getHits() / (cache.getHits() + cache.getMisses()) - ); + private static int[][] calculateSpecificChildrenPaths(Bucket bucket, ConceptTreeConnector connector, BucketManager bucketManager) { + if (connector.getColumn() == null) { + return calculateSpecificChildrenPathsWithoutColumn(bucket, connector); } - return mostSpecificChildren; + return calculateSpecificChildrenPathsWithColumn(bucket, connector, bucketManager); } /** @@ -225,21 +143,6 @@ private static Map calculateConceptElementPathBloomFilter(int buck return includedConcepts; } - /** - * Calculates the bloom filter from the precomputed path to the most specific {@link ConceptTreeChild}. - */ - public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { - - for (int index = pathIndex; index > 0; index--) { - // TODO how could they be > Long.SIZE? - if (mostSpecificChild[index] < Long.SIZE) { - return 1L << mostSpecificChild[index]; - } - } - - return 0; - } - /** * For every included entity, calculate min and max and store them as statistics in the CBlock. * @@ -248,7 +151,7 @@ public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { private static Map calculateEntityDateIndices(Bucket bucket) { final Map spans = new HashMap<>(); - final Table table = bucket.getTable(); + final Table table = bucket.getTable().resolve(); for (Column column : table.getColumns()) { @@ -297,6 +200,126 @@ private static Map calculateEntityDateIndices(Bucket bucket) return spans; } + private static int[][] calculateSpecificChildrenPathsWithoutColumn(Bucket bucket, Connector connector) { + + final int[][] mostSpecificChildren = new int[bucket.getNumberOfEvents()][]; + + // All elements resolve to the root, unless they are filtered out by the condition. + Arrays.fill(mostSpecificChildren, connector.getConcept().getPrefix()); + + if (connector.getCondition() == null) { + return mostSpecificChildren; + } + + final IntFunction> mapCalculator = bucket.mapCalculator(); + + // Since the connector has no column, there is no real columnValue. + // All downstream code assumes the presence of a column value, so we just pass an empty string to avoid exceptions. + final String columnValue = ""; + + for (int event = 0; event < bucket.getNumberOfEvents(); event++) { + try { + + // Events can also be filtered, allowing a single table to be used by multiple connectors. + // Lazy evaluation of map to avoid allocations if possible. + // Copy event for closure. + final int _event = event; + final CalculatedValue> rowMap = new CalculatedValue<>(() -> mapCalculator.apply(_event)); + + if (connector.getCondition().matches(columnValue, rowMap)) { + // by default initialized to the only element, the root. + continue; + } + + mostSpecificChildren[event] = Connector.NOT_CONTAINED; + } + catch (ConceptConfigurationException ex) { + log.error("Failed to evaluate event {}, row {} against connector {}", bucket.getId(), event, connector.getId(), ex); + } + } + + return mostSpecificChildren; + } + + /** + * Calculates the path for each event from the root of the {@link TreeConcept} to the most specific {@link ConceptTreeChild} + * denoted by the individual {@link ConceptTreeChild#getPrefix()}. + */ + private static int[][] calculateSpecificChildrenPathsWithColumn(Bucket bucket, ConceptTreeConnector connector, BucketManager bucketManager) { + + final Column column = connector.getColumn().resolve(); + + final ConceptTreeCache cache = bucketManager.getConceptTreeCache(connector.getConcept(), bucket.getImp()); + final int[] rootPrefix = connector.getConcept().getPrefix(); + + final IntFunction> mapCalculator = bucket.mapCalculator(); + + final int[][] mostSpecificChildren = new int[bucket.getNumberOfEvents()][]; + Arrays.fill(mostSpecificChildren, ConceptTreeConnector.NOT_CONTAINED); + + + for (int event = 0; event < bucket.getNumberOfEvents(); event++) { + try { + + if (!bucket.has(event, column)) { + continue; + } + + final String columnValue = bucket.getString(event, column); + + // Events can also be filtered, allowing a single table to be used by multiple connectors. + // Lazy evaluation of map to avoid allocations if possible. + // Copy event for closure. + final int _event = event; + final CalculatedValue> rowMap = new CalculatedValue<>(() -> mapCalculator.apply(_event)); + + if (connector.getCondition() != null && !connector.getCondition().matches(columnValue, rowMap)) { + continue; + } + + final ConceptTreeChild child = cache.findMostSpecificChild(columnValue, rowMap); + + // All unresolved elements resolve to the root. + if (child == null) { + mostSpecificChildren[event] = rootPrefix; + continue; + } + + mostSpecificChildren[event] = child.getPrefix(); + } + catch (ConceptConfigurationException ex) { + log.error("Failed to resolve event {}, row {} against connector {}", bucket.getId(), event, connector.getId(), ex); + } + } + + + log.trace( + "Hits: {}, Misses: {}, Hits/Misses: {}, %Hits: {} (Up to now)", + cache.getHits(), + cache.getMisses(), + (double) cache.getHits() / cache.getMisses(), + (double) cache.getHits() / (cache.getHits() + cache.getMisses()) + ); + + + return mostSpecificChildren; + } + + /** + * Calculates the bloom filter from the precomputed path to the most specific {@link ConceptTreeChild}. + */ + public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { + + for (int index = pathIndex; index > 0; index--) { + // TODO how could they be > Long.SIZE? + if (mostSpecificChild[index] < Long.SIZE) { + return 1L << mostSpecificChild[index]; + } + } + + return 0; + } + public int[] getPathToMostSpecificChild(int event) { if (mostSpecificChildren == null) { return null; @@ -321,7 +344,7 @@ public CDateRange getEntityDateRange(String entity) { @Override @JsonIgnore public CBlockId createId() { - return new CBlockId(bucket.getId(), connector.getId()); + return new CBlockId(bucket, connector); } public boolean isConceptIncluded(String entity, long requiredBits) { @@ -329,7 +352,7 @@ public boolean isConceptIncluded(String entity, long requiredBits) { return true; } - if(!includedConceptElementsPerEntity.containsKey(entity)){ + if (!includedConceptElementsPerEntity.containsKey(entity)) { return false; } @@ -341,7 +364,7 @@ public boolean isConceptIncluded(String entity, long requiredBits) { @Override @JsonIgnore - public Dataset getDataset() { + public DatasetId getDataset() { return bucket.getDataset(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java index 5d1551cb8d..4ccf7d45f7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java @@ -2,6 +2,7 @@ import java.math.BigDecimal; import java.util.Map; +import java.util.function.IntFunction; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -24,18 +25,11 @@ public EmptyBucket() { this.setStores(new ColumnStore[0]); } - - @Override - public boolean eventIsContainedIn(int event, ValidityDate column, CDateSet dateRanges) { - return false; - } - @Override public boolean containsEntity(String entity) { return false; } - @Override public int getEntityStart(String entityId) { throw new IllegalStateException("ALL_IDS Bucket does not do anything"); @@ -46,7 +40,6 @@ public int getEntityEnd(String entityId) { throw new IllegalStateException("ALL_IDS Bucket does not do anything"); } - @Override public String getString(int event, Column column) { throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); @@ -73,7 +66,7 @@ public BigDecimal getDecimal(int event, Column column) { } @Override - public long getMoney(int event, Column column) { + public BigDecimal getMoney(int event, Column column) { throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); } @@ -83,8 +76,8 @@ public int getDate(int event, Column column) { } @Override - public CDateRange getDateRange(int event, Column column) { - throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); + public boolean eventIsContainedIn(int event, ValidityDate column, CDateSet dateRanges) { + return false; } @Override @@ -93,7 +86,12 @@ public CDateRange getAsDateRange(int event, Column column) { } @Override - public Map calculateMap(int event) { + public CDateRange getDateRange(int event, Column column) { + throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); + } + + @Override + public IntFunction> mapCalculator() { throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyStore.java b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyStore.java index 7201ae0ac9..aa32018ac3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyStore.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyStore.java @@ -114,12 +114,12 @@ public void setInteger(int event, long value) { } @Override - public long getMoney(int event) { - return 0; + public BigDecimal getMoney(int event) { + return BigDecimal.ZERO; } @Override - public void setMoney(int event, long money) { + public void setMoney(int event, BigDecimal money) { } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/stores/root/MoneyStore.java b/backend/src/main/java/com/bakdata/conquery/models/events/stores/root/MoneyStore.java index 96c048f281..c520e21e5e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/stores/root/MoneyStore.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/stores/root/MoneyStore.java @@ -1,6 +1,8 @@ package com.bakdata.conquery.models.events.stores.root; +import java.math.BigDecimal; + import com.bakdata.conquery.models.events.MajorTypeId; /** @@ -9,8 +11,8 @@ */ public interface MoneyStore extends ColumnStore { - long getMoney(int event); - void setMoney(int event, long money); + BigDecimal getMoney(int event); + void setMoney(int event, BigDecimal money); @Override default Object createScriptValue(int event) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/stores/specific/MoneyIntStore.java b/backend/src/main/java/com/bakdata/conquery/models/events/stores/specific/MoneyIntStore.java index c66faaae0e..1784c7f7c5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/stores/specific/MoneyIntStore.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/stores/specific/MoneyIntStore.java @@ -1,26 +1,49 @@ package com.bakdata.conquery.models.events.stores.specific; +import java.math.BigDecimal; + import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.io.jackson.Initializing; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.stores.root.ColumnStore; import com.bakdata.conquery.models.events.stores.root.IntegerStore; import com.bakdata.conquery.models.events.stores.root.MoneyStore; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; -import lombok.Getter; -import lombok.Setter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.OptBoolean; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; import lombok.ToString; +import lombok.experimental.Accessors; @CPSType(base = ColumnStore.class, id = "MONEY_VARINT") -@Getter -@Setter +@Data @ToString(of = "numberType") -public class MoneyIntStore implements MoneyStore { +@NoArgsConstructor(onConstructor_ = {@JsonCreator}) +@JsonDeserialize(converter = MoneyIntStore.MoneyIntStoreInitializer.class) +public class MoneyIntStore implements MoneyStore, Initializing { + + @JsonIgnore + @JacksonInject(useInput = OptBoolean.FALSE) + @EqualsAndHashCode.Exclude + @Accessors(fluent = true) + private ConqueryConfig config; + + private IntegerStore numberType; - protected IntegerStore numberType; + @JsonProperty(required = false) + private int decimalShift = Integer.MIN_VALUE; - @JsonCreator - public MoneyIntStore(IntegerStore numberType) { - this.numberType = numberType; + + public MoneyIntStore(IntegerStore store, int decimalShift){ + this(); + this.numberType = store; + this.decimalShift = decimalShift; } @Override @@ -30,17 +53,17 @@ public int getLines() { @Override public MoneyIntStore createDescription() { - return new MoneyIntStore(numberType.createDescription()); + return new MoneyIntStore(numberType.createDescription(), getDecimalShift()); } @Override public MoneyIntStore select(int[] starts, int[] length) { - return new MoneyIntStore(numberType.select(starts, length)); + return new MoneyIntStore(numberType.select(starts, length), getDecimalShift()); } @Override - public long getMoney(int event) { - return numberType.getInteger(event); + public BigDecimal getMoney(int event) { + return BigDecimal.valueOf(numberType.getInteger(event)).movePointLeft(decimalShift); } @Override @@ -49,8 +72,8 @@ public long estimateEventBits() { } @Override - public void setMoney(int event, long value) { - numberType.setInteger(event, value); + public void setMoney(int event, BigDecimal value) { + numberType.setInteger(event, value.movePointRight(decimalShift).longValue()); } @Override @@ -66,4 +89,16 @@ public final boolean has(int event) { public void setParent(Bucket bucket) { // not used } + + @Override + public void init() { + if (decimalShift != Integer.MIN_VALUE){ + return; + } + + decimalShift = config.getFrontend().getCurrency().getDecimalScale(); + } + + public static class MoneyIntStoreInitializer extends Initializing.Converter {} + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index baeaca2288..1923172899 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -9,6 +9,10 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -17,8 +21,6 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Subject; @@ -50,8 +52,8 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.base.Preconditions; -import jakarta.validation.constraints.NotNull; -import jakarta.ws.rs.core.UriBuilder; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -79,15 +81,13 @@ public abstract class ManagedExecution extends IdentifiableImpl datasetRegistry; - public ManagedExecution(@NonNull User owner, @NonNull Dataset dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { + public ManagedExecution(@NonNull UserId owner, @NonNull DatasetId dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { this.owner = owner; this.dataset = dataset; this.metaStorage = metaStorage; @@ -190,7 +190,7 @@ protected String makeAutoLabel(PrintSettings cfg) { @JsonIgnore public Namespace getNamespace() { - return datasetRegistry.get(getDataset().getId()); + return datasetRegistry.get(getDataset()); } protected abstract void doInitExecutable(); @@ -225,7 +225,9 @@ public ManagedExecutionId createId() { if (queryId == null) { queryId = UUID.randomUUID(); } - return new ManagedExecutionId(dataset.getId(), queryId); + ManagedExecutionId managedExecutionId = new ManagedExecutionId(dataset, queryId); + managedExecutionId.setMetaStorage(getMetaStorage()); + return managedExecutionId; } /** @@ -289,7 +291,7 @@ public void start() { /** * Renders a lightweight status with meta information about this query. Computation an size should be small for this. */ - public OverviewExecutionStatus buildStatusOverview(UriBuilder url, Subject subject) { + public OverviewExecutionStatus buildStatusOverview(Subject subject) { OverviewExecutionStatus status = new OverviewExecutionStatus(); setStatusBase(subject, status); @@ -311,7 +313,7 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta status.setContainsDates(containsDates); if (owner != null) { - User user = owner; + User user = owner.resolve(); status.setOwner(user.getId()); status.setOwnerName(user.getLabel()); } @@ -354,7 +356,7 @@ public FullExecutionStatus buildStatusFull(Subject subject, Namespace namespace) public void setStatusFull(FullExecutionStatus status, Subject subject, Namespace namespace) { setStatusBase(subject, status); - setAdditionalFieldsForStatusWithColumnDescription(subject, status, namespace); + setAdditionalFieldsForStatusWithColumnDescription(subject, status); setAdditionalFieldsForStatusWithSource(subject, status, namespace); setAdditionalFieldsForStatusWithGroups(status); setAvailableSecondaryIds(status); @@ -380,7 +382,7 @@ private void setAdditionalFieldsForStatusWithGroups(FullExecutionStatus status) * This is usually not done very often and should be reasonable fast, so don't cache this. */ List permittedGroups = new ArrayList<>(); - for (Group group : getMetaStorage().getAllGroups()) { + for (Group group : getMetaStorage().getAllGroups().toList()) { for (Permission perm : group.getPermissions()) { if (perm.implies(createPermission(Ability.READ.asSet()))) { permittedGroups.add(group.getId()); @@ -391,7 +393,7 @@ private void setAdditionalFieldsForStatusWithGroups(FullExecutionStatus status) status.setGroups(permittedGroups); } - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { // Implementation specific } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java b/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java index 1c77dd5911..ee76301a50 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java @@ -1,8 +1,8 @@ package com.bakdata.conquery.models.execution; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; public interface Owned extends Authorized { - User getOwner(); + UserId getOwner(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java index b626b88ddb..887dc8c871 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java @@ -44,7 +44,7 @@ default , S extends Identifiable & Shareable & Au final S shareable = (S) this; // Collect groups that do not have access to this instance and remove their probable permission - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (patch.getGroups().contains(group.getId())) { continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java index baa4706bf9..8e1ef40ecf 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java @@ -14,7 +14,6 @@ import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.FormConfigPatch; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Subject; @@ -30,7 +29,9 @@ import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.util.VariableDefaultValue; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.JsonNode; import lombok.AllArgsConstructor; import lombok.Data; @@ -44,6 +45,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.apache.shiro.authz.Permission; +import org.jetbrains.annotations.Nullable; @Slf4j @Data @@ -70,8 +72,7 @@ public class FormConfig extends IdentifiableImpl implements Sharea */ @NotNull private JsonNode values; - @MetaIdRef - private User owner; + private UserId owner; @VariableDefaultValue private LocalDateTime creationTime = LocalDateTime.now(); @@ -83,7 +84,9 @@ public FormConfig(String formType, JsonNode values) { @Override public FormConfigId createId() { - return new FormConfigId(dataset, formType, formId); + FormConfigId formConfigId = new FormConfigId(dataset, formType, formId); + formConfigId.setMetaStorage(getMetaStorage()); + return formConfigId; } /** @@ -91,33 +94,38 @@ public FormConfigId createId() { * actual form field values. */ public FormConfigOverviewRepresentation overview(MetaStorage storage, Subject subject) { - String ownerName = Optional.ofNullable(owner).map(User::getLabel).orElse(null); + String ownerName = getOwnerName(); return FormConfigOverviewRepresentation.builder() - .id(getId()) - .formType(formType) - .label(label) - .tags(tags) - .ownerName(ownerName) - .own(subject.isOwner(this)) - .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) - .shared(shared) - // system? - .build(); + .id(getId()) + .formType(formType) + .label(label) + .tags(tags) + .ownerName(ownerName) + .own(subject.isOwner(this)) + .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) + .shared(shared) + // system? + .build(); + } + + @JsonIgnore + private @Nullable String getOwnerName() { + return Optional.ofNullable(owner).map(UserId::resolve).map(User.class::cast).map(User::getLabel).orElse(null); } /** * Return the full representation of the configuration with the configured form fields and meta data. */ public FormConfigFullRepresentation fullRepresentation(MetaStorage storage, Subject requestingUser){ - String ownerName = Optional.ofNullable(owner).map(User::getLabel).orElse(null); + String ownerName = getOwnerName(); /* Calculate which groups can see this query. * This is usually not done very often and should be reasonable fast, so don't cache this. */ List permittedGroups = new ArrayList<>(); - for(Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { for(Permission perm : group.getPermissions()) { if(perm.implies(createPermission(Ability.READ.asSet()))) { permittedGroups.add(group.getId()); @@ -129,8 +137,8 @@ public FormConfigFullRepresentation fullRepresentation(MetaStorage storage, Subj .id(getId()).formType(formType) .label(label) .tags(tags) - .ownerName(ownerName) - .own(requestingUser.isOwner(this)) + .ownerName(ownerName) + .own(requestingUser.isOwner(this)) .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) .shared(shared) .groups(permittedGroups) @@ -144,6 +152,10 @@ public ConqueryPermission createPermission(Set abilities) { return FormConfigPermission.onInstance(abilities, getId()); } + public Consumer valueSetter() { + return (patch) -> setValues(patch.getValues()); + } + /** * API representation for the overview of all {@link FormConfig}s which does not * include the form fields an their values. @@ -187,8 +199,4 @@ public static class FormConfigFullRepresentation extends FormConfigOverviewRepre private JsonNode values; } - public Consumer valueSetter() { - return (patch) -> setValues(patch.getValues()); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java index 6000c90819..ba579d6639 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java @@ -85,7 +85,7 @@ public Stream getConfigsByFormType(@NonNull Su final Set formTypesFinal = requestedFormType; - final Stream stream = storage.getAllFormConfigs().stream() + final Stream stream = storage.getAllFormConfigs() .filter(c -> dataset.getId().equals(c.getDataset())) .filter(c -> formTypesFinal.contains(c.getFormType())) .filter(c -> subject.isPermitted(c, Ability.READ)); @@ -116,7 +116,7 @@ public FormConfig addConfig(Subject subject, Dataset targetDataset, FormConfigAP subject.authorize(namespace.getDataset(), Ability.READ); - final FormConfig internalConfig = config.intern(storage.getUser(subject.getId()), targetDataset.getId()); + final FormConfig internalConfig = config.intern(subject.getId(), targetDataset.getId()); // Add the config immediately to the submitted dataset addConfigToDataset(internalConfig); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java index 7e544fbff9..68be676506 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java @@ -11,7 +11,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; import com.bakdata.conquery.apiv1.forms.Form; @@ -43,7 +42,7 @@ public class FormScanner extends Task { * task accounts the change. */ private final ConqueryConfig config; - private List formConfigProviders = new ArrayList<>(); + private final List formConfigProviders = new ArrayList<>(); public FormScanner(ConqueryConfig config) { super("form-scanner"); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/AbsoluteFormQuery.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/AbsoluteFormQuery.java index 264d3d1a4c..4a678ef4eb 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/AbsoluteFormQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/AbsoluteFormQuery.java @@ -17,7 +17,6 @@ import com.bakdata.conquery.models.forms.util.DateContext; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -43,7 +42,7 @@ public class AbsoluteFormQuery extends Query { public static final int TIME_INDEX = 2; /** - * see {@linkplain this#getResultInfos(PrintSettings)}. + * see {@linkplain #getResultInfos()}. */ public static final int FEATURES_OFFSET = 3; @@ -85,13 +84,13 @@ public AbsoluteFormQueryPlan createQueryPlan(QueryPlanContext context) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { final List resultInfos = new ArrayList<>(); - resultInfos.add(ResultHeaders.formResolutionInfo(printSettings)); - resultInfos.add(ResultHeaders.formContextInfo(printSettings)); - resultInfos.add(ResultHeaders.formDateRangeInfo(printSettings)); - resultInfos.addAll(features.getResultInfos(printSettings)); + resultInfos.add(ResultHeaders.formResolutionInfo()); + resultInfos.add(ResultHeaders.formContextInfo()); + resultInfos.add(ResultHeaders.formDateRangeInfo()); + resultInfos.addAll(features.getResultInfos()); return resultInfos; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/EntityDateQuery.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/EntityDateQuery.java index 68c6dce565..9b74f0830d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/EntityDateQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/EntityDateQuery.java @@ -16,7 +16,6 @@ import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -89,13 +88,13 @@ public void resolve(QueryResolveContext context) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { List resultInfos = new ArrayList<>(); - resultInfos.add(ResultHeaders.formResolutionInfo(printSettings)); - resultInfos.add(ResultHeaders.formContextInfo(printSettings)); - resultInfos.add(ResultHeaders.formDateRangeInfo(printSettings)); + resultInfos.add(ResultHeaders.formResolutionInfo()); + resultInfos.add(ResultHeaders.formContextInfo()); + resultInfos.add(ResultHeaders.formDateRangeInfo()); - resultInfos.addAll(features.getResultInfos(printSettings)); + resultInfos.addAll(features.getResultInfos()); return resultInfos; diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java index f7d8a0dfaf..16c5f7d5f8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java @@ -24,6 +24,9 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ExternalStateImpl; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -55,7 +58,7 @@ public class ExternalExecution extends ManagedForm { private UUID externalTaskId; - public ExternalExecution(ExternalForm form, User user, Dataset dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { + public ExternalExecution(ExternalForm form, UserId user, DatasetId dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { super(form, user, dataset, metaStorage, datasetRegistry); } @@ -85,7 +88,7 @@ public void start() { // Create service user final Dataset dataset = getNamespace().getDataset(); - final User originalUser = getOwner(); + final User originalUser = getOwner().resolve(); final FormBackendConfig formBackendConfig = getConfig().getPluginConfigs(FormBackendConfig.class) .filter(c -> c.supportsFormType(getSubmittedForm().getFormType())) .collect(MoreCollectors.onlyElement()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java index ce763811ad..daa0761867 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java @@ -6,10 +6,10 @@ import com.bakdata.conquery.apiv1.forms.FormConfigAPI; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -46,7 +46,7 @@ public abstract class ManagedForm extends ManagedExecution { @Getter private Form submittedForm; - protected ManagedForm(F submittedForm, User owner, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + protected ManagedForm(F submittedForm, UserId owner, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(owner, submittedDataset, storage, datasetRegistry); this.submittedForm = submittedForm; } @@ -64,7 +64,7 @@ public void start() { .tags(this.getTags()) .values(getSubmittedForm().getValues()).build(); - final FormConfig formConfig = build.intern(getOwner(), getDataset().getId()); + final FormConfig formConfig = build.intern(getOwner(), getDataset()); getMetaStorage().addFormConfig(formConfig); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java index 78c358701d..5c8f7b9104 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java @@ -12,24 +12,21 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.InternalExecution; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.IdMap; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ColumnDescriptor; -import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AccessLevel; import lombok.EqualsAndHashCode; @@ -51,21 +48,20 @@ public class ManagedInternalForm extends ManagedF /** - * Mapping of a result table name to a set of queries. - * This is required by forms that have multiple results (CSVs) as output. + * Subqueries that are sent to the workers. */ @JsonIgnore @EqualsAndHashCode.Exclude - private Map subQueries; - + private final IdMap flatSubQueries = new IdMap<>(); /** - * Subqueries that are sent to the workers. + * Mapping of a result table name to a set of queries. + * This is required by forms that have multiple results (CSVs) as output. */ @JsonIgnore @EqualsAndHashCode.Exclude - private final IdMap flatSubQueries = new IdMap<>(); + private Map subQueries; - public ManagedInternalForm(F form, User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedInternalForm(F form, UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(form, user, submittedDataset, storage, datasetRegistry); } @@ -95,23 +91,7 @@ private Map createSubExecutions() { )); } - - @Override - public void start() { - synchronized (this) { - subQueries.values().forEach(flatSubQueries::add); - } - flatSubQueries.values().forEach(ManagedExecution::start); - super.start(); - } - - @Override - public List generateColumnDescriptions(boolean isInitialized, ConqueryConfig config) { - return subQueries.values().iterator().next().generateColumnDescriptions(isInitialized, config); - } - - - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { // Set the ColumnDescription if the Form only consits of a single subquery if (subQueries == null) { // If subqueries was not set the Execution was not initialized, do it manually @@ -134,13 +114,27 @@ public void cancel() { subQueries.values().forEach(ManagedQuery::cancel); } + @Override + public void start() { + synchronized (this) { + subQueries.values().forEach(flatSubQueries::add); + } + flatSubQueries.values().forEach(ManagedExecution::start); + super.start(); + } + + @Override + public List generateColumnDescriptions(boolean isInitialized, ConqueryConfig config) { + return subQueries.values().iterator().next().generateColumnDescriptions(isInitialized, config); + } + @Override @JsonIgnore - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { if (subQueries.size() != 1) { throw new UnsupportedOperationException("Cannot gather result info when multiple tables are generated"); } - return subQueries.values().iterator().next().getResultInfos(printSettings); + return subQueries.values().iterator().next().getResultInfos(); } @Override @@ -161,7 +155,7 @@ public long resultRowCount() { return subQueries.values().iterator().next().resultRowCount(); } - public boolean allSubQueriesDone(ExecutionManager executionManager) { + public boolean allSubQueriesDone() { synchronized (this) { return flatSubQueries.values().stream().allMatch(q -> q.getState().equals(ExecutionState.DONE)); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/RelativeFormQuery.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/RelativeFormQuery.java index 6b912d0596..2668aa332a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/RelativeFormQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/RelativeFormQuery.java @@ -16,7 +16,6 @@ import com.bakdata.conquery.models.forms.util.CalendarUnit; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.DateAggregationMode; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -74,17 +73,17 @@ public void collectRequiredQueries(Set requiredQueries) { } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { List resultInfos = new ArrayList<>(); - resultInfos.add(ResultHeaders.formResolutionInfo(printSettings)); - resultInfos.add(ResultHeaders.formContextInfo(printSettings)); - resultInfos.add(ResultHeaders.formEventDateInfo(printSettings)); - resultInfos.add(ResultHeaders.formDateRangeInfo(printSettings)); + resultInfos.add(ResultHeaders.formResolutionInfo()); + resultInfos.add(ResultHeaders.formContextInfo()); + resultInfos.add(ResultHeaders.formEventDateInfo()); + resultInfos.add(ResultHeaders.formDateRangeInfo()); - final List featureInfos = features.getResultInfos(printSettings); + final List featureInfos = features.getResultInfos(); - resultInfos.add(ResultHeaders.formObservationScopeInfo(printSettings)); + resultInfos.add(ResultHeaders.formObservationScopeInfo()); resultInfos.addAll(featureInfos); diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java deleted file mode 100644 index ae74f0de63..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java +++ /dev/null @@ -1,106 +0,0 @@ -package com.bakdata.conquery.models.identifiable; - -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; - -import com.bakdata.conquery.models.error.ConqueryError.ExecutionCreationResolveError; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import lombok.NoArgsConstructor; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@SuppressWarnings({"rawtypes", "unchecked"}) -@NoArgsConstructor -@ToString(of = "map") -public class CentralRegistry { - - private final IdMap map = new IdMap<>(); - private final ConcurrentMap, Function> cacheables = new ConcurrentHashMap<>(); - - public synchronized CentralRegistry register(Identifiable ident) { - map.add(ident); - return this; - } - - public synchronized Function registerCacheable(Id id, Function supplier) { - return cacheables.put(id, supplier); - } - - public > T resolve(Id name) { - final T result = get(name); - - if (result == null) { - throw new ExecutionCreationResolveError(name); - } - - return result; - } - - public Identifiable update(Identifiable ident) { - return map.update(ident); - } - - public synchronized Optional updateCacheable(Id id, Function supplier) { - Function old = cacheables.put(id, supplier); - if (old != null) { - // If the cacheable was still there, the Object was never cached. - return Optional.empty(); - } - // The supplier might have been invoked already and the object gone into the IdMap - // So we invalidate it - return Optional.ofNullable(map.remove(id)); - } - - public > Optional getOptional(Id name) { - return Optional.ofNullable(get(name)); - } - - public synchronized void remove(Identifiable ident) { - Id id = ident.getId(); - map.remove(id); - } - - public static CentralRegistry get(DeserializationContext ctxt) throws JsonMappingException { - return (CentralRegistry) ctxt.findInjectableValue(CentralRegistry.class.getName(), null, null); - } - - public void clear() { - map.clear(); - cacheables.clear(); - } - - /** - * Needs to be protected in order to be overwritten by {@link InjectingCentralRegistry} - */ - protected > T get(Id name) { - Object res = map.get(name); - if (res != null) { - return (T) res; - } - synchronized (this) { - // Retry synchronized to make sure it has not been resolved from cacheables in the mean time - Object res2 = map.get(name); - if (res2 != null) { - return (T) res2; - } - Function supplier = cacheables.get(name); - if (supplier == null) { - return null; - } - - // Transfer object to the IdMap - final T apply = (T) supplier.apply(name); - register(apply); - cacheables.remove(name); - } - - return (T) map.get(name); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java new file mode 100644 index 0000000000..190d2302db --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.models.identifiable; + +import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.error.ErrorMessages; +import com.bakdata.conquery.models.identifiable.ids.Id; + +/** + * TODO as {@link com.bakdata.conquery.models.error.ConqueryError} ? + */ +@CPSType(base = ConqueryError.class, id = "CQ_ID_RESOLVE_ERROR") +public class IdResolvingException extends ConqueryError { + + private final Id id; + + public IdResolvingException(Id id) { + this.id = id; + } + + public IdResolvingException(Id id, Throwable cause) { + super(ConqueryError.asConqueryError(cause)); + this.id = id; + } + + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.idUnresolvable(id); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java index 4c6037f87c..f77daa1736 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java @@ -1,7 +1,5 @@ package com.bakdata.conquery.models.identifiable; -import jakarta.validation.Valid; - import com.bakdata.conquery.models.identifiable.ids.Id; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.ToString; @@ -9,7 +7,6 @@ public interface Identifiable>> { @JsonIgnore - @Valid @ToString.Include ID getId(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java index dd9a276e61..219ea5438d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java @@ -1,9 +1,14 @@ package com.bakdata.conquery.models.identifiable; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.OptBoolean; +import lombok.AccessLevel; +import lombok.Getter; import lombok.NoArgsConstructor; +import lombok.Setter; import lombok.ToString; @NoArgsConstructor @@ -14,24 +19,12 @@ public abstract class IdentifiableImpl other = (IdentifiableImpl) obj; if (getId() == null) { - if (other.getId() != null) { - return false; - } - } else if (!getId().equals(other.getId())) { - return false; + return other.getId() == null; + } + else { + return getId().equals(other.getId()); + } + } + + @Override + public String toString() { + return this.getClass().getSimpleName()+"["+ getId() + "]"; + } + + @ToString.Include + @JsonIgnore + @Override + public ID getId() { + if (cachedId == null) { + + cachedId = createId(); } - return true; + return cachedId; } + + public abstract ID createId(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java new file mode 100644 index 0000000000..6a7b1885cd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java @@ -0,0 +1,43 @@ +package com.bakdata.conquery.models.identifiable; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import org.jetbrains.annotations.NotNull; + +/** + * Interface for classes that can resolve an {@link NamespacedId} to a concrete object. + */ +public interface NamespacedStorageProvider extends Injectable { + + static NamespacedStorageProvider getResolver(DeserializationContext ctxt) throws JsonMappingException { + return (NamespacedStorageProvider) ctxt + .findInjectableValue(NamespacedStorageProvider.class.getName(), null, null); + } + + /** + * Almost identical to {@link NamespacedStorageProvider#getStorage(DatasetId)}, but throws an {@link IllegalArgumentException} if no storage could be resolved. + * @return the storage or throws an {@link IllegalArgumentException} if the storage could not be resolved. + */ + @NotNull + default NamespacedStorage resolveStorage(DatasetId datasetId) { + NamespacedStorage storage = getStorage(datasetId); + if (storage == null) { + throw new IllegalArgumentException("Unknown dataset: %s".formatted(datasetId)); + } + return storage; + } + + /** + * Returns the storage corresponding to the given dataset. + * @param datasetId the dataset to query + * @return The storage or null if no storage corresponds to the dataset + * + * @implNote Don't call {@link Dataset#getNamespacedStorageProvider()} as it is probably not yet set. + */ + NamespacedStorage getStorage(DatasetId datasetId); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java index 5c46e6a297..395bfd7369 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java @@ -4,24 +4,48 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.models.identifiable.ids.IdUtil.Parser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; -public enum IIdInterner { - - INSTANCE; +public class IIdInterner implements Injectable { private final Map, ParserIIdInterner> perParserInterner = new ConcurrentHashMap<>(); + public static IIdInterner get(DeserializationContext context) throws JsonMappingException { + return (IIdInterner) context.findInjectableValue(IIdInterner.class, null, null); + } + @SuppressWarnings("unchecked") - public static > ParserIIdInterner forParser(Parser parser) { - return (ParserIIdInterner) INSTANCE.perParserInterner.computeIfAbsent(parser, k -> new ParserIIdInterner<>()); + public > ParserIIdInterner forParser(Parser parser) { + return (ParserIIdInterner) perParserInterner.computeIfAbsent(parser, k -> new ParserIIdInterner<>()); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(this.getClass(), this); } public static class ParserIIdInterner> { private final Map, ID> interned = new ConcurrentHashMap<>(); public ID putIfAbsent(List components, ID id) { - return interned.putIfAbsent(components, id); + ID old = interned.putIfAbsent(components, id); + + if (old == null) { + return id; + } + checkConflict(id, old); + return old; + } + + public static void checkConflict(Id id, Id cached) { + if (!cached.equals(id)) { + throw new IllegalStateException("The cached id '%s' (%s) conflicted with the new entry of '%s' (%s)" + .formatted(cached, cached.getClass().getSimpleName(), id, id.getClass().getSimpleName())); + } } public ID get(List components) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java index 3d46c9268b..7f63d73ed2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java @@ -2,15 +2,21 @@ import java.lang.ref.WeakReference; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Objects; import com.bakdata.conquery.io.jackson.serializer.IdDeserializer; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.identifiable.IdResolvingException; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.util.ConqueryEscape; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import lombok.Getter; import lombok.RequiredArgsConstructor; +import lombok.Setter; @RequiredArgsConstructor @JsonDeserialize(using = IdDeserializer.class) @@ -19,17 +25,33 @@ public abstract class Id { /** * Holds the cached escaped value. * - * @implNote needs to be initialized. Otherwise SerializationTests fail, because assertj checks ignored types. + * @implNote needs to be initialized. Otherwise, SerializationTests fail, because assertj checks ignored types. */ @JsonIgnore private WeakReference escapedId = new WeakReference<>(null); - @Override - public abstract boolean equals(Object obj); + /** + * Injected by deserializer + */ + @JsonIgnore + @Setter + @Getter + private NamespacedStorageProvider namespacedStorageProvider; + + /** + * Injected by deserializer for resolving meta Ids + */ + @JsonIgnore + @Setter + @Getter + private MetaStorage metaStorage; @Override public abstract int hashCode(); + @Override + public abstract boolean equals(Object obj); + @Override @JsonValue public final String toString() { @@ -67,4 +89,24 @@ public final List collectComponents() { return result; } + + public TYPE resolve() { + if (this instanceof NamespacedId namespacedId) { + return (TYPE) namespacedId.resolve(getNamespacedStorageProvider().getStorage(namespacedId.getDataset())); + } + if (this instanceof MetaId) { + return metaStorage.resolve((Id & MetaId)this); + } + throw new IllegalStateException("Tried to resolve an id that is neither NamespacedId not MetaId: %s".formatted(this)); + } + + public IdResolvingException newIdResolveException() { + return new IdResolvingException(this); + } + + public IdResolvingException newIdResolveException(Exception e) { + return new IdResolvingException(this, e); + } + + public abstract void collectIds(Collection> collect); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java index bff1bc6521..586f229fc1 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java @@ -9,7 +9,6 @@ import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.util.ConqueryEscape; import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; import lombok.experimental.UtilityClass; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.reflect.MethodUtils; @@ -21,31 +20,10 @@ public final class IdUtil { public static final Joiner JOINER = Joiner.on(JOIN_CHAR); private static final Map, Class> CLASS_TO_ID_MAP = new ConcurrentHashMap<>(); - public static > ID intern(ID id) { - @SuppressWarnings("unchecked") - ID old = IIdInterner.forParser((Parser) createParser(id.getClass())).putIfAbsent(id.collectComponents(), id); - if (old == null) { - return id; - } - checkConflict(id, old); - return old; - } - public static > Parser createParser(Class idClass) { return (Parser) idClass.getDeclaredClasses()[0].getEnumConstants()[0]; } - public static void checkConflict(Id id, Id cached) { - if (!cached.equals(id)) { - throw new IllegalStateException("The cached id '" - + cached - + "'(" - + cached.getClass().getSimpleName() - + ") conflicted with a new entry of " - + id.getClass().getSimpleName()); - } - } - public static > Class findIdClass(Class cl) { Class result = CLASS_TO_ID_MAP.get(cl); @@ -80,12 +58,8 @@ public static > Class findIdClass(Class cl) { public interface Parser> { - default ID parse(String id) { - return parse(split(id)); - } - - default ID parse(String... id) { - return parse(Arrays.asList(id)); + static List asComponents(String id) { + return Arrays.asList(split(id)); } static String[] split(String id) { @@ -98,18 +72,17 @@ static String[] split(String id) { return parts; } + default ID parse(String id) { + return parse(split(id)); + } + + default ID parse(String... id) { + return parse(Arrays.asList(id)); + } + default ID parse(List parts) { //first check if we get the result with the list (which might be a sublist) - ID result = IIdInterner.forParser(this).get(parts); - if (result == null) { - result = createId(parts); - //if not make a minimal list and use that to compute so that we do not keep the sublist - ID secondResult = IIdInterner.forParser(this).putIfAbsent(ImmutableList.copyOf(parts), result); - if (secondResult != null) { - checkConflict(result, secondResult); - return secondResult; - } - } + ID result = createId(parts); return result; } @@ -137,28 +110,27 @@ default ID checkNoRemaining(ID id, IdIterator remaining, List allParts) default ID parse(IdIterator parts) { //first check if we get the result with the list (which might be a sublist) - List input = parts.getRemaining(); - ID result = IIdInterner.forParser(this).get(input); - if (result == null) { - parts.internNext(); - result = parseInternally(parts); - //if not make a minimal list and use that to compute so that we do not keep the sublist - ID secondResult = IIdInterner.forParser(this).putIfAbsent(ImmutableList.copyOf(input), result); - if (secondResult != null) { - checkConflict(result, secondResult); - return secondResult; - } - return result; - } - parts.consumeAll(); + + parts.internNext(); + ID result = parseInternally(parts); return result; } default ID parsePrefixed(String dataset, String id) { + List result = asComponents(dataset, id); + return parse(result); + } + + static List asComponents(String dataset, String id) { String[] result; String[] split = split(id); - //if already prefixed + + if (dataset == null) { + return Arrays.asList(split); + } + if (split.length > 0 && split[0].equals(dataset)) { + //if already prefixed result = split; } else { @@ -166,7 +138,7 @@ default ID parsePrefixed(String dataset, String id) { result[0] = dataset; System.arraycopy(split, 0, result, 1, split.length); } - return parse(Arrays.asList(result)); + return Arrays.asList(result); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java new file mode 100644 index 0000000000..8bc164aecc --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java @@ -0,0 +1,12 @@ +package com.bakdata.conquery.models.identifiable.ids; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.identifiable.Identifiable; + +/** + * Marker interface for Ids that are resolvable in a {@link com.bakdata.conquery.io.storage.MetaStorage} + */ +public interface MetaId { + + Identifiable get(MetaStorage storage); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java index d19838ffc8..48a18ca78c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java @@ -1,20 +1,69 @@ package com.bakdata.conquery.models.identifiable.ids; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.models.identifiable.IdResolvingException; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; /** - * Marker interface for {@link Id}s that are loaded via Namespaced CentralRegistry - * (see {@link com.bakdata.conquery.models.worker.IdResolveContext#findRegistry(DatasetId)}, - * as opposed to Registry in the {@link com.bakdata.conquery.io.storage.MetaStorage} + * Marker interface for {@link Id}s that are bound to a {@link com.bakdata.conquery.models.worker.Namespace}/{@link com.bakdata.conquery.models.datasets.Dataset}. */ public interface NamespacedId { - @JsonIgnore - DatasetId getDataset(); + static WorkerStorage assertWorkerStorage(NamespacedStorage storage) { + if (!(storage instanceof WorkerStorage workerStorage)) { + throw new IllegalArgumentException("Cannot be retrieved from %s".formatted(storage)); + } + return workerStorage; + } + + static NamespaceStorage assertNamespaceStorage(NamespacedStorage storage) { + if (!(storage instanceof NamespaceStorage namespaceStorage)) { + throw new IllegalArgumentException("Cannot be retrieved from %s".formatted(storage)); + } + return namespaceStorage; + } default String toStringWithoutDataset() { return StringUtils.removeStart(toString(), getDataset().toString() + IdUtil.JOIN_CHAR); } + + @JsonIgnore + DatasetId getDataset(); + + /** + * Almost identical to {@link NamespacedId#get(NamespacedStorage)}, but throws an {@link IdResolvingException} if no object could be resolved. + * @return the object or throws an {@link IdResolvingException} if the Object could not be resolved. + */ + @NotNull + default NamespacedIdentifiable resolve(NamespacedStorage storage) { + try { + NamespacedIdentifiable o = get(storage); + if (o == null) { + throw newIdResolveException(); + } + return o; + } + catch (IdResolvingException e) { + throw e; + } + catch (Exception e) { + throw newIdResolveException(e); + } + } + + /** + * Return the object identified by the given id from the given storage. + * @return the object or null if no object could be resolved. If the id type is not supported + * throws a IllegalArgumentException + */ + NamespacedIdentifiable get(NamespacedStorage storage); + + IdResolvingException newIdResolveException(); + + IdResolvingException newIdResolveException(Exception e); } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java index f31c1b5f33..e9178fa585 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java @@ -1,8 +1,8 @@ package com.bakdata.conquery.models.identifiable.ids; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; public interface NamespacedIdentifiable> & NamespacedId> extends Identifiable { - Dataset getDataset(); + DatasetId getDataset(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java index 51eef836ed..f33a42a623 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java @@ -1,13 +1,18 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertWorkerStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +30,28 @@ public DatasetId getDataset() { return imp.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertWorkerStorage(storage).getBucket(this); + } + @Override public void collectComponents(List components) { imp.collectComponents(components); components.add(bucket); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + imp.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return imp.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java index b153c6f8b0..44f09e3eaf 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java @@ -1,13 +1,18 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertWorkerStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +30,29 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertWorkerStorage(storage).getCBlock(this); + } + @Override public void collectComponents(List components) { bucket.collectComponents(components); connector.collectComponents(components); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + bucket.collectIds(collect); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return bucket.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java index 18342624a0..7998adcf10 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return table.getDataset(); } + @Override + public Column get(NamespacedStorage storage) { + return storage.getTable(getTable()).getColumnByName(getColumn()); + } + @Override public void collectComponents(List components) { table.collectComponents(components); components.add(column); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + table.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return table.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java index 6951c25e42..1899cee104 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java @@ -2,8 +2,8 @@ import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -23,7 +23,7 @@ public ConceptElementId parseInternally(IdIterator parts) { return ConceptId.Parser.INSTANCE.parse(parts); } String childName = parts.next(); - ConceptElementId parent = ConceptElementId.Parser.INSTANCE.parse(parts); + ConceptElementId parent = Parser.INSTANCE.parse(parts); return new ConceptTreeChildId(parent, childName); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java index 4decf7e430..f1c8c3a55a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java @@ -1,17 +1,27 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConceptPermission; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - @Getter @AllArgsConstructor @EqualsAndHashCode(callSuper=false) -public class ConceptId extends ConceptElementId> implements NamespacedId { +public class ConceptId extends ConceptElementId> implements NamespacedId, Authorized { private final DatasetId dataset; private final String name; @@ -20,7 +30,12 @@ public class ConceptId extends ConceptElementId> implements Namespace public DatasetId getDataset() { return dataset; } - + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(this); + } + @Override public ConceptId findConcept() { return this; @@ -32,7 +47,24 @@ public void collectComponents(List components) { components.add(name); } - public static enum Parser implements IdUtil.Parser { + + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return ConceptPermission.onInstance(abilities, this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + + public enum Parser implements IdUtil.Parser { INSTANCE; @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java index 438f57fbb3..71ec378244 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java @@ -1,11 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -20,9 +24,9 @@ public ConceptSelectId(ConceptId concept, String select) { } @Override - public void collectComponents(List components) { - concept.collectComponents(components); - super.collectComponents(components); + public void collectIds(Collection> collect) { + collect.add(this); + concept.collectIds(collect); } @Override @@ -30,6 +34,27 @@ public DatasetId getDataset() { return concept.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(concept).getSelectByName(getSelect()); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return concept.getNamespacedStorageProvider(); + } + + @Override + public ConceptId findConcept() { + return concept; + } + + @Override + public void collectComponents(List components) { + concept.collectComponents(components); + super.collectComponents(components); + } + public enum Parser implements IdUtil.Parser { INSTANCE; @@ -40,9 +65,4 @@ public ConceptSelectId parseInternally(IdIterator parts) { return new ConceptSelectId(parent, name); } } - - @Override - public ConceptId findConcept() { - return concept; - } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java index 85cb4b68c8..d38675af17 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java @@ -1,12 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -21,7 +26,16 @@ public class ConceptTreeChildId extends ConceptElementId imple public DatasetId getDataset() { return parent.getDataset(); } - + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + Concept concept = storage.getConcept(findConcept()); + if (concept == null) { + return null; + } + return concept.findById(this); + } + @Override public ConceptId findConcept() { return parent.findConcept(); @@ -33,6 +47,17 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + parent.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return parent.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java index 8b6be8cdef..79932af141 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return concept.getDataset(); } + @Override + public Connector get(NamespacedStorage storage) { + return storage.getConcept(getConcept()).getConnectorByName(getConnector()); + } + @Override public void collectComponents(List components) { concept.collectComponents(components); components.add(connector); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + concept.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return concept.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java index d18929a627..fde78b020f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java @@ -1,11 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -19,6 +23,27 @@ public ConnectorSelectId(ConnectorId connector, String select) { this.connector = connector; } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public DatasetId getDataset() { + return connector.getDataset(); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(findConcept()).getConnectorByName(getConnector().getConnector()).getSelectByName(getSelect()); + } + + @Override + public ConceptId findConcept() { + return connector.getConcept(); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); @@ -26,8 +51,8 @@ public void collectComponents(List components) { } @Override - public DatasetId getDataset() { - return connector.getDataset(); + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); } public enum Parser implements IdUtil.Parser { @@ -40,9 +65,4 @@ public ConnectorSelectId parseInternally(IdIterator parts) { return new ConnectorSelectId(parent, name); } } - - @Override - public ConceptId findConcept() { - return connector.getConcept(); - } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java index f52218cb49..187af3c161 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java @@ -1,14 +1,21 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import java.util.Set; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; +import com.bakdata.conquery.models.auth.permissions.DatasetPermission; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.fasterxml.jackson.annotation.JsonIgnore; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -16,7 +23,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false, doNotUseGetters = true) -public class DatasetId extends Id implements NamespacedId { +public class DatasetId extends Id implements NamespacedId, Authorized { private final String name; @@ -26,11 +33,26 @@ public DatasetId getDataset() { return this; } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getDataset(); + } + @Override public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return DatasetPermission.onInstance(abilities, this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java index 733fe01398..6ddedf0405 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public Filter get(NamespacedStorage storage) { + return storage.getConcept(connector.getConcept()).getConnectorByName(connector.getConnector()).getFilterByName(getFilter()); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); components.add(filter); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java index a46bb68d4a..58c6d50a6b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java @@ -1,12 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import java.util.UUID; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -14,7 +18,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false) -public class FormConfigId extends Id { +public class FormConfigId extends Id implements MetaId { private final DatasetId dataset; @@ -29,6 +33,17 @@ public void collectComponents(List components) { } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getFormConfig(this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java index 9019702861..6de6bd59fe 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java @@ -1,20 +1,23 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; +@Getter @EqualsAndHashCode(callSuper=false) public class GroupId extends PermissionOwnerId { public static final String TYPE = "group"; - @Getter private final String group; public GroupId(String group) { @@ -27,6 +30,16 @@ public void collectComponents(List components) { components.add(group); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getGroup(this); + } + public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java index 490d23cd9f..8edf685ee5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java @@ -1,13 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.ImportColumn; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +28,28 @@ public DatasetId getDataset() { return imp.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + throw new UnsupportedOperationException("%s is never stored".formatted(this.getClass().getSimpleName())); + } + @Override public void collectComponents(List components) { imp.collectComponents(components); components.add(column); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + imp.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return imp.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java index c4bac13f29..046ce44df7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java @@ -1,16 +1,20 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; +import java.util.List; + +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false) @@ -24,12 +28,28 @@ public DatasetId getDataset() { return table.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getImport(this); + } + @Override public void collectComponents(List components) { table.collectComponents(components); components.add(tag); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + table.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return table.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java index 5ddd7a0f14..6f09df15a9 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java @@ -1,11 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertNamespaceStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.index.InternToExternMapper; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -24,6 +30,21 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertNamespaceStorage(storage).getInternToExternMapper(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java index f0a2d62e40..74ca53b650 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java @@ -1,12 +1,21 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.UUID; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; +import com.bakdata.conquery.models.auth.permissions.ExecutionPermission; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -14,7 +23,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false, doNotUseGetters = true) -public class ManagedExecutionId extends Id { +public class ManagedExecutionId extends Id implements MetaId, Authorized { private final DatasetId dataset; private final UUID execution; @@ -25,6 +34,22 @@ public void collectComponents(List components) { components.add(execution); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getExecution(this); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return ExecutionPermission.onInstance(abilities, this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java index 4e8072ba28..744aeb3b14 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java @@ -5,13 +5,14 @@ import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.EqualsAndHashCode; import lombok.RequiredArgsConstructor; @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) -public abstract class PermissionOwnerId> extends Id { +public abstract class PermissionOwnerId> extends Id implements MetaId { public enum Parser implements IdUtil.Parser> { diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java index 388c0ed586..4df43d6cb0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java @@ -1,19 +1,22 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Role; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; +@Getter @EqualsAndHashCode(callSuper=false) public class RoleId extends PermissionOwnerId { public static final String TYPE = "role"; - @Getter private final String role; public RoleId(String mandator) { @@ -26,6 +29,16 @@ public void collectComponents(List components) { components.add(role); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getRole(this); + } + enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java index 31005a7a59..8aee086b19 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java @@ -1,12 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertNamespaceStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.index.search.SearchIndex; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,6 +30,21 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertNamespaceStorage(storage).getSearchIndex(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java index f0e1b83e56..4c214ee448 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java @@ -1,11 +1,14 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -25,6 +28,22 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public SecondaryIdDescription get(NamespacedStorage storage) { + return storage.getSecondaryId(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java index 618186f345..220dd5821c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java @@ -4,8 +4,8 @@ import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java index 2f1bb9bb40..ac543cf94d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java @@ -1,11 +1,13 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.datasets.concepts.StructureNode; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -31,6 +33,22 @@ public void collectComponents(List components) { components.add(structureNode); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + if (parent != null) { + parent.collectIds(collect); + } + else { + dataset.collectIds(collect); + } + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java index a4c04cb280..dd7c9ab8bd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java @@ -1,15 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; -import lombok.Data; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -27,6 +29,22 @@ public void collectComponents(List components) { components.add(table); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getTable(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java index 67e0c6aa03..e5c8e60408 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java @@ -1,10 +1,11 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.preproc.TableImportDescriptor; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -22,6 +23,11 @@ public void collectComponents(List components) { components.add(importDescriptor); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java index 3273813723..2ce00dc287 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java @@ -1,11 +1,14 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -27,6 +30,16 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getUser(this); + } + public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java index e8c9d8615f..aaae4f622a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java @@ -1,12 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -23,12 +27,30 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(getConnector().getConcept()) + .getConnectorByName(getConnector().getConnector()) + .getValidityDateByName(getValidityDate()); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); components.add(validityDate); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java index 627298a398..2706b0d2f3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java @@ -1,10 +1,11 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.worker.WorkerInformation; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -24,6 +25,12 @@ public void collectComponents(List components) { components.add(worker); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java index 6bd986ed3d..b937353964 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java @@ -7,11 +7,15 @@ import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ColumnConfig; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonValue; +import com.fasterxml.jackson.annotation.OptBoolean; import com.univocity.parsers.common.record.Record; import com.univocity.parsers.csv.CsvParser; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -26,12 +30,15 @@ @Getter @EqualsAndHashCode @Slf4j -@NoArgsConstructor +@AllArgsConstructor +// For Jackson +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class EntityIdMap { @Setter @JsonIgnore @EqualsAndHashCode.Exclude + @JacksonInject(useInput = OptBoolean.FALSE) private NamespaceStorage storage; /** @@ -49,9 +56,9 @@ public class EntityIdMap { /** * Read incoming CSV-file extracting Id-Mappings for {@link ExternalId} and {@link EntityPrintId}. */ - public static EntityIdMap generateIdMapping(CsvParser parser, List mappers) { + public static EntityIdMap generateIdMapping(CsvParser parser, List mappers, NamespaceStorage namespaceStorage) { - EntityIdMap mapping = new EntityIdMap(); + EntityIdMap mapping = new EntityIdMap(namespaceStorage); Record record; diff --git a/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java b/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java index 1ed80b263e..b60769e303 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java +++ b/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java @@ -11,9 +11,9 @@ import com.bakdata.conquery.io.jackson.Initializing; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.util.io.FileUtil; import com.fasterxml.jackson.annotation.JacksonInject; @@ -64,7 +64,7 @@ public class MapInternToExternMapper extends NamedImpl i @JsonIgnore @NotNull - private Dataset dataset; + private DatasetId dataset; @ToString.Include @NotEmpty @@ -95,7 +95,7 @@ public synchronized void init() { return; } - dataset = storage.getDataset(); + dataset = storage.getDataset().getId(); final URI resolvedURI = FileUtil.getResolvedUri(config.getIndex().getBaseUrl(), csv); log.trace("Resolved mapping reference csv url '{}': {}", this.getId(), resolvedURI); @@ -145,7 +145,7 @@ public String external(String internalValue) { @Override public InternToExternMapperId createId() { - return new InternToExternMapperId(getDataset().getId(), getName()); + return new InternToExternMapperId(getDataset(), getName()); } public static class Initializer extends Initializing.Converter {} diff --git a/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java b/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java index dd080e10ad..5956889a61 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java +++ b/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java @@ -1,10 +1,10 @@ package com.bakdata.conquery.models.index.search; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.Named; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SearchIndexId; import com.fasterxml.jackson.annotation.JsonTypeInfo; @@ -15,5 +15,5 @@ public interface SearchIndex extends Identifiable, Named> getAllSelectFilters(NamespaceStorage storage) { - return storage.getAllConcepts().stream() + return storage.getAllConcepts() .flatMap(c -> c.getConnectors().stream()) .flatMap(co -> co.collectAllFilters().stream()) .filter(SelectFilter.class::isInstance) diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java index 3df723a45b..dafbb00495 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java @@ -11,12 +11,12 @@ import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.stores.root.StringStore; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.Job; import com.bakdata.conquery.models.jobs.UpdateFilterSearchJob; import com.bakdata.conquery.models.messages.namespaces.ActionReactionMessage; @@ -44,8 +44,7 @@ public class CollectColumnValuesJob extends WorkerMessage implements ActionReactionMessage { @Getter - @NsIdRefCollection - private final Set columns; + private final Set columns; /** * This exists only on the manager for the afterAllReaction. @@ -56,8 +55,8 @@ public class CollectColumnValuesJob extends WorkerMessage implements ActionReact @Override public void react(Worker context) throws Exception { - final Map> table2Buckets = context.getStorage().getAllBuckets().stream() - .collect(Collectors.groupingBy(Bucket::getTable)); + final Map> table2Buckets = context.getStorage().getAllBuckets() + .collect(Collectors.groupingBy(Bucket::getTable)); final ListeningExecutorService jobsExecutorService = MoreExecutors.listeningDecorator(context.getJobsExecutorService()); @@ -68,14 +67,15 @@ public void react(Worker context) throws Exception { final List> futures = columns.stream() .filter(column -> table2Buckets.get(column.getTable()) != null) + .map(ColumnId::resolve) .map(column -> jobsExecutorService.submit(() -> { - final List buckets = table2Buckets.get(column.getTable()); + final List buckets = table2Buckets.get(column.getTable().getId()); final Set values = buckets.stream() .flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues()) .collect(Collectors.toSet()); - context.send(new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column, values)); + context.send(new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), values)); log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet()); }) ) @@ -120,7 +120,8 @@ public void execute() { log.debug("{} shrinking searches", this); - for (Column column : columns) { + for (ColumnId columnId : columns) { + Column column = columnId.resolve(); try { filterSearch.shrinkSearch(column); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java index 32587e4bdb..ec8a068bf8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java @@ -5,8 +5,7 @@ import java.util.UUID; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.ReactionMessage; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; @@ -33,8 +32,7 @@ public class RegisterColumnValues extends NamespaceMessage implements ReactionMe private WorkerId workerId; - @NsIdRef - private final Column column; + private final ColumnId column; @ToString.Exclude private final Collection values; @@ -48,13 +46,13 @@ public int size() { @Override public void react(DistributedNamespace context) throws Exception { if (log.isTraceEnabled()) { - log.trace("Registering {} values for column '{}': {}", size(), column.getId(), Arrays.toString(values.toArray())); + log.trace("Registering {} values for column '{}': {}", size(), column, Arrays.toString(values.toArray())); } else { - log.debug("Registering {} values for column '{}'", size(), column.getId()); + log.debug("Registering {} values for column '{}'", size(), column); } - context.getFilterSearch().registerValues(column, values); + context.getFilterSearch().registerValues(column.resolve(), values); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java index f1a856fcab..d061dcbde8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -14,14 +13,13 @@ @CPSType(id="REMOVE_CONCEPT", base=NamespacedMessage.class) @RequiredArgsConstructor(onConstructor_=@JsonCreator) @Getter @ToString public class RemoveConcept extends WorkerMessage { - - @NsIdRef - private final Concept concept; + private final ConceptId concept; + @Override public void react(Worker context) throws Exception { synchronized (context.getStorage()) { - context.removeConcept(concept); + context.removeConcept(concept.resolve()); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java index 7c4b6ca355..6fa6bd86b5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java @@ -1,24 +1,24 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; import com.fasterxml.jackson.annotation.JsonCreator; +import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; - @CPSType(id="REMOVE_IMPORT", base= NamespacedMessage.class) -@RequiredArgsConstructor(onConstructor_=@JsonCreator) @ToString +@RequiredArgsConstructor(onConstructor_=@JsonCreator) +@ToString @Slf4j +@Getter // Needed by SmileParser public class RemoveImportJob extends WorkerMessage { - @NsIdRef - private final Import imp; + private final ImportId imp; @Override public void react(Worker context) throws Exception { diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java index 62a3206c4e..1200eeb3e6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -17,12 +16,11 @@ @AllArgsConstructor(onConstructor_=@JsonCreator) @Getter @Setter @ToString(callSuper=true) public class RemoveSecondaryId extends WorkerMessage { - @NsIdRef - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; @Override public void react(Worker context) throws Exception { log.info("Received Deletion of SecondaryId {}", secondaryId); - context.removeSecondaryId(secondaryId.getId()); + context.removeSecondaryId(secondaryId); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java index 02d8dddac8..6faade2ac5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -17,8 +16,7 @@ @AllArgsConstructor(onConstructor_=@JsonCreator) @Getter @Setter @ToString(callSuper=true) public class RemoveTable extends WorkerMessage { - @NsIdRef - private Table table; + private TableId table; @Override public void react(Worker context) throws Exception { diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java index 692fa2f986..a8c1b74347 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java @@ -14,7 +14,11 @@ import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.google.common.collect.Sets; -import lombok.*; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.NonNull; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -38,7 +42,7 @@ public class ReportConsistency extends NamespaceMessage { @Override public void react(DistributedNamespace context) throws Exception { - Set managerImports = context.getStorage().getAllImports().stream().map(Import::getId).collect(Collectors.toSet()); + Set managerImports = context.getStorage().getAllImports().map(Import::getId).collect(Collectors.toSet()); Set assignedWorkerBuckets = context.getWorkerHandler().getBucketsForWorker(workerId); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java index c1624fb420..5be15f728e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java @@ -24,10 +24,10 @@ public class RequestConsistency extends WorkerMessage { @Override public void react(Worker context) throws Exception { // Gather ImportIds - Set workerImports = context.getStorage().getAllImports().stream().map(Import::getId).collect(Collectors.toSet()); + Set workerImports = context.getStorage().getAllImports().map(Import::getId).collect(Collectors.toSet()); // Gather BucketIds - Set workerBuckets = context.getStorage().getAllBuckets().stream().map(Bucket::getId).collect(Collectors.toSet()); + Set workerBuckets = context.getStorage().getAllBuckets().map(Bucket::getId).collect(Collectors.toSet()); // Send report context.send(new ReportConsistency(context.getInfo().getId(), workerImports, workerBuckets)); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java index 5efc018639..9d5821b198 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java @@ -1,12 +1,15 @@ package com.bakdata.conquery.models.messages.namespaces.specific; +import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefKeys; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.MatchingStats; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; @@ -26,27 +29,40 @@ public class UpdateElementMatchingStats extends NamespaceMessage { private final WorkerId source; @ToString.Exclude - @NsIdRefKeys - private final Map, MatchingStats.Entry> values; + private final Map, MatchingStats.Entry> values; @Override public void react(DistributedNamespace context) throws Exception { - for (Entry, MatchingStats.Entry> entry : values.entrySet()) { + // We collect the concepts outside the loop to update the storage afterward + Map> conceptsToUpdate = new HashMap<>(); + + for (Entry, MatchingStats.Entry> entry : values.entrySet()) { try { - final ConceptElement target = entry.getKey(); + ConceptElementId element = entry.getKey(); + ConceptId conceptId = element.findConcept(); + + // mapping function cannot use Id::resolve here yet, somehow the nsIdResolver is not set because it + // stems from a map key. Jackson seems to use a different serializer. + Concept concept = conceptsToUpdate.computeIfAbsent(conceptId, id -> context.getStorage().getConcept(id)); + + final ConceptElement target = concept.findById(element); + final MatchingStats.Entry value = entry.getValue(); + conceptsToUpdate.put(conceptId, concept); + MatchingStats matchingStats = target.getMatchingStats(); if (matchingStats == null) { matchingStats = new MatchingStats(); target.setMatchingStats(matchingStats); } matchingStats.putEntry(source, value); - } - catch (Exception e) { - log.error("Failed to set matching stats for '{}'", entry.getKey()); + } catch (Exception e) { + log.error("Failed to set matching stats for '{}' (enable TRACE for exception)", entry.getKey(), (Exception) (log.isTraceEnabled() ? e : null)); } } + + conceptsToUpdate.values().forEach(context.getStorage()::updateConcept); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java index 5f783a8c19..19f54c8d13 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java @@ -10,7 +10,6 @@ import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; @@ -20,6 +19,8 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.Job; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; @@ -27,6 +28,7 @@ import com.bakdata.conquery.util.progressreporter.ProgressReporter; import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.base.Functions; +import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -38,8 +40,8 @@ @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) public class UpdateMatchingStatsMessage extends WorkerMessage { - @NsIdRefCollection - private final Collection> concepts; + @Getter + private final Collection concepts; @Override @@ -50,16 +52,11 @@ public void react(Worker worker) throws Exception { @RequiredArgsConstructor private static class UpdateMatchingStatsJob extends Job { private final Worker worker; - private final Collection> concepts; - - @Override - public String getLabel() { - return String.format("Calculate Matching Stats for %s", worker.getInfo().getDataset()); - } + private final Collection concepts; @Override public void execute() throws Exception { - if (worker.getStorage().getAllCBlocks().isEmpty()) { + if (worker.getStorage().getAllCBlocks().findAny().isEmpty()) { log.debug("Worker {} is empty, skipping.", worker); return; } @@ -69,16 +66,17 @@ public void execute() throws Exception { log.info("BEGIN update Matching stats for {} Concepts", concepts.size()); - final Map, CompletableFuture> + final Map> subJobs = concepts.stream() .collect(Collectors.toMap(Functions.identity(), concept -> CompletableFuture.runAsync(() -> { - final Map, MatchingStats.Entry> + final Concept resolved = concept.resolve(); + final Map, MatchingStats.Entry> matchingStats = - new HashMap<>(concept.countElements()); + new HashMap<>(resolved.countElements()); - calculateConceptMatches(concept, matchingStats, worker); + calculateConceptMatches(resolved, matchingStats, worker); worker.send(new UpdateElementMatchingStats(worker.getInfo().getId(), matchingStats)); @@ -110,7 +108,7 @@ public void execute() throws Exception { return; } - log.trace("Still waiting for `{}`", concept.getId()); + log.trace("Still waiting for `{}`", concept); }); } @@ -121,19 +119,23 @@ public void execute() throws Exception { } + @Override + public String getLabel() { + return String.format("Calculate Matching Stats for %s", worker.getInfo().getDataset()); + } - private static void calculateConceptMatches(Concept concept, Map, MatchingStats.Entry> results, Worker worker) { + private static void calculateConceptMatches(Concept concept, Map, MatchingStats.Entry> results, Worker worker) { log.debug("BEGIN calculating for `{}`", concept.getId()); - for (CBlock cBlock : worker.getStorage().getAllCBlocks()) { + for (CBlock cBlock : worker.getStorage().getAllCBlocks().toList()) { - if (!cBlock.getConnector().getConcept().equals(concept)) { + if (!cBlock.getConnector().getConcept().equals(concept.getId())) { continue; } try { - final Bucket bucket = cBlock.getBucket(); - final Table table = bucket.getTable(); + final Bucket bucket = cBlock.getBucket().resolve(); + final Table table = bucket.getTable().resolve(); for (String entity : bucket.entities()) { @@ -145,9 +147,7 @@ private static void calculateConceptMatches(Concept concept, Map new MatchingStats.Entry()).addEvent(table, bucket, event, entity); - + results.computeIfAbsent(concept.getId(), (ignored) -> new MatchingStats.Entry()).addEvent(table, bucket, event, entity); continue; } @@ -158,7 +158,7 @@ private static void calculateConceptMatches(Concept concept, Map element = ((TreeConcept) concept).getElementByLocalIdPath(localIds); while (element != null) { - results.computeIfAbsent(((ConceptElement) element), (ignored) -> new MatchingStats.Entry()) + results.computeIfAbsent(((ConceptElement) element).getId(), (ignored) -> new MatchingStats.Entry()) .addEvent(table, bucket, event, entity); element = element.getParent(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java index 5816f91dd1..65f85fcf44 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java @@ -1,5 +1,7 @@ package com.bakdata.conquery.models.messages.network; +import jakarta.validation.Validator; + import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.io.mina.MessageSender; @@ -9,7 +11,7 @@ import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.ShardWorkers; -import jakarta.validation.Validator; +import io.dropwizard.core.setup.Environment; import lombok.Getter; @Getter @@ -33,14 +35,14 @@ public static class ShardNodeNetworkContext extends NetworkMessageContext String.format("Missing dataset `%s`", datasetId)); ConqueryMDC.setLocation(ns.getStorage().getDataset().toString()); + message.react(ns); if (message instanceof ReactionMessage reactionMessage) { @@ -37,6 +38,11 @@ public void react(ManagerNodeNetworkContext context) throws Exception { } } + @Override + public String toString() { + return message.toString() + " for dataset " + datasetId; + } + @Override public ProgressReporter getProgressReporter() { return ((SlowMessage) message).getProgressReporter(); @@ -46,9 +52,4 @@ public ProgressReporter getProgressReporter() { public void setProgressReporter(ProgressReporter progressReporter) { ((SlowMessage) message).setProgressReporter(progressReporter); } - - @Override - public String toString() { - return message.toString() + " for dataset " + datasetId; - } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveWorker.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveWorker.java index a52d03d82e..bc1fbc85c9 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveWorker.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/specific/RemoveWorker.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.network.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.messages.network.MessageToShardNode; import com.bakdata.conquery.models.messages.network.NetworkMessage; import com.bakdata.conquery.models.messages.network.NetworkMessageContext.ShardNodeNetworkContext; @@ -15,14 +14,13 @@ @RequiredArgsConstructor(onConstructor_=@JsonCreator) @Getter @Slf4j public class RemoveWorker extends MessageToShardNode.Slow { - @NsIdRef - private final Dataset dataset; + private final DatasetId dataset; @Override public void react(ShardNodeNetworkContext context) throws Exception { log.info("Removing worker {}", dataset); - context.getWorkers().removeWorkerFor(dataset.getId()); + context.getWorkers().removeWorkerFor(dataset); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedHeader.java b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedHeader.java index 6b33c7abd8..eabafdf77f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedHeader.java +++ b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedHeader.java @@ -6,17 +6,18 @@ import java.util.Map; import java.util.stream.Collectors; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.ImportColumn; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.events.stores.root.ColumnStore; -import com.fasterxml.jackson.annotation.JsonCreator; -import lombok.AllArgsConstructor; +import com.fasterxml.jackson.annotation.JacksonInject; +import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.Data; import lombok.Getter; -import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -27,41 +28,44 @@ */ @Data @Getter @Setter -@NoArgsConstructor(onConstructor_ = {@JsonCreator}) -@AllArgsConstructor +@RequiredArgsConstructor @Slf4j public class PreprocessedHeader { /** * The name/tag of an import. */ - private String name; + private final String name; /** * The specific table id to be loaded into. */ - private String table; + private final String table; /** * Number of rows in the Preprocessed file. */ - private long rows; - private long numberOfEntities; + private final long rows; + private final long numberOfEntities; //TODO use Set to track actually included buckets,to split phase bucket assignment. - private int numberOfBuckets; + private final int numberOfBuckets; /** * The specific columns and their associated MajorType for validation. */ - private PPColumn[] columns; + private final PPColumn[] columns; /** * A hash to check if any of the underlying files for generating this CQPP has changed. */ - private int validityHash; + private final int validityHash; + + @JsonIgnore + @JacksonInject + private NamespaceStorage namespaceStorage; public Import createImportDescription(Table table, Map stores) { - final Import imp = new Import(table); + final Import imp = new Import(table.getId()); imp.setName(getName()); imp.setNumberOfEntries(getRows()); @@ -87,8 +91,6 @@ public Import createImportDescription(Table table, Map stor /** * Verify that the supplied table matches the preprocessed' data in shape. - * - * @return */ public List assertMatch(Table table) { final List errors = new ArrayList<>(); diff --git a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java index 340433ae6c..5f180edf28 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java +++ b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java @@ -20,32 +20,22 @@ */ @RequiredArgsConstructor(access = AccessLevel.PACKAGE) public class PreprocessedReader implements AutoCloseable, Iterator { - @Override - public void close() throws IOException { - parser.close(); - } - - @Accessors(fluent = true) - @RequiredArgsConstructor - public enum LastRead { - DATA(null), HEADER(DATA), BEGIN(HEADER); - - @Getter - private final LastRead next; - } - + private final JsonParser parser; @Getter private LastRead lastRead = LastRead.BEGIN; private int bucketsRemaining; - private final JsonParser parser; - public PreprocessedReader(InputStream inputStream, ObjectMapper objectMapper) throws IOException { - parser = objectMapper.copy().enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) - .getFactory() - .createParser(inputStream); + parser = objectMapper + .enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) + .getFactory() + .createParser(inputStream); } + @Override + public void close() throws IOException { + parser.close(); + } public PreprocessedHeader readHeader() throws IOException { Preconditions.checkState(lastRead.equals(LastRead.BEGIN)); @@ -57,7 +47,6 @@ public PreprocessedHeader readHeader() throws IOException { return header; } - @Override public boolean hasNext() { return bucketsRemaining > 0; @@ -71,4 +60,13 @@ public PreprocessedData next() { return parser.readValueAs(PreprocessedData.class); } + @Accessors(fluent = true) + @RequiredArgsConstructor + public enum LastRead { + DATA(null), HEADER(DATA), BEGIN(HEADER); + + @Getter + private final LastRead next; + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/preproc/parser/specific/MoneyParser.java b/backend/src/main/java/com/bakdata/conquery/models/preproc/parser/specific/MoneyParser.java index 3d8049b4d0..31e6675686 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/preproc/parser/specific/MoneyParser.java +++ b/backend/src/main/java/com/bakdata/conquery/models/preproc/parser/specific/MoneyParser.java @@ -3,7 +3,6 @@ import java.math.BigDecimal; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.config.ParserConfig; import com.bakdata.conquery.models.events.stores.root.IntegerStore; import com.bakdata.conquery.models.events.stores.root.MoneyStore; import com.bakdata.conquery.models.events.stores.specific.MoneyIntStore; @@ -11,62 +10,58 @@ import com.bakdata.conquery.models.preproc.parser.ColumnValues; import com.bakdata.conquery.models.preproc.parser.Parser; import com.bakdata.conquery.util.NumberParsing; -import com.fasterxml.jackson.annotation.JsonIgnore; -import lombok.Getter; import lombok.ToString; @ToString(callSuper = true) -public class MoneyParser extends Parser { +public class MoneyParser extends Parser { - private long maxValue = Long.MIN_VALUE; - private long minValue = Long.MAX_VALUE; - - @JsonIgnore - private final BigDecimal moneyFactor; + private final int defaultFractionDigits; + private BigDecimal maxValue = null; + private BigDecimal minValue = null; public MoneyParser(ConqueryConfig config) { super(config); - moneyFactor = BigDecimal.valueOf(10).pow(config.getPreprocessor().getParsers().getCurrency().getDefaultFractionDigits()); + defaultFractionDigits = config.getPreprocessor().getParsers().getCurrency().getDefaultFractionDigits(); } @Override - protected Long parseValue(String value) throws ParsingException { - return NumberParsing - .parseMoney(value) - .multiply(moneyFactor) - .longValueExact(); + protected BigDecimal parseValue(String value) throws ParsingException { + return NumberParsing.parseMoney(value); } @Override - protected void registerValue(Long v) { - if (v > maxValue) { + protected void registerValue(BigDecimal v) { + if (maxValue == null){ maxValue = v; } - if (v < minValue) { + if(minValue == null){ minValue = v; } + + maxValue = maxValue.max(v); + minValue = minValue.min(v); } @Override protected MoneyStore decideType() { IntegerParser subParser = new IntegerParser(getConfig()); - subParser.registerValue(maxValue); - subParser.registerValue(minValue); + subParser.registerValue(maxValue.movePointRight(defaultFractionDigits).longValue()); + subParser.registerValue(minValue.movePointRight(defaultFractionDigits).longValue()); subParser.setLines(getLines()); subParser.setNullLines(getNullLines()); IntegerStore subDecision = subParser.findBestType(); - return new MoneyIntStore(subDecision); + return new MoneyIntStore(subDecision, defaultFractionDigits); } @Override - public void setValue(MoneyStore store, int event, Long value) { + public void setValue(MoneyStore store, int event, BigDecimal value) { store.setMoney(event, value); } @Override public ColumnValues createColumnValues() { - return new LongColumnValues(); + return new ListColumnValues(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java index 2c533a42bc..1379a72a99 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java @@ -42,50 +42,13 @@ @Slf4j public class DistributedExecutionManager extends ExecutionManager { - @Data - @AllArgsConstructor(access = AccessLevel.PRIVATE) - public static class DistributedState implements InternalState { - @Setter - @NonNull - private ExecutionState state; - private Map> results; - private CountDownLatch executingLock; - - public DistributedState() { - this(ExecutionState.RUNNING, new ConcurrentHashMap<>(), new CountDownLatch(1)); - } - - @NotNull - @Override - public ExecutionState getState() { - return state; - } - - @Override - public Stream streamQueryResults() { - return results.values().stream().flatMap(Collection::stream); - } - - @Override - public CountDownLatch getExecutingLock() { - return executingLock; - } - - public boolean allResultsArrived(Set allWorkers) { - Set finishedWorkers = results.keySet(); - return finishedWorkers.equals(allWorkers); - } - } - private final ClusterState clusterState; - public DistributedExecutionManager(MetaStorage storage, DatasetRegistry datasetRegistry, ClusterState state) { super(storage, datasetRegistry); clusterState = state; } - @Override protected void doExecute(E execution) { @@ -102,6 +65,10 @@ protected void doExecute(E exec workerHandler.sendToAll(createExecutionMessage(execution)); } + private WorkerHandler getWorkerHandler(DatasetId datasetId) { + return clusterState.getWorkerHandlers().get(datasetId); + } + private WorkerMessage createExecutionMessage(ManagedExecution execution) { if (execution instanceof ManagedQuery mq) { return new ExecuteQuery(mq.getId(), mq.getQuery()); @@ -116,8 +83,12 @@ else if (execution instanceof ManagedInternalForm form) { } - private WorkerHandler getWorkerHandler(DatasetId datasetId) { - return clusterState.getWorkerHandlers().get(datasetId); + @Override + public void doCancelQuery(ManagedExecution execution) { + log.debug("Sending cancel message to all workers."); + + execution.cancel(); + getWorkerHandler(execution.createId().getDataset()).sendToAll(new CancelQuery(execution.getId())); } /** @@ -152,7 +123,7 @@ public v distributedState.results.put(result.getWorkerId(), result.getResults()); // If all known workers have returned a result, the query is DONE. - if (distributedState.allResultsArrived(getWorkerHandler(execution.getDataset().getId()).getAllWorkerIds())) { + if (distributedState.allResultsArrived(getWorkerHandler(execution.getDataset()).getAllWorkerIds())) { execution.finish(ExecutionState.DONE); @@ -162,7 +133,7 @@ public v // State changed to DONE or FAILED ExecutionState execStateAfterResultCollect = getResult(id).getState(); if (execStateAfterResultCollect != ExecutionState.RUNNING) { - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), getStorage()).map(Group::getName).orElse("none"); + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner().resolve(), getStorage()).map(Group::getName).orElse("none"); ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).dec(); ExecutionMetrics.getQueryStateCounter(execStateAfterResultCollect, primaryGroupName).inc(); @@ -175,12 +146,39 @@ public v } - @Override - public void doCancelQuery(ManagedExecution execution) { - log.debug("Sending cancel message to all workers."); + @Data + @AllArgsConstructor(access = AccessLevel.PRIVATE) + public static class DistributedState implements InternalState { + @Setter + @NonNull + private ExecutionState state; + private Map> results; + private CountDownLatch executingLock; - execution.cancel(); - getWorkerHandler(execution.createId().getDataset()).sendToAll(new CancelQuery(execution.getId())); + public DistributedState() { + this(ExecutionState.RUNNING, new ConcurrentHashMap<>(), new CountDownLatch(1)); + } + + @NotNull + @Override + public ExecutionState getState() { + return state; + } + + @Override + public CountDownLatch getExecutingLock() { + return executingLock; + } + + @Override + public Stream streamQueryResults() { + return results.values().stream().flatMap(Collection::stream); + } + + public boolean allResultsArrived(Set allWorkers) { + Set finishedWorkers = results.keySet(); + return finishedWorkers.equals(allWorkers); + } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java index 6d94b2573d..663bf5be6c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java @@ -13,7 +13,6 @@ import com.bakdata.conquery.metrics.ExecutionMetrics; import com.bakdata.conquery.models.auth.AuthorizationHelper; import com.bakdata.conquery.models.auth.entities.Group; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; @@ -21,6 +20,7 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.ExternalExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; @@ -36,44 +36,9 @@ @Slf4j public abstract class ExecutionManager { - /** - * Holds all informations about an execution, which cannot/should not be serialized/cached in a store. - */ - public interface State { - - /** - * The current {@link ExecutionState} of the execution. - */ - @NotNull - ExecutionState getState(); - - void setState(ExecutionState state); - - /** - * Synchronization barrier for web requests. - * Barrier is activated upon starting an execution so request can wait for execution completion. - * When the execution is finished the barrier is removed. - */ - CountDownLatch getExecutingLock(); - } - - public interface InternalState extends State{ - Stream streamQueryResults(); - } - private final MetaStorage storage; - private final DatasetRegistry datasetRegistry; - /** - * Cache for execution states. - */ - private final Cache executionStates = - CacheBuilder.newBuilder() - .softValues() - .removalListener(this::executionRemoved) - .build(); - /** * Manage state of evicted Queries, setting them to NEW. */ @@ -95,7 +60,6 @@ private void executionRemoved(RemovalNotification rem } } - public ManagedExecution getExecution(ManagedExecutionId execution) { return storage.getExecution(execution); } @@ -109,7 +73,14 @@ public R getResult(ManagedExecutionId id) { throw new NoSuchElementException("No execution found for %s".formatted(id)); } return (R) state; - } + } /** + * Cache for execution states. + */ + private final Cache executionStates = + CacheBuilder.newBuilder() + .softValues() + .removalListener(this::executionRemoved) + .build(); public Optional tryGetResult(ManagedExecutionId id) { return Optional.ofNullable((R) executionStates.getIfPresent(id)); @@ -123,16 +94,20 @@ public void addState(ManagedExecutionId id, State result) { executionStates.put(id, result); } - public final ManagedExecution runQuery(Namespace namespace, QueryDescription query, User user, ConqueryConfig config, boolean system) { + public final ManagedExecution runQuery(Namespace namespace, QueryDescription query, UserId user, ConqueryConfig config, boolean system) { final ManagedExecution execution = createExecution(query, user, namespace, system); - execute(namespace, execution, config); + execute(execution, config); return execution; } + // Visible for testing + public final ManagedExecution createExecution(QueryDescription query, UserId user, Namespace namespace, boolean system) { + return createExecution(query, UUID.randomUUID(), user, namespace, system); + } - public final void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config) { + public final void execute(ManagedExecution execution, ConqueryConfig config) { clearQueryResults(execution); @@ -158,7 +133,7 @@ public final void execute(Namespace namespace, ManagedExecution execution, Conqu execution.start(); - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), storage).map(Group::getName).orElse("none"); + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner().resolve(), storage).map(Group::getName).orElse("none"); ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).inc(); if (execution instanceof InternalExecution internalExecution) { @@ -172,16 +147,9 @@ public final void execute(Namespace namespace, ManagedExecution execution, Conqu } } - protected abstract void doExecute(E execution); - - // Visible for testing - public final ManagedExecution createExecution(QueryDescription query, User user, Namespace namespace, boolean system) { - return createExecution(query, UUID.randomUUID(), user, namespace, system); - } - - public final ManagedExecution createExecution(QueryDescription query, UUID queryId, User user, Namespace namespace, boolean system) { + public final ManagedExecution createExecution(QueryDescription query, UUID queryId, UserId user, Namespace namespace, boolean system) { // Transform the submitted query into an initialized execution - ManagedExecution managed = query.toManagedExecution(user, namespace.getDataset(), storage, datasetRegistry); + ManagedExecution managed = query.toManagedExecution(user, namespace.getDataset().getId(), storage, datasetRegistry); managed.setSystem(system); managed.setQueryId(queryId); managed.setMetaStorage(storage); @@ -192,6 +160,12 @@ public final ManagedExecution createExecution(QueryDescription query, UUID query return managed; } + public void clearQueryResults(ManagedExecution execution) { + executionStates.invalidate(execution.getId()); + } + + protected abstract void doExecute(E execution); + public final void cancelQuery(final ManagedExecution execution) { executionStates.invalidate(execution.getId()); @@ -202,6 +176,7 @@ public final void cancelQuery(final ManagedExecution execution) { doCancelQuery(execution); } + public abstract void doCancelQuery(final ManagedExecution execution); public void updateState(ManagedExecutionId id, ExecutionState execState) { State state = executionStates.getIfPresent(id); @@ -213,13 +188,6 @@ public void updateState(ManagedExecutionId id, ExecutionState execState) { log.warn("Could not update execution state of {} to {}, because it had no state.", id, execState); } - - public abstract void doCancelQuery(final ManagedExecution execution); - - public void clearQueryResults(ManagedExecution execution) { - executionStates.invalidate(execution.getId()); - } - public Stream streamQueryResults(E execution) { final InternalState resultParts = (InternalState) executionStates.getIfPresent(execution.getId()); @@ -262,4 +230,31 @@ public ExecutionState awaitDone(ManagedExecution execution, int time, TimeUnit u } return stateAfterWait.getState(); } + + /** + * Holds all informations about an execution, which cannot/should not be serialized/cached in a store. + */ + public interface State { + + /** + * The current {@link ExecutionState} of the execution. + */ + @NotNull + ExecutionState getState(); + + void setState(ExecutionState state); + + /** + * Synchronization barrier for web requests. + * Barrier is activated upon starting an execution so request can wait for execution completion. + * When the execution is finished the barrier is removed. + */ + CountDownLatch getExecutingLock(); + } + + public interface InternalState extends State{ + Stream streamQueryResults(); + } + + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java index 70896956f0..6df19ef2fa 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java @@ -17,15 +17,14 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.InternalExecution; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AccessLevel; @@ -51,11 +50,9 @@ public class ManagedQuery extends ManagedExecution implements SingleTableResult, */ private Long lastResultCount; - @JsonIgnore - private transient List columnDescriptions; - public ManagedQuery(Query query, User owner, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedQuery(Query query, UserId owner, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(owner, submittedDataset, storage, datasetRegistry); this.query = query; } @@ -74,7 +71,6 @@ public synchronized void finish(ExecutionState executionState) { super.finish(executionState); } - public Stream streamResults(OptionalLong maybeLimit) { final Stream results = getNamespace().getExecutionManager().streamQueryResults(this); @@ -106,21 +102,18 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta status.setQueryType(query.getClass().getAnnotation(CPSType.class).id()); if (query instanceof SecondaryIdQuery secondaryIdQuery) { - status.setSecondaryId((secondaryIdQuery).getSecondaryId().getId()); + status.setSecondaryId((secondaryIdQuery).getSecondaryId()); } } @Override - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { - if (columnDescriptions == null) { - columnDescriptions = generateColumnDescriptions(isInitialized(), getConfig()); - } - status.setColumnDescriptions(columnDescriptions); + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { + status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); } @JsonIgnore - public List getResultInfos(PrintSettings printSettings) { - return query.getResultInfos(printSettings); + public List getResultInfos() { + return query.getResultInfos(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java b/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java index 04cf01e424..16b61fb384 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java @@ -13,5 +13,5 @@ */ public interface NamespacedIdentifiableHolding { - void collectNamespacedObjects(Set> identifiables); + void collectNamespacedObjects(Set> identifiables); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java index a6698b8f48..3f2c418b93 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java @@ -8,13 +8,13 @@ import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import lombok.AllArgsConstructor; @@ -52,7 +52,7 @@ public class QueryExecutionContext { private final int today = CDate.ofLocalDate(LocalDate.now()); - public List getEntityBucketsForTable(Entity entity, Table table) { + public List getEntityBucketsForTable(Entity entity, TableId table) { return bucketManager.getEntityBucketsForTable(entity, table); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java index 38b258928f..39fa43d827 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java @@ -6,7 +6,6 @@ import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.worker.Worker; import lombok.AccessLevel; @@ -41,8 +40,4 @@ public ModificationShieldedWorkerStorage getStorage() { return worker.getStorage(); } - public CentralRegistry getCentralRegistry() { - return worker.getStorage().getCentralRegistry(); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/SingleTableResult.java b/backend/src/main/java/com/bakdata/conquery/models/query/SingleTableResult.java index 2823c5b6e0..0b9f33faa8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/SingleTableResult.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/SingleTableResult.java @@ -20,28 +20,28 @@ public interface SingleTableResult { default List generateColumnDescriptions(boolean isInitialized, ConqueryConfig config) { Preconditions.checkArgument(isInitialized, "The execution must have been initialized first"); - List columnDescriptions = new ArrayList<>(); + final List columnDescriptions = new ArrayList<>(); final Locale locale = I18n.LOCALE.get(); + // The printer is never used to generate results. But downstream code might touch them + final PrintSettings settings = new PrintSettings(true, locale, getNamespace(), config, null, null); - PrintSettings settings = new PrintSettings(true, locale, getNamespace(), config, null, null); - - UniqueNamer uniqNamer = new UniqueNamer(settings); + final UniqueNamer uniqNamer = new UniqueNamer(settings); // First add the id columns to the descriptor list. The are the first columns - for (ResultInfo header : config.getIdColumns().getIdResultInfos(settings)) { + for (ResultInfo header : config.getIdColumns().getIdResultInfos()) { final ColumnDescriptor descriptor = - new ColumnDescriptor(uniqNamer.getUniqueName(header), null, null, ResultType.Primitive.STRING.typeInfo(), header.getSemantics()); + new ColumnDescriptor(uniqNamer.getUniqueName(header, settings), null, null, ResultType.Primitive.STRING.typeInfo(), header.getSemantics()); columnDescriptions.add(descriptor); } final UniqueNamer collector = new UniqueNamer(settings); - getResultInfos(settings).forEach(info -> columnDescriptions.add(info.asColumnDescriptor(collector))); + getResultInfos().forEach(info -> columnDescriptions.add(info.asColumnDescriptor(collector, settings))); return columnDescriptions; } @JsonIgnore - List getResultInfos(PrintSettings printSettings); + List getResultInfos(); /** * @param limit Optionally limits how many lines are emitted. diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/number/MoneyFilterNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/number/MoneyFilterNode.java index 9d1d8993d8..b6cbc41351 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/number/MoneyFilterNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/number/MoneyFilterNode.java @@ -6,9 +6,9 @@ import lombok.ToString; @ToString(callSuper = true) -public class MoneyFilterNode extends NumberFilterNode { +public class MoneyFilterNode extends NumberFilterNode { - public MoneyFilterNode(Column column, Range.LongRange filterValue) { + public MoneyFilterNode(Column column, Range.MoneyRange filterValue) { super(column, filterValue); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java index 59916a9ea6..a8e82f32c5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.models.query.preview; -import java.math.BigDecimal; import java.time.LocalDate; import java.util.ArrayList; import java.util.Collections; @@ -17,36 +16,33 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.QuarterUtils; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.AbsoluteFormQuery; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.JsonResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.MultilineEntityResult; -import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.node.BooleanNode; -import com.fasterxml.jackson.databind.node.DecimalNode; -import com.fasterxml.jackson.databind.node.IntNode; -import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.collect.MoreCollectors; import lombok.AccessLevel; import lombok.NoArgsConstructor; @@ -65,149 +61,18 @@ public class EntityPreviewExecution extends ManagedInternalForm datasetRegistry) { + public EntityPreviewExecution(EntityPreviewForm entityPreviewQuery, UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(entityPreviewQuery, user, submittedDataset, storage, datasetRegistry); } - /** - * Query contains both YEARS and QUARTERS lines: Group them. - */ - private static Map> getQuarterLines(EntityResult entityResult) { - final Map> quarterLines = new HashMap<>(); - - for (Object[] line : entityResult.listResultLines()) { - if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) != Resolution.QUARTERS) { - continue; - } - - // Since we know the dates are always aligned we need to only respect their starts. - final LocalDate date = CDate.toLocalDate(((List) line[AbsoluteFormQuery.TIME_INDEX]).get(0)); - - final int year = date.getYear(); - final int quarter = QuarterUtils.getQuarter(date); - - quarterLines.computeIfAbsent(year, (ignored) -> new HashMap<>(4)).put(quarter, line); - } - - return quarterLines; - } - - /** - * Query contains both YEARS and QUARTERS lines: Group them. - */ - private static Map getYearLines(EntityResult entityResult) { - - final Map yearLines = new HashMap<>(); - - for (Object[] line : entityResult.listResultLines()) { - - if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) != Resolution.YEARS) { - continue; - } - - // Since we know the dates are always aligned we need to only respect their starts. - final LocalDate date = CDate.toLocalDate(((List) line[AbsoluteFormQuery.TIME_INDEX]).get(0)); - - final int year = date.getYear(); - - yearLines.put(year, line); - } - - return yearLines; - } - - /** - * Creates a transformer printing lines, transformed into a Map of label->value. - * Null values are omitted. - */ - private static Function> createLineToMapTransformer(List resultInfos, Map select2desc, PrintSettings printSettings) { - - - final int size = resultInfos.size(); - final String[] columnNames = new String[size]; - - //TODO pull renderValue logic into outer loop, only use array as lookup - - for (int index = 0; index < size; index++) { - final ResultInfo resultInfo = resultInfos.get(index); - - if (resultInfo instanceof SelectResultInfo selectResultInfo) { - columnNames[index] = select2desc.get(selectResultInfo.getSelect().getId()).label(); - } - } - - return line -> { - final Map out = new HashMap<>(size); - - for (int column = 0; column < size; column++) { - final String columnName = columnNames[column]; - - if (columnName == null) { - continue; - } - - - final Object value = renderValue(line[column], resultInfos.get(column).getType(), printSettings); - - if (value == null) { - continue; - } - - out.put(columnName, value); - } - - return out; - }; - } - - /** - * Instead of outputting only String values, render to Json equivalents - */ - private static Object renderValue(Object value, ResultType type, PrintSettings printSettings) { - if (value == null) { - return null; - } - - if (type instanceof ResultType.ListT listT) { - return ((List) value).stream().map(entry -> renderValue(entry, listT.getElementType(), printSettings)).collect(Collectors.toList()); - } - - return switch (((ResultType.Primitive) type)) { - case BOOLEAN -> BooleanNode.valueOf((Boolean) value); - case INTEGER -> new IntNode((Integer) value); - case NUMERIC -> DecimalNode.valueOf((BigDecimal) value); - case DATE -> new TextNode(new ResultPrinters.DatePrinter(printSettings).print(value)); //TODO bind printers in outer loop - case DATE_RANGE -> new TextNode(new ResultPrinters.DateRangePrinter(printSettings).print(value)); //TODO bind printers in outer loop - case STRING -> new TextNode(value.toString()); //TODO mapping - case MONEY -> ResultPrinters.readMoney(printSettings, ((Number) value)); - }; + @Override + public void doInitExecutable() { + super.doInitExecutable(); + previewConfig = getNamespace().getPreviewConfig(); } - /** - * For the selects in result infos, build ColumnDescriptors using definitions (label and description) from PreviewConfig. - */ - private static List createChronoColumnDescriptors(SingleTableResult query, Map select2desc, PrintSettings printSettings) { - - final List columnDescriptions = new ArrayList<>(); - - for (ResultInfo info : query.getResultInfos(printSettings)) { - if (info instanceof SelectResultInfo selectResultInfo) { - final PreviewConfig.InfoCardSelect additionalInfo = select2desc.get(selectResultInfo.getSelect().getId()); - - // We build these by hand because they are labeled and described by config. - final ColumnDescriptor descriptor = new ColumnDescriptor( - additionalInfo.label(), - additionalInfo.label(), - (additionalInfo.description() != null) ? additionalInfo.description() : selectResultInfo.getDescription(),// both might be null - info.getType().typeInfo(), - info.getSemantics() - ); - columnDescriptions.add(descriptor); - } - } - - - return columnDescriptions; + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { + status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); } @Override @@ -216,12 +81,6 @@ public boolean isSystem() { return true; } - @Override - public void doInitExecutable() { - super.doInitExecutable(); - previewConfig = getNamespace().getPreviewConfig(); - } - /** * Collects status of {@link EntityPreviewForm#getValuesQuery()} and {@link EntityPreviewForm#getInfoCardQuery()}. *

@@ -237,19 +96,27 @@ public FullExecutionStatus buildStatusFull(Subject subject, Namespace namespace) status.setQuery(getValuesQuery().getQuery()); + JsonResultPrinters printers = new JsonResultPrinters(); + final PrintSettings infoSettings = new PrintSettings(true, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel); + status.setInfos(transformQueryResultToInfos(getInfoCardExecution(), infoSettings, printers)); - status.setInfos(transformQueryResultToInfos(getInfoCardExecution(), new PrintSettings(true, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel))); - - status.setTimeStratifiedInfos(toChronoInfos(previewConfig, getSubQueries(), new PrintSettings(false, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel))); + final PrintSettings stratifiedSettings = new PrintSettings(false, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel); + status.setTimeStratifiedInfos(toChronoInfos(previewConfig, getSubQueries(), stratifiedSettings, printers)); return status; } + @JsonIgnore + private ManagedQuery getValuesQuery() { + return getSubQueries().get(EntityPreviewForm.VALUES_QUERY_NAME); + } + /** * Takes a ManagedQuery, and transforms its result into a List of {@link EntityPreviewStatus.Info}. * The format of the query is an {@link AbsoluteFormQuery} containing a single line for one person. This should correspond to {@link EntityPreviewForm#VALUES_QUERY_NAME}. */ - private List transformQueryResultToInfos(ManagedQuery infoCardExecution, PrintSettings printSettings) { + private List transformQueryResultToInfos( + ManagedQuery infoCardExecution, PrintSettings printSettings, PrinterFactory printerFactory) { // Submitted Query is a single line of an AbsoluteFormQuery => MultilineEntityResult with a single line. @@ -259,18 +126,25 @@ private List transformQueryResultToInfos(ManagedQuery final List extraInfos = new ArrayList<>(values.length); // We are only interested in the Select results. - for (int index = AbsoluteFormQuery.FEATURES_OFFSET; index < infoCardExecution.getResultInfos(printSettings).size(); index++) { - final ResultInfo resultInfo = infoCardExecution.getResultInfos(printSettings).get(index); + for (int index = AbsoluteFormQuery.FEATURES_OFFSET; index < infoCardExecution.getResultInfos().size(); index++) { + final ResultInfo resultInfo = infoCardExecution.getResultInfos().get(index); + final Object value = values[index]; + final Object printed; - final Object printed = renderValue(values[index], resultInfo.getType(), printSettings); + if (value == null) { + printed = null; + } + else { + Printer printer = resultInfo.createPrinter(printerFactory, printSettings); + printed = printer.apply(value); + } - extraInfos.add(new EntityPreviewStatus.Info( - resultInfo.userColumnName(), - printed, - resultInfo.getType().typeInfo(), - resultInfo.getDescription(), - resultInfo.getSemantics() + extraInfos.add(new EntityPreviewStatus.Info(resultInfo.userColumnName(printSettings), + printed, + resultInfo.getType().typeInfo(), + resultInfo.getDescription(), + resultInfo.getSemantics() )); } @@ -283,7 +157,8 @@ private ManagedQuery getInfoCardExecution() { } @NotNull - private List toChronoInfos(PreviewConfig previewConfig, Map subQueries, PrintSettings printSettings) { + private List toChronoInfos( + PreviewConfig previewConfig, Map subQueries, PrintSettings printSettings, PrinterFactory printers) { final List timeStratifiedInfos = new ArrayList<>(); for (PreviewConfig.TimeStratifiedSelects description : previewConfig.getTimeStratifiedSelects()) { @@ -292,22 +167,24 @@ private List toChronoInfos(PreviewConfi final EntityResult entityResult = query.streamResults(OptionalLong.empty()).collect(MoreCollectors.onlyElement()); final Map select2desc = - description.selects().stream() - .collect(Collectors.toMap(PreviewConfig.InfoCardSelect::select, Function.identity())); + description.selects().stream().collect(Collectors.toMap(PreviewConfig.InfoCardSelect::select, Function.identity())); // Group lines by year and quarter. - final Function> lineTransformer = createLineToMapTransformer(query.getResultInfos(printSettings), select2desc, printSettings); + final Function> lineTransformer = createLineToMapTransformer(query.getResultInfos(), select2desc, printSettings, printers); final List yearEntries = createYearEntries(entityResult, lineTransformer); final Object[] completeResult = getCompleteLine(entityResult); // get descriptions, but drop everything that isn't a select result as the rest is already structured - final List columnDescriptors = createChronoColumnDescriptors(query, select2desc, printSettings); + final List columnDescriptors = createChronoColumnDescriptors(query, select2desc); - final EntityPreviewStatus.TimeStratifiedInfos - infos = - new EntityPreviewStatus.TimeStratifiedInfos(description.label(), description.description(), columnDescriptors, lineTransformer.apply(completeResult), yearEntries); + final EntityPreviewStatus.TimeStratifiedInfos infos = new EntityPreviewStatus.TimeStratifiedInfos(description.label(), + description.description(), + columnDescriptors, + lineTransformer.apply(completeResult), + yearEntries + ); timeStratifiedInfos.add(infos); } @@ -315,6 +192,51 @@ private List toChronoInfos(PreviewConfi return timeStratifiedInfos; } + /** + * Creates a transformer printing lines, transformed into a Map of label->value. + * Null values are omitted. + */ + private static Function> createLineToMapTransformer( + List resultInfos, Map select2desc, PrintSettings printSettings, PrinterFactory printerFactory) { + + + final int size = resultInfos.size(); + final String[] columnNames = new String[size]; + final Printer[] printers = new Printer[size]; + + for (int index = 0; index < size; index++) { + final ResultInfo resultInfo = resultInfos.get(index); + + if (resultInfo instanceof SelectResultInfo selectResultInfo) { + columnNames[index] = select2desc.get(selectResultInfo.getSelect().getId()).label(); + } + + printers[index] = resultInfo.createPrinter(printerFactory, printSettings); + } + + return line -> { + final Map out = new HashMap<>(size); + + for (int column = 0; column < size; column++) { + final String columnName = columnNames[column]; + + if (columnName == null) { + continue; + } + + if (line[column] == null) { + continue; + } + + final Object value = printers[column].apply(line[column]); + + out.put(columnName, value); + } + + return out; + }; + } + @NotNull private List createYearEntries(EntityResult entityResult, Function> lineTransformer) { final Map yearLines = getYearLines(entityResult); @@ -341,21 +263,88 @@ private List createYearEntries(EntityResult entit private Object[] getCompleteLine(EntityResult entityResult) { for (Object[] line : entityResult.listResultLines()) { + if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) == Resolution.COMPLETE) { + return line; + } + } + + throw new IllegalStateException("Result has no row for COMPLETE"); + } + + /** + * For the selects in result infos, build ColumnDescriptors using definitions (label and description) from PreviewConfig. + */ + private static List createChronoColumnDescriptors(SingleTableResult query, Map select2desc) { + + final List columnDescriptions = new ArrayList<>(); + + for (ResultInfo info : query.getResultInfos()) { + if (info instanceof SelectResultInfo selectResultInfo) { + final PreviewConfig.InfoCardSelect desc = select2desc.get(selectResultInfo.getSelect().getId()); + + // We build these by hand because they are labeled and described by config. + columnDescriptions.add(new ColumnDescriptor(desc.label(), + desc.label(), + desc.description() != null ? desc.description() : selectResultInfo.getDescription(), // both might be null + info.getType().typeInfo(), + info.getSemantics() + )); + } + } + + + return columnDescriptions; + } + + /** + * Query contains both YEARS and QUARTERS lines: Group them. + */ + private static Map getYearLines(EntityResult entityResult) { + + final Map yearLines = new HashMap<>(); + + for (Object[] line : entityResult.listResultLines()) { + + if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) != Resolution.YEARS) { + continue; + } + // Since we know the dates are always aligned we need to only respect their starts. final LocalDate date = CDate.toLocalDate(((List) line[AbsoluteFormQuery.TIME_INDEX]).get(0)); final int year = date.getYear(); - final int quarter = QuarterUtils.getQuarter(date); - if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) == Resolution.COMPLETE) { - return line; + yearLines.put(year, line); + } + + return yearLines; + } + + /** + * Query contains both YEARS and QUARTERS lines: Group them. + */ + private static Map> getQuarterLines(EntityResult entityResult) { + final Map> quarterLines = new HashMap<>(); + + for (Object[] line : entityResult.listResultLines()) { + if (Resolution.valueOf((String) line[AbsoluteFormQuery.RESOLUTION_INDEX]) != Resolution.QUARTERS) { + continue; } + + // Since we know the dates are always aligned we need to only respect their starts. + final LocalDate date = CDate.toLocalDate(((List) line[AbsoluteFormQuery.TIME_INDEX]).get(0)); + + final int year = date.getYear(); + final int quarter = QuarterUtils.getQuarter(date); + + quarterLines.computeIfAbsent(year, (ignored) -> new HashMap<>(4)).put(quarter, line); } - throw new IllegalStateException("Result has no row for COMPLETE"); + return quarterLines; } - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { + @Override + protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecutionStatus status, Namespace namespace) { status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); } @@ -368,14 +357,15 @@ public List generateColumnDescriptions(boolean isInitialized, if (descriptor.getSemantics() .stream() .anyMatch(semanticType -> semanticType instanceof SemanticType.SecondaryIdT desc - && previewConfig.isGroupingColumn(desc.getSecondaryId()))) { + && previewConfig.isGroupingColumn(desc.getSecondaryId().resolve()) + )) { descriptor.getSemantics().add(new SemanticType.GroupT()); } // Add hidden semantics to fields flagged for hiding. if (descriptor.getSemantics() .stream() - .anyMatch(semanticType -> semanticType instanceof SemanticType.ColumnT desc && previewConfig.isHidden(desc.getColumn()))) { + .anyMatch(semanticType -> semanticType instanceof SemanticType.ColumnT desc && previewConfig.isHidden(desc.getColumn().resolve()))) { descriptor.getSemantics().add(new SemanticType.HiddenT()); } } @@ -384,19 +374,9 @@ public List generateColumnDescriptions(boolean isInitialized, return descriptors; } - @JsonIgnore - private ManagedQuery getValuesQuery() { - return getSubQueries().get(EntityPreviewForm.VALUES_QUERY_NAME); - } - - @Override - protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecutionStatus status, Namespace namespace) { - status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); - } - @Override - public List getResultInfos(PrintSettings printSettings) { - return getValuesQuery().getResultInfos(printSettings); + public List getResultInfos() { + return getValuesQuery().getResultInfos(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java index 62747276a3..a71354834e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java @@ -23,17 +23,19 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.AbsoluteFormQuery; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.visitor.QueryVisitor; @@ -76,13 +78,7 @@ public class EntityPreviewForm extends Form implements InternalForm { private final Map timeOverViews; - @Nullable - @Override - public JsonNode getValues() { - return null; // will not be implemented. - } - - public static EntityPreviewForm create(String entity, String idKind, Range dateRange, List sources, List infos, List timeStratifiedSelects, DatasetRegistry datasetRegistry) { // We use this query to filter for the single selected query. final Query entitySelectQuery = new ConceptQuery(new CQExternal(List.of(idKind), new String[][]{{"HEAD"}, {entity}}, true)); @@ -96,6 +92,30 @@ public static EntityPreviewForm create(String entity, String idKind, Range dateRange, List sources, Query entitySelectQuery) { + // Query exporting selected Sources of the Entity. + final TableExportQuery exportQuery = new TableExportQuery(entitySelectQuery); + + exportQuery.setDateRange(dateRange); + exportQuery.setTables(sources.stream().map(ConnectorId::resolve).map(CQConcept::forConnector).collect(Collectors.toList())); + exportQuery.setRawConceptValues(false); + return exportQuery; + } + + @NotNull + private static AbsoluteFormQuery createInfoCardQuery(Range dateRange, List infos, Query entitySelectQuery) { - // Query exporting a few additional infos on the entity. - return new AbsoluteFormQuery(entitySelectQuery, dateRange, - ArrayConceptQuery.createFromFeatures( - infos.stream() - .map(CQConcept::forSelect) - .collect(Collectors.toList()) - ), - List.of(ExportForm.ResolutionAndAlignment.of(Resolution.COMPLETE, Alignment.NO_ALIGN)) - ); + @Nullable + @Override + public JsonNode getValues() { + return null; // will not be implemented. } - @NotNull - private static TableExportQuery createExportQuery(Range dateRange, List sources, Query entitySelectQuery) { - // Query exporting selected Sources of the Entity. - final TableExportQuery exportQuery = new TableExportQuery(entitySelectQuery); - - exportQuery.setDateRange(dateRange); - exportQuery.setTables(sources.stream().map(CQConcept::forConnector).collect(Collectors.toList())); - exportQuery.setRawConceptValues(false); - return exportQuery; + @Override + public void authorize(Subject subject, Dataset submittedDataset, @NonNull List visitors, MetaStorage storage) { + QueryDescription.authorizeQuery(this, subject, submittedDataset, visitors, storage); } + @Override + public String getLocalizedTypeLabel() { + // If we successfully keep away system queries from the users, this should not be called except for buildStatusFull, where it is ignored. + return getClass().getAnnotation(CPSType.class).id(); + } @Override public Map createSubQueries() { @@ -160,18 +172,7 @@ public Map createSubQueries() { } @Override - public void authorize(Subject subject, Dataset submittedDataset, @NonNull List visitors, MetaStorage storage) { - QueryDescription.authorizeQuery(this, subject, submittedDataset, visitors, storage); - } - - @Override - public String getLocalizedTypeLabel() { - // If we successfully keep away system queries from the users, this should not be called except for buildStatusFull, where it is ignored. - return getClass().getAnnotation(CPSType.class).id(); - } - - @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new EntityPreviewExecution(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java index 3e943f2dbe..96da92712c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.EmptyBucket; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -100,11 +101,12 @@ public Optional execute(QueryExecutionContext ctx, Entit nextTable(ctx, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); log.trace("Table[{}] has {} buckets for Entity[{}]", currentTable, tableBuckets, entity); - for (Bucket bucket : tableBuckets) { + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); if (!isOfInterest(bucket)) { continue; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java index 26591d05f5..035d8e434a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java @@ -16,6 +16,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -55,11 +56,6 @@ public class SecondaryIdQueryPlan implements QueryPlan { private final Set

tablesWithoutSecondaryId; private final ConceptQueryPlan queryPlan; - - - private Map childPerKey = new HashMap<>(); - - /** * TODO borrow these from {@link QueryExecutionContext} * @@ -67,8 +63,20 @@ public class SecondaryIdQueryPlan implements QueryPlan { */ @Getter(AccessLevel.NONE) private final Queue childPlanReusePool = new LinkedList<>(); - private final int subPlanRetentionLimit; + private Map childPerKey = new HashMap<>(); + + @Override + public void init(QueryExecutionContext ctx, Entity entity) { + queryPlan.init(ctx, entity); + + // Dump the created children into reuse-pool + childPlanReusePool.clear(); + + childPerKey.values().stream().limit(subPlanRetentionLimit).forEach(childPlanReusePool::add); + + childPerKey = new HashMap<>(); + } /** * This is the same execution as a typical ConceptQueryPlan. The difference @@ -107,9 +115,11 @@ private void executeQueriesWithSecondaryId(QueryExecutionContext ctx, Entity ent nextTable(ctxWithPhase, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); + + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); - for (Bucket bucket : tableBuckets) { String entityId = entity.getId(); nextBlock(bucket); @@ -154,17 +164,14 @@ private void executeQueriesWithSecondaryId(QueryExecutionContext ctx, Entity ent } } - private boolean discardSubPlan(ConceptQueryPlan plan) { - return childPlanReusePool.add(plan); - } - private void executeQueriesWithoutSecondaryId(QueryExecutionContext ctx, Entity entity, Table currentTable) { nextTable(ctx, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); - for (Bucket bucket : tableBuckets) { + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); String entityId = entity.getId(); nextBlock(bucket); if (!bucket.containsEntity(entityId) || !isOfInterest(bucket)) { @@ -242,23 +249,15 @@ private ConceptQueryPlan createChild(QueryExecutionContext currentContext, Bucke final QueryExecutionContext context = QueryUtils.determineDateAggregatorForContext(currentContext, plan::getValidityDateAggregator); plan.init(context, queryPlan.getEntity()); - plan.nextTable(context, currentBucket.getTable()); + plan.nextTable(context, currentBucket.getTable().resolve()); plan.isOfInterest(currentBucket); plan.nextBlock(currentBucket); return plan; } - @Override - public void init(QueryExecutionContext ctx, Entity entity) { - queryPlan.init(ctx, entity); - - // Dump the created children into reuse-pool - childPlanReusePool.clear(); - - childPerKey.values().stream().limit(subPlanRetentionLimit).forEach(childPlanReusePool::add); - - childPerKey = new HashMap<>(); + private boolean discardSubPlan(ConceptQueryPlan plan) { + return childPlanReusePool.add(plan); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java index 10c870ce4e..1de21de4b8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java @@ -5,13 +5,18 @@ import java.util.Map; import java.util.Optional; +import com.bakdata.conquery.apiv1.query.TableExportQuery; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -20,6 +25,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; +import org.jetbrains.annotations.NotNull; /** * The QueryPlan creates a full dump of the given table within a certain @@ -39,7 +45,7 @@ public class TableExportQueryPlan implements QueryPlan { private final Map tables; @ToString.Exclude - private final Map positions; + private final Map positions; /** * If true, Connector {@link Column}s will be output raw. @@ -48,18 +54,6 @@ public class TableExportQueryPlan implements QueryPlan { @Getter private final boolean rawConceptValues; - - @Override - public boolean isOfInterest(Entity entity) { - return subPlan.isOfInterest(entity); - } - - @Override - public Optional> getValidityDateAggregator() { - // TODO create a fake aggregator and feed it inside the loop, return it here. - return Optional.empty(); - } - @Override public void init(QueryExecutionContext ctxt, Entity entity) { subPlan.init(ctxt, entity); @@ -76,7 +70,7 @@ public Optional execute(QueryExecutionContext ctx, Entity final List results = new ArrayList<>(); - final int totalColumns = positions.values().stream().mapToInt(i -> i).max().getAsInt() + 1; + final int totalColumns = TableExportQuery.calculateWidth(positions); final String entityId = entity.getId(); for (Map.Entry entry : tables.entrySet()) { @@ -84,9 +78,12 @@ public Optional execute(QueryExecutionContext ctx, Entity final CQTable cqTable = entry.getKey(); final ValidityDate validityDate = cqTable.findValidityDate(); final QPNode query = entry.getValue(); - final Map cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, cqTable.getConnector()); + final Map cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, cqTable.getConnector()); + final Connector connector = cqTable.getConnector().resolve(); - for (Bucket bucket : ctx.getEntityBucketsForTable(entity, cqTable.getConnector().getTable())) { + for (BucketId bucketId : ctx.getEntityBucketsForTable(entity, connector.getResolvedTableId())) { + Bucket bucket = bucketId.resolve(); + CBlock cBlock = cblocks.get(bucketId).resolve(); if (!shouldEvaluateBucket(query, bucket, entity, ctx)) { continue; @@ -106,7 +103,7 @@ public Optional execute(QueryExecutionContext ctx, Entity continue; } - final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDate, cblocks.get(bucket)); + final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDate, cBlock); results.add(resultRow); } @@ -116,6 +113,18 @@ public Optional execute(QueryExecutionContext ctx, Entity return Optional.of(new MultilineEntityResult(entity.getId(), results)); } + @Override + public boolean isOfInterest(Entity entity) { + return subPlan.isOfInterest(entity); + } + + @NotNull + @Override + public Optional> getValidityDateAggregator() { + // TODO create a fake aggregator and feed it inside the loop, return it here. + return Optional.empty(); + } + /** * Test if the Bucket should even be evaluated for the {@link QPNode}. *

@@ -128,7 +137,7 @@ private boolean shouldEvaluateBucket(QPNode query, Bucket bucket, Entity entity, return false; } - query.nextTable(ctx, bucket.getTable()); + query.nextTable(ctx, bucket.getTable().resolve()); query.nextBlock(bucket); return query.isOfInterest(bucket); @@ -140,7 +149,7 @@ private boolean shouldEvaluateBucket(QPNode query, Bucket bucket, Entity entity, private boolean isRowIncluded(QPNode query, Bucket bucket, Entity entity, int event, QueryExecutionContext ctx) { query.init(entity, ctx); - query.nextTable(ctx, bucket.getTable()); + query.nextTable(ctx, bucket.getTable().resolve()); query.nextBlock(bucket); query.acceptEvent(bucket, event); @@ -158,16 +167,18 @@ private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket entry[0] = List.of(date); } - entry[1] = exportDescription.getConnector().getTable().getLabel(); + final Connector connector = exportDescription.getConnector().resolve(); + entry[1] = connector.getResolvedTable().getLabel(); - for (Column column : exportDescription.getConnector().getTable().getColumns()) { + for (Column column : connector.getResolvedTable().getColumns()) { // ValidityDates are handled separately. if (validityDate != null && validityDate.containsColumn(column)){ continue; } - if (!positions.containsKey(column)) { + final ColumnId columnId = column.getId(); + if (!positions.containsKey(columnId)) { continue; } @@ -175,9 +186,9 @@ private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket continue; } - final int position = positions.get(column); + final int position = positions.get(columnId); - if (!rawConceptValues && column.equals(exportDescription.getConnector().getColumn())) { + if (!rawConceptValues && columnId.equals(connector.getColumn())) { entry[position] = cblock.getMostSpecificChildLocalId(event); continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java index 4f3187ce9e..39c6610208 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java @@ -10,7 +10,6 @@ import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.types.ResultType; import com.google.common.collect.ImmutableList; import lombok.Getter; import lombok.RequiredArgsConstructor; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/MultiSelectAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/MultiSelectAggregator.java deleted file mode 100644 index f428a0167c..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/MultiSelectAggregator.java +++ /dev/null @@ -1,80 +0,0 @@ -package com.bakdata.conquery.models.query.queryplan.aggregators.specific; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.events.Bucket; -import com.bakdata.conquery.models.query.QueryExecutionContext; -import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.query.queryplan.aggregators.SingleColumnAggregator; -import lombok.ToString; - -/** - * Aggregator counting the occurrence of multiple values. - */ -@ToString(callSuper = true, of = "selection") -public class MultiSelectAggregator extends SingleColumnAggregator> { - - private final String[] selection; - private final int[] hits; - - public MultiSelectAggregator(Column column, String[] selection) { - super(column); - this.selection = selection; - this.hits = new int[selection.length]; - } - - @Override - public void init(Entity entity, QueryExecutionContext context) { - Arrays.fill(hits, 0); - } - - @Override - public void nextBlock(Bucket bucket) { - } - - @Override - public void consumeEvent(Bucket bucket, int event) { - if (!bucket.has(event, getColumn())) { - return; - } - - String stringToken = bucket.getString(event, getColumn()); - - for (int index = 0; index < selection.length; index++) { - if (Objects.equals(selection[index], stringToken)) { - hits[index]++; - return; - } - } - } - - @Override - public Map createAggregationResult() { - Map out = new HashMap<>(); - - for (int i = 0; i < hits.length; i++) { - int hit = hits[i]; - if (hit > 0) { - out.merge(selection[i], hit, Integer::sum); - } - } - - return out.isEmpty() ? null : out; - } - - @Override - public boolean isOfInterest(Bucket bucket) { -//TODO - // for (String selected : selection) { -// if (((StringStore) bucket.getStores()[getColumn().getPosition()]).getId(selected) == -1) { -// return false; -// } -// } - - return super.isOfInterest(bucket); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SelectAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SelectAggregator.java deleted file mode 100644 index d654d03ad2..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/SelectAggregator.java +++ /dev/null @@ -1,60 +0,0 @@ -package com.bakdata.conquery.models.query.queryplan.aggregators.specific; - -import java.util.Objects; - -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.events.Bucket; -import com.bakdata.conquery.models.query.QueryExecutionContext; -import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.query.queryplan.aggregators.SingleColumnAggregator; -import lombok.ToString; - - -/** - * Aggregator counting the number of occurrences of a selected value in a column. - */ -@ToString(callSuper = true, of = {"selected"}) -public class SelectAggregator extends SingleColumnAggregator { - - private final String selected; - private long hits = 0; - - public SelectAggregator(Column column, String selected) { - super(column); - this.selected = selected; - } - - @Override - public void init(Entity entity, QueryExecutionContext context) { - hits = 0; - } - - @Override - public void nextBlock(Bucket bucket) { - } - - @Override - public void consumeEvent(Bucket bucket, int event) { - - if (!bucket.has(event, getColumn())) { - return; - } - - final String value = bucket.getString(event, getColumn()); - - if (Objects.equals(value, selected)) { - hits++; - } - } - - @Override - public Long createAggregationResult() { - return hits > 0 ? hits : null; - } - - @Override - public boolean isOfInterest(Bucket bucket) { - return super.isOfInterest(bucket); - //TODO && ((StringStore) bucket.getStores()[getColumn().getPosition()]).getId(selected) != -1; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/diffsum/MoneyDiffSumAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/diffsum/MoneyDiffSumAggregator.java index 1a1edf8d17..7360e7ad0c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/diffsum/MoneyDiffSumAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/diffsum/MoneyDiffSumAggregator.java @@ -1,5 +1,6 @@ package com.bakdata.conquery.models.query.queryplan.aggregators.specific.diffsum; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -15,13 +16,13 @@ * Aggregator summing over {@code addendColumn} and subtracting over {@code subtrahendColumn}, for money columns. */ @ToString(of = {"addendColumn", "subtrahendColumn"}) -public class MoneyDiffSumAggregator extends ColumnAggregator { +public class MoneyDiffSumAggregator extends ColumnAggregator { @Getter private final Column addendColumn; @Getter private final Column subtrahendColumn; - private long sum; + private BigDecimal sum; private boolean hit; public MoneyDiffSumAggregator(Column addend, Column subtrahend) { @@ -32,7 +33,7 @@ public MoneyDiffSumAggregator(Column addend, Column subtrahend) { @Override public void init(Entity entity, QueryExecutionContext context) { hit = false; - sum = 0; + sum = BigDecimal.ZERO; } @@ -55,15 +56,15 @@ public void consumeEvent(Bucket bucket, int event) { hit = true; - long addend = bucket.has(event, getAddendColumn()) ? bucket.getMoney(event, getAddendColumn()) : 0; + final BigDecimal addend = bucket.has(event, getAddendColumn()) ? bucket.getMoney(event, getAddendColumn()) : BigDecimal.ZERO; - long subtrahend = bucket.has(event, getSubtrahendColumn()) ? bucket.getMoney(event, getSubtrahendColumn()) : 0; + final BigDecimal subtrahend = bucket.has(event, getSubtrahendColumn()) ? bucket.getMoney(event, getSubtrahendColumn()) : BigDecimal.ZERO; - sum = sum + addend - subtrahend; + sum = sum.add(addend).subtract(subtrahend); } @Override - public Long createAggregationResult() { + public BigDecimal createAggregationResult() { return hit ? sum : null; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/sum/MoneySumAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/sum/MoneySumAggregator.java index f9d0fc46be..facec8f618 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/sum/MoneySumAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/sum/MoneySumAggregator.java @@ -1,5 +1,7 @@ package com.bakdata.conquery.models.query.queryplan.aggregators.specific.sum; +import java.math.BigDecimal; + import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -11,10 +13,10 @@ * Aggregator implementing a sum over {@code column}, for money columns. */ @ToString(callSuper = true, onlyExplicitlyIncluded = true) -public class MoneySumAggregator extends SingleColumnAggregator { +public class MoneySumAggregator extends SingleColumnAggregator { private boolean hit = false; - private long sum = 0L; + private BigDecimal sum; public MoneySumAggregator(Column column) { super(column); @@ -23,7 +25,7 @@ public MoneySumAggregator(Column column) { @Override public void init(Entity entity, QueryExecutionContext context) { hit = false; - sum = 0; + sum = BigDecimal.ZERO; } @@ -35,13 +37,13 @@ public void consumeEvent(Bucket bucket, int event) { hit = true; - long addend = bucket.getMoney(event, getColumn()); + final BigDecimal addend = bucket.getMoney(event, getColumn()); - sum = sum + addend; + sum = sum.add(addend); } @Override - public Long createAggregationResult() { + public BigDecimal createAggregationResult() { return hit ? sum : null; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java index df4c440b78..6d018c772a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java @@ -10,6 +10,8 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -26,7 +28,7 @@ public class ConceptElementsAggregator extends Aggregator> { private Column column; private Entity entity; - private Map cblocks; + private Map cblocks; private CBlock cblock; private final Map tableConnectors; @@ -35,7 +37,7 @@ public ConceptElementsAggregator(TreeConcept concept) { super(); tableConnectors = concept.getConnectors().stream() .filter(conn -> conn.getColumn() != null) - .collect(Collectors.toMap(Connector::getTable, Functions.identity())); + .collect(Collectors.toMap(Connector::getResolvedTable, Functions.identity())); } @Override @@ -52,13 +54,13 @@ public void nextTable(QueryExecutionContext ctx, Table currentTable) { return; } - column = connector.getColumn(); - cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, connector); + column = connector.getColumn().resolve(); + cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, connector.getId()); } @Override public void nextBlock(Bucket bucket) { - cblock = cblocks.get(bucket); + cblock = cblocks.get(bucket.getId()).resolve(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java index 83bf74264c..773e813861 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -32,7 +33,7 @@ public ConceptValuesAggregator(TreeConcept concept) { this.concept = concept; tableConnectors = concept.getConnectors().stream() .filter(conn -> conn.getColumn() != null) - .collect(Collectors.toMap(Connector::getTable, Functions.identity())); + .collect(Collectors.toMap(Connector::getResolvedTable, Functions.identity())); } @Override @@ -49,7 +50,8 @@ public void nextTable(QueryExecutionContext ctx, Table currentTable) { return; } - column = connector.getColumn(); + final ColumnId columnId = connector.getColumn(); + column = columnId != null ? columnId.resolve() : null; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java index ae790ac71b..09f682a164 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java @@ -10,8 +10,11 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.QPChainNode; @@ -28,7 +31,7 @@ public class ConceptNode extends QPChainNode { private final CQTable table; private final SecondaryIdDescription selectedSecondaryId; private boolean tableActive; - private Map preCurrentRow; + private Map preCurrentRow; private CBlock currentRow; public ConceptNode(QPNode child, List> concepts, CQTable table, SecondaryIdDescription selectedSecondaryId) { @@ -66,17 +69,18 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { - tableActive = table.getConnector().getTable().equals(currentTable) + Connector connector = table.getConnector().resolve(); + tableActive = connector.getResolvedTableId().equals(currentTable.getId()) && ctx.getActiveSecondaryId() == selectedSecondaryId; if(tableActive) { - super.nextTable(ctx.withConnector(table.getConnector()), currentTable); + super.nextTable(ctx.withConnector(connector), currentTable); } } @Override public void nextBlock(Bucket bucket) { if (tableActive) { - currentRow = Objects.requireNonNull(preCurrentRow.get(bucket)); + currentRow = Objects.requireNonNull(preCurrentRow.get(bucket.getId()).resolve()); super.nextBlock(bucket); } } @@ -98,7 +102,7 @@ public boolean isOfInterest(Bucket bucket) { return false; } - final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket)); + final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket.getId()).resolve()); if(cBlock.isConceptIncluded(entity.getId(), requiredBits)) { return super.isOfInterest(bucket); @@ -137,7 +141,7 @@ public boolean isContained() { @Override public void collectRequiredTables(Set

requiredTables) { super.collectRequiredTables(requiredTables); - requiredTables.add(table.getConnector().getTable()); + requiredTables.add(table.getConnector().resolve().getResolvedTable()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java index eba1af8b94..0a8601b6a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.models.query.queryplan.specific; import java.util.Map; -import java.util.Objects; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -9,6 +8,8 @@ import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.queryplan.QPChainNode; import com.bakdata.conquery.models.query.queryplan.QPNode; @@ -19,10 +20,9 @@ public class ValidityDateNode extends QPChainNode { private final ValidityDate validityDate; + protected Map preCurrentRow; private transient CDateSet restriction; - protected Map preCurrentRow; - public ValidityDateNode(ValidityDate validityDate, QPNode child) { super(child); Preconditions.checkNotNull(validityDate, this.getClass().getSimpleName() + " needs a validityDate"); @@ -42,12 +42,11 @@ public boolean acceptEvent(Bucket bucket, int event) { } @Override - public boolean isOfInterest(Bucket bucket) { - final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket)); - - final CDateRange range = cBlock.getEntityDateRange(entity.getId()); + public void nextTable(QueryExecutionContext ctx, Table currentTable) { + super.nextTable(ctx.withValidityDateColumn(validityDate), currentTable); + restriction = ctx.getDateRestriction(); - return restriction.intersects(range) && super.isOfInterest(bucket); + preCurrentRow = ctx.getBucketManager().getEntityCBlocksForConnector(getEntity(), context.getConnector().getId()); } @Override @@ -56,10 +55,11 @@ public boolean isContained() { } @Override - public void nextTable(QueryExecutionContext ctx, Table currentTable) { - super.nextTable(ctx.withValidityDateColumn(validityDate), currentTable); - restriction = ctx.getDateRestriction(); + public boolean isOfInterest(Bucket bucket) { + final CBlock cBlock = preCurrentRow.get(bucket.getId()).resolve(); - preCurrentRow = ctx.getBucketManager().getEntityCBlocksForConnector(getEntity(), context.getConnector()); + final CDateRange range = cBlock.getEntityDateRange(entity.getId()); + + return restriction.intersects(range) && super.isOfInterest(bucket); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ColumnResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ColumnResultInfo.java index b1d8abc4f5..96faaa22f3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ColumnResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ColumnResultInfo.java @@ -1,12 +1,14 @@ package com.bakdata.conquery.models.query.resultinfo; -import java.util.Set; +import java.util.Collections; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.common.ConceptIdPrinter; import com.bakdata.conquery.models.types.ResultType; -import com.bakdata.conquery.models.types.SemanticType; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; @@ -19,25 +21,33 @@ public class ColumnResultInfo extends ResultInfo { private final Column column; private final ResultType type; private final String description; - private final ResultPrinters.Printer printer; + private final Concept concept; - public ColumnResultInfo(Column column, ResultType type, Set semantics, ResultPrinters.Printer printer, String description, PrintSettings settings) { - super(semantics, settings); + public ColumnResultInfo(Column column, ResultType type, String description, Concept concept) { + super(Collections.emptySet()); this.column = column; this.type = type; this.description = description; - this.printer = printer; + this.concept = concept; } @Override - public String userColumnName() { + public String userColumnName(PrintSettings printSettings) { return column.getTable().getLabel() + " " + column.getLabel(); } @Override - public String defaultColumnName() { - return userColumnName(); + public String defaultColumnName(PrintSettings printSettings) { + return userColumnName(printSettings); + } + + @Override + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + if(concept != null){ + return new ConceptIdPrinter(concept, printSettings); + } + return printerFactory.printerFor(type, printSettings); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ExternalResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ExternalResultInfo.java index 984840fd5c..f5900aae5a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ExternalResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ExternalResultInfo.java @@ -1,12 +1,11 @@ package com.bakdata.conquery.models.query.resultinfo; import java.util.Collections; -import java.util.Set; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.types.ResultType; -import com.bakdata.conquery.models.types.SemanticType; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -16,28 +15,30 @@ public class ExternalResultInfo extends ResultInfo { private final String name; private final ResultType type; - private final String description; - private final ResultPrinters.Printer printer; - public ExternalResultInfo(String name, ResultType type, PrintSettings settings) { - this(name, type, null, ResultPrinters.printerFor(type, settings), Collections.emptySet(), settings); - } - public ExternalResultInfo(String name, ResultType type, String description, ResultPrinters.Printer printer, Set semantics, PrintSettings settings) { - super(semantics, settings); + public ExternalResultInfo(String name, ResultType type) { + super(Collections.emptySet()); this.name = name; this.type = type; - this.description = description; - this.printer = printer; } - @Override - public String userColumnName() { + public String userColumnName(PrintSettings printSettings) { return null; } @Override - public String defaultColumnName() { + public String defaultColumnName(PrintSettings printSettings) { return name; } + + @Override + public String getDescription() { + return null; + } + + @Override + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + return printerFactory.printerFor(type, printSettings); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/FixedLabelResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/FixedLabelResultInfo.java index 8a4b898115..8e09dd8d79 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/FixedLabelResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/FixedLabelResultInfo.java @@ -4,12 +4,12 @@ import c10n.C10N; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import lombok.EqualsAndHashCode; import lombok.Getter; -import lombok.NonNull; import lombok.ToString; /** @@ -34,35 +34,28 @@ */ @EqualsAndHashCode(callSuper = true) @ToString -public class FixedLabelResultInfo extends ResultInfo { +public abstract class FixedLabelResultInfo extends ResultInfo { - @NonNull - private final String localizedLabel; - @NonNull - private final String localizedDefaultLabel; - @Getter - private final ResultType type; @Getter - private final ResultPrinters.Printer printer; + private final ResultType type; - public FixedLabelResultInfo(String label, String defaultLabel, ResultType type, Set semantics, PrintSettings settings, ResultPrinters.Printer printer) { - super(semantics, settings); - this.localizedLabel = label; - this.localizedDefaultLabel = defaultLabel; + public FixedLabelResultInfo(ResultType type, Set semantics) { + super(semantics); this.type = type; - this.printer = printer; } - @Override - public String userColumnName() { - return localizedLabel; + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + return printerFactory.printerFor(getType(), printSettings); } @Override - public String defaultColumnName() { - return localizedDefaultLabel; + public abstract String userColumnName(PrintSettings printSettings); + + @Override + public String defaultColumnName(PrintSettings printSettings) { + return userColumnName(printSettings); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ResultInfo.java index 54a68b428b..9774dbe390 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/ResultInfo.java @@ -7,7 +7,8 @@ import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import com.google.common.collect.ImmutableSet; @@ -24,13 +25,10 @@ @Slf4j public abstract class ResultInfo { - private final PrintSettings settings; - @ToString.Include private final Set semantics = new HashSet<>(); - protected ResultInfo(Collection semantics, PrintSettings settings) { - this.settings = settings; + protected ResultInfo(Collection semantics) { this.semantics.addAll(semantics); } @@ -38,12 +36,12 @@ public final void addSemantics(SemanticType... incoming) { semantics.addAll(Arrays.asList(incoming)); } - public abstract String userColumnName(); + public abstract String userColumnName(PrintSettings printSettings); - public final ColumnDescriptor asColumnDescriptor(UniqueNamer collector) { + public final ColumnDescriptor asColumnDescriptor(UniqueNamer collector, PrintSettings printSettings) { return new ColumnDescriptor( - collector.getUniqueName(this), - defaultColumnName(), getDescription(), + collector.getUniqueName(this, printSettings), + defaultColumnName(printSettings), getDescription(), getType().typeInfo(), getSemantics() ); @@ -51,8 +49,9 @@ public final ColumnDescriptor asColumnDescriptor(UniqueNamer collector) { /** * Use default label schema which ignores user labels. + * @param printSettings */ - public abstract String defaultColumnName(); + public abstract String defaultColumnName(PrintSettings printSettings); @ToString.Include public abstract ResultType getType(); @@ -63,17 +62,5 @@ public Set getSemantics() { public abstract String getDescription(); - public final String printNullable(Object f) { - if (f == null) { - return ""; - } - - return print(f); - } - - protected String print(Object f) { - return getPrinter().print(f); - } - - public abstract ResultPrinters.Printer getPrinter(); + public abstract Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java new file mode 100644 index 0000000000..63d1812332 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java @@ -0,0 +1,54 @@ +package com.bakdata.conquery.models.query.resultinfo; + +import java.util.Set; + +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.common.MappedPrinter; +import com.bakdata.conquery.models.types.ResultType; +import com.bakdata.conquery.models.types.SemanticType; +import lombok.Getter; +import lombok.ToString; + +@Getter +@ToString +public class SecondaryIdResultInfo extends ResultInfo { + private final SecondaryIdDescription secondaryId; + private final ResultType type; + + + public SecondaryIdResultInfo(SecondaryIdDescription secondaryId) { + super(Set.of(new SemanticType.SecondaryIdT(secondaryId.getId()))); + this.secondaryId = secondaryId; + type = ResultType.Primitive.STRING; + + + } + + @Override + public String getDescription() { + return secondaryId.getDescription(); + } + + @Override + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + if (secondaryId.getMapping() == null) { + return printerFactory.getStringPrinter(printSettings); + } + else { + return new MappedPrinter(secondaryId.getMapping().resolve()); + } + } + + @Override + public String userColumnName(PrintSettings printSettings) { + return secondaryId.getLabel(); + } + + @Override + public String defaultColumnName(PrintSettings printSettings) { + return userColumnName(printSettings); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java index b8873c64d4..2b9ac53a40 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java @@ -5,7 +5,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; import com.google.common.collect.Sets; @@ -21,8 +22,8 @@ public class SelectResultInfo extends ResultInfo { @NonNull private final CQConcept cqConcept; - public SelectResultInfo(Select select, CQConcept cqConcept, Set semantics, PrintSettings settings) { - super(Sets.union(semantics, Set.of(new SemanticType.SelectResultT(select))), settings); + public SelectResultInfo(Select select, CQConcept cqConcept, Set semantics) { + super(Sets.union(semantics, Set.of(new SemanticType.SelectResultT(select.getId())))); this.select = select; this.cqConcept = cqConcept; } @@ -34,8 +35,8 @@ public String getDescription() { } @Override - public ResultPrinters.Printer getPrinter() { - return select.createPrinter(getSettings()); + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + return select.createPrinter(printerFactory, printSettings); } @Override @@ -44,11 +45,11 @@ public ResultType getType() { } @Override - public String userColumnName() { + public String userColumnName(PrintSettings printSettings) { - if (getSettings().getColumnNamer() != null) { + if (printSettings.getColumnNamer() != null) { // override user labels if column namer is set, TODO clean this up when userConceptLabel is removed - return getSettings().getColumnNamer().apply(this); + return printSettings.getColumnNamer().apply(this); } String label = getCqConcept().getLabel(); @@ -60,10 +61,10 @@ public String userColumnName() { } @Override - public String defaultColumnName() { + public String defaultColumnName(PrintSettings printSettings) { StringBuilder sb = new StringBuilder(); - String cqLabel = getCqConcept().defaultLabel(getSettings().getLocale()); + String cqLabel = getCqConcept().defaultLabel(printSettings.getLocale()); final String selectLabel = select.getColumnName(); if (selectLabel.equals(cqLabel)) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java index 9b11596c33..12218e1edf 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/UniqueNamer.java @@ -33,9 +33,9 @@ public class UniqueNamer { @NonNull @JsonIgnore - public final String getUniqueName(ResultInfo info) { + public final String getUniqueName(ResultInfo info, PrintSettings printSettings) { @NonNull - String label = Objects.requireNonNullElse(info.userColumnName(), info.defaultColumnName()); + String label = Objects.requireNonNullElse(info.userColumnName(printSettings), info.defaultColumnName(printSettings)); // lookup if prefix is needed and computed it if necessary String uniqueName = label; synchronized (ocurrenceCounter) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ArrowResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ArrowResultPrinters.java new file mode 100644 index 0000000000..9e1a4ecffe --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ArrowResultPrinters.java @@ -0,0 +1,31 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.math.BigDecimal; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.common.IdentityPrinter; +import org.jetbrains.annotations.NotNull; + +public class ArrowResultPrinters extends JavaResultPrinters { + + @Override + public Printer getDatePrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getMoneyPrinter(PrintSettings printSettings) { + return new MoneyPrinter(); + } + + private record MoneyPrinter() implements Printer { + @Override + public Object apply(@NotNull Number value) { + if (value instanceof BigDecimal bigDecimal){ + return bigDecimal.unscaledValue().intValueExact(); + } + + return value.intValue(); + } + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ExcelResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ExcelResultPrinters.java new file mode 100644 index 0000000000..4a9e05a0ad --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ExcelResultPrinters.java @@ -0,0 +1,59 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.common.DatePrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.IdentityPrinter; +import com.bakdata.conquery.models.types.ResultType; + +/** + * This class is a mess because Excel supports some of our types natively. + * + * With LIST types we fall back onto the StringResultPrinter, as Excel does not support Lists, BUT we also cannot use the {@link IdentityPrinter} inside the list, as some of our printers are native types. + */ +public class ExcelResultPrinters extends StringResultPrinters { + + private final PrinterFactory partialDelegate = new StringResultPrinters(); + + public Printer printerFor(ResultType type, PrintSettings printSettings) { + if (type instanceof ResultType.ListT listT) { + final Printer elementPrinter = partialDelegate.printerFor(listT.getElementType(), printSettings); + return getListPrinter(elementPrinter, printSettings); + } + + return switch (((ResultType.Primitive) type)) { + case BOOLEAN -> getBooleanPrinter(printSettings); + case INTEGER -> getIntegerPrinter(printSettings); + case NUMERIC -> getNumericPrinter(printSettings); + case DATE -> getDatePrinter(printSettings); + case DATE_RANGE -> getDateRangePrinter(printSettings); + case STRING -> getStringPrinter(printSettings); + case MONEY -> getMoneyPrinter(printSettings); + }; + } + + @Override + public Printer getBooleanPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getNumericPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getMoneyPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getDatePrinter(PrintSettings printSettings) { + return new DatePrinter(); + } + + @Override + public Printer getIntegerPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JavaResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JavaResultPrinters.java new file mode 100644 index 0000000000..bab9fc5153 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JavaResultPrinters.java @@ -0,0 +1,76 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.util.ArrayList; +import java.util.List; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.common.DatePrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.IdentityPrinter; +import org.jetbrains.annotations.NotNull; + +public class JavaResultPrinters extends PrinterFactory { + + @Override + public Printer> getListPrinter(Printer elementPrinter, PrintSettings printSettings) { + return new ListPrinter<>(elementPrinter); + } + + @Override + public Printer getBooleanPrinter(PrintSettings printSettings) { + return new IdentityPrinter(); + } + + @Override + public Printer getIntegerPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getNumericPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + @Override + public Printer getDatePrinter(PrintSettings printSettings) { + return new DatePrinter(); + } + + @Override + public Printer> getDateRangePrinter(PrintSettings printSettings) { + return new DateRangePrinter(); + } + + @Override + public Printer getStringPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + + @Override + public Printer getMoneyPrinter(PrintSettings printSettings) { + return new IdentityPrinter<>(); + } + + private record ListPrinter(Printer elementPrinter) implements Printer> { + + @Override + public Object apply(@NotNull List value) { + final List out = new ArrayList<>(value.size()); + + for (T elt : value) { + out.add(elementPrinter.apply(elt)); + } + + return out; + } + } + + private record DateRangePrinter() implements Printer> { + + @Override + public Object apply(@NotNull List f) { + return CDateRange.of(f.get(0), f.get(1)); + } + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JsonResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JsonResultPrinters.java new file mode 100644 index 0000000000..69dd30b38c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/JsonResultPrinters.java @@ -0,0 +1,27 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.util.List; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.common.ToStringPrinter; +import lombok.ToString; + +/** + * This class simply put's out native types where possible to let Jackson handle the Serialization, except for Date and DateRange, where the Frontend cannot ensure proper handling. + */ +@ToString +public class JsonResultPrinters extends JavaResultPrinters { + + + @Override + public Printer getDatePrinter(PrintSettings printSettings) { + return new ToStringPrinter<>(super.getDatePrinter(printSettings)); + } + + @Override + public Printer> getDateRangePrinter(PrintSettings printSettings) { + return new ToStringPrinter<>(super.getDateRangePrinter(printSettings)); + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/Printer.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/Printer.java new file mode 100644 index 0000000000..b165df89ea --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/Printer.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.util.function.Function; + +import org.jetbrains.annotations.NotNull; + +/** + * Printers handle transformation from {@link com.bakdata.conquery.models.query.results.EntityResult} to the respective renderers "native" representation. + * + * @param The intermediate representation of the type we are printing. + */ +@FunctionalInterface +public interface Printer extends Function { + Object apply(@NotNull T value); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/PrinterFactory.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/PrinterFactory.java new file mode 100644 index 0000000000..2d46e73c68 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/PrinterFactory.java @@ -0,0 +1,56 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.util.List; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.types.ResultType; + +/** + * This class allows {@link com.bakdata.conquery.models.datasets.concepts.select.Select}s to abstractly define printing, for all our renderers. + * + * The primary thing this class solves is {@link List} printing interacting with special handling like {@link com.bakdata.conquery.models.datasets.concepts.select.concept.ConceptColumnSelect} and {@link com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect}. + */ +public abstract class PrinterFactory { + /** + * Default implementation of determining the printer for a {@link ResultType}. + * Generally, this method should not be overriden and preferably be final, but {@link ExcelResultPrinters} makes this problematic. + */ + public Printer printerFor(ResultType type, PrintSettings printSettings) { + if (type instanceof ResultType.ListT listT) { + final Printer elementPrinter = printerFor(listT.getElementType(), printSettings); + return (Printer) getListPrinter(elementPrinter, printSettings); + } + + return (Printer) switch (((ResultType.Primitive) type)) { + case BOOLEAN -> getBooleanPrinter(printSettings); + case INTEGER -> getIntegerPrinter(printSettings); + case NUMERIC -> getNumericPrinter(printSettings); + case DATE -> getDatePrinter(printSettings); + case DATE_RANGE -> getDateRangePrinter(printSettings); + case STRING -> getStringPrinter(printSettings); + case MONEY -> getMoneyPrinter(printSettings); + }; + } + + public abstract Printer> getListPrinter(Printer elementPrinter, PrintSettings printSettings); + + public abstract Printer getBooleanPrinter(PrintSettings printSettings); + + /** + * Jackson will opportunistically read {@link Long} and {@link Integer} hence our usage of Number. + */ + public abstract Printer getIntegerPrinter(PrintSettings printSettings); + + public abstract Printer getNumericPrinter(PrintSettings printSettings); + + /** + * Jackson will opportunistically read {@link Long} and {@link Integer} hence our usage of Number. + */ + public abstract Printer getDatePrinter(PrintSettings printSettings); + + public abstract Printer> getDateRangePrinter(PrintSettings printSettings); + + public abstract Printer getStringPrinter(PrintSettings printSettings); + + public abstract Printer getMoneyPrinter(PrintSettings printSettings); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ResultPrinters.java deleted file mode 100644 index 8cc0a3208b..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/ResultPrinters.java +++ /dev/null @@ -1,231 +0,0 @@ -package com.bakdata.conquery.models.query.resultinfo.printers; - -import java.math.BigDecimal; -import java.text.NumberFormat; -import java.util.List; -import java.util.Objects; -import java.util.StringJoiner; - -import com.bakdata.conquery.internationalization.Results; -import com.bakdata.conquery.models.common.CDate; -import com.bakdata.conquery.models.common.LocalizedToString; -import com.bakdata.conquery.models.common.daterange.CDateRange; -import com.bakdata.conquery.models.config.LocaleConfig; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.index.InternToExternMapper; -import com.bakdata.conquery.models.query.C10nCache; -import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.types.ResultType; -import com.google.common.base.Preconditions; -import lombok.experimental.UtilityClass; -import lombok.extern.slf4j.Slf4j; - -@UtilityClass -@Slf4j -public class ResultPrinters { - - public Printer printerFor(ResultType type, PrintSettings printSettings) { - if (type instanceof ResultType.ListT listT) { - return new ListPrinter(printerFor(listT.getElementType(), printSettings), printSettings); - } - - return switch (((ResultType.Primitive) type)) { - case BOOLEAN -> new BooleanPrinter(printSettings); - case INTEGER -> NumberFormatPrinter.integerPrinter(printSettings); - case NUMERIC -> NumberFormatPrinter.decimalPrinter(printSettings); - case DATE -> new DatePrinter(printSettings); - case DATE_RANGE -> new DateRangePrinter(printSettings); - case STRING -> new StringPrinter(); - case MONEY -> MoneyPrinter.create(printSettings); - }; - } - - public BigDecimal readMoney(PrintSettings cfg, Number value) { - return new BigDecimal(value.longValue()).movePointLeft(cfg.getCurrency().getDefaultFractionDigits()); - } - - public interface Printer { - String print(Object f); - } - - public record StringPrinter() implements Printer { - @Override - public String print(Object f) { - return Objects.toString(f); - } - } - - public record NumberFormatPrinter(NumberFormat format) implements Printer { - - public static Printer integerPrinter(PrintSettings cfg){ - if (!cfg.isPrettyPrint()) { - return new StringPrinter(); - } - - return new NumberFormatPrinter(cfg.getIntegerFormat()); - } - - public static Printer decimalPrinter(PrintSettings cfg){ - if (!cfg.isPrettyPrint()) { - return new StringPrinter(); - } - - return new NumberFormatPrinter(cfg.getDecimalFormat()); - } - - @Override - public String print(Object f) { - return format.format(f); - } - } - - public record MoneyPrinter(PrintSettings cfg, NumberFormat format) implements Printer { - - public static Printer create(PrintSettings cfg) { - if (!cfg.isPrettyPrint()) { - return new StringPrinter(); - } - - return new MoneyPrinter(cfg, (NumberFormat) cfg.getCurrencyFormat().clone()); - } - - @Override - public String print(Object f) { - final BigDecimal asMoney = readMoney(cfg, (Number) f); - return format.format(asMoney); - } - } - - public record DatePrinter(PrintSettings cfg) implements Printer { - - @Override - public String print(Object f) { - Preconditions.checkArgument(f instanceof Number, "Expected an Number but got an '%s' with the value: %s".formatted(f.getClass().getName(), f)); - - final Number number = (Number) f; - return cfg.getDateFormatter().format(CDate.toLocalDate(number.intValue())); - } - } - - public record DateRangePrinter(DatePrinter datePrinter, PrintSettings cfg) implements Printer { - - public DateRangePrinter(PrintSettings printSettings) { - this(new DatePrinter(printSettings), printSettings); - } - - @Override - public String print(Object f) { - Preconditions.checkArgument(f instanceof List, "Expected a List got %s (Type: %s, as string: %s)", f, f.getClass().getName(), f); - Preconditions.checkArgument(((List) f).size() == 2, "Expected a list with 2 elements, one min, one max. The list was: %s ", f); - - final List list = (List) f; - final Integer min = (Integer) list.get(0); - final Integer max = (Integer) list.get(1); - - if (min == null || max == null) { - log.warn("Encountered incomplete range, treating it as an open range. Either min or max was null: {}", list); - } - // Compute minString first because we need it either way - final String minString = min == null || min == CDateRange.NEGATIVE_INFINITY ? "-∞" : datePrinter.print(min); - - if (cfg.isPrettyPrint() && min != null && min.equals(max)) { - // If the min and max are the same we print it like a singe date, not a range (only in pretty printing) - return minString; - } - final String maxString = max == null || max == CDateRange.POSITIVE_INFINITY ? "+∞" : datePrinter.print(max); - - return minString + cfg.getDateRangeSeparator() + maxString; - } - } - - public record BooleanPrinter(PrintSettings cfg, String trueVal, String falseVal) implements Printer { - - public BooleanPrinter(PrintSettings cfg) { - this( - cfg, - cfg.isPrettyPrint() ? C10nCache.getLocalized(Results.class, cfg.getLocale()).True() : "1", - cfg.isPrettyPrint() ? C10nCache.getLocalized(Results.class, cfg.getLocale()).False() : "0" - ); - } - - @Override - public String print(Object f) { - if ((Boolean) f) { - return trueVal; - } - return falseVal; - - } - } - - public record MappedPrinter(InternToExternMapper mapper) implements Printer { - - @Override - public String print(Object f) { - return mapper.external(((String) f)); - } - } - - public record ConceptIdPrinter(Concept concept, PrintSettings cfg) implements Printer { - - @Override - public String print(Object rawValue) { - if (rawValue == null) { - return null; - } - - final int localId = (int) rawValue; - - final ConceptTreeNode node = ((TreeConcept) concept).getElementByLocalId(localId); - - if (!cfg.isPrettyPrint()) { - return node.getId().toString(); - } - - if (node.getDescription() == null) { - return node.getLabel(); - } - - return node.getLabel() + " - " + node.getDescription(); - } - } - - public record ListPrinter(Printer elementPrinter, PrintSettings cfg, LocaleConfig.ListFormat listFormat) implements Printer { - - public ListPrinter(Printer elementPrinter, PrintSettings cfg) { - this(elementPrinter, cfg, cfg.getListFormat()); - } - - @Override - public String print(Object f) { - - // Jackson deserializes collections as lists instead of an array, if the type is not given - Preconditions.checkArgument(f instanceof List, "Expected a List got %s (as String `%s` )".formatted(f.getClass().getName(), f)); - - final StringJoiner joiner = listFormat.createListJoiner(); - - for (Object obj : (List) f) { - joiner.add(listFormat.escapeListElement(elementPrinter.print(obj))); - } - return joiner.toString(); - } - } - - public record LocalizedEnumPrinter & LocalizedToString>(PrintSettings cfg, Class clazz) implements Printer { - @Override - public String print(Object f) { - - if (clazz.isInstance(f)) { - return clazz.cast(f).toString(cfg.getLocale()); - } - try { - return Enum.valueOf(clazz, f.toString()).toString(cfg.getLocale()); - } - catch (Exception e) { - throw new IllegalArgumentException("%s is not a valid %s.".formatted(f, clazz), e); - } - } - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/SecondaryIdResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/SecondaryIdResultInfo.java deleted file mode 100644 index 6f33bbb124..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/SecondaryIdResultInfo.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.bakdata.conquery.models.query.resultinfo.printers; - -import java.util.Set; - -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.types.ResultType; -import com.bakdata.conquery.models.types.SemanticType; -import lombok.Getter; -import lombok.ToString; - -@Getter -@ToString -public class SecondaryIdResultInfo extends ResultInfo { - private final SecondaryIdDescription secondaryId; - private final ResultType type; - private final ResultPrinters.Printer printer; - - - public SecondaryIdResultInfo(SecondaryIdDescription secondaryId, PrintSettings settings) { - super(Set.of(new SemanticType.SecondaryIdT(secondaryId)), settings); - this.secondaryId = secondaryId; - type = ResultType.Primitive.STRING; - printer = secondaryId.getMapping() == null - ? new ResultPrinters.StringPrinter() - : new ResultPrinters.MappedPrinter(secondaryId.getMapping()); - } - - @Override - public String getDescription() { - return secondaryId.getDescription(); - } - - @Override - public String userColumnName() { - return secondaryId.getLabel(); - } - - @Override - public String defaultColumnName() { - return userColumnName(); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/StringResultPrinters.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/StringResultPrinters.java new file mode 100644 index 0000000000..cbd74ddfaf --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/StringResultPrinters.java @@ -0,0 +1,61 @@ +package com.bakdata.conquery.models.query.resultinfo.printers; + +import java.util.List; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.common.BooleanStringPrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.DateRangeStringPrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.DateStringPrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.ListStringPrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.NumberFormatStringPrinter; +import com.bakdata.conquery.models.query.resultinfo.printers.common.StringPrinter; +import lombok.ToString; + +/** + * All printers in this factory should be assumed to return {@link String}, this is useful for CSV or HTML printing. + */ +@ToString +public class StringResultPrinters extends PrinterFactory { + + + @Override + public Printer> getListPrinter(Printer elementPrinter, PrintSettings printSettings) { + return new ListStringPrinter<>(elementPrinter, printSettings); + } + + @Override + public Printer getBooleanPrinter(PrintSettings printSettings) { + return BooleanStringPrinter.create(printSettings); + } + + @Override + public Printer getIntegerPrinter(PrintSettings printSettings) { + return NumberFormatStringPrinter.create(printSettings, printSettings.getIntegerFormat()); + } + + @Override + public Printer getNumericPrinter(PrintSettings printSettings) { + return NumberFormatStringPrinter.create(printSettings, printSettings.getDecimalFormat()); + } + + @Override + public Printer getDatePrinter(PrintSettings printSettings) { + return new DateStringPrinter(printSettings); + } + + @Override + public Printer> getDateRangePrinter(PrintSettings printSettings) { + return new DateRangeStringPrinter(printSettings); + } + + @Override + public Printer getStringPrinter(PrintSettings printSettings) { + return new StringPrinter(); + } + + @Override + public Printer getMoneyPrinter(PrintSettings printSettings) { + return NumberFormatStringPrinter.create(printSettings, printSettings.getCurrencyFormat()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/BooleanStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/BooleanStringPrinter.java new file mode 100644 index 0000000000..d54fea8853 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/BooleanStringPrinter.java @@ -0,0 +1,25 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.internationalization.Results; +import com.bakdata.conquery.models.query.C10nCache; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record BooleanStringPrinter(PrintSettings cfg, String trueVal, String falseVal) implements Printer { + + public static BooleanStringPrinter create(PrintSettings settings) { + if (!settings.isPrettyPrint()) { + return new BooleanStringPrinter(settings, "1", "0"); + } + + final Results localized = C10nCache.getLocalized(Results.class, settings.getLocale()); + return new BooleanStringPrinter(settings, localized.True(), localized.False()); + } + + + @Override + public String apply(@NotNull Boolean f) { + return f ? trueVal : falseVal; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ConceptIdPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ConceptIdPrinter.java new file mode 100644 index 0000000000..e495a5110a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ConceptIdPrinter.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode; +import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record ConceptIdPrinter(Concept concept, PrintSettings cfg) implements Printer { + + @Override + public String apply(@NotNull Integer localId) { + if (localId == null) { + return null; + } + + final ConceptTreeNode node = ((TreeConcept) concept).getElementByLocalId(localId); + + if (!cfg.isPrettyPrint()) { + return node.getId().toString(); + } + + if (node.getDescription() == null) { + return node.getLabel(); + } + + return node.getLabel() + " - " + node.getDescription(); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DatePrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DatePrinter.java new file mode 100644 index 0000000000..d55fac0a44 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DatePrinter.java @@ -0,0 +1,13 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record DatePrinter() implements Printer { + + @Override + public Object apply(@NotNull Number value) { + return CDate.toLocalDate(value.intValue()); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateRangeStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateRangeStringPrinter.java new file mode 100644 index 0000000000..bbc8cc9d9f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateRangeStringPrinter.java @@ -0,0 +1,37 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import java.util.List; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.google.common.base.Preconditions; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; + +@Slf4j +public record DateRangeStringPrinter(DateStringPrinter datePrinter, PrintSettings cfg) implements Printer> { + + public DateRangeStringPrinter(PrintSettings printSettings) { + this(new DateStringPrinter(printSettings), printSettings); + } + + @Override + public String apply(@NotNull List f) { + Preconditions.checkArgument(f.size() == 2, "Expected a list with 2 elements, one min, one max. The list was: %s ", f); + + final Integer min = f.get(0); + final Integer max = f.get(1); + + // Compute minString first because we need it either way + final String minString = min == null || min == CDateRange.NEGATIVE_INFINITY ? "-∞" : datePrinter.apply(min); + + if (cfg.isPrettyPrint() && min != null && min.equals(max)) { + // If the min and max are the same we print it like a singe date, not a range (only in pretty printing) + return minString; + } + final String maxString = max == null || max == CDateRange.POSITIVE_INFINITY ? "+∞" : datePrinter.apply(max); + + return minString + cfg.getDateRangeSeparator() + maxString; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateStringPrinter.java new file mode 100644 index 0000000000..f9a57f05b5 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/DateStringPrinter.java @@ -0,0 +1,14 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record DateStringPrinter(PrintSettings cfg) implements Printer { + + @Override + public String apply(@NotNull Number f) { + return cfg.getDateFormatter().format(CDate.toLocalDate(f.intValue())); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/IdentityPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/IdentityPrinter.java new file mode 100644 index 0000000000..30c05117e4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/IdentityPrinter.java @@ -0,0 +1,12 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record IdentityPrinter() implements Printer { + + @Override + public Object apply(@NotNull T value) { + return value; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ListStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ListStringPrinter.java new file mode 100644 index 0000000000..f311075acb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ListStringPrinter.java @@ -0,0 +1,31 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import java.util.List; +import java.util.StringJoiner; + +import com.bakdata.conquery.models.config.LocaleConfig; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record ListStringPrinter(Printer elementPrinter, PrintSettings cfg, LocaleConfig.ListFormat listFormat) implements Printer> { + + public ListStringPrinter(Printer elementPrinter, PrintSettings cfg) { + this(elementPrinter, cfg, cfg.getListFormat()); + } + + @Override + public String apply(@NotNull List f) { + + final StringJoiner joiner = listFormat.createListJoiner(); + + for (T obj : f) { + if (obj == null){ + continue; + } + + joiner.add(listFormat.escapeListElement(elementPrinter.apply(obj).toString())); + } + return joiner.toString(); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/LocalizedEnumPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/LocalizedEnumPrinter.java new file mode 100644 index 0000000000..61cd8ccd60 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/LocalizedEnumPrinter.java @@ -0,0 +1,18 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.common.LocalizedToString; +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record LocalizedEnumPrinter & LocalizedToString>(PrintSettings cfg, Class clazz) implements Printer { + @Override + public String apply(@NotNull String f) { + try { + return Enum.valueOf(clazz, f).toString(cfg.getLocale()); + } + catch (Exception e) { + throw new IllegalArgumentException("%s is not a valid %s.".formatted(f, clazz), e); + } + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/MappedPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/MappedPrinter.java new file mode 100644 index 0000000000..d388ceb689 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/MappedPrinter.java @@ -0,0 +1,13 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record MappedPrinter(InternToExternMapper mapper) implements Printer { + + @Override + public String apply(@NotNull String f) { + return mapper.external(f); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/NumberFormatStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/NumberFormatStringPrinter.java new file mode 100644 index 0000000000..1619276295 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/NumberFormatStringPrinter.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import java.text.NumberFormat; + +import com.bakdata.conquery.models.query.PrintSettings; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record NumberFormatStringPrinter(NumberFormat format) implements Printer { + + public static Printer create(PrintSettings cfg, NumberFormat currencyFormat) { + if (cfg.isPrettyPrint()) { + return new NumberFormatStringPrinter(currencyFormat); + } + return new StringPrinter<>(); + } + + @Override + public String apply(@NotNull Number f) { + return format.format(f); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/StringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/StringPrinter.java new file mode 100644 index 0000000000..515b8520d1 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/StringPrinter.java @@ -0,0 +1,13 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import java.util.Objects; + +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record StringPrinter() implements Printer { + @Override + public String apply(@NotNull T f) { + return Objects.toString(f); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ToStringPrinter.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ToStringPrinter.java new file mode 100644 index 0000000000..5b5c204b7b --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/printers/common/ToStringPrinter.java @@ -0,0 +1,14 @@ +package com.bakdata.conquery.models.query.resultinfo.printers.common; + +import java.util.Objects; + +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import org.jetbrains.annotations.NotNull; + +public record ToStringPrinter(Printer delegate) implements Printer { + + @Override + public Object apply(@NotNull T value) { + return Objects.toString(delegate.apply(value)); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java b/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java index 1836b01877..4984115649 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java @@ -47,7 +47,7 @@ public void addResult(DistributedExecutionManager executionManager) { ); } - if (managedInternalForm.allSubQueriesDone(executionManager)) { + if (managedInternalForm.allSubQueriesDone()) { managedInternalForm.finish(ExecutionState.DONE); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/BooleanColumnStatsCollector.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/BooleanColumnStatsCollector.java index c805131751..fd4fb70237 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/BooleanColumnStatsCollector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/BooleanColumnStatsCollector.java @@ -4,8 +4,9 @@ import java.util.Map; import c10n.C10N; +import com.bakdata.conquery.internationalization.Results; +import com.bakdata.conquery.models.query.C10nCache; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import lombok.Getter; @Getter @@ -36,13 +37,13 @@ public void consume(Object value) { @Override public ResultColumnStatistics describe() { - final ResultPrinters.BooleanPrinter printer = new ResultPrinters.BooleanPrinter(getPrintSettings()); + final Results results = C10nCache.getLocalized(Results.class, getPrintSettings().getLocale()); return new HistogramColumnDescription( getName(), getLabel(), getDescription(), List.of( - new HistogramColumnDescription.Entry(printer.print(true), trues), - new HistogramColumnDescription.Entry(printer.print(false), falses) + new HistogramColumnDescription.Entry(results.True(), trues), + new HistogramColumnDescription.Entry(results.False(), falses) ), Map.of( C10N.get(StatisticsLabels.class, getPrintSettings().getLocale()).missing(), diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ColumnStatsCollector.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ColumnStatsCollector.java index 28ccb85a5a..689f2e1129 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ColumnStatsCollector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ColumnStatsCollector.java @@ -5,7 +5,6 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.models.config.FrontendConfig; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import com.bakdata.conquery.models.types.ResultType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonTypeInfo; @@ -19,11 +18,12 @@ public abstract class ColumnStatsCollector { @JsonIgnore private final PrintSettings printSettings; - public static ColumnStatsCollector getStatsCollector(String name, String description, ResultType type, ResultPrinters.Printer printer, PrintSettings printSettings, FrontendConfig config) { + public static ColumnStatsCollector getStatsCollector(String name, String description, ResultType type, PrintSettings printSettings, FrontendConfig config) { // List recursion must be done before assigning uniqueNames if (type instanceof ResultType.ListT listT) { - final ColumnStatsCollector columnStatsCollector = getStatsCollector(name, description, listT.getElementType(), ((ResultPrinters.ListPrinter) printer).elementPrinter(), printSettings, config); + + final ColumnStatsCollector columnStatsCollector = getStatsCollector(name, description, listT.getElementType(), printSettings, config); return new ListColumnStatsCollector(columnStatsCollector, printSettings); } @@ -31,7 +31,7 @@ public static ColumnStatsCollector getStatsCollector(String name, String descrip case BOOLEAN -> new BooleanColumnStatsCollector(name, name, description, printSettings); case INTEGER, MONEY, NUMERIC -> new NumberColumnStatsCollector<>(name, name, description, type, printSettings, config.getVisualisationsHistogramLimit(), config.getVisualisationPercentiles().lowerEndpoint(), config.getVisualisationPercentiles().upperEndpoint()); case DATE, DATE_RANGE -> new DateColumnStatsCollector(name, name, description, type, printSettings); - case STRING -> new StringColumnStatsCollector(name, name, description, printer, printSettings, config.getVisualisationsHistogramLimit()); + case STRING -> new StringColumnStatsCollector(name, name, description, printSettings, config.getVisualisationsHistogramLimit()); }; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/DateColumnStatsCollector.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/DateColumnStatsCollector.java index 2b56c99740..2c661d5f3d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/DateColumnStatsCollector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/DateColumnStatsCollector.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.models.query.statistics; import java.time.LocalDate; -import java.util.List; import java.util.SortedMap; import java.util.TreeMap; import java.util.function.Function; @@ -35,8 +34,8 @@ public DateColumnStatsCollector(String name, String label, String description, R private static Function getDateExtractor(ResultType dateType) { return switch (((ResultType.Primitive) dateType)) { - case DATE_RANGE -> dateValue -> CDateRange.fromList((List) dateValue); - case DATE -> dateValue -> CDateRange.exactly((Integer) dateValue); + case DATE_RANGE -> dateValue -> (CDateRange) dateValue; + case DATE -> dateValue -> CDateRange.exactly((LocalDate) dateValue); default -> throw new IllegalStateException("Unexpected type %s".formatted(dateType)); }; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/HistogramColumnDescription.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/HistogramColumnDescription.java index 708550bee6..9106859f8a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/HistogramColumnDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/HistogramColumnDescription.java @@ -12,7 +12,7 @@ @ToString(callSuper = true) public class HistogramColumnDescription extends ColumnStatsCollector.ResultColumnStatistics { - public static record Entry(String label, long value) {}; + public record Entry(String label, long value) {}; private final List entries; private final Map extras; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/NumberColumnStatsCollector.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/NumberColumnStatsCollector.java index e2f83d13be..f2c6ca82be 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/NumberColumnStatsCollector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/NumberColumnStatsCollector.java @@ -10,7 +10,6 @@ import c10n.C10N; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import com.bakdata.conquery.models.types.ResultType; import com.google.common.collect.Range; import lombok.Getter; @@ -101,13 +100,7 @@ public void consume(Object value) { return; } - Number number = (Number) value; - - if (ResultType.Primitive.MONEY.equals(getType())) { - number = ResultPrinters.readMoney(getPrintSettings(), number); - } - - statistics.addValue(number.doubleValue()); + statistics.addValue(((Number) value).doubleValue()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ResultStatistics.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ResultStatistics.java index 891caae97e..45f0b1715d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ResultStatistics.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/ResultStatistics.java @@ -18,6 +18,9 @@ import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.resultinfo.printers.JavaResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; @@ -34,13 +37,19 @@ public record ResultStatistics(int entities, int total, List statistics, Range dateRange) { @SneakyThrows @NotNull - public static ResultStatistics collectResultStatistics(SingleTableResult managedQuery, List resultInfos, Optional dateInfo, Optional dateIndex, PrintSettings printSettings, UniqueNamer uniqueNamer, ConqueryConfig conqueryConfig) { + public static ResultStatistics collectResultStatistics( + SingleTableResult managedQuery, + List resultInfos, + Optional dateInfo, + Optional dateIndex, + PrintSettings printSettings, + UniqueNamer uniqueNamer, + ConqueryConfig conqueryConfig, + PrinterFactory printerFactory) { //TODO pull inner executor service from ManagerNode - final ListeningExecutorService - executorService = - MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() - 1)); + final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() - 1)); // Yes, we are actually iterating the result for every job. @@ -50,7 +59,7 @@ public static ResultStatistics collectResultStatistics(SingleTableResult managed final boolean containsDates = dateInfo.isPresent(); if (containsDates) { - futureSpan = executorService.submit(() -> calculateDateSpan(managedQuery, dateInfo, dateIndex.get())); + futureSpan = executorService.submit(() -> calculateDateSpan(managedQuery, dateInfo, dateIndex.get(), printSettings)); } else { futureSpan = Futures.immediateFuture(CDateRange.all().toSimpleRange()); @@ -59,12 +68,10 @@ public static ResultStatistics collectResultStatistics(SingleTableResult managed // Count result lines and entities (may differ in case of form or SecondaryIdQuery) final ListenableFuture futureLines = executorService.submit(() -> (int) managedQuery.resultRowCount()); - final ListenableFuture futureEntities = - executorService.submit(() -> (int) managedQuery.streamResults(OptionalLong.empty()).count()); + final ListenableFuture futureEntities = executorService.submit(() -> (int) managedQuery.streamResults(OptionalLong.empty()).count()); // compute ResultColumnStatistics for each column - final List> - futureDescriptions = + final List> futureDescriptions = IntStream.range(0, resultInfos.size()) // If the query doesn't contain dates, we can skip the dates-column. .filter(col -> !resultInfos.get(col).getSemantics().contains(new SemanticType.EventDateT()) || containsDates) @@ -72,15 +79,29 @@ public static ResultStatistics collectResultStatistics(SingleTableResult managed final StopWatch started = StopWatch.createStarted(); final ResultInfo info = resultInfos.get(col); + final Printer printer = info.createPrinter(printerFactory, printSettings); final ColumnStatsCollector statsCollector = - ColumnStatsCollector.getStatsCollector(uniqueNamer.getUniqueName(info), info.getDescription(), info.getType(), info.getPrinter(), printSettings, conqueryConfig.getFrontend()); + ColumnStatsCollector.getStatsCollector(uniqueNamer.getUniqueName(info, printSettings), + info.getDescription(), + info.getType(), + printSettings, + conqueryConfig.getFrontend() + ); log.trace("BEGIN stats collection for {}", info); managedQuery.streamResults(OptionalLong.empty()) .map(EntityResult::listResultLines) .flatMap(List::stream) - .forEach(line -> statsCollector.consume(line[col])); + .forEach(line -> { + final Object value = line[col]; + if (value == null) { + // Printers dont handle null + statsCollector.consume(null); + return; + } + statsCollector.consume(printer.apply(value)); + }); log.trace("DONE collecting values for {}, in {}", info, started); @@ -103,13 +124,13 @@ public static ResultStatistics collectResultStatistics(SingleTableResult managed return new ResultStatistics(entities, lines, descriptions, span); } - private static Range calculateDateSpan(SingleTableResult managedQuery, Optional dateInfo, int dateIndex) { + private static Range calculateDateSpan(SingleTableResult managedQuery, Optional dateInfo, int dateIndex, PrintSettings printSettings) { if (dateInfo.isEmpty()) { return CDateRange.all().toSimpleRange(); } final AtomicReference spanRef = new AtomicReference<>(null); - final Consumer dateAggregator = getDateSpanner(dateInfo.get(), dateIndex, spanRef); + final Consumer dateAggregator = getDateSpanner(dateInfo.get(), dateIndex, spanRef, printSettings); managedQuery.streamResults(OptionalLong.empty()).flatMap(EntityResult::streamValues).forEach(dateAggregator); @@ -125,28 +146,32 @@ private static Range calculateDateSpan(SingleTableResult managedQuery /** * If not dateInfo is given, don't try to span values. otherwise takes values from line at dateIndex, and handles them according to dateInfo. */ - private static Consumer getDateSpanner(ResultInfo dateInfo, int dateIndex, AtomicReference spanRef) { + private static Consumer getDateSpanner(ResultInfo dateInfo, int dateIndex, AtomicReference spanRef, PrintSettings printSettings) { final Consumer spanner = date -> spanRef.getAndAccumulate(date, (old, incoming) -> incoming.spanClosed(old)); - final BiConsumer> extractor = validityDateExtractor(dateInfo.getType()); + final JavaResultPrinters printers = new JavaResultPrinters(); + final BiConsumer> extractor = validityDateExtractor(dateInfo.getType(), printSettings, printers); return line -> extractor.accept(line[dateIndex], spanner); } - public static BiConsumer> validityDateExtractor(ResultType dateType) { + public static BiConsumer> validityDateExtractor(ResultType dateType, PrintSettings printSettings, JavaResultPrinters printers) { + if (dateType.equals(ResultType.Primitive.DATE_RANGE)) { - return (obj, con) -> con.accept(CDateRange.fromList((List) obj)); + final Printer printer = printers.getDateRangePrinter(printSettings); + return (obj, con) -> con.accept((CDateRange) printer.apply(obj)); } if (dateType.equals(ResultType.Primitive.DATE)) { - return (obj, con) -> con.accept(CDateRange.exactly((Integer) obj)); + final Printer printer = printers.getDatePrinter(printSettings); + return (obj, con) -> con.accept(CDateRange.exactly((LocalDate) printer.apply(obj))); } if (dateType instanceof ResultType.ListT listT) { - final BiConsumer> extractor = validityDateExtractor(listT.getElementType()); + final BiConsumer> extractor = validityDateExtractor(listT.getElementType(), printSettings, printers); return (obj, con) -> ((List) obj).forEach(date -> extractor.accept(date, con)); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/StringColumnStatsCollector.java b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/StringColumnStatsCollector.java index 19babdfcb5..c66d368b65 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/statistics/StringColumnStatsCollector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/statistics/StringColumnStatsCollector.java @@ -8,7 +8,6 @@ import c10n.C10N; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.math3.stat.Frequency; @@ -19,15 +18,13 @@ public class StringColumnStatsCollector extends ColumnStatsCollector { private final Frequency frequencies = new Frequency(); private final long limit; - private final ResultPrinters.Printer printer; private int nulls = 0; - public StringColumnStatsCollector(String name, String label, String description, ResultPrinters.Printer printer, PrintSettings printSettings, long limit) { + public StringColumnStatsCollector(String name, String label, String description, PrintSettings printSettings, long limit) { super(name, label, description, printSettings); this.limit = limit; - this.printer = printer; } @Override @@ -38,7 +35,7 @@ public void consume(Object value) { } // In case there's a mapping, we need to map the value - final String printed = printer.print(value); + final String printed = (String) value; frequencies.addValue(printed); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java b/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java index ed077b225a..04bab285bc 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java +++ b/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java @@ -2,12 +2,14 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IdColumnConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeInfo; import lombok.Data; @@ -73,8 +75,7 @@ public static class IdT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class SecondaryIdT extends SemanticType { - @NsIdRef - private final SecondaryIdDescription secondaryId; + private final SecondaryIdDescriptionId secondaryId; } /** @@ -102,8 +103,7 @@ public static class HiddenT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class SelectResultT extends SemanticType { - @NsIdRef - private final Select select; + private final SelectId select; } /** @@ -115,8 +115,7 @@ public static class SelectResultT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class ConceptColumnT extends SemanticType { - @NsIdRef - private final Concept concept; + private final ConceptId concept; } @@ -130,8 +129,7 @@ public static class ConceptColumnT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class ColumnT extends SemanticType { - @NsIdRef - private final Column column; + private final ColumnId column; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java index e9c3e07d3c..08e353f19a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java @@ -4,22 +4,23 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; +import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.mode.NamespaceHandler; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.IndexKey; @@ -35,7 +36,7 @@ @Slf4j @RequiredArgsConstructor @JsonIgnoreType -public class DatasetRegistry extends IdResolveContext implements Closeable { +public class DatasetRegistry implements Closeable, NamespacedStorageProvider, Injectable { private final ConcurrentMap datasets = new ConcurrentHashMap<>(); @Getter @@ -48,18 +49,19 @@ public class DatasetRegistry extends IdResolveContext imple private final NamespaceHandler namespaceHandler; + @Getter private final IndexService indexService; public N createNamespace(Dataset dataset, MetaStorage metaStorage, Environment environment) throws IOException { // Prepare empty storage NamespaceStorage datasetStorage = new NamespaceStorage(config.getStorage(), "dataset_" + dataset.getName()); - final ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(this); + final ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetStorage); // Each store injects its own IdResolveCtx so each needs its own mapper - datasetStorage.openStores(Jackson.copyMapperAndInjectables((persistenceMapper))); + datasetStorage.openStores(Jackson.copyMapperAndInjectables((persistenceMapper)), environment.metrics()); datasetStorage.loadData(); datasetStorage.updateDataset(dataset); - datasetStorage.updateIdMapping(new EntityIdMap()); + datasetStorage.updateIdMapping(new EntityIdMap(datasetStorage)); datasetStorage.setPreviewConfig(new PreviewConfig()); datasetStorage.close(); @@ -89,15 +91,6 @@ public void removeNamespace(DatasetId id) { } } - @Override - public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { - if (!datasets.containsKey(dataset)) { - throw new NoSuchElementException(String.format("Did not find Dataset[%s] in [%s]", dataset, datasets.keySet())); - } - - return datasets.get(dataset).getStorage().getCentralRegistry(); - } - public List getAllDatasets() { return datasets.values().stream().map(Namespace::getStorage).map(NamespaceStorage::getDataset).collect(Collectors.toList()); } @@ -128,15 +121,18 @@ public void close() { @Override public MutableInjectableValues inject(MutableInjectableValues values) { - // Make this class also available under DatasetRegistry - super.inject(values).add(DatasetRegistry.class, this); - indexService.inject(values); - - return values; + // Make this class also available under DatasetRegistry + return values.add(NamespacedStorageProvider.class, this) + .add(this.getClass(), this); } public void resetIndexService() { indexService.evictCache(); } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + return datasets.get(datasetId).getStorage(); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java index 0f7f8824e7..c018a2a797 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.messages.namespaces.specific.CollectColumnValuesJob; import com.bakdata.conquery.models.messages.namespaces.specific.UpdateMatchingStatsMessage; @@ -53,17 +54,17 @@ public DistributedNamespace( @Override void updateMatchingStats() { - final Collection> concepts = getStorage().getAllConcepts() - .stream() - .filter(concept -> concept.getMatchingStats() == null) - .collect(Collectors.toSet()); + final Collection concepts = getStorage().getAllConcepts() + .filter(concept -> concept.getMatchingStats() == null) + .map(Concept::getId) + .collect(Collectors.toSet()); getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(concepts)); } @Override void registerColumnValuesInSearch(Set columns) { log.trace("Sending columns to collect values on shards: {}", Arrays.toString(columns.toArray())); - getWorkerHandler().sendToAll(new CollectColumnValuesJob(columns, this)); + getWorkerHandler().sendToAll(new CollectColumnValuesJob(columns.stream().map(Column::getId).collect(Collectors.toSet()), this)); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java b/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java deleted file mode 100644 index d65f7850c0..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.bakdata.conquery.models.worker; - -import java.util.NoSuchElementException; -import java.util.Optional; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import io.dropwizard.jackson.Jackson; - -/** - * Superclass for implementations that map ids to existing objects in the conquery id system. - * This is a bridge between {@link Jackson} and conquery id serdes. - */ -public abstract class IdResolveContext implements Injectable { - - public static IdResolveContext get(DeserializationContext ctxt) throws JsonMappingException { - IdResolveContext namespaces = (IdResolveContext) ctxt - .findInjectableValue(IdResolveContext.class.getName(), null, null); - if(namespaces == null) { - throw new NoSuchElementException("Could not find injected namespaces"); - } - return namespaces; - } - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return values.add(IdResolveContext.class, this); - } - - public abstract CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException; - - public & NamespacedId, T extends Identifiable> T resolve(ID id) { - return findRegistry(id.getDataset()).resolve(id); - } - - public & NamespacedId, T extends Identifiable> Optional getOptional(ID id) { - return findRegistry(id.getDataset()).getOptional(id); - } - - public & NamespacedId, T extends Identifiable> Optional getOptional(DatasetId dataset, ID id) { - return findRegistry(dataset).getOptional(id); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java index d2ad40c0c4..0e04fa8e81 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java @@ -3,7 +3,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.NoSuchElementException; import java.util.Set; import com.bakdata.conquery.apiv1.query.concept.specific.external.EntityResolver; @@ -14,8 +13,6 @@ import com.bakdata.conquery.models.datasets.PreviewConfig; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.jobs.SimpleJob; import com.bakdata.conquery.models.jobs.UpdateFilterSearchJob; @@ -31,7 +28,7 @@ @Getter @ToString(onlyExplicitlyIncluded = true) @RequiredArgsConstructor -public abstract class Namespace extends IdResolveContext { +public abstract class Namespace { private final ObjectMapper preprocessMapper; @@ -80,10 +77,6 @@ public void remove() { storage.removeStorage(); } - public CentralRegistry getCentralRegistry() { - return getStorage().getCentralRegistry(); - } - public int getNumberOfEntities() { return getStorage().getNumberOfEntities(); } @@ -92,25 +85,17 @@ public PreviewConfig getPreviewConfig() { return getStorage().getPreviewConfig(); } - @Override - public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { - if (!this.getDataset().getId().equals(dataset)) { - throw new NoSuchElementException("Wrong dataset: '" + dataset + "' (expected: '" + this.getDataset().getId() + "')"); - } - return storage.getCentralRegistry(); - } - public void updateInternToExternMappings() { - storage.getAllConcepts().stream() - .flatMap(c -> c.getConnectors().stream()) - .flatMap(con -> con.getSelects().stream()) - .filter(MappableSingleColumnSelect.class::isInstance) - .map(MappableSingleColumnSelect.class::cast) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); - - storage.getSecondaryIds().stream() - .filter(desc -> desc.getMapping() != null) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping()::init))); + storage.getAllConcepts() + .flatMap(c -> c.getConnectors().stream()) + .flatMap(con -> con.getSelects().stream()) + .filter(MappableSingleColumnSelect.class::isInstance) + .map(MappableSingleColumnSelect.class::cast) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); + + storage.getSecondaryIds() + .filter(desc -> desc.getMapping() != null) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping().resolve()::init))); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java index d365519aa9..8aed99c098 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.models.worker; -import java.util.NoSuchElementException; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -8,22 +7,23 @@ import java.util.concurrent.atomic.AtomicInteger; import com.bakdata.conquery.commands.ShardNode; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.config.ThreadPoolDefinition; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.jobs.SimpleJob; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.ObjectMapper; +import io.dropwizard.core.setup.Environment; import io.dropwizard.lifecycle.Managed; -import jakarta.validation.Validator; import lombok.Getter; import lombok.NonNull; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -32,25 +32,20 @@ * Each Shard contains one {@link Worker} per {@link Dataset}. */ @Slf4j -public class ShardWorkers extends IdResolveContext implements Managed { - @Getter @Setter - private AtomicInteger nextWorker = new AtomicInteger(0); +public class ShardWorkers implements NamespacedStorageProvider, Managed { @Getter private final ConcurrentHashMap workers = new ConcurrentHashMap<>(); @JsonIgnore private final transient ConcurrentMap dataset2Worker = new ConcurrentHashMap<>(); - /** * Shared ExecutorService among Workers for Jobs. */ private final ThreadPoolExecutor jobsThreadPool; private final ThreadPoolDefinition queryThreadPoolDefinition; - private final InternalMapperFactory internalMapperFactory; - private final int entityBucketSize; - private final int secondaryIdSubPlanRetention; + private final AtomicInteger nextWorker = new AtomicInteger(0); public ShardWorkers(ThreadPoolDefinition queryThreadPoolDefinition, InternalMapperFactory internalMapperFactory, int entityBucketSize, int secondaryIdSubPlanRetention) { @@ -66,54 +61,40 @@ public ShardWorkers(ThreadPoolDefinition queryThreadPoolDefinition, InternalMapp jobsThreadPool.prestartAllCoreThreads(); } - public Worker createWorker(WorkerStorage storage, boolean failOnError) { + public Worker createWorker(WorkerStorage storage, boolean failOnError, Environment environment) { - final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(this); - final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(this); + final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(storage); + final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(storage); final Worker worker = - new Worker(queryThreadPoolDefinition, storage, jobsThreadPool, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention); + new Worker(queryThreadPoolDefinition, storage, jobsThreadPool, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention, environment); addWorker(worker); return worker; } - public Worker createWorker(Dataset dataset, StoreFactory storageConfig, @NonNull String name, Validator validator, boolean failOnError) { - - final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(this); + private void addWorker(Worker worker) { + nextWorker.incrementAndGet(); + workers.put(worker.getInfo().getId(), worker); + dataset2Worker.put(worker.getStorage().getDataset().getId(), worker); + } - final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(this); + public Worker createWorker(Dataset dataset, StoreFactory storageConfig, @NonNull String name, Environment environment, boolean failOnError) { final Worker worker = - Worker.newWorker(dataset, queryThreadPoolDefinition, jobsThreadPool, storageConfig, name, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention); + Worker.newWorker(dataset, queryThreadPoolDefinition, jobsThreadPool, storageConfig, name, failOnError, entityBucketSize, internalMapperFactory, secondaryIdSubPlanRetention, environment); addWorker(worker); return worker; } - private void addWorker(Worker worker) { - nextWorker.incrementAndGet(); - workers.put(worker.getInfo().getId(), worker); - dataset2Worker.put(worker.getStorage().getDataset().getId(), worker); - } - public Worker getWorker(WorkerId worker) { return Objects.requireNonNull(workers.get(worker)); } - - @Override - public CentralRegistry findRegistry(DatasetId dataset) { - if (!dataset2Worker.containsKey(dataset)) { - throw new NoSuchElementException(String.format("Did not find Dataset[%s] in [%s]", dataset, dataset2Worker.keySet())); - } - - return dataset2Worker.get(dataset).getStorage().getCentralRegistry(); - } - public void removeWorkerFor(DatasetId dataset) { final Worker worker = dataset2Worker.get(dataset); @@ -133,7 +114,7 @@ removed from dataset2Worker (which is used in deserialization of NamespacedIds, removed.remove(); } catch(Exception e) { - log.error("Failed to remove storage "+removed, e); + log.error("Failed to remove storage {}", removed, e); } } @@ -161,4 +142,14 @@ public void stop() { w.close(); } } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + return dataset2Worker.get(datasetId).getStorage(); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java b/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java deleted file mode 100644 index 4f1d3799b3..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.bakdata.conquery.models.worker; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -@RequiredArgsConstructor -public class SingletonNamespaceCollection extends IdResolveContext { - - @NonNull - private final CentralRegistry registry; - - @Override - public CentralRegistry findRegistry(DatasetId dataset) { - return registry; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java b/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java index 3e20e742fc..d24ec985d5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java @@ -4,11 +4,12 @@ import java.io.IOException; import java.util.concurrent.ExecutorService; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.mina.MessageSender; import com.bakdata.conquery.io.mina.NetworkSession; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; +import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.config.ThreadPoolDefinition; import com.bakdata.conquery.models.datasets.Dataset; @@ -18,7 +19,9 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.network.MessageToManagerNode; @@ -26,6 +29,7 @@ import com.bakdata.conquery.models.messages.network.specific.ForwardToNamespace; import com.bakdata.conquery.models.query.QueryExecutor; import com.fasterxml.jackson.databind.ObjectMapper; +import io.dropwizard.core.setup.Environment; import lombok.Getter; import lombok.NonNull; import lombok.Setter; @@ -41,8 +45,6 @@ public class Worker implements MessageSender.Transforming getConcept() { return concept; } @DELETE public void removeConcept() { - processor.deleteConcept(concept); + processor.deleteConcept(concept.getId()); } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java index ee81d656cb..6427619cde 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java @@ -73,6 +73,7 @@ public class AdminDatasetProcessor { public synchronized Dataset addDataset(Dataset dataset) throws IOException { final String name = dataset.getName(); + if (datasetRegistry.get(new DatasetId(name)) != null) { throw new WebApplicationException("Dataset already exists", Response.Status.CONFLICT); } @@ -86,12 +87,12 @@ public synchronized Dataset addDataset(Dataset dataset) throws IOException { public synchronized void deleteDataset(Dataset dataset) { final Namespace namespace = datasetRegistry.get(dataset.getId()); - if (!namespace.getStorage().getTables().isEmpty()) { + if (namespace.getStorage().getTables().findAny().isPresent()) { throw new WebApplicationException( String.format( "Cannot delete dataset `%s`, because it still has tables: `%s`", dataset.getId(), - namespace.getStorage().getTables().stream() + namespace.getStorage().getTables() .map(Table::getId) .map(Objects::toString) .collect(Collectors.joining(",")) @@ -109,7 +110,7 @@ public synchronized void deleteDataset(Dataset dataset) { */ public synchronized void addSecondaryId(Namespace namespace, SecondaryIdDescription secondaryId) { final Dataset dataset = namespace.getDataset(); - secondaryId.setDataset(dataset); + secondaryId.setDataset(dataset.getId()); if (namespace.getStorage().getSecondaryId(secondaryId.getId()) != null) { throw new WebApplicationException("SecondaryId already exists", Response.Status.CONFLICT); @@ -125,12 +126,12 @@ public synchronized void addSecondaryId(Namespace namespace, SecondaryIdDescript * Delete SecondaryId if it does not have any dependents. */ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secondaryId) { - final Namespace namespace = datasetRegistry.get(secondaryId.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(secondaryId.getDataset()); // Before we commit this deletion, we check if this SecondaryId still has dependent Columns. - final List dependents = namespace.getStorage().getTables().stream() + final List dependents = namespace.getStorage().getTables() .map(Table::getColumns).flatMap(Arrays::stream) - .filter(column -> secondaryId.equals(column.getSecondaryId())) + .filter(column -> secondaryId.getId().equals(column.getSecondaryId())) .toList(); if (!dependents.isEmpty()) { @@ -157,10 +158,11 @@ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secon public synchronized void addTable(@NonNull Table table, Namespace namespace) { Dataset dataset = namespace.getDataset(); + DatasetId datasetId = dataset.getId(); if (table.getDataset() == null) { - table.setDataset(dataset); + table.setDataset(datasetId); } - else if (!table.getDataset().equals(dataset)) { + else if (!table.getDataset().equals(datasetId)) { throw new IllegalArgumentException(); } @@ -181,7 +183,7 @@ else if (!table.getDataset().equals(dataset)) { * Therefore, the concept will be deleted first then added */ public synchronized void updateConcept(@NonNull Dataset dataset, @NonNull Concept concept) { - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); if (!datasetRegistry.get(dataset.getId()).getStorage().hasConcept(concept.getId())) { throw new NotFoundException("Can't find the concept in the dataset " + concept.getId()); } @@ -194,14 +196,14 @@ public synchronized void updateConcept(@NonNull Dataset dataset, @NonNull Concep * Add the concept to the dataset if it does not exist yet */ public synchronized void addConcept(@NonNull Dataset dataset, @NonNull Concept concept, boolean force) { - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); ValidatorHelper.failOnError(log, environment.getValidator().validate(concept)); if (datasetRegistry.get(dataset.getId()).getStorage().hasConcept(concept.getId())) { if (!force) { throw new WebApplicationException("Can't replace already existing concept " + concept.getId(), Response.Status.CONFLICT); } - deleteConcept(concept); + deleteConcept(concept.getId()); log.info("Force deleted previous concept: {}", concept.getId()); } @@ -210,6 +212,15 @@ public synchronized void addConcept(@NonNull Dataset dataset, @NonNull Concept deleteTable(Table table, boolean force) { - final Namespace namespace = datasetRegistry.get(table.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(table.getDataset()); - final List> dependentConcepts = namespace.getStorage().getAllConcepts().stream().flatMap(c -> c.getConnectors().stream()) - .filter(con -> con.getTable().equals(table)) + TableId tableId = table.getId(); + final List> dependentConcepts = namespace.getStorage().getAllConcepts().flatMap(c -> c.getConnectors().stream()) + .filter(con -> con.getResolvedTableId().equals(tableId)) .map(Connector::getConcept) .collect(Collectors.toList()); if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } - namespace.getStorage().getAllImports().stream() - .filter(imp -> imp.getTable().equals(table)) + namespace.getStorage().getAllImports() + .filter(imp -> imp.getTable().equals(tableId)) .forEach(this::deleteImport); - namespace.getStorage().removeTable(table.getId()); + namespace.getStorage().removeTable(tableId); storageListener.onRemoveTable(table); } @@ -303,13 +306,10 @@ public synchronized List deleteTable(Table table, boolean force) { } /** - * Deletes a concept. + * Deletes an import. */ - public synchronized void deleteConcept(Concept concept) { - final Namespace namespace = datasetRegistry.get(concept.getDataset().getId()); - - namespace.getStorage().removeConcept(concept.getId()); - storageListener.onDeleteConcept(concept); + public synchronized void deleteImport(Import imp) { + this.importHandler.deleteImport(imp); } /** @@ -339,9 +339,9 @@ public void addInternToExternMapping(Namespace namespace, InternToExternMapper i } public List deleteInternToExternMapping(InternToExternMapper internToExternMapper, boolean force) { - final Namespace namespace = datasetRegistry.get(internToExternMapper.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(internToExternMapper.getDataset()); - final Set> dependentConcepts = namespace.getStorage().getAllConcepts().stream() + final Set> dependentConcepts = namespace.getStorage().getAllConcepts() .filter( c -> c.getSelects().stream() .filter(MappableSingleColumnSelect.class::isInstance) @@ -354,7 +354,7 @@ public List deleteInternToExternMapping(InternToExternMapper internTo if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } namespace.getStorage().removeInternToExternMapper(internToExternMapper.getId()); @@ -368,7 +368,7 @@ public void clearIndexCache() { } public void addSearchIndex(Namespace namespace, SearchIndex searchIndex) { - searchIndex.setDataset(namespace.getDataset()); + searchIndex.setDataset(namespace.getDataset().getId()); ValidatorHelper.failOnError(log, environment.getValidator().validate(searchIndex)); @@ -381,9 +381,9 @@ public void addSearchIndex(Namespace namespace, SearchIndex searchIndex) { } public List deleteSearchIndex(SearchIndex searchIndex, boolean force) { - final Namespace namespace = datasetRegistry.get(searchIndex.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(searchIndex.getDataset()); - final List> dependentConcepts = namespace.getStorage().getAllConcepts().stream() + final List> dependentConcepts = namespace.getStorage().getAllConcepts() .filter( c -> c.getConnectors().stream() .map(Connector::getFilters) @@ -392,13 +392,13 @@ public List deleteSearchIndex(SearchIndex searchIndex, boolean force) .map(SelectFilter.class::cast) .map(SelectFilter::getTemplate) .filter(Objects::nonNull) - .anyMatch(searchIndex::equals) + .anyMatch(searchIndex.getId()::equals) ) .toList(); if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } namespace.getStorage().removeSearchIndex(searchIndex.getId()); diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java index 9c7cce00e9..ff91326468 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java @@ -200,13 +200,16 @@ public void setStructure(@NotNull @Valid StructureNode[] structure) { @GET @Path("tables") public List listTables() { - return namespace.getStorage().getTables().stream().map(Table::getId).collect(Collectors.toList()); + return namespace.getStorage().getTables().map(Table::getId).collect(Collectors.toList()); } @GET @Path("concepts") public List listConcepts() { - return namespace.getStorage().getAllConcepts().stream().map(Concept::getId).collect(Collectors.toList()); + return namespace.getStorage() + .getAllConcepts() + .map(Concept::getId) + .collect(Collectors.toList()); } @DELETE diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java index 3090a7d6af..fee4ed1366 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java @@ -2,10 +2,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import jakarta.inject.Inject; import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; @@ -14,6 +10,10 @@ import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; + +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; @@ -29,7 +29,7 @@ public class AdminDatasetsResource { @SneakyThrows @POST @Consumes(ExtraMimeTypes.JSON_STRING) - public void addDataset(@Valid @NotNull Dataset dataset) { + public void addDataset(@NotNull @Valid Dataset dataset) { processor.addDataset(dataset); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java index aab83fa3a7..a84e2b6c0e 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java @@ -11,6 +11,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import jakarta.validation.Validator; import com.bakdata.conquery.commands.ManagerNode; @@ -92,19 +93,15 @@ public synchronized void addRole(Role role) throws JSONException { public void deleteRole(RoleId role) { log.info("Deleting {}", role); - for (User user : storage.getAllUsers()) { - user.removeRole(role); - } + storage.getAllUsers().forEach(user -> user.removeRole(role)); - for (Group group : storage.getAllGroups()) { - group.removeRole(role); - } + storage.getAllGroups().forEach(group -> group.removeRole(role)); storage.removeRole(role); } public SortedSet getAllRoles() { - return new TreeSet<>(storage.getAllRoles()); + return storage.getAllRoles().collect(Collectors.toCollection(TreeSet::new)); } @@ -113,6 +110,7 @@ public SortedSet getAllRoles() { * * @param owner to which the permission is assigned * @param permission The permission to create. + * * @throws JSONException is thrown upon processing JSONs. */ public void createPermission(PermissionOwner owner, ConqueryPermission permission) throws JSONException { @@ -131,13 +129,11 @@ public void deletePermission(PermissionOwner owner, ConqueryPermission permis public TreeSet getAllUsers() { - return new TreeSet<>(storage.getAllUsers()); + return storage.getAllUsers().collect(Collectors.toCollection(TreeSet::new)); } public synchronized void deleteUser(UserId user) { - for (Group group : storage.getAllGroups()) { - group.removeMember(user); - } + storage.getAllGroups().forEach(group -> group.removeMember(user)); storage.removeUser(user); log.trace("Removed user {} from the storage.", user); } @@ -160,7 +156,7 @@ public void addUser(User user) { } public TreeSet getAllGroups() { - return new TreeSet<>(storage.getAllGroups()); + return storage.getAllGroups().collect(Collectors.toCollection(TreeSet::new)); } public void addGroups(List groups) { @@ -217,7 +213,7 @@ public String getPermissionOverviewAsCSV() { /** * Renders the permission overview for certain {@link User} in form of a CSV. */ - public String getPermissionOverviewAsCSV(Collection users) { + public String getPermissionOverviewAsCSV(Stream users) { final StringWriter sWriter = new StringWriter(); final CsvWriter writer = config.getCsv().createWriter(sWriter); final List scope = config @@ -226,9 +222,9 @@ public String getPermissionOverviewAsCSV(Collection users) { // Header writeAuthOverviewHeader(writer, scope); // Body - for (User user : users) { - writeAuthOverviewUser(writer, scope, user, storage, config); - } + users.forEach(user -> + writeAuthOverviewUser(writer, scope, user, storage, config) + ); return sWriter.toString(); } @@ -263,7 +259,7 @@ private static void writeAuthOverviewUser(CsvWriter writer, List scope, * Renders the permission overview for all users in a certain {@link Group} in form of a CSV. */ public String getPermissionOverviewAsCSV(Group group) { - return getPermissionOverviewAsCSV(group.getMembers().stream().map(storage::getUser).collect(Collectors.toList())); + return getPermissionOverviewAsCSV(group.getMembers().stream().map(storage::getUser)); } public boolean isBusy() { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java index 0f8039d7a3..5ed9d37cde 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java @@ -9,6 +9,7 @@ import java.util.Optional; import java.util.OptionalLong; import java.util.UUID; +import java.util.stream.Stream; import jakarta.inject.Inject; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; @@ -21,6 +22,7 @@ import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; +import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.io.jersey.ExtraMimeTypes; import com.bakdata.conquery.io.storage.MetaStorage; @@ -78,9 +80,7 @@ public Response cancelJob(@PathParam(JOB_ID) UUID jobId) { info.send(new CancelJobMessage(jobId)); } - return Response - .seeOther(UriBuilder.fromPath("/admin/").path(AdminUIResource.class, "getJobs").build()) - .build(); + return Response.seeOther(UriBuilder.fromPath("/admin/").path(AdminUIResource.class, "getJobs").build()).build(); } @GET @@ -97,7 +97,7 @@ public boolean isBusy() { @GET @Path("/queries") - public FullExecutionStatus[] getQueries(@Auth Subject currentUser, @QueryParam("limit") OptionalLong maybeLimit, @QueryParam("since") Optional maybeSince) { + public Stream getQueries(@Auth Subject currentUser, @QueryParam("limit") OptionalLong maybeLimit, @QueryParam("since") Optional maybeSince) { final LocalDate since = maybeSince.map(LocalDate::parse).orElse(LocalDate.now()); final long limit = maybeLimit.orElse(100); @@ -105,13 +105,18 @@ public FullExecutionStatus[] getQueries(@Auth Subject currentUser, @QueryParam(" final MetaStorage storage = processor.getStorage(); - return storage.getAllExecutions().stream() + return storage.getAllExecutions() + .filter(t -> t.getCreationTime().toLocalDate().isAfter(since) || t.getCreationTime().toLocalDate().isEqual(since)) .limit(limit) .map(t -> { - Namespace namespace = processor.getDatasetRegistry().get(t.getDataset().getId()); try { - return t.buildStatusFull(currentUser, namespace); + if (t.isInitialized()) { + final Namespace namespace = processor.getDatasetRegistry().get(t.getDataset()); + return t.buildStatusFull(currentUser, namespace); + } + + return t.buildStatusOverview(currentUser); } catch (ConqueryError e) { // Initialization of execution probably failed, so we construct a status based on the overview status @@ -121,8 +126,7 @@ public FullExecutionStatus[] getQueries(@Auth Subject currentUser, @QueryParam(" fullExecutionStatus.setError(e); return fullExecutionStatus; } - }) - .toArray(FullExecutionStatus[]::new); + }); } @POST diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java index f10c8369d1..e8f63e59c1 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java @@ -4,15 +4,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; -import com.bakdata.conquery.models.worker.Namespace; import jakarta.annotation.PostConstruct; import jakarta.inject.Inject; import jakarta.ws.rs.Consumes; @@ -25,6 +16,15 @@ import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.worker.Namespace; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; @@ -84,8 +84,7 @@ public Response remove(@QueryParam("force") @DefaultValue("false") boolean force public List listImports() { return namespace.getStorage() .getAllImports() - .stream() - .filter(imp -> imp.getTable().equals(table)) + .filter(imp -> imp.getTable().equals(table.getId())) .map(Import::getId) .collect(Collectors.toList()); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java index edac579d1f..5325de3b01 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java @@ -12,6 +12,7 @@ import java.util.TreeSet; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; import jakarta.inject.Inject; import com.bakdata.conquery.io.cps.CPSTypeIdResolver; @@ -54,11 +55,11 @@ * Wrapper processor that transforms internal representations of the {@link AdminProcessor} into * objects that are more convenient to handle with freemarker. */ +@Getter @Slf4j @RequiredArgsConstructor(onConstructor_ = {@Inject}) public class UIProcessor { - @Getter private final AdminProcessor adminProcessor; public UIContext getUIContext(String csrfToken) { @@ -75,7 +76,7 @@ public CacheStats getIndexServiceStatistics() { public FrontendAuthOverview getAuthOverview() { Collection overview = new TreeSet<>(); - for (User user : getStorage().getAllUsers()) { + for (User user : getStorage().getAllUsers().toList()) { Collection userGroups = AuthorizationHelper.getGroupsOf(user, getStorage()); Set effectiveRoles = user.getRoles().stream() .map(getStorage()::getRole) @@ -99,22 +100,22 @@ public MetaStorage getStorage() { public FrontendGroupContent getGroupContent(Group group) { Set membersIds = group.getMembers(); - ArrayList availableMembers = new ArrayList<>(getStorage().getAllUsers()); + ArrayList availableMembers = new ArrayList<>(getStorage().getAllUsers().toList()); availableMembers.removeIf(u -> membersIds.contains(u.getId())); List members = membersIds.stream() - .map(id -> { - User user = getStorage().getUser(id); - if (user != null) { - return getUserContent(user); - } - return FrontendUserContent.builder().id(id).build(); - }) - .toList(); + .map(id -> { + User user = getStorage().getUser(id); + if (user != null) { + return getUserContent(user); + } + return FrontendUserContent.builder().id(id).build(); + }) + .toList(); List roles = group.getRoles().stream() - .map(this::getFrontendRoleContent) - .toList(); + .map(this::getFrontendRoleContent) + .toList(); return FrontendGroupContent .builder() @@ -124,7 +125,7 @@ public FrontendGroupContent getGroupContent(Group group) { .members(members) .availableMembers(availableMembers) .roles(roles) - .availableRoles(getStorage().getAllRoles()) + .availableRoles(getStorage().getAllRoles().toList()) .permissions(wrapInFEPermission(group.getPermissions())) .permissionTemplateMap(preparePermissionTemplate()) .build(); @@ -139,7 +140,7 @@ private FrontendRoleContent getFrontendRoleContent(RoleId id) { } public FrontendUserContent getUserContent(User user) { - final Collection availableGroups = new ArrayList<>(getStorage().getAllGroups()); + final Collection availableGroups = new ArrayList<>(getStorage().getAllGroups().toList()); availableGroups.removeIf(g -> g.containsMember(user)); return FrontendUserContent @@ -150,7 +151,7 @@ public FrontendUserContent getUserContent(User user) { .groups(AuthorizationHelper.getGroupsOf(user, getStorage())) .availableGroups(availableGroups) .roles(user.getRoles().stream().map(this::getFrontendRoleContent).collect(Collectors.toList())) - .availableRoles(getStorage().getAllRoles()) + .availableRoles(getStorage().getAllRoles().toList()) .permissions(wrapInFEPermission(user.getPermissions())) .permissionTemplateMap(preparePermissionTemplate()) .build(); @@ -199,20 +200,18 @@ private Map, List>> preparePermissionTemplate( } public List getUsers(Role role) { - Collection user = getStorage().getAllUsers(); - return user.stream().filter(u -> u.getRoles().contains(role.getId())).sorted().collect(Collectors.toList()); + return getStorage().getAllUsers().filter(u -> u.getRoles().contains(role.getId())).sorted().collect(Collectors.toList()); } private List getGroups(Role role) { - Collection groups = getStorage().getAllGroups(); - return groups.stream() + return getStorage().getAllGroups() .filter(g -> g.getRoles().contains(role.getId())) .sorted() .collect(Collectors.toList()); } public TableStatistics getTableStatistics(Table table) { - final NamespaceStorage storage = getDatasetRegistry().get(table.getDataset().getId()).getStorage(); + final NamespaceStorage storage = getDatasetRegistry().get(table.getDataset()).getStorage(); List imports = table.findImports(storage).collect(Collectors.toList()); final long entries = imports.stream().mapToLong(Import::getNumberOfEntries).sum(); @@ -229,10 +228,10 @@ public TableStatistics getTableStatistics(Table table) { .mapToLong(imp -> calculateCBlocksSizeBytes(imp, storage.getAllConcepts())) .sum(), imports, - storage.getAllConcepts().stream() + storage.getAllConcepts() .map(Concept::getConnectors) .flatMap(Collection::stream) - .filter(conn -> conn.getTable().equals(table)) + .filter(conn -> conn.getResolvedTableId().equals(table.getId())) .map(Connector::getConcept).collect(Collectors.toSet()) ); @@ -242,14 +241,14 @@ public DatasetRegistry getDatasetRegistry() { return adminProcessor.getDatasetRegistry(); } - public static long calculateCBlocksSizeBytes(Import imp, Collection> concepts) { + public static long calculateCBlocksSizeBytes(Import imp, Stream> concepts) { // CBlocks are created per (per Bucket) Import per Connector targeting this table // Since the overhead of a single CBlock is minor, we gloss over the fact, that there are multiple and assume it is only a single very large one. - return concepts.stream() + return concepts .filter(TreeConcept.class::isInstance) .flatMap(concept -> ((TreeConcept) concept).getConnectors().stream()) - .filter(con -> con.getTable().equals(imp.getTable())) + .filter(con -> con.getResolvedTableId().equals(imp.getTable())) .mapToLong(con -> { // Per event an int array is stored marking the path to the concept child. final double avgDepth = con.getConcept() @@ -264,7 +263,7 @@ public static long calculateCBlocksSizeBytes(Import imp, Collection new TableInfos( table.getId(), table.getName(), @@ -88,18 +88,17 @@ public View getDataset(@PathParam(DATASET) Dataset dataset) { table.findImports(namespace.getStorage()).mapToLong(Import::getNumberOfEntries).sum() )) .collect(Collectors.toList()), - namespace.getStorage().getAllConcepts(), + namespace.getStorage().getAllConcepts().toList(), // Total size of CBlocks namespace .getStorage().getTables() - .stream() .flatMap(table -> table.findImports(namespace.getStorage())) .mapToLong(imp -> calculateCBlocksSizeBytes( imp, namespace.getStorage().getAllConcepts() )) .sum(), // total size of entries - namespace.getStorage().getAllImports().stream().mapToLong(Import::estimateMemoryConsumption).sum() + namespace.getStorage().getAllImports().mapToLong(Import::estimateMemoryConsumption).sum() ) ); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java index ddd919ed27..7655cc7b13 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java @@ -4,14 +4,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.apiv1.frontend.FrontendList; -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.auth.permissions.Ability; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.resources.hierarchies.HAuthorized; -import com.fasterxml.jackson.annotation.JsonCreator; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletResponse; import jakarta.validation.constraints.NotNull; @@ -25,6 +17,14 @@ import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; + +import com.bakdata.conquery.apiv1.frontend.FrontendList; +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.resources.hierarchies.HAuthorized; +import com.fasterxml.jackson.annotation.JsonCreator; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.ToString; diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java index 908aa69235..71014612a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java @@ -13,6 +13,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import jakarta.inject.Inject; +import jakarta.validation.Validator; import com.bakdata.conquery.apiv1.IdLabel; import com.bakdata.conquery.apiv1.frontend.FrontendList; @@ -47,8 +49,6 @@ import com.google.common.collect.Iterators; import it.unimi.dsi.fastutil.objects.Object2LongMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; -import jakarta.inject.Inject; -import jakarta.validation.Validator; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -143,12 +143,12 @@ public FrontendPreviewConfig getEntityPreviewFrontendConfig(Dataset dataset) { return new FrontendPreviewConfig( previewConfig.getAllConnectors() .stream() - .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), namespace.getCentralRegistry().resolve(id).getTable().getLabel())) + .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), id.resolve().getResolvedTable().getLabel())) .collect(Collectors.toSet()), previewConfig.getDefaultConnectors() .stream() - .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), namespace.getCentralRegistry().resolve(id).getTable().getLabel())) + .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), id.resolve().getResolvedTable().getLabel())) .collect(Collectors.toSet()), previewConfig.resolveSearchFilters(), previewConfig.resolveSearchConcept() @@ -164,7 +164,7 @@ public ResolvedFilterValues resolveFilterValues(SelectFilter searchable, List // search in the full text engine final Set openSearchTerms = new HashSet<>(searchTerms); - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); final List out = new ArrayList<>(); @@ -230,7 +230,7 @@ public AutoCompleteResult autocompleteTextFilter( } private Cursor listAllValues(SelectFilter searchable) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); /* Don't worry, I am as confused as you are! For some reason, flatMapped streams in conjunction with distinct will be evaluated full before further operation. @@ -255,7 +255,7 @@ private Cursor listAllValues(SelectFilter searchable) { } private int countAllValues(SelectFilter searchable) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); return namespace.getFilterSearch().getTotal(searchable); } @@ -265,7 +265,7 @@ private int countAllValues(SelectFilter searchable) { * Is used by the serach cache to load missing items */ private List autocompleteTextFilter(SelectFilter searchable, String text) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); // Note that FEValues is equals/hashcode only on value: // The different sources might contain duplicate FEValue#values which we exploit: diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java b/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java index f7b9b5f40f..895e5a220c 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java @@ -2,12 +2,11 @@ import java.time.LocalDate; import java.util.List; +import jakarta.validation.constraints.NotEmpty; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.fasterxml.jackson.annotation.JsonCreator; -import jakarta.validation.constraints.NotEmpty; import lombok.AllArgsConstructor; import lombok.Data; @@ -17,9 +16,8 @@ public class EntityPreviewRequest { private String idKind; //TODO I think ID is fallback, but i dont currently know. private final String entityId; private final Range time; - @NsIdRefCollection @NotEmpty - private final List sources; + private final List sources; //TODO uncomment, when frontend is adapted to support this // @ValidationMethod(message = "Time must be closed.") diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java index 58ad274147..76e6aba671 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java @@ -5,7 +5,6 @@ import java.util.List; import java.util.Optional; import java.util.OptionalInt; - import jakarta.inject.Inject; import jakarta.validation.Valid; import jakarta.ws.rs.BadRequestException; @@ -44,7 +43,7 @@ public class FilterResource extends HAuthorized { @POST @Path("resolve") public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues filterValues) { - subject.isPermitted(filter.getDataset(), Ability.READ); + subject.isPermitted(filter.getDataset().resolve(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); return processor.resolveFilterValues((SelectFilter) filter, filterValues.values()); @@ -54,7 +53,7 @@ public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues f @POST @Path("autocomplete") public ConceptsProcessor.AutoCompleteResult autocompleteTextFilter(@Valid FilterResource.AutocompleteRequest request) { - subject.isPermitted(filter.getDataset(), Ability.READ); + subject.isPermitted(filter.getDataset().resolve(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); if (!(filter instanceof SelectFilter)) { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java index facc8705ee..9bbcbeb664 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java @@ -112,7 +112,7 @@ public void cancel(@Auth Subject subject, @PathParam(QUERY) ManagedExecution que subject.authorize(query.getDataset(), Ability.READ); subject.authorize(query, Ability.CANCEL); - processor.cancel(subject, query.getDataset(), query); + processor.cancel(subject, query); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java index c99e4b5bee..ad732fe173 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java @@ -7,14 +7,6 @@ import java.net.URL; import java.util.Optional; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.arrow.ResultArrowProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -26,6 +18,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.arrow.ResultArrowProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -48,7 +48,7 @@ public Response getFile( ) { checkSingleTableResult(query); - log.info("Result for {} download on dataset {} by subject {} ({}).", query.getId(), query.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", query.getId(), query.getDataset(), subject.getId(), subject.getName()); return processor.createResultFile(subject, query, pretty, limit); } @@ -84,7 +84,7 @@ public Response getStream( @QueryParam("limit") OptionalLong limit ) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset(), subject.getId(), subject.getName()); return processor.createResultStream(subject, execution, pretty.orElse(false), limit); } } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java index caf4b1d33c..1e0b522e4f 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java @@ -7,14 +7,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.csv.ResultCsvProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -26,6 +18,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.csv.ResultCsvProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -59,7 +59,7 @@ public Response getAsCsv( ) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset(), subject.getId(), subject.getName()); return processor.createResult(subject, (E) execution, pretty, determineCharset(userAgent, queryCharset), limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java index 5fa2bdf217..eab229807b 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java @@ -6,14 +6,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.excel.ResultExcelProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -25,6 +17,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.excel.ResultExcelProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -47,8 +47,7 @@ public Response get( @QueryParam("pretty") @DefaultValue("true") boolean pretty, @QueryParam("limit") OptionalLong limit) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset() - .getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset(), subject.getId(), subject.getName()); return processor.createResult(subject, (E) execution, pretty, limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java index 74709908e7..55839554e5 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java @@ -7,13 +7,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.io.result.parquet.ResultParquetProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -25,6 +18,13 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.io.result.parquet.ResultParquetProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -58,7 +58,7 @@ public Response getFile( @QueryParam("limit") OptionalLong limit) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset(), subject.getId(), subject.getName()); return processor.createResultFile(subject, execution, pretty, limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java index 792779e24e..87f1059e4a 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java @@ -7,8 +7,8 @@ import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.google.common.collect.MoreCollectors; import lombok.Getter; import lombok.Setter; @@ -28,7 +28,7 @@ public void init() { super.init(); connector = concept.getConnectors() .stream() - .filter(con -> con.getTable().equals(table)) + .filter(con -> con.getResolvedTable().equals(table)) .collect(MoreCollectors.toOptional()) .orElseThrow(() -> new NotFoundException(String.format("Could not find Connector for Table[%s] in Concept[%s]", connector, concept))); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java index dfeb09fe1a..c5942c8db4 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java @@ -20,7 +20,7 @@ public abstract class HDatasets extends HAuthorized { @Inject - protected DatasetRegistry datasetRegistry; + protected DatasetRegistry datasetRegistry; @PathParam(DATASET) private Dataset dataset; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java index a8fa813a3f..1bc33168cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java @@ -10,10 +10,14 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; +import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.concept.ConceptColumnSelect; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.SharedAliases; @@ -57,38 +61,6 @@ public CQConceptConverter() { ); } - @Override - public Class getConversionClass() { - return CQConcept.class; - } - - @Override - public ConversionContext convert(CQConcept cqConcept, ConversionContext context) { - - TablePath tablePath = new TablePath(cqConcept, context); - List convertedCQTables = cqConcept.getTables().stream() - .flatMap(cqTable -> convertCqTable(tablePath, cqConcept, cqTable, context).stream()) - .toList(); - - QueryStep joinedStep = QueryStepJoiner.joinSteps(convertedCQTables, ConqueryJoinType.OUTER_JOIN, DateAggregationAction.MERGE, context); - QueryStep lastConceptStep = finishConceptConversion(joinedStep, cqConcept, tablePath, context); - return context.withQueryStep(lastConceptStep); - } - - private Optional convertCqTable(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext context) { - CQTableContext tableContext = createTableContext(tablePath, cqConcept, cqTable, context); - Optional lastQueryStep = Optional.empty(); - for (ConnectorCte queryStep : connectorCTEs) { - Optional convertedStep = queryStep.convert(tableContext, lastQueryStep); - if (convertedStep.isEmpty()) { - continue; - } - lastQueryStep = convertedStep; - tableContext = tableContext.withPrevious(lastQueryStep.get()); - } - return lastQueryStep; - } - private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcept cqConcept, TablePath tablePath, ConversionContext context) { ConceptSqlTables universalTables = tablePath.createConceptTables(predecessor); @@ -97,9 +69,12 @@ private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcep Optional validityDate = predecessorSelects.getValidityDate(); SqlIdColumns ids = predecessorSelects.getIds(); - SelectContext selectContext = SelectContext.create(cqConcept, ids, validityDate, universalTables, context); + SelectContext selectContext = SelectContext.create(ids, validityDate, universalTables, context); List converted = cqConcept.getSelects().stream() - .map(select -> select.createConverter().conceptSelect(select, selectContext)) + .map(selectId -> { + Select select = selectId.resolve(); + return select.createConverter().conceptSelect(select, selectContext); + }) .toList(); List queriesToJoin = new ArrayList<>(); @@ -135,61 +110,30 @@ private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcep .build(); } - private CQTableContext createTableContext(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { - - SqlIdColumns ids = convertIds(cqConcept, cqTable, conversionContext); - ConnectorSqlTables connectorTables = tablePath.getConnectorTables(cqTable); - Optional tablesValidityDate = convertValidityDate(cqTable, connectorTables.getLabel(), conversionContext); - - // convert filters - SqlFunctionProvider functionProvider = conversionContext.getSqlDialect().getFunctionProvider(); - List allSqlFiltersForTable = new ArrayList<>(); - cqTable.getFilters().stream() - .map(filterValue -> filterValue.convertToSqlFilter(ids, conversionContext, connectorTables)) - .forEach(allSqlFiltersForTable::add); - collectConditionFilters(cqConcept.getElements(), cqTable, functionProvider).ifPresent(allSqlFiltersForTable::add); - getDateRestriction(conversionContext, tablesValidityDate).ifPresent(allSqlFiltersForTable::add); - - // convert selects - SelectContext selectContext = SelectContext.create(cqTable, ids, tablesValidityDate, connectorTables, conversionContext); - List allSelectsForTable = new ArrayList<>(); - ConnectorSqlSelects conceptColumnSelect = createConceptColumnConnectorSqlSelects(cqConcept, selectContext); - allSelectsForTable.add(conceptColumnSelect); - cqTable.getSelects().stream().map(select -> select.createConverter().connectorSelect(select, selectContext)).forEach(allSelectsForTable::add); - - return CQTableContext.builder() - .ids(ids) - .validityDate(tablesValidityDate) - .sqlSelects(allSelectsForTable) - .sqlFilters(allSqlFiltersForTable) - .connectorTables(connectorTables) - .conversionContext(conversionContext) - .build(); - } - - private static SqlIdColumns convertIds(CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { + public static SqlIdColumns convertIds(CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { - Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(cqTable.getConnector().getTable(), conversionContext.getConfig()); + Table table = cqTable.getConnector().resolve().getResolvedTable(); + Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(table, conversionContext.getConfig()); if (cqConcept.isExcludeFromSecondaryId() || conversionContext.getSecondaryIdDescription() == null || !cqTable.hasSelectedSecondaryId(conversionContext.getSecondaryIdDescription()) ) { - return new SqlIdColumns(primaryColumn); + return new SqlIdColumns(primaryColumn).withAlias(); } - Column secondaryIdColumn = cqTable.getConnector().getTable().findSecondaryIdColumn(conversionContext.getSecondaryIdDescription()); + Column secondaryIdColumn = table.findSecondaryIdColumn(conversionContext.getSecondaryIdDescription().getId()); Preconditions.checkArgument( secondaryIdColumn != null, "Expecting Table %s to have a matching secondary id for %s".formatted( - cqTable.getConnector().getTable(), + table, conversionContext.getSecondaryIdDescription() ) ); - Field secondaryId = DSL.field(DSL.name(secondaryIdColumn.getName())); - return new SqlIdColumns(primaryColumn, secondaryId); + Field secondaryId = DSL.field(DSL.name(table.getName(), secondaryIdColumn.getName())); + return new SqlIdColumns(primaryColumn, secondaryId).withAlias(); } private static Optional convertValidityDate(CQTable cqTable, String connectorLabel, ConversionContext context) { @@ -242,7 +186,7 @@ private static Stream collectConditions(CQTable cqTable, Concept if (!(conceptElement instanceof ConceptTreeChild child)) { return Stream.empty(); } - WhereCondition childCondition = child.getCondition().convertToSqlCondition(CTConditionContext.create(cqTable.getConnector(), functionProvider)); + WhereCondition childCondition = child.getCondition().convertToSqlCondition(CTConditionContext.create(cqTable.getConnector().resolve(), functionProvider)); return Stream.concat( collectConditions(cqTable, child.getParent(), functionProvider), Stream.of(childCondition) @@ -250,8 +194,8 @@ private static Stream collectConditions(CQTable cqTable, Concept } private static Optional convertConnectorCondition(CQTable cqTable, SqlFunctionProvider functionProvider) { - return Optional.ofNullable(cqTable.getConnector().getCondition()) - .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(cqTable.getConnector(), functionProvider))); + return Optional.ofNullable(cqTable.getConnector().resolve().getCondition()) + .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(cqTable.getConnector().resolve(), functionProvider))); } private static Optional getDateRestriction(ConversionContext context, Optional validityDate) { @@ -277,10 +221,75 @@ private static Optional getDateRestriction(ConversionContext context private static ConnectorSqlSelects createConceptColumnConnectorSqlSelects(CQConcept cqConcept, SelectContext selectContext) { return cqConcept.getSelects().stream() + .map(SelectId::resolve) .filter(select -> select instanceof ConceptColumnSelect) .findFirst() .map(select -> select.createConverter().connectorSelect(select, selectContext)) .orElse(ConnectorSqlSelects.none()); } + @Override + public Class getConversionClass() { + return CQConcept.class; + } + + @Override + public ConversionContext convert(CQConcept cqConcept, ConversionContext context) { + + TablePath tablePath = new TablePath(cqConcept, context); + List convertedCQTables = cqConcept.getTables().stream() + .flatMap(cqTable -> convertCqTable(tablePath, cqConcept, cqTable, context).stream()) + .toList(); + + QueryStep joinedStep = QueryStepJoiner.joinSteps(convertedCQTables, ConqueryJoinType.OUTER_JOIN, DateAggregationAction.MERGE, context); + QueryStep lastConceptStep = finishConceptConversion(joinedStep, cqConcept, tablePath, context); + return context.withQueryStep(lastConceptStep); + } + + private Optional convertCqTable(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext context) { + CQTableContext tableContext = createTableContext(tablePath, cqConcept, cqTable, context); + Optional lastQueryStep = Optional.empty(); + for (ConnectorCte queryStep : connectorCTEs) { + Optional convertedStep = queryStep.convert(tableContext, lastQueryStep); + if (convertedStep.isEmpty()) { + continue; + } + lastQueryStep = convertedStep; + tableContext = tableContext.withPrevious(lastQueryStep.get()); + } + return lastQueryStep; + } + + private CQTableContext createTableContext(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { + + SqlIdColumns ids = convertIds(cqConcept, cqTable, conversionContext); + ConnectorSqlTables connectorTables = tablePath.getConnectorTables(cqTable); + Optional tablesValidityDate = convertValidityDate(cqTable, connectorTables.getLabel(), conversionContext); + + // convert filters + SqlFunctionProvider functionProvider = conversionContext.getSqlDialect().getFunctionProvider(); + List allSqlFiltersForTable = new ArrayList<>(); + cqTable.getFilters().stream() + .map(filterValue -> filterValue.convertToSqlFilter(ids, conversionContext, connectorTables)) + .forEach(allSqlFiltersForTable::add); + collectConditionFilters(cqConcept.getElements().stream().>map(ConceptElementId::resolve).toList(), cqTable, functionProvider).ifPresent(allSqlFiltersForTable::add); + getDateRestriction(conversionContext, tablesValidityDate).ifPresent(allSqlFiltersForTable::add); + + // convert selects + SelectContext selectContext = SelectContext.create(ids, tablesValidityDate, connectorTables, conversionContext); + List allSelectsForTable = new ArrayList<>(); + ConnectorSqlSelects conceptColumnSelect = createConceptColumnConnectorSqlSelects(cqConcept, selectContext); + allSelectsForTable.add(conceptColumnSelect); + cqTable.getSelects().stream().map(SelectId::resolve).map(select -> select.createConverter().connectorSelect(select, selectContext)).forEach(allSelectsForTable::add); + + return CQTableContext.builder() + .ids(ids) + .validityDate(tablesValidityDate) + .sqlSelects(allSelectsForTable) + .sqlFilters(allSqlFiltersForTable) + .connectorTables(connectorTables) + .conversionContext(conversionContext) + .build(); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java index 114692d118..4c53669a2a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java @@ -15,8 +15,8 @@ public class CTConditionContext { public static CTConditionContext create(Connector connector, SqlFunctionProvider functionProvider) { return new CTConditionContext( - connector.getTable(), - connector.getColumn(), + connector.getResolvedTable(), + connector.getColumn() != null ? connector.getColumn().resolve() : null, functionProvider ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java index c660248531..b8cc941a17 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java @@ -61,7 +61,9 @@ private static QueryStep.QueryStepBuilder joinWithStratificationTable( Selects stratificationSelects = stratificationTableCte.getQualifiedSelects(); SqlIdColumns stratificationIds = stratificationSelects.getIds(); - SqlIdColumns rootTableIds = tableContext.getIds(); + SqlIdColumns rootTableIds = tableContext.getIds().getPredecessor().orElseThrow(() -> new IllegalStateException( + "Id's should have been qualified during conversion and thus have a predecessor") + ); List idConditions = stratificationIds.join(rootTableIds); // join full stratification with connector table on all ID's from prerequisite query diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java index da4449be71..2c7a1bdea5 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java @@ -1,6 +1,10 @@ package com.bakdata.conquery.sql.conversion.cqelement.concept; -import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.*; +import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.EVENT_FILTER; +import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.INTERVAL_PACKING_SELECTS; +import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.MANDATORY_STEPS; +import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.UNIVERSAL_SELECTS; +import static com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep.UNNEST_DATE; import static com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingCteStep.INTERVAL_COMPLETE; import java.util.HashMap; @@ -11,6 +15,7 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingCteStep; import com.bakdata.conquery.sql.conversion.model.CteStep; @@ -45,13 +50,13 @@ public ConnectorSqlTables getConnectorTables(CQTable cqTable) { private static ConnectorSqlTables createConnectorTables(CQConcept cqConcept, CQTable cqTable, ConversionContext context) { - String conceptConnectorLabel = context.getNameGenerator().conceptConnectorName(cqConcept, cqTable.getConnector(), context.getSqlPrintSettings() + String conceptConnectorLabel = context.getNameGenerator().conceptConnectorName(cqConcept, cqTable.getConnector().resolve(), context.getSqlPrintSettings() .getLocale()); TablePathInfo tableInfo = collectConnectorTables(cqConcept, cqTable, context); Map cteNameMap = CteStep.createCteNameMap(tableInfo.getMappings().keySet(), conceptConnectorLabel, context.getNameGenerator()); return new ConnectorSqlTables( - cqTable.getConnector(), + cqTable.getConnector().resolve(), conceptConnectorLabel, tableInfo.getRootTable(), cteNameMap, @@ -78,10 +83,10 @@ public ConceptSqlTables createConceptTables(QueryStep predecessor) { private static TablePathInfo collectConnectorTables(CQConcept cqConcept, CQTable cqTable, ConversionContext context) { TablePathInfo tableInfo = new TablePathInfo(); - tableInfo.setRootTable(cqTable.getConnector().getTable().getName()); + tableInfo.setRootTable(cqTable.getConnector().resolve().getResolvedTableId().getTable()); tableInfo.addWithDefaultMapping(MANDATORY_STEPS); - boolean eventDateSelectsPresent = cqTable.getSelects().stream().anyMatch(Select::isEventDateSelect); + boolean eventDateSelectsPresent = cqTable.getSelects().stream().map(SelectId::resolve).anyMatch(Select::isEventDateSelect); // no validity date aggregation possible nor necessary if (cqTable.findValidityDate() == null || (!cqConcept.isAggregateEventDates() && !eventDateSelectsPresent)) { return tableInfo; @@ -118,7 +123,7 @@ private TablePathInfo collectConceptTables(QueryStep predecessor) { tableInfo.addRootTableMapping(UNIVERSAL_SELECTS); // no event date selects present - if (cqConcept.getSelects().stream().noneMatch(Select::isEventDateSelect)) { + if (cqConcept.getSelects().stream().map(SelectId::resolve).noneMatch(Select::isEventDateSelect)) { return tableInfo; } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java index f765f7cd84..1dcd4286cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java @@ -132,9 +132,10 @@ public ColumnDateRange forValidityDate(ValidityDate validityDate, CDateRange dat public ColumnDateRange forArbitraryDateRange(DaterangeSelectOrFilter daterangeSelectOrFilter) { String tableName = daterangeSelectOrFilter.getTable().getName(); if (daterangeSelectOrFilter.getEndColumn() != null) { - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn(), daterangeSelectOrFilter.getEndColumn()); + return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); } - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getColumn(), daterangeSelectOrFilter.getColumn()); + Column column = daterangeSelectOrFilter.getColumn().resolve(); + return ofStartAndEnd(tableName, column, column); } @Override @@ -313,19 +314,20 @@ private ColumnDateRange toColumnDateRange(CDateRange dateRestriction) { private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { - String tableName = validityDate.getConnector().getTable().getName(); + String tableName = validityDate.getConnector().getResolvedTableId().getTable(); Column startColumn; Column endColumn; // if no end column is present, the only existing column is both start and end of the date range if (validityDate.getEndColumn() == null) { - startColumn = validityDate.getColumn(); - endColumn = validityDate.getColumn(); + Column column = validityDate.getColumn().resolve(); + startColumn = column; + endColumn = column; } else { - startColumn = validityDate.getStartColumn(); - endColumn = validityDate.getEndColumn(); + startColumn = validityDate.getStartColumn().resolve(); + endColumn = validityDate.getEndColumn().resolve(); } return ofStartAndEnd(tableName, startColumn, endColumn); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 96ed3e3f7e..8b1d166155 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -123,9 +123,9 @@ public ColumnDateRange forValidityDate(ValidityDate validityDate, CDateRange dat public ColumnDateRange forArbitraryDateRange(DaterangeSelectOrFilter daterangeSelectOrFilter) { String tableName = daterangeSelectOrFilter.getTable().getName(); if (daterangeSelectOrFilter.getEndColumn() != null) { - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn(), daterangeSelectOrFilter.getEndColumn()); + return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); } - return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn()); + return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn().resolve()); } @Override @@ -310,11 +310,11 @@ private ColumnDateRange toColumnDateRange(CDateRange dateRestriction) { } private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { - String tableName = validityDate.getConnector().getTable().getName(); + String tableName = validityDate.getConnector().getResolvedTableId().getTable(); if (validityDate.getEndColumn() != null) { - return ofStartAndEnd(tableName, validityDate.getStartColumn(), validityDate.getEndColumn()); + return ofStartAndEnd(tableName, validityDate.getStartColumn().resolve(), validityDate.getEndColumn().resolve()); } - return ofSingleColumn(tableName, validityDate.getColumn()); + return ofSingleColumn(tableName, validityDate.getColumn().resolve()); } private ColumnDateRange ofSingleColumn(String tableName, Column column) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/SqlIdColumns.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/SqlIdColumns.java index 20f4a84f8b..f80ebf8d35 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/SqlIdColumns.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/SqlIdColumns.java @@ -11,6 +11,7 @@ import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.sql.conversion.SharedAliases; +import lombok.AllArgsConstructor; import lombok.Getter; import lombok.experimental.SuperBuilder; import org.jooq.Condition; @@ -18,6 +19,7 @@ import org.jooq.impl.DSL; @SuperBuilder +@AllArgsConstructor public class SqlIdColumns implements Qualifiable { @Getter @@ -26,21 +28,40 @@ public class SqlIdColumns implements Qualifiable { @Nullable private final Field secondaryId; + @Nullable + private final SqlIdColumns predecessor; + public SqlIdColumns(Field primaryColumn, Field secondaryId) { this.primaryColumn = primaryColumn; this.secondaryId = secondaryId; + this.predecessor = null; } public SqlIdColumns(Field primaryColumn) { this.primaryColumn = primaryColumn; this.secondaryId = null; + this.predecessor = null; + } + + public SqlIdColumns withAlias() { + if (this.secondaryId == null) { + return new SqlIdColumns(this.primaryColumn.as(SharedAliases.PRIMARY_COLUMN.getAlias()), null, this); + } + return new SqlIdColumns( + this.primaryColumn.as(SharedAliases.PRIMARY_COLUMN.getAlias()), + this.secondaryId.as(SharedAliases.SECONDARY_ID.getAlias()), + this + ); } @Override public SqlIdColumns qualify(String qualifier) { Field primaryColumn = QualifyingUtil.qualify(this.primaryColumn, qualifier); - Field secondaryId = this.secondaryId != null ? QualifyingUtil.qualify(this.secondaryId, qualifier) : null; - return new SqlIdColumns(primaryColumn, secondaryId); + if (secondaryId == null) { + return new SqlIdColumns(primaryColumn, null, this); + } + Field secondaryId = QualifyingUtil.qualify(this.secondaryId, qualifier); + return new SqlIdColumns(primaryColumn, secondaryId, this); } public SqlIdColumns withAbsoluteStratification(Resolution resolution, Field index) { @@ -73,6 +94,10 @@ public Optional> getSecondaryId() { return Optional.ofNullable(this.secondaryId); } + public Optional getPredecessor() { + return Optional.ofNullable(this.predecessor); + } + public boolean isWithStratification() { return false; } @@ -110,10 +135,10 @@ public SqlIdColumns coalesce(List selectsIds) { }); Field coalescedPrimaryColumn = coalesceFields(primaryColumns).as(SharedAliases.PRIMARY_COLUMN.getAlias()); - Field coalescedSecondaryIds = !secondaryIds.isEmpty() - ? coalesceFields(secondaryIds).as(SharedAliases.SECONDARY_ID.getAlias()) - : null; - + if (secondaryIds.isEmpty()) { + return new SqlIdColumns(coalescedPrimaryColumn); + } + Field coalescedSecondaryIds = coalesceFields(secondaryIds).as(SharedAliases.SECONDARY_ID.getAlias()); return new SqlIdColumns(coalescedPrimaryColumn, coalescedSecondaryIds); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java index 6c8e6fd6a7..664abdbe64 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java @@ -42,14 +42,14 @@ public ConnectorSqlSelects connectorSelect(CountQuartersSelect countQuartersSele CommonAggregationSelect countAggregationSelect; if (countQuartersSelect.isSingleColumnDaterange()) { - Column countColumn = countQuartersSelect.getColumn(); + Column countColumn = countQuartersSelect.getColumn().resolve(); countAggregationSelect = countColumn.getType() == MajorTypeId.DATE_RANGE ? createSingleDaterangeColumnAggregationSelect(countColumn, alias, tables, functionProvider, stratificationFunctions) : createSingleDateColumnAggregationSelect(countColumn, alias, tables, functionProvider); } else { - Column startColumn = countQuartersSelect.getStartColumn(); - Column endColumn = countQuartersSelect.getEndColumn(); + Column startColumn = countQuartersSelect.getStartColumn().resolve(); + Column endColumn = countQuartersSelect.getEndColumn().resolve(); countAggregationSelect = createTwoDateColumnAggregationSelect(startColumn, endColumn, alias, tables, functionProvider, stratificationFunctions); } @@ -73,14 +73,14 @@ public SqlFilters convertToSqlFilter(CountQuartersFilter countQuartersFilter, Fi CommonAggregationSelect countAggregationSelect; if (countQuartersFilter.isSingleColumnDaterange()) { - Column countColumn = countQuartersFilter.getColumn(); + Column countColumn = countQuartersFilter.getColumn().resolve(); countAggregationSelect = countColumn.getType() == MajorTypeId.DATE_RANGE ? createSingleDaterangeColumnAggregationSelect(countColumn, alias, tables, functionProvider, stratificationFunctions) : createSingleDateColumnAggregationSelect(countColumn, alias, tables, functionProvider); } else { - Column startColumn = countQuartersFilter.getStartColumn(); - Column endColumn = countQuartersFilter.getEndColumn(); + Column startColumn = countQuartersFilter.getStartColumn().resolve(); + Column endColumn = countQuartersFilter.getEndColumn().resolve(); countAggregationSelect = createTwoDateColumnAggregationSelect(startColumn, endColumn, alias, tables, functionProvider, stratificationFunctions); } ConnectorSqlSelects selects = ConnectorSqlSelects.builder() diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java index 501340d79f..9e519583e2 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java @@ -39,7 +39,7 @@ public ConnectorSqlSelects connectorSelect(CountSelect countSelect, SelectContex ConnectorSqlTables tables = selectContext.getTables(); CountType countType = CountType.fromBoolean(countSelect.isDistinct()); - Column countColumn = countSelect.getColumn(); + Column countColumn = countSelect.getColumn().resolve(); String alias = selectContext.getNameGenerator().selectName(countSelect); CommonAggregationSelect countAggregationSelect = createCountAggregationSelect(countColumn, countType, alias, tables); @@ -59,7 +59,7 @@ public SqlFilters convertToSqlFilter(CountFilter countFilter, FilterContext countAggregationSelect = createCountAggregationSelect(countColumn, countType, alias, tables); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java index ff7aba4043..c72832e7da 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java @@ -37,7 +37,7 @@ public class DateDistanceSqlAggregator implements SelectConverter selectContext) { - Column column = select.getColumn(); + Column column = select.getColumn().resolve(); String alias = selectContext.getNameGenerator().selectName(select); ConnectorSqlTables tables = selectContext.getTables(); ConversionContext conversionContext = selectContext.getConversionContext(); @@ -60,7 +60,7 @@ public ConnectorSqlSelects connectorSelect(DateDistanceSelect select, SelectCont @Override public SqlFilters convertToSqlFilter(DateDistanceFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String alias = filterContext.getNameGenerator().selectName(filter); ConnectorSqlTables tables = filterContext.getTables(); ConversionContext conversionContext = filterContext.getConversionContext(); @@ -80,7 +80,7 @@ public SqlFilters convertToSqlFilter(DateDistanceFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java index a1505b3447..a2810beab0 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java @@ -9,6 +9,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.FlagFilter; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.FlagSelect; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext; @@ -99,7 +100,7 @@ private static Map> createFlagRootSelectMap .entrySet().stream() .collect(Collectors.toMap( Map.Entry::getKey, - entry -> new ExtractingSqlSelect<>(rootTable, entry.getValue().getName(), Boolean.class) + entry -> new ExtractingSqlSelect<>(rootTable, entry.getValue().resolve().getName(), Boolean.class) )); } @@ -170,9 +171,10 @@ public SqlFilters convertToSqlFilter(FlagFilter flagFilter, FilterContext getRequiredColumns(Map flags, Set selectedFlags) { + private static List getRequiredColumns(Map flags, Set selectedFlags) { return selectedFlags.stream() .map(flags::get) + .map(ColumnId::resolve) .toList(); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java index 8dbceba3be..5974b7536a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SumFilter; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.SumSelect; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext; @@ -97,15 +98,15 @@ private enum SumDistinctCteStep implements CteStep { @Override public ConnectorSqlSelects connectorSelect(SumSelect sumSelect, SelectContext selectContext) { - Column sumColumn = sumSelect.getColumn(); - Column subtractColumn = sumSelect.getSubtractColumn(); - List distinctByColumns = sumSelect.getDistinctByColumn(); + Column sumColumn = sumSelect.getColumn().resolve(); + Column subtractColumn = sumSelect.getSubtractColumn() != null ? sumSelect.getSubtractColumn().resolve() : null; + List distinctByColumns = sumSelect.getDistinctByColumn().stream().map(ColumnId::resolve).toList(); NameGenerator nameGenerator = selectContext.getNameGenerator(); String alias = nameGenerator.selectName(sumSelect); ConnectorSqlTables tables = selectContext.getTables(); CommonAggregationSelect sumAggregationSelect; - if (distinctByColumns != null && !distinctByColumns.isEmpty()) { + if (!distinctByColumns.isEmpty()) { SqlIdColumns ids = selectContext.getIds(); sumAggregationSelect = createDistinctSumAggregationSelect(sumColumn, distinctByColumns, alias, ids, tables, nameGenerator); ExtractingSqlSelect finalSelect = createFinalSelect(sumAggregationSelect, tables); @@ -129,16 +130,16 @@ public ConnectorSqlSelects connectorSelect(SumSelect sumSelect, SelectContext sumFilter, FilterContext filterContext) { - Column sumColumn = sumFilter.getColumn(); - Column subtractColumn = sumFilter.getSubtractColumn(); - List distinctByColumns = sumFilter.getDistinctByColumn(); + Column sumColumn = sumFilter.getColumn().resolve(); + Column subtractColumn = sumFilter.getSubtractColumn() != null ? sumFilter.getSubtractColumn().resolve() : null; + List distinctByColumns = sumFilter.getDistinctByColumn().stream().map(ColumnId::resolve).toList(); String alias = filterContext.getNameGenerator().selectName(sumFilter); ConnectorSqlTables tables = filterContext.getTables(); CommonAggregationSelect sumAggregationSelect; ConnectorSqlSelects selects; - if (distinctByColumns != null && !distinctByColumns.isEmpty()) { + if (!distinctByColumns.isEmpty()) { sumAggregationSelect = createDistinctSumAggregationSelect(sumColumn, distinctByColumns, alias, filterContext.getIds(), tables, filterContext.getNameGenerator()); selects = ConnectorSqlSelects.builder() @@ -168,19 +169,20 @@ public SqlFilters convertToSqlFilter(SumFilter sumFilter, FilterContext filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Class numberClass = NumberMapUtil.getType(column); Field field = DSL.field(DSL.name(tableName, columnName), numberClass); - Column subtractColumn = filter.getSubtractColumn(); + ColumnId subtractColumn = filter.getSubtractColumn(); if (subtractColumn == null) { return new SumCondition(field, filterContext.getValue()).condition(); } - String subtractColumnName = subtractColumn.getName(); - String subtractTableName = subtractColumn.getTable().getName(); + Column resolvedSubtractionColumn = subtractColumn.resolve(); + String subtractColumnName = resolvedSubtractionColumn.getName(); + String subtractTableName = resolvedSubtractionColumn.getTable().getName(); Field subtractField = DSL.field(DSL.name(subtractTableName, subtractColumnName), numberClass); return new SumCondition(field.minus(subtractField), filterContext.getValue()).condition(); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java index abfa39614e..65e3ac4980 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java @@ -17,7 +17,7 @@ public SqlFilters convertToSqlFilter(F filter, FilterContext filterContext) { ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( filterContext.getTables().getPredecessor(ConceptCteStep.PREPROCESSING), - filter.getColumn().getName(), + filter.getColumn().getColumn(), String.class ); @@ -39,7 +39,7 @@ public SqlFilters convertToSqlFilter(F filter, FilterContext filterContext) { @Override public Condition convertForTableExport(F filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Field field = DSL.field(DSL.name(tableName, columnName), String.class); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java index 1eb1daa5fe..f360f7f7f1 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java @@ -1,13 +1,10 @@ package com.bakdata.conquery.sql.conversion.model.filter; -import java.math.BigDecimal; import java.util.List; import com.bakdata.conquery.models.common.IRange; -import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.NumberFilter; -import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext; @@ -23,51 +20,31 @@ public class NumberFilterConverter> im @Override public SqlFilters convertToSqlFilter(NumberFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); ConnectorSqlTables tables = filterContext.getTables(); Class numberClass = NumberMapUtil.getType(column); ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>(tables.getRootTable(), column.getName(), numberClass); Field eventFilterCtePredecessor = rootSelect.qualify(tables.getPredecessor(ConceptCteStep.EVENT_FILTER)).select(); - IRange filterValue = prepareFilterValue(column, filterContext.getValue()); + IRange filterValue = filterContext.getValue(); NumberCondition condition = new NumberCondition(eventFilterCtePredecessor, filterValue); - ConnectorSqlSelects selects = ConnectorSqlSelects.builder() - .preprocessingSelects(List.of(rootSelect)) - .build(); + ConnectorSqlSelects selects = ConnectorSqlSelects.builder().preprocessingSelects(List.of(rootSelect)).build(); - WhereClauses whereClauses = WhereClauses.builder() - .eventFilter(condition) - .build(); + WhereClauses whereClauses = WhereClauses.builder().eventFilter(condition).build(); return new SqlFilters(selects, whereClauses); } @Override public Condition convertForTableExport(NumberFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Field field = DSL.field(DSL.name(tableName, columnName), Number.class); - return new NumberCondition(field, filterContext.getValue()).condition(); - } + IRange range = filterContext.getValue(); - /** - * If there is a long range filter on a column of type MONEY, the filter value will represent a decimal with the point moved right 2 places right. - *

- * For example, the filter value {@code {min: 1000€, max: 2000€}} will be converted to {@code {min: 10,00€, max: 20,00€}} - */ - private static IRange prepareFilterValue(Column column, IRange filterValue) { - if (column.getType() != MajorTypeId.MONEY || !(filterValue instanceof Range.LongRange)) { - return filterValue; - } - Long min = (Long) filterValue.getMin(); - Long max = (Long) filterValue.getMax(); - return Range.LongRange.of( - BigDecimal.valueOf(min).movePointLeft(2), - BigDecimal.valueOf(max).movePointLeft(2) - ); + return new NumberCondition(field, range).condition(); } - } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java index 02e770e89d..1bcda25c97 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java @@ -7,19 +7,16 @@ import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.concept.ConceptColumnSelect; -import com.bakdata.conquery.sql.conversion.SharedAliases; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; import com.bakdata.conquery.sql.conversion.model.CteStep; import com.bakdata.conquery.sql.conversion.model.NameGenerator; -import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; import com.bakdata.conquery.sql.conversion.model.QueryStep; import com.bakdata.conquery.sql.conversion.model.Selects; import com.bakdata.conquery.sql.conversion.model.SqlIdColumns; import com.bakdata.conquery.sql.execution.ResultSetProcessor; -import com.bakdata.conquery.util.TablePrimaryColumnUtil; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.jooq.Field; @@ -46,7 +43,7 @@ public ConnectorSqlSelects connectorSelect(ConceptColumnSelect select, SelectCon if (connector.getColumn() == null) { return ConnectorSqlSelects.none(); } - ExtractingSqlSelect connectorColumn = new ExtractingSqlSelect<>(connector.getTable().getName(), connector.getColumn().getName(), Object.class); + ExtractingSqlSelect connectorColumn = new ExtractingSqlSelect<>(connector.getResolvedTableId().getTable(), connector.getColumn().getColumn(), Object.class); ExtractingSqlSelect qualified = connectorColumn.qualify(selectContext.getTables().getPredecessor(ConceptCteStep.EVENT_FILTER)); return ConnectorSqlSelects.builder() .preprocessingSelect(connectorColumn) @@ -120,18 +117,14 @@ private static QueryStep createConnectorColumnSelectQuery( String tableName = selectContext.getTables() .getConnectorTables() .stream() - .filter(tables -> Objects.equals(tables.getRootTable(), connector.getTable().getName())) + .filter(tables -> Objects.equals(tables.getRootTable(), connector.getResolvedTableId().getTable())) .findFirst() .map(tables -> tables.cteName(ConceptCteStep.EVENT_FILTER)) - .orElse(connector.getTable().getName()); + .orElse(connector.getResolvedTableId().getTable()); Table connectorTable = DSL.table(DSL.name(tableName)); - - Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(connector.getTable(), selectContext.getConversionContext().getConfig()); - Field qualifiedPrimaryColumn = QualifyingUtil.qualify(primaryColumn, connectorTable.getName()).as(SharedAliases.PRIMARY_COLUMN.getAlias()); - SqlIdColumns ids = new SqlIdColumns(qualifiedPrimaryColumn); - - Field connectorColumn = DSL.field(DSL.name(connectorTable.getName(), connector.getColumn().getName())); + SqlIdColumns ids = selectContext.getIds().qualify(connectorTable.getName()); + Field connectorColumn = DSL.field(DSL.name(connectorTable.getName(), connector.getColumn().resolve().getName())); Field casted = selectContext.getFunctionProvider().cast(connectorColumn, SQLDataType.VARCHAR).as(alias); FieldWrapper connectorSelect = new FieldWrapper<>(casted); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java index 5cde2abe5b..2c86a617e1 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java @@ -68,7 +68,7 @@ public ConnectorSqlSelects connectorSelect(DistinctSelect distinctSelect, Select String alias = selectContext.getNameGenerator().selectName(distinctSelect); ConnectorSqlTables tables = selectContext.getTables(); - FieldWrapper preprocessingSelect = new FieldWrapper<>(field(name(tables.getRootTable(), distinctSelect.getColumn().getName())).as(alias)); + FieldWrapper preprocessingSelect = new FieldWrapper<>(field(name(tables.getRootTable(), distinctSelect.getColumn().getColumn())).as(alias)); QueryStep distinctSelectCte = createDistinctSelectCte(preprocessingSelect, alias, selectContext); QueryStep aggregatedCte = createAggregationCte(selectContext, preprocessingSelect, distinctSelectCte, alias); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java index 64b19538bb..6eab341418 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java @@ -8,7 +8,7 @@ public class FirstValueSelectConverter implements SelectConverter selectContext) { return ValueSelectUtil.createValueSelect( - select.getColumn(), + select.getColumn().resolve(), selectContext.getNameGenerator().selectName(select), (valueField, orderByFields) -> selectContext.getFunctionProvider().first(valueField, orderByFields), selectContext diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java index 06a27c7484..4cb0e45afc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java @@ -8,7 +8,7 @@ public class LastValueSelectConverter implements SelectConverter selectContext) { return ValueSelectUtil.createValueSelect( - select.getColumn(), + select.getColumn().resolve(), selectContext.getNameGenerator().selectName(select), (valueField, orderByFields) -> selectContext.getFunctionProvider().last(valueField, orderByFields), selectContext diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java index 208860a993..100c74327d 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java @@ -13,7 +13,7 @@ public ConnectorSqlSelects connectorSelect(RandomValueSelect select, SelectConte ConnectorSqlTables tables = selectContext.getTables(); String rootTableName = tables.getRootTable(); - String columnName = select.getColumn().getName(); + String columnName = select.getColumn().getColumn(); ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>(rootTableName, columnName, Object.class); String alias = selectContext.getNameGenerator().selectName(select); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SelectContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SelectContext.java index d37e743641..b274dc25ee 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SelectContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SelectContext.java @@ -2,8 +2,6 @@ import java.util.Optional; -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.sql.conversion.Context; import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptSqlTables; @@ -26,7 +24,6 @@ public class SelectContext implements Context { ConversionContext conversionContext; public static SelectContext create( - CQTable cqTable, SqlIdColumns ids, Optional validityDate, ConnectorSqlTables tables, @@ -36,7 +33,6 @@ public static SelectContext create( } public static SelectContext create( - CQConcept cqConcept, SqlIdColumns ids, Optional validityDate, ConceptSqlTables tables, diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/AbsoluteFormQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/AbsoluteFormQueryConverter.java index 573f2222b4..189a1748b6 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/AbsoluteFormQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/AbsoluteFormQueryConverter.java @@ -40,7 +40,7 @@ public ConversionContext convert(AbsoluteFormQuery form, ConversionContext conte FormType.ABSOLUTE, stratificationTable, form.getFeatures(), - form.getResultInfos(context.getSqlPrintSettings()), + form.getResultInfos(), context ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java index 74f66d3e42..055bfcc262 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java @@ -46,7 +46,7 @@ public ConversionContext convert(ConceptQuery conceptQuery, ConversionContext co .build(); Select finalQuery = this.queryStepTransformer.toSelectQuery(finalStep); - return contextAfterConversion.withFinalQuery(new SqlQuery(finalQuery, conceptQuery.getResultInfos(context.getSqlPrintSettings()))); + return contextAfterConversion.withFinalQuery(new SqlQuery(finalQuery, conceptQuery.getResultInfos())); } private Selects getFinalSelects(ConceptQuery conceptQuery, Selects preFinalSelects, SqlFunctionProvider functionProvider) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/EntityDateQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/EntityDateQueryConverter.java index 917f49f8f3..457eebe740 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/EntityDateQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/EntityDateQueryConverter.java @@ -39,7 +39,7 @@ public ConversionContext convert(EntityDateQuery entityDateQuery, ConversionCont FormType.ENTITY_DATE, stratificationTable, entityDateQuery.getFeatures(), - entityDateQuery.getResultInfos(context.getSqlPrintSettings()), + entityDateQuery.getResultInfos(), context ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/RelativFormQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/RelativFormQueryConverter.java index b0062afbdc..544d1e0548 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/RelativFormQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/RelativFormQueryConverter.java @@ -29,7 +29,7 @@ public ConversionContext convert(RelativeFormQuery form, ConversionContext conte FormType.RELATIVE, stratificationTable, form.getFeatures(), - form.getResultInfos(context.getSqlPrintSettings()), + form.getResultInfos(), context ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java index 34c3511f63..8b59f9c70e 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java @@ -18,11 +18,11 @@ public ConversionContext convert(SecondaryIdQuery query, ConversionContext conte ConversionContext withConvertedQuery = context.getNodeConversions().convert( query.getQuery(), - context.withSecondaryIdDescription(query.getSecondaryId()) + context.withSecondaryIdDescription(query.getSecondaryId().resolve()) ); Preconditions.checkArgument(withConvertedQuery.getFinalQuery() != null, "The SecondaryIdQuery's query should be converted by now."); - SqlQuery secondaryIdSqlQuery = withConvertedQuery.getFinalQuery().overwriteResultInfos(query.getResultInfos(context.getSqlPrintSettings())); + SqlQuery secondaryIdSqlQuery = withConvertedQuery.getFinalQuery().overwriteResultInfos(query.getResultInfos()); return withConvertedQuery.withFinalQuery(secondaryIdSqlQuery); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java index 37489e79d0..c8013f36d6 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java @@ -13,6 +13,7 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.SharedAliases; import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; @@ -38,6 +39,12 @@ @RequiredArgsConstructor public class TableExportQueryConverter implements NodeConverter { + /** + * Validity date is part of positions, but not when converting to SQL because it might have multiple columns and not just one. + * Thus, we need to apply an offset to the positions. + */ + private static final int POSITION_OFFSET = 1; + private final QueryStepTransformer queryStepTransformer; @Override @@ -48,29 +55,29 @@ public Class getConversionClass() { @Override public ConversionContext convert(TableExportQuery tableExportQuery, ConversionContext context) { - QueryStep convertedPrerequisite = convertPrerequisite(tableExportQuery, context); - Map positions = tableExportQuery.getPositions(); - CDateRange dateRestriction = CDateRange.of(tableExportQuery.getDateRange()); - - List convertedTables = tableExportQuery.getTables().stream() - .flatMap(concept -> concept.getTables().stream().map(table -> convertTable( - table, - concept, - dateRestriction, - convertedPrerequisite, - positions, - context - ))) - .toList(); - - QueryStep unionedTables = QueryStep.createUnionAllStep( + final QueryStep convertedPrerequisite = convertPrerequisite(tableExportQuery, context); + final Map positions = tableExportQuery.getPositions(); + final CDateRange dateRestriction = CDateRange.of(tableExportQuery.getDateRange()); + + final List convertedTables = tableExportQuery.getTables().stream() + .flatMap(concept -> concept.getTables().stream().map(table -> convertTable( + table, + concept, + dateRestriction, + convertedPrerequisite, + positions, + context + ))) + .toList(); + + final QueryStep unionedTables = QueryStep.createUnionAllStep( convertedTables, null, // no CTE name required as this step will be the final select List.of(convertedPrerequisite) ); - Select selectQuery = queryStepTransformer.toSelectQuery(unionedTables); + final Select selectQuery = queryStepTransformer.toSelectQuery(unionedTables); - return context.withFinalQuery(new SqlQuery(selectQuery, tableExportQuery.getResultInfos(context.getSqlPrintSettings()))); + return context.withFinalQuery(new SqlQuery(selectQuery, tableExportQuery.getResultInfos())); } /** @@ -78,14 +85,14 @@ public ConversionContext convert(TableExportQuery tableExportQuery, ConversionCo */ private static QueryStep convertPrerequisite(TableExportQuery exportQuery, ConversionContext context) { - ConversionContext withConvertedPrerequisite = context.getNodeConversions().convert(exportQuery.getQuery(), context); + final ConversionContext withConvertedPrerequisite = context.getNodeConversions().convert(exportQuery.getQuery(), context); Preconditions.checkArgument(withConvertedPrerequisite.getQuerySteps().size() == 1, "Base query conversion should produce exactly 1 QueryStep"); - QueryStep convertedPrerequisite = withConvertedPrerequisite.getLastConvertedStep(); + final QueryStep convertedPrerequisite = withConvertedPrerequisite.getLastConvertedStep(); - Selects prerequisiteSelects = convertedPrerequisite.getQualifiedSelects(); - Selects selects = Selects.builder() - .ids(new SqlIdColumns(prerequisiteSelects.getIds().getPrimaryColumn())) - .build(); + final Selects prerequisiteSelects = convertedPrerequisite.getQualifiedSelects(); + final Selects selects = Selects.builder() + .ids(new SqlIdColumns(prerequisiteSelects.getIds().getPrimaryColumn())) + .build(); return QueryStep.builder() .cteName(FormCteStep.EXTRACT_IDS.getSuffix()) @@ -105,23 +112,25 @@ private static QueryStep convertTable( CQConcept concept, CDateRange dateRestriction, QueryStep convertedPrerequisite, - Map positions, + Map positions, ConversionContext context ) { - Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(cqTable.getConnector().getTable(), context.getConfig()); - SqlIdColumns ids = new SqlIdColumns(primaryColumn); - String conceptConnectorName = context.getNameGenerator().conceptConnectorName(concept, cqTable.getConnector(), context.getSqlPrintSettings().getLocale()); - Optional validityDate = convertTablesValidityDate(cqTable, conceptConnectorName, context); + final Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(cqTable.getConnector().resolve().getResolvedTable(), context.getConfig()); + final SqlIdColumns ids = new SqlIdColumns(primaryColumn); + final String conceptConnectorName = + context.getNameGenerator().conceptConnectorName(concept, cqTable.getConnector().resolve(), context.getSqlPrintSettings().getLocale()); + final Optional validityDate = convertTablesValidityDate(cqTable, conceptConnectorName, context); - List> exportColumns = initializeFields(cqTable, positions); - Selects selects = Selects.builder() - .ids(ids) - .validityDate(validityDate) - .sqlSelects(exportColumns) - .build(); + final List> exportColumns = initializeFields(cqTable, positions); - List filters = cqTable.getFilters().stream().map(filterValue -> filterValue.convertForTableExport(ids, context)).toList(); - Table joinedTable = joinConnectorTableWithPrerequisite(cqTable, ids, convertedPrerequisite, dateRestriction, context); + final Selects selects = Selects.builder() + .ids(ids) + .validityDate(validityDate) + .sqlSelects(exportColumns) + .build(); + + final List filters = cqTable.getFilters().stream().map(filterValue -> filterValue.convertForTableExport(ids, context)).toList(); + final Table joinedTable = joinConnectorTableWithPrerequisite(cqTable, ids, convertedPrerequisite, dateRestriction, context); return QueryStep.builder() .cteName(conceptConnectorName) @@ -135,72 +144,74 @@ private static Optional convertTablesValidityDate(CQTable table if (table.findValidityDate() == null) { return Optional.of(ColumnDateRange.empty()); } - SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); - ColumnDateRange validityDate = functionProvider.forValidityDate(table.findValidityDate()); + final SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); + final ColumnDateRange validityDate = functionProvider.forValidityDate(table.findValidityDate()); // when exporting tables, we want the validity date as a single-column daterange string expression straightaway - Field asStringExpression = functionProvider.encloseInCurlyBraces(functionProvider.daterangeStringExpression(validityDate)); + final Field asStringExpression = functionProvider.encloseInCurlyBraces(functionProvider.daterangeStringExpression(validityDate)); return Optional.of(ColumnDateRange.of(asStringExpression).asValidityDateRange(alias)); } - private static List> initializeFields(CQTable cqTable, Map positions) { + private static List> initializeFields(CQTable cqTable, Map positions) { + + final Field[] exportColumns = createPlaceholders(positions); - Field[] exportColumns = createPlaceholders(positions, cqTable); - for (Column column : cqTable.getConnector().getTable().getColumns()) { + exportColumns[0] = createSourceInfoSelect(cqTable); + + for (Column column : cqTable.getConnector().resolve().getResolvedTable().getColumns()) { // e.g. date column(s) are handled separately and not part of positions - if (!positions.containsKey(column)) { + if (!positions.containsKey(column.getId())) { continue; } - int position = positions.get(column) - 1; + final int position = positions.get(column.getId()) - POSITION_OFFSET; exportColumns[position] = createColumnSelect(column, position); } return Arrays.stream(exportColumns).map(FieldWrapper::new).collect(Collectors.toList()); } - private static Field[] createPlaceholders(Map positions, CQTable cqTable) { + private static Table joinConnectorTableWithPrerequisite( + CQTable cqTable, + SqlIdColumns ids, + QueryStep convertedPrerequisite, + CDateRange dateRestriction, + ConversionContext context + ) { + final SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); + final Table connectorTable = DSL.table(DSL.name(cqTable.getConnector().resolve().getResolvedTableId().getTable())); + final Table convertedPrerequisiteTable = DSL.table(DSL.name(convertedPrerequisite.getCteName())); - Field[] exportColumns = new Field[positions.size() + 1]; - exportColumns[0] = createSourceInfoSelect(cqTable); + final ColumnDateRange validityDate = functionProvider.forValidityDate(cqTable.findValidityDate()); + final List joinConditions = Stream.concat( + ids.join(convertedPrerequisite.getQualifiedSelects().getIds()).stream(), + Stream.of(functionProvider.dateRestriction(functionProvider.forCDateRange(dateRestriction), validityDate)) + ).toList(); + + return functionProvider.innerJoin(connectorTable, convertedPrerequisiteTable, joinConditions); + } + + private static Field[] createPlaceholders(Map positions) { + + final int size = TableExportQuery.calculateWidth(positions) - POSITION_OFFSET; + final Field[] exportColumns = new Field[size]; // if columns have the same computed position, they can share a common name because they will be unioned over multiple tables anyway - positions.forEach((column, position) -> { - int shifted = position - 1; - Field columnSelect = DSL.inline(null, Object.class).as("%s-%d".formatted(column.getName(), shifted)); - exportColumns[shifted] = columnSelect; - }); + for (int index = 0; index < exportColumns.length; index++) { + final Field columnSelect = DSL.inline(null, Object.class).as("null-%d".formatted(index)); + exportColumns[index] = columnSelect; + } return exportColumns; } private static Field createSourceInfoSelect(CQTable cqTable) { - String tableName = cqTable.getConnector().getTable().getName(); + final String tableName = cqTable.getConnector().resolve().getResolvedTableId().getTable(); return DSL.val(tableName).as(SharedAliases.SOURCE.getAlias()); } private static Field createColumnSelect(Column column, int position) { - String columnName = "%s-%s".formatted(column.getName(), position); + final String columnName = "%s-%s".formatted(column.getName(), position); return DSL.field(DSL.name(column.getTable().getName(), column.getName())) .as(columnName); } - private static Table joinConnectorTableWithPrerequisite( - CQTable cqTable, - SqlIdColumns ids, - QueryStep convertedPrerequisite, - CDateRange dateRestriction, - ConversionContext context - ) { - SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); - Table connectorTable = DSL.table(DSL.name(cqTable.getConnector().getTable().getName())); - Table convertedPrerequisiteTable = DSL.table(DSL.name(convertedPrerequisite.getCteName())); - - ColumnDateRange validityDate = functionProvider.forValidityDate(cqTable.findValidityDate()); - List joinConditions = Stream.concat( - ids.join(convertedPrerequisite.getQualifiedSelects().getIds()).stream(), - Stream.of(functionProvider.dateRestriction(functionProvider.forCDateRange(dateRestriction), validityDate)) - ).toList(); - - return functionProvider.innerJoin(connectorTable, convertedPrerequisiteTable, joinConditions); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionState.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionState.java index 15253a0973..8b189392a0 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionState.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionState.java @@ -38,6 +38,10 @@ public SqlExecutionState(ExecutionState state, List columnNames, List streamQueryResults() { + // when the SQL execution fails, table is null + if (table == null) { + return Stream.empty(); + } return table.stream(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java index 0b1911c529..98c00a29ad 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java @@ -2,10 +2,12 @@ import java.io.PrintWriter; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.PermissionOwner; @@ -50,11 +52,12 @@ public void execute(Map> parameters, PrintWriter output) th * * @return The number of deleted permissions. */ - public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Iterable> owners) { + public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Stream> owners) { int countDeleted = 0; // Do the loop-di-loop - for (PermissionOwner owner : owners) { - Set permissions = owner.getPermissions(); + for (Iterator> it = owners.iterator(); it.hasNext(); ) { + PermissionOwner owner = it.next(); + Set permissions = owner.getPermissions(); for (Permission permission : permissions) { WildcardPermission wpermission = getAsWildcardPermission(permission); if (wpermission == null) { @@ -90,7 +93,7 @@ public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Iter countDeleted++; } - } + } return countDeleted; } @@ -113,7 +116,8 @@ private static WildcardPermission getAsWildcardPermission(Permission permission) */ public static & Owned, ID extends Id> int deletePermissionsOfOwnedInstances(MetaStorage storage, String permissionDomain, IdUtil.Parser idParser, Function instanceStorageExtractor) { int countDeleted = 0; - for (User user : storage.getAllUsers()) { + for (Iterator it = storage.getAllUsers().iterator(); it.hasNext(); ) { + User user = it.next(); Set permissions = user.getPermissions(); for (Permission permission : permissions) { WildcardPermission wpermission = getAsWildcardPermission(permission); @@ -157,7 +161,7 @@ public static & Owned, ID extends Id> int del } - } + } return countDeleted; diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java index fe45b4ceec..6aa161b446 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java @@ -65,7 +65,7 @@ public void execute(Map> parameters, PrintWriter output) th throw new IllegalArgumentException("Query Expiration may not be null"); } - log.info("Starting deletion of queries older than {} of {}", queryExpiration, storage.getAllExecutions().size()); + log.info("Starting deletion of queries older than {} of {}", queryExpiration, storage.getAllExecutions().count()); // Iterate for as long as no changes are needed (this is because queries can be referenced by other queries) while (true) { @@ -73,7 +73,7 @@ public void execute(Map> parameters, PrintWriter output) th final Set toDelete = new HashSet<>(); - for (ManagedExecution execution : storage.getAllExecutions()) { + for (ManagedExecution execution : storage.getAllExecutions().toList()) { // Gather all referenced queries via reused checker. requiredQueries.addAll(execution.getSubmitted().collectRequiredQueries()); @@ -114,8 +114,8 @@ public void execute(Map> parameters, PrintWriter output) th // remove all queries referenced in reused queries. final Collection referenced = requiredQueries.stream() - .map(storage::getExecution) - .collect(Collectors.toSet()); + .map(storage::getExecution) + .collect(Collectors.toSet()); toDelete.removeAll(referenced); diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java index fb5d423dc2..0a38378390 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java @@ -26,11 +26,11 @@ public void execute(Map> parameters, PrintWriter output) th output.println("BEGIN reloading MetaStorage."); { - final int allUsers = storage.getAllUsers().size(); - final int allExecutions = storage.getAllExecutions().size(); - final int allFormConfigs = storage.getAllFormConfigs().size(); - final int allGroups = storage.getAllGroups().size(); - final int allRoles = storage.getAllRoles().size(); + final long allUsers = storage.getAllUsers().count(); + final long allExecutions = storage.getAllExecutions().count(); + final long allFormConfigs = storage.getAllFormConfigs().count(); + final long allGroups = storage.getAllGroups().count(); + final long allRoles = storage.getAllRoles().count(); log.debug("BEFORE: Have {} Users, {} Groups, {} Roles, {} Executions, {} FormConfigs.", allUsers, allGroups, allRoles, allExecutions, allFormConfigs); @@ -40,11 +40,11 @@ public void execute(Map> parameters, PrintWriter output) th output.println("DONE reloading MetaStorage within %s.".formatted(timer.elapsed())); { - final int allUsers = storage.getAllUsers().size(); - final int allExecutions = storage.getAllExecutions().size(); - final int allFormConfigs = storage.getAllFormConfigs().size(); - final int allGroups = storage.getAllGroups().size(); - final int allRoles = storage.getAllRoles().size(); + final long allUsers = storage.getAllUsers().count(); + final long allExecutions = storage.getAllExecutions().count(); + final long allFormConfigs = storage.getAllFormConfigs().count(); + final long allGroups = storage.getAllGroups().count(); + final long allRoles = storage.getAllRoles().count(); log.debug("AFTER: Have {} Users, {} Groups, {} Roles, {} Executions, {} FormConfigs.", allUsers, allGroups, allRoles, allExecutions, allFormConfigs); diff --git a/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java b/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java index 3a84ffb4f7..3e93ba5178 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java @@ -21,7 +21,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage // Remove form configurations int countForms = 0; - for (FormConfig formConfig : storage.getAllFormConfigs()) { + for (FormConfig formConfig : storage.getAllFormConfigs().toList()) { if (!user.isOwner(formConfig)) { continue; } @@ -32,7 +32,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage // Remove executions int countExecs = 0; - for (ManagedExecution exec : storage.getAllExecutions()) { + for (ManagedExecution exec : storage.getAllExecutions().toList()) { if (!user.isOwner(exec)) { continue; } @@ -43,7 +43,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage log.debug("Removed {} form configs and {} executions for user '{}'", countForms, countExecs, user); - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (group.containsMember(user)) { group.removeMember(user.getId()); group.updateStorage(); diff --git a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java index 901394f33a..15cd949262 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java +++ b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java @@ -28,13 +28,14 @@ import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.NamespacedIdentifiableHolding; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -75,6 +76,100 @@ public static String createTotalDefaultMultiLabel(List elements, Stri return elements.stream().map(elt -> elt.defaultLabel(locale)).collect(Collectors.joining(delimiter)); } + public static void generateConceptReadPermissions(@NonNull QueryUtils.NamespacedIdentifiableCollector idCollector, @NonNull Collection collectPermissions){ + idCollector.getIdentifiables().stream() + .filter(id -> id instanceof ConceptElement) + .map(ConceptElement.class::cast) + .>map(ConceptElement::getConcept) + .map(cId -> cId.createPermission(Ability.READ.asSet())) + .distinct() + .collect(Collectors.toCollection(() -> collectPermissions)); + } + + public static QueryExecutionContext determineDateAggregatorForContext(QueryExecutionContext ctx, Supplier>> altValidityDateAggregator) { + if (ctx.getQueryDateAggregator().isPresent()) { + return ctx; + } + return ctx.withQueryDateAggregator(altValidityDateAggregator.get()); + } + + public static String makeQueryLabel(final Visitable query, PrintSettings cfg, ManagedExecutionId id) { + final StringBuilder sb = new StringBuilder(); + + final Map, List> sortedContents = + Visitable.stream(query) + .collect(Collectors.groupingBy(Visitable::getClass)); + + int sbStartSize = sb.length(); + + // Check for CQExternal + List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); + if (!externals.isEmpty()) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); + } + + // Check for CQReused + if (sortedContents.containsKey(CQReusedQuery.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); + } + + + // Check for CQConcept + if (sortedContents.containsKey(CQConcept.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + // Track length of text we are appending for concepts. + final AtomicInteger length = new AtomicInteger(); + + sortedContents.get(CQConcept.class) + .stream() + .map(CQConcept.class::cast) + + .map(c -> makeLabelWithRootAndChild(c, cfg)) + .filter(Predicate.not(Strings::isNullOrEmpty)) + .distinct() + + .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) + .forEach(label -> sb.append(label).append(" ")); + + // Last entry will output one Space that we don't want + if (!sb.isEmpty()) { + sb.deleteCharAt(sb.length() - 1); + } + + // If not all Concept could be included in the name, point that out + if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { + sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); + } + } + + + // Fallback to id if nothing could be extracted from the query description + if (sbStartSize == sb.length()) { + sb.append(id.getExecution()); + } + + return sb.toString(); + } + + private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { + String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); + + if (label == null) { + label = cqConcept.getConcept().getLabel(); + } + + // Concat everything with dashes + return label.replace(" ", "-"); + } + /** * Checks if the query requires to resolve external ids. * @@ -137,7 +232,6 @@ public Optional getOnlyReused() { } } - /** * Collects all {@link NamespacedIdentifiable} provided by a user from a * {@link Visitable}. @@ -159,10 +253,10 @@ public void accept(Visitable element) { * Collects all {@link NamespacedId} references provided by a user from a * {@link Visitable}. */ + @Getter public static class AvailableSecondaryIdCollector implements QueryVisitor { - @Getter - private final Set ids = new HashSet<>(); + private final Set ids = new HashSet<>(); @Override public void accept(Visitable element) { @@ -174,7 +268,7 @@ public void accept(Visitable element) { } for (Connector connector : cqConcept.getConcept().getConnectors()) { - for (Column column : connector.getTable().getColumns()) { + for (Column column : connector.getResolvedTable().getColumns()) { if(column.getSecondaryId() == null){ continue; } @@ -185,101 +279,4 @@ public void accept(Visitable element) { } } } - - public static void generateConceptReadPermissions(@NonNull QueryUtils.NamespacedIdentifiableCollector idCollector, @NonNull Collection collectPermissions){ - idCollector.getIdentifiables().stream() - .filter(id -> id instanceof ConceptElement) - .map(ConceptElement.class::cast) - .map(ConceptElement::getConcept) - .map(cId -> cId.createPermission(Ability.READ.asSet())) - .distinct() - .collect(Collectors.toCollection(() -> collectPermissions)); - } - - - - public static QueryExecutionContext determineDateAggregatorForContext(QueryExecutionContext ctx, Supplier>> altValidityDateAggregator) { - if (ctx.getQueryDateAggregator().isPresent()) { - return ctx; - } - return ctx.withQueryDateAggregator(altValidityDateAggregator.get()); - } - - public static String makeQueryLabel(final Visitable query, PrintSettings cfg, ManagedExecutionId id) { - final StringBuilder sb = new StringBuilder(); - - final Map, List> sortedContents = - Visitable.stream(query) - .collect(Collectors.groupingBy(Visitable::getClass)); - - int sbStartSize = sb.length(); - - // Check for CQExternal - List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); - if (!externals.isEmpty()) { - if (!sb.isEmpty()) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); - } - - // Check for CQReused - if (sortedContents.containsKey(CQReusedQuery.class)) { - if (!sb.isEmpty()) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); - } - - - // Check for CQConcept - if (sortedContents.containsKey(CQConcept.class)) { - if (!sb.isEmpty()) { - sb.append(" "); - } - // Track length of text we are appending for concepts. - final AtomicInteger length = new AtomicInteger(); - - sortedContents.get(CQConcept.class) - .stream() - .map(CQConcept.class::cast) - - .map(c -> makeLabelWithRootAndChild(c, cfg)) - .filter(Predicate.not(Strings::isNullOrEmpty)) - .distinct() - - .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) - .forEach(label -> sb.append(label).append(" ")); - - // Last entry will output one Space that we don't want - if (!sb.isEmpty()) { - sb.deleteCharAt(sb.length() - 1); - } - - // If not all Concept could be included in the name, point that out - if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { - sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); - } - } - - - // Fallback to id if nothing could be extracted from the query description - if (sbStartSize == sb.length()) { - sb.append(id.getExecution()); - } - - return sb.toString(); - } - - - private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { - String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); - - if (label == null) { - label = cqConcept.getConcept().getLabel(); - } - - // Concat everything with dashes - return label.replace(" ", "-"); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java index 3f1716a3f6..6dbb8e568d 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java @@ -52,7 +52,7 @@ public static IdPrinter getIdPrinter(Subject owner, ManagedExecution execution, .findFirst() .orElseThrow(); - if (owner.isPermitted(execution.getDataset(), Ability.PRESERVE_ID)) { + if (owner.isPermitted(execution.getDataset().resolve(), Ability.PRESERVE_ID)) { // todo(tm): The integration of ids in the sql connector needs to be properly managed return new FullIdPrinter(namespace.getStorage().getIdMapping(), size, pos); } diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl index 59d2a6858c..428201bd66 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl @@ -6,11 +6,11 @@ <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.dataset.label, "Concepts", c.label] + labels=["Datasets", c.dataset.resolve().label, "Concepts", c.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.dataset.id}", - "/admin-ui/datasets/${c.dataset.id}#Concepts" + "/admin-ui/datasets/${c.dataset}", + "/admin-ui/datasets/${c.dataset}#Concepts" ] /> <@infoCard.infoCard @@ -59,7 +59,7 @@ "${descriptionHeader}": x.description!"" } ) - link="/admin-ui/datasets/${c.dataset.id}/connectors/" + link="/admin-ui/datasets/${c.dataset}/connectors/" /> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl index 6b00c5d789..ad055abc11 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl @@ -11,28 +11,28 @@ "${idHeader}": x.id, "name": x.name, "${labelHeader}": x.label, - "${requiredColumnsHeader}": x.requiredColumns?sort_by("name")?join(', ') + "${requiredColumnsHeader}": x.requiredColumns?sort?join(', ') }) /> <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.concept.dataset.label, "Concept", c.concept.label, "Connector", c.label] + labels=["Datasets", c.concept.dataset.resolve().label, "Concept", c.concept.label, "Connector", c.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.dataset.id}", - "/admin-ui/datasets/${c.dataset.id}#Concepts", - "/admin-ui/datasets/${c.dataset.id}/concepts/${c.concept.id}" - "/admin-ui/datasets/${c.dataset.id}/concepts/${c.concept.id}#Connectors" + "/admin-ui/datasets/${c.dataset}", + "/admin-ui/datasets/${c.dataset}#Concepts", + "/admin-ui/datasets/${c.dataset}/concepts/${c.concept.id}" + "/admin-ui/datasets/${c.dataset}/concepts/${c.concept.id}#Connectors" ] /> <@infoCard.infoCard class="d-inline-flex mt-2" labels=["ID", "Label", "Validity Dates", "Table"] - values=[c.id, c.label, c.validityDates?join(', '), c.table.name] - links={"Table": "/admin-ui/datasets/${c.dataset.id}/tables/${c.table.id}"} + values=[c.id, c.label, c.validityDates?join(', '), c.getResolvedTable().name] + links={"Table": "/admin-ui/datasets/${c.dataset}/tables/${c.getResolvedTable().id}"} /> <@accordion.accordionGroup> <#assign idHeader = "id"> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/import.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/import.html.ftl index aa22dfdecb..6bd154143d 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/import.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/import.html.ftl @@ -2,17 +2,19 @@ <#import "templates/breadcrumbs.html.ftl" as breadcrumbs> <#import "templates/infoCard.html.ftl" as infoCard> <#import "templates/accordion.html.ftl" as accordion> -<#import "templates/table.html.ftl" as table> +<#import "templates/table.html.ftl" as tableMacro> <@layout.layout> + <#assign table=c.imp.table.resolve() /> + <#assign dataset=c.imp.dataset.resolve() /> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.imp.table.dataset.label, "Tables", c.imp.table.label, "Tags", c.imp.id] + labels=["Datasets", dataset.label, "Tables", table.label, "Tags", c.imp.id] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.imp.table.dataset.id}", - "/admin-ui/datasets/${c.imp.table.dataset.id}#Tables", - "/admin-ui/datasets/${c.imp.table.dataset.id}/tables/${c.imp.table.id}", - "/admin-ui/datasets/${c.imp.table.dataset.id}/tables/${c.imp.table.id}#Tags" + "/admin-ui/datasets/${c.imp.table.dataset}", + "/admin-ui/datasets/${c.imp.table.dataset}#Tables", + "/admin-ui/datasets/${c.imp.table.dataset}/tables/${c.imp.table}", + "/admin-ui/datasets/${c.imp.table.dataset}/tables/${c.imp.table}#Tags" ] /> @@ -27,7 +29,7 @@ <#assign idHeader="id" /> <#assign sizeHeader="size" /> <#assign typeHeader="type" /> - <@table.table + <@tableMacro.table columns=[idHeader, sizeHeader, typeHeader] items=c.imp.columns ?map( x -> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/queries.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/queries.html.ftl index 032b5ed33a..ec2d689ffe 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/queries.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/queries.html.ftl @@ -1,255 +1,269 @@ -<#import "templates/template.html.ftl" as layout> +<#import "templates/template.html.ftl" as layout /> <@layout.layout> - - -

Queries

- -
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
- -
-
-
-
-
-
-
-
-
- \ No newline at end of file + function getHtmltemplate(data, queryCounter) { + return ` +
+
+
+
+
+ ${data.ownerName} - ${data.id}/${data.label} +
+
+
+
+ Created: ${((new Date(data.createdAt)).toLocaleString(languageTag))} +
+
+ Started: ${((new Date(data.startTime)).toLocaleString(languageTag))} +
+
+ Finished: ${((new Date(data.finishTime)).toLocaleString(languageTag))} +
+
+
+
+ Duration: ${data.requiredTime} ms +
+
+ Type : ${data.queryType} +
+
+ + +
+
+
+
+
+
+ ${(data.progress && data.progress != null ? data.progress*100 : 0 )} % +
+
+
+
+
+
+
+
+
  ${(data.query ? JSON.stringify(data.query, undefined, 2) : '')}  
+
+
+
+
+
+
+
  ${(data.error ? JSON.stringify(data.error, undefined, 2) : '')}  
+
+
+
+
+
+
+
+ ` + } + + +

Queries

+ +
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+
+ diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl index 6ece489be3..2f073b17d2 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl @@ -5,7 +5,7 @@ <#import "templates/table.html.ftl" as table> <#macro deleteTagButton id> - + <#macro columnInfoRender id> @@ -22,11 +22,11 @@ <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.table.dataset.label, "Tables", c.table.label] + labels=["Datasets", c.table.dataset.resolve().label, "Tables", c.table.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.table.dataset.id}", - "/admin-ui/datasets/${c.table.dataset.id}#Tables" + "/admin-ui/datasets/${c.table.dataset}", + "/admin-ui/datasets/${c.table.dataset}#Tables" ] /> <@infoCard.infoCard @@ -41,7 +41,7 @@ <@table.table columns=["id", "name", "numberOfEntries", "actions"] items=c.imports?sort_by("name") - link="/admin-ui/datasets/${c.table.dataset.id}/tables/${c.table.id}/import/" + link="/admin-ui/datasets/${c.table.dataset}/tables/${c.table.id}/import/" deleteButton=deleteTagButton /> @@ -49,7 +49,7 @@ <@table.table columns=["id", "name"] items=c.concepts?sort_by("name") - link="/admin-ui/datasets/${c.table.dataset.id}/concepts/" + link="/admin-ui/datasets/${c.table.dataset}/concepts/" /> <@accordion.accordion summary="Columns" infoText="${c.table.columns?size} entries"> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl index 02e791bdde..59b0dc5836 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/templates/table.html.ftl @@ -25,7 +25,7 @@ <#list items as item> -

+ <#list columns as column> <#if renderers?keys?seq_contains(column)> diff --git a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java index 1793c36229..84e797f740 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java @@ -11,8 +11,6 @@ import java.util.List; import java.util.UUID; import java.util.stream.Collectors; -import jakarta.validation.Validator; -import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.QueryProcessor; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; @@ -51,27 +49,41 @@ import com.bakdata.conquery.models.index.IndexService; import com.bakdata.conquery.models.query.DistributedExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; -import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.MetaStorageExtension; +import com.bakdata.conquery.util.extensions.UserExtension; import com.google.common.collect.ImmutableList; import io.dropwizard.core.setup.Environment; import io.dropwizard.jersey.validation.Validators; +import jakarta.validation.Validator; +import jakarta.ws.rs.core.UriBuilder; import lombok.SneakyThrows; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class StoredQueriesProcessorTest { + public static final ConqueryConfig CONFIG = new ConqueryConfig(); + public static final UriBuilder URI_BUILDER = UriBuilder.fromPath("http://localhost"); + public static final IndexService INDEX_SERVICE = new IndexService(CONFIG.getCsv().createCsvParserSettings(), "empty"); private static final Environment ENVIRONMENT = new Environment("StoredQueriesProcessorTest"); + @RegisterExtension + private static final MetaStorageExtension STORAGE_EXTENTION = new MetaStorageExtension(ENVIRONMENT.metrics()); + public static final MetaStorage STORAGE = STORAGE_EXTENTION.getMetaStorage(); + @RegisterExtension + private static final UserExtension USER_0_EXTENSIONS = new UserExtension(STORAGE, "0"); + @RegisterExtension + private static final UserExtension USER_1_EXTENSIONS = new UserExtension(STORAGE, "1"); + private static final User[] USERS = new User[]{ + USER_0_EXTENSIONS.getUser(), + USER_1_EXTENSIONS.getUser() + }; + private static final Validator VALIDATOR = Validators.newValidator(); - public static final NonPersistentStoreFactory NON_PERSISTENT_STORE_FACTORY = new NonPersistentStoreFactory(); - public static final ConqueryConfig CONFIG = new ConqueryConfig().withStorage(NON_PERSISTENT_STORE_FACTORY); - private static final MetaStorage STORAGE = NON_PERSISTENT_STORE_FACTORY.createMetaStorage(); public static final InternalMapperFactory INTERNAL_MAPPER_FACTORY = new InternalMapperFactory(CONFIG, VALIDATOR); - public static final IndexService INDEX_SERVICE = new IndexService(CONFIG.getCsv().createCsvParserSettings(), "empty"); private static final DatasetRegistry DATASET_REGISTRY = new DatasetRegistry<>( @@ -81,54 +93,58 @@ public class StoredQueriesProcessorTest { new ClusterNamespaceHandler(new ClusterState(), CONFIG, INTERNAL_MAPPER_FACTORY), INDEX_SERVICE ); - private static final QueryProcessor QUERY_PROCESSOR = new QueryProcessor(DATASET_REGISTRY, STORAGE, CONFIG, VALIDATOR); - + private static final QueryProcessor processor = new QueryProcessor(DATASET_REGISTRY, STORAGE, CONFIG, VALIDATOR); + private static final ExcelResultProvider EXCEL_RESULT_PROVIDER = new ExcelResultProvider(); + private static final CsvResultProvider CSV_RESULT_PROVIDER = new CsvResultProvider(); + private static final ArrowResultProvider ARROW_RESULT_PROVIDER = new ArrowResultProvider(); + private static final ParquetResultProvider PARQUET_RESULT_PROVIDER = new ParquetResultProvider(); private static final Dataset DATASET_0 = new Dataset() {{ setName("dataset0"); }}; private static final Dataset DATASET_1 = new Dataset() {{ setName("dataset1"); }}; + private static ManagedExecutionId QUERY_ID_0; + private static ManagedExecutionId QUERY_ID_1; + private static ManagedExecutionId QUERY_ID_2; + private static ManagedExecutionId QUERY_ID_3; + private static ManagedExecutionId QUERY_ID_4; + private static ManagedExecutionId QUERY_ID_5; + private static ManagedExecutionId QUERY_ID_6; + private static ManagedExecutionId QUERY_ID_7; + private static ManagedExecutionId QUERY_ID_8; + private static ManagedExecutionId QUERY_ID_9; + private static ManagedExecutionId QUERY_ID_10; + private static List QUERIES; - private static final ManagedExecutionId QUERY_ID_0 = createExecutionId(DATASET_0, "0"); - private static final ManagedExecutionId QUERY_ID_1 = createExecutionId(DATASET_1, "1"); - private static final ManagedExecutionId QUERY_ID_2 = createExecutionId(DATASET_0, "2"); - private static final ManagedExecutionId QUERY_ID_3 = createExecutionId(DATASET_0, "3"); - private static final ManagedExecutionId QUERY_ID_4 = createExecutionId(DATASET_0, "4"); - private static final ManagedExecutionId QUERY_ID_5 = createExecutionId(DATASET_0, "5"); - private static final ManagedExecutionId QUERY_ID_6 = createExecutionId(DATASET_0, "6"); - private static final ManagedExecutionId QUERY_ID_7 = createExecutionId(DATASET_0, "7"); - private static final ManagedExecutionId QUERY_ID_8 = createExecutionId(DATASET_0, "8"); - private static final ManagedExecutionId QUERY_ID_9 = createExecutionId(DATASET_0, "9"); - private static final ManagedExecutionId QUERY_ID_10 = createExecutionId(DATASET_0, "10"); - public static final UriBuilder URI_BUILDER = UriBuilder.fromPath("http://localhost"); + @BeforeAll + public static void beforeAll() throws IOException { + new AuthorizationController(STORAGE, CONFIG, new Environment(StoredQueriesProcessorTest.class.getSimpleName()), null); - private static final ExcelResultProvider EXCEL_RESULT_PROVIDER = new ExcelResultProvider(); - private static final CsvResultProvider CSV_RESULT_PROVIDER = new CsvResultProvider(); - private static final ArrowResultProvider ARROW_RESULT_PROVIDER = new ArrowResultProvider(); - private static final ParquetResultProvider PARQUET_RESULT_PROVIDER = new ParquetResultProvider(); + DATASET_REGISTRY.createNamespace(DATASET_0, STORAGE, ENVIRONMENT); + DATASET_REGISTRY.createNamespace(DATASET_1, STORAGE, ENVIRONMENT); - private static ManagedExecutionId createExecutionId(Dataset dataset0, String s) { - StringBuilder idBuilder = new StringBuilder("00000000-0000-0000-0000-000000000000"); - idBuilder.replace(idBuilder.length() - s.length(), idBuilder.length(), s); - return new ManagedExecutionId(dataset0.getId(), UUID.fromString(idBuilder.toString())); - } + QUERY_ID_0 = createExecutionId(DATASET_0, "0"); + QUERY_ID_1 = createExecutionId(DATASET_1, "1"); + QUERY_ID_2 = createExecutionId(DATASET_0, "2"); + QUERY_ID_3 = createExecutionId(DATASET_0, "3"); + QUERY_ID_4 = createExecutionId(DATASET_0, "4"); + QUERY_ID_5 = createExecutionId(DATASET_0, "5"); + QUERY_ID_6 = createExecutionId(DATASET_0, "6"); + QUERY_ID_7 = createExecutionId(DATASET_0, "7"); + QUERY_ID_8 = createExecutionId(DATASET_0, "8"); + QUERY_ID_9 = createExecutionId(DATASET_0, "9"); + QUERY_ID_10 = createExecutionId(DATASET_0, "10"); - private static final User[] USERS = new User[]{ - mockUser(0, List.of(QUERY_ID_0, QUERY_ID_1, QUERY_ID_2, QUERY_ID_4, QUERY_ID_7, QUERY_ID_9, QUERY_ID_10)), - mockUser(1, List.of(QUERY_ID_3, QUERY_ID_4)) - }; + User user0 = USER_0_EXTENSIONS.getUser(); + for (ManagedExecutionId id : List.of(QUERY_ID_4, QUERY_ID_7, QUERY_ID_9, QUERY_ID_10)) { - private static List queries; + user0.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, id)); + } - @BeforeAll - public static void beforeAll() throws IOException { - DATASET_REGISTRY.createNamespace(DATASET_0, STORAGE, ENVIRONMENT); - DATASET_REGISTRY.createNamespace(DATASET_1, STORAGE, ENVIRONMENT); - new AuthorizationController(STORAGE, CONFIG, new Environment(StoredQueriesProcessorTest.class.getSimpleName()), null); - queries = ImmutableList.of( + QUERIES= ImmutableList.of( mockManagedConceptQueryFrontEnd(USERS[0], QUERY_ID_0, NEW, DATASET_0, 100L), // included mockManagedConceptQueryFrontEnd(USERS[0], QUERY_ID_1, NEW, DATASET_1, 100L), // not included: wrong dataset mockManagedForm(USERS[0], QUERY_ID_2, NEW, DATASET_0), // not included: not a ManagedQuery @@ -146,47 +162,6 @@ public static void beforeAll() throws IOException { ); } - - @Test - public void getQueriesFiltered() { - List infos = QUERY_PROCESSOR.getQueriesFiltered(DATASET_0.getId(), URI_BUILDER, USERS[0], queries, true) - .collect(Collectors.toList()); - - assertThat(infos) - .containsExactly( - makeState(QUERY_ID_0, USERS[0], USERS[0], NEW, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_4, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_7, USERS[1], USERS[0], DONE, "SECONDARY_ID_QUERY", new SecondaryIdDescriptionId(DATASET_0.getId(), "sid"), 100L), - makeState(QUERY_ID_9, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_10, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 2_000_000L) - - ); - } - - private static User mockUser(int id, List allowedQueryIds) { - final User user = new User("user" + id, null, STORAGE); - - STORAGE.addUser(user); - - for (ManagedExecutionId queryId : allowedQueryIds) { - user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR,queryId)); - } - - return user; - - } - - private static ManagedForm mockManagedForm(User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset) { - ManagedInternalForm managedInternalForm = new ManagedInternalForm<>(new ExportForm(), user, dataset, STORAGE, DATASET_REGISTRY) { - { - setCreationTime(LocalDateTime.MIN); - setQueryId(id.getExecution()); - } - }; - setState(execState, managedInternalForm.getId()); - return managedInternalForm; - } - private static void setState(ExecutionState execState, ManagedExecutionId id) { if (execState != NEW) { DistributedExecutionManager.DistributedState state = new DistributedExecutionManager.DistributedState(); @@ -197,6 +172,13 @@ private static void setState(ExecutionState execState, ManagedExecutionId id) { } } + private static ManagedExecutionId createExecutionId(Dataset dataset0, String s) { + StringBuilder idBuilder = new StringBuilder("00000000-0000-0000-0000-000000000000"); + idBuilder.replace(idBuilder.length() - s.length(), idBuilder.length(), s); + + return new ManagedExecutionId(dataset0.getId(), UUID.fromString(idBuilder.toString())); + } + private static ManagedQuery mockManagedConceptQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, Dataset dataset, long resultCount) { return mockManagedQuery( new ConceptQuery( @@ -210,20 +192,19 @@ private static ManagedQuery mockManagedConceptQueryFrontEnd(User user, ManagedEx execState, dataset, resultCount ); } - private static ManagedQuery mockManagedSecondaryIdQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, CQElement root, Dataset dataset){ - final SecondaryIdQuery sid = new SecondaryIdQuery(); - sid.setSecondaryId(new SecondaryIdDescription() {{ - setDataset(dataset); - setName("sid"); - }}); - sid.setRoot(root); - return mockManagedQuery(sid, user, id, execState, dataset, 100L); + private static ManagedForm mockManagedForm(User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset){ + return new ManagedInternalForm<>(new ExportForm(), user.getId(), dataset.getId(), STORAGE, DATASET_REGISTRY) { + { + setState(execState, id); + setCreationTime(LocalDateTime.MIN); + setQueryId(id.getExecution()); + } + }; } - private static ManagedQuery mockManagedQuery(Query queryDescription, User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset, final long resultCount) { - ManagedQuery managedQuery = new ManagedQuery(queryDescription, user, dataset, STORAGE, DATASET_REGISTRY) { + ManagedQuery managedQuery = new ManagedQuery(queryDescription, user.getId(), dataset.getId(), STORAGE, DATASET_REGISTRY) { { setCreationTime(LocalDateTime.MIN); setQueryId(id.getExecution()); @@ -232,7 +213,7 @@ private static ManagedQuery mockManagedQuery(Query queryDescription, User user, } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { // With method is mocked because the ExcelResultProvider needs some info to check dimensions, // but actually resolving the query here requires much more setup return Collections.emptyList(); @@ -242,11 +223,40 @@ public List getResultInfos(PrintSettings printSettings) { return managedQuery; } + private static ManagedQuery mockManagedSecondaryIdQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, CQElement root, Dataset dataset){ + final SecondaryIdQuery sIdQ = new SecondaryIdQuery(); + SecondaryIdDescription sId = new SecondaryIdDescription() {{ + setDataset(dataset.getId()); + setName("sid"); + }}; + sIdQ.setSecondaryId(sId.getId()); + sIdQ.setRoot(root); + + return mockManagedQuery(sIdQ, user, id, execState, dataset, 100L); + } + + @Test + public void getQueriesFiltered() { + + List infos = processor.getQueriesFiltered(DATASET_0.getId(), URI_BUILDER, USERS[0], QUERIES.stream(), true) + .collect(Collectors.toList()); + + assertThat(infos) + .containsExactly( + makeState(QUERY_ID_0, USERS[0], USERS[0], NEW, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_4, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_7, USERS[1], USERS[0], DONE, "SECONDARY_ID_QUERY", new SecondaryIdDescriptionId(DATASET_0.getId(), "sid"), 100L), + makeState(QUERY_ID_9, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_10, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 2_000_000L) + + ); + } + @SneakyThrows private static ExecutionStatus makeState(ManagedExecutionId id, User owner, User callingUser, ExecutionState state, String typeLabel, SecondaryIdDescriptionId secondaryId, Long resultCount) { OverviewExecutionStatus status = new OverviewExecutionStatus(); - final ManagedQuery execMock = new ManagedQuery(null, owner, DATASET_0, STORAGE, DATASET_REGISTRY) { + final ManagedQuery execMock = new ManagedQuery(null, owner.getId(), DATASET_0.getId(), STORAGE, DATASET_REGISTRY) { { setQueryId(id.getExecution()); setLastResultCount(resultCount); @@ -254,7 +264,7 @@ private static ExecutionStatus makeState(ManagedExecutionId id, User owner, User } @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { return Collections.emptyList(); } }; @@ -264,13 +274,14 @@ public List getResultInfos(PrintSettings printSettings) { status.setPristineLabel(true); status.setCreatedAt(LocalDateTime.MIN.atZone(ZoneId.systemDefault())); status.setOwner(owner.getId()); + status.setOwnerName(owner.getLabel()); status.setShared(false); status.setOwn(owner.equals(callingUser)); status.setId(id); status.setStatus(state); status.setQueryType(typeLabel); status.setNumberOfResults(resultCount); - status.setSecondaryId(secondaryId); // This is probably not interesting on the overview (only if there is an filter for the search) + status.setSecondaryId(secondaryId); // This is probably not interesting on the overview (only if there is a filter for the search) if(state.equals(DONE)) { List resultUrls = new ArrayList<>(); resultUrls.addAll(EXCEL_RESULT_PROVIDER.generateResultURLs(execMock, URI_BUILDER.clone(), true)); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java index f287060707..d1af7170eb 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java @@ -2,7 +2,8 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; import java.net.URL; import java.time.ZoneId; @@ -18,7 +19,6 @@ import com.bakdata.conquery.apiv1.forms.export_form.AbsoluteMode; import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; import com.bakdata.conquery.apiv1.forms.export_form.RelativeMode; -import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.MutableInjectableValues; @@ -39,11 +39,12 @@ import com.bakdata.conquery.models.forms.frontendconfiguration.FormConfigProcessor; import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner; import com.bakdata.conquery.models.forms.frontendconfiguration.FormType; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; @@ -59,6 +60,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.mockito.Mockito; /** @@ -69,14 +71,11 @@ public class FormConfigTest { private final ConqueryConfig config = new ConqueryConfig(); - - private MetaStorage storage; - - private FormConfigProcessor processor; - private Validator validator = Validators.newValidatorFactory().getValidator(); - + private final Validator validator = Validators.newValidatorFactory().getValidator(); private final Dataset dataset = new Dataset("test"); private final Dataset dataset1 = new Dataset("test1"); + private MetaStorage storage; + private FormConfigProcessor processor; private DatasetId datasetId; private DatasetId datasetId1; private ExportForm form; @@ -90,22 +89,16 @@ public void setupTestClass() throws Exception { datasetId1 = dataset1.getId(); // Mock DatasetRegistry for translation - DatasetRegistry namespacesMock = mock(DatasetRegistry.class); - - doAnswer(invocation -> { - throw new UnsupportedOperationException("Not yet implemented"); - }).when(namespacesMock).getOptional(any()); + DatasetRegistry namespacesMock = Mockito.mock(DatasetRegistry.class); doAnswer(invocation -> { final DatasetId id = invocation.getArgument(0); - Namespace namespaceMock = mock(LocalNamespace.class); + Namespace namespaceMock = Mockito.mock(LocalNamespace.class); if (id.equals(datasetId)) { when(namespaceMock.getDataset()).thenReturn(dataset); - } - else if (id.equals(datasetId1)) { + } else if (id.equals(datasetId1)) { when(namespaceMock.getDataset()).thenReturn(dataset1); - } - else { + } else { throw new IllegalStateException("Unknown dataset id."); } return namespaceMock; @@ -117,20 +110,17 @@ else if (id.equals(datasetId1)) { storage = new NonPersistentStoreFactory().createMetaStorage(); ((MutableInjectableValues) FormConfigProcessor.getMAPPER().getInjectableValues()) - .add(IdResolveContext.class, namespacesMock); + .add(NamespacedStorageProvider.class, namespacesMock); processor = new FormConfigProcessor(validator, storage, namespacesMock); AuthorizationController controller = new AuthorizationController(storage, config, new Environment(this.getClass().getSimpleName()), null); controller.start(); + } @BeforeEach public void setupTest() { - - user = new User("test", "test", storage); - storage.addUser(user); - - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), user, dataset, null, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), dataset.getId(), storage, null); managedQuery.setQueryId(UUID.randomUUID()); form = new ExportForm(); @@ -138,6 +128,11 @@ public void setupTest() { form.setTimeMode(mode); form.setQueryGroupId(managedQuery.getId()); mode.setForm(form); + + + user = new User("test", "test", storage); + user.setMetaStorage(storage); + storage.addUser(user); } @AfterEach @@ -157,7 +152,7 @@ public void addConfigWithoutTranslation() { processor.addConfig(user, dataset, formConfig); - assertThat(storage.getAllFormConfigs()).containsExactly(formConfig.intern(user, dataset.getId())); + assertThat(storage.getAllFormConfigs()).containsExactly(formConfig.intern(user.getId(), dataset.getId())); } @Test @@ -168,6 +163,7 @@ public void deleteConfig() { ObjectMapper mapper = FormConfigProcessor.getMAPPER(); FormConfig formConfig = new FormConfig(form.getClass().getAnnotation(CPSType.class).id(), mapper.valueToTree(form)); formConfig.setDataset(dataset.getId()); + formConfig.setOwner(user.getId()); user.addPermission(formConfig.createPermission(AbilitySets.FORM_CONFIG_CREATOR)); storage.addFormConfig(formConfig); @@ -190,7 +186,7 @@ public void getConfig() { JsonNode values = mapper.valueToTree(form); FormConfig formConfig = new FormConfig(form.getClass().getAnnotation(CPSType.class).id(), values); formConfig.setDataset(dataset.getId()); - formConfig.setOwner(user); + formConfig.setOwner(user.getId()); user.addPermission(formConfig.createPermission(Ability.READ.asSet())); storage.addFormConfig(formConfig); @@ -338,13 +334,16 @@ public void patchConfig() { patchedFormExpected.setLabel("newTestLabel"); patchedFormExpected.setShared(true); patchedFormExpected.setTags(new String[]{"tag1", "tag2"}); - patchedFormExpected.setOwner(user); + patchedFormExpected.setOwner(user.getId()); patchedFormExpected.setValues(new ObjectNode(mapper.getNodeFactory(), Map.of("test-Node", new TextNode("test-text")))); + final String[] fieldsToIgnore = new String[] {FormConfig.Fields.creationTime, "cachedId", "metaStorage", "nsIdResolver"}; final FormConfigId formId = config.getId(); - assertThat(storage.getFormConfig(formId)).usingRecursiveComparison() - .ignoringFields("cachedId", FormConfig.Fields.creationTime) - .isEqualTo(patchedFormExpected); + assertThat(storage.getFormConfig(formId)) + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(fieldsToIgnore) + .isEqualTo(patchedFormExpected); assertThat(storage.getGroup(group1.getId()).getPermissions()).contains(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); assertThat(storage.getGroup(group2.getId()).getPermissions()).doesNotContain(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); @@ -363,7 +362,7 @@ public void patchConfig() { patchedFormExpected.setShared(false); assertThat(storage.getFormConfig(formId)).usingRecursiveComparison() - .ignoringFields("cachedId", FormConfig.Fields.creationTime) + .ignoringFields(fieldsToIgnore) .isEqualTo(patchedFormExpected); assertThat(storage.getGroup(group1.getId()).getPermissions()).doesNotContain(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java index 018d1d9aef..d201265ca3 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java @@ -11,11 +11,11 @@ import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -25,7 +25,7 @@ public abstract class TestForm extends Form implements InternalForm { @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/test/java/com/bakdata/conquery/execution/DefaultSqlCDateSetParserTest.java b/backend/src/test/java/com/bakdata/conquery/execution/DefaultSqlCDateSetParserTest.java index d6643ae4bb..6225b7ef06 100644 --- a/backend/src/test/java/com/bakdata/conquery/execution/DefaultSqlCDateSetParserTest.java +++ b/backend/src/test/java/com/bakdata/conquery/execution/DefaultSqlCDateSetParserTest.java @@ -6,7 +6,7 @@ import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.query.PrintSettings; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; +import com.bakdata.conquery.models.query.resultinfo.printers.StringResultPrinters; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.execution.DefaultSqlCDateSetParser; import org.junit.jupiter.api.Assertions; @@ -17,6 +17,7 @@ class DefaultSqlCDateSetParserTest { private static final DefaultSqlCDateSetParser parser = new DefaultSqlCDateSetParser(); + private static final StringResultPrinters csvResultPrinters = new StringResultPrinters(); private static final ConqueryConfig CONFIG = new ConqueryConfig(); private static final PrintSettings PLAIN = new PrintSettings(false, Locale.ENGLISH, null, CONFIG, null, null); @@ -24,7 +25,7 @@ class DefaultSqlCDateSetParserTest { @MethodSource("testToEpochDayRangeListProvider") public void testToEpochDayRangeList(String input, String expected, String message) { List> epochDayRangeList = parser.toEpochDayRangeList(input); - String actual = ResultPrinters.printerFor(new ResultType.ListT(ResultType.Primitive.DATE_RANGE), PLAIN).print(epochDayRangeList); + final String actual = (String) csvResultPrinters.>>printerFor(new ResultType.ListT<>(ResultType.Primitive.DATE_RANGE), PLAIN).apply(epochDayRangeList); Assertions.assertEquals(expected, actual, message); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java b/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java index aa939d6fde..a145aa9810 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java @@ -2,12 +2,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import java.io.InputStream; import java.net.URI; import java.util.Set; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.execution.ResultAsset; +import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.integration.common.IntegrationUtils; +import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; @@ -32,8 +35,8 @@ public void execute(StandaloneSupport conquery) throws Exception { final User user = new User("testU", "testU", storage); - final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(conquery.getDataset(), testJson); + final InputStream testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().asStream(); + final QueryTest test = (QueryTest) new JsonIntegrationTest(testJson).getTestSpec(); storage.updateUser(user); @@ -41,8 +44,11 @@ public void execute(StandaloneSupport conquery) throws Exception { ValidatorHelper.failOnError(log, conquery.getValidator().validate(test)); test.importRequiredData(conquery); + // Parse the query in the context of the conquery instance, not the test, to have the IdResolver properly set + Query query = ConqueryTestSpec.parseSubTree(conquery, test.getRawQuery(), Query.class, false); + // Create execution for download - ManagedQuery exec = new ManagedQuery(test.getQuery(), user, conquery.getDataset(), storage, conquery.getDatasetRegistry()); + ManagedQuery exec = new ManagedQuery(query, user.getId(), conquery.getDataset().getId(), storage, conquery.getDatasetRegistry()); exec.setLastResultCount(100L); storage.addExecution(exec); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index e37943068e..f9ec952a71 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -7,7 +7,14 @@ import java.io.InputStream; import java.net.URI; import java.nio.file.Files; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -25,6 +32,7 @@ import com.bakdata.conquery.models.config.DatabaseConfig; import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.util.support.ConfigOverride; import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; @@ -43,9 +51,9 @@ public class IntegrationTests { public static final ObjectMapper MAPPER; - private static final ObjectWriter CONFIG_WRITER; public static final String JSON_TEST_PATTERN = ".*\\.test\\.json$"; public static final String SQL_TEST_PATTERN = ".*\\.json$"; + private static final ObjectWriter CONFIG_WRITER; static { @@ -59,15 +67,13 @@ public class IntegrationTests { CONFIG_WRITER = MAPPER.writerFor(ConqueryConfig.class); } - + @Getter + public final ConqueryConfig config = new ConqueryConfig(); private final Map reusedInstances = new HashMap<>(); - private final String defaultTestRoot; private final String defaultTestRootPackage; @Getter private final File workDir; - @Getter - public final ConqueryConfig config = new ConqueryConfig(); @SneakyThrows(IOException.class) public IntegrationTests(String defaultTestRoot, String defaultTestRootPackage) { @@ -77,6 +83,31 @@ public IntegrationTests(String defaultTestRoot, String defaultTestRootPackage) { ConfigOverride.configurePathsAndLogging(this.config, this.workDir); } + private static DynamicContainer toDynamicContainer(ResourceTree currentDir, List list) { + list.sort(Comparator.comparing(DynamicNode::getDisplayName)); + return dynamicContainer( + currentDir.getName(), + URI.create("classpath:/" + currentDir.getFullName() + "/"), + list.stream() + ); + } + + private static DynamicTest wrapError(Resource resource, String name, Exception e) { + return DynamicTest.dynamicTest( + name, + resource.getURI(), + () -> { + throw e; + } + ); + } + + private static ResourceTree scanForResources(String testRoot, String pattern) { + ResourceTree tree = new ResourceTree(null, null); + tree.addAll(CPSTypeIdResolver.SCAN_RESULT.getResourcesMatchingPattern(Pattern.compile("^" + testRoot + pattern))); + return tree; + } + public List jsonTests() { TestDataImporter testImporter = new WorkerTestDataImporter(); final String testRoot = Objects.requireNonNullElse(System.getenv(TestTags.TEST_DIRECTORY_ENVIRONMENT_VARIABLE), defaultTestRoot); @@ -85,6 +116,12 @@ public List jsonTests() { return collectTestTree(tree, testRoot, testImporter, dialect); } + @SneakyThrows + public Stream sqlProgrammaticTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { + this.config.setSqlConnectorConfig(sqlConfig); + return programmaticTests(testDataImporter, StandaloneSupport.Mode.SQL); + } + @SneakyThrows public Stream programmaticTests(TestDataImporter testImporter, StandaloneSupport.Mode mode) { String regexFilter = System.getenv(TestTags.TEST_PROGRAMMATIC_REGEX_FILTER); @@ -121,13 +158,15 @@ public Stream programmaticTests(TestDataImporter testImporter, Stan .map(programmaticIntegrationTest -> createDynamicProgrammaticTestNode(programmaticIntegrationTest, testImporter)); } - @SneakyThrows - public Stream sqlProgrammaticTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { - this.config.setSqlConnectorConfig(sqlConfig); - return programmaticTests(testDataImporter, StandaloneSupport.Mode.SQL); + private DynamicTest createDynamicProgrammaticTestNode(ProgrammaticIntegrationTest test, TestDataImporter testImporter) { + return DynamicTest.dynamicTest( + test.getClass().getSimpleName(), + //classpath URI + URI.create("classpath:/" + test.getClass().getName().replace('.', '/') + ".java"), + new IntegrationTest.Wrapper(test.getClass().getSimpleName(), this, test, testImporter) + ); } - @SneakyThrows public List sqlQueryTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { this.config.setSqlConnectorConfig(sqlConfig); @@ -150,15 +189,6 @@ private List collectTestTree(ResourceTree tree, String testRoot, Te .collect(Collectors.toList()); } - private DynamicTest createDynamicProgrammaticTestNode(ProgrammaticIntegrationTest test, TestDataImporter testImporter) { - return DynamicTest.dynamicTest( - test.getClass().getSimpleName(), - //classpath URI - URI.create("classpath:/" + test.getClass().getName().replace('.', '/') + ".java"), - new IntegrationTest.Wrapper(test.getClass().getSimpleName(), this, test, testImporter) - ); - } - private DynamicNode collectTests(ResourceTree currentDir, TestDataImporter testImporter, Dialect sqlDialect) { if (currentDir.getValue() != null) { Optional dynamicTest = readTest(currentDir.getValue(), currentDir.getName(), testImporter, sqlDialect); @@ -173,15 +203,6 @@ private DynamicNode collectTests(ResourceTree currentDir, TestDataImporter testI return toDynamicContainer(currentDir, list); } - private static DynamicContainer toDynamicContainer(ResourceTree currentDir, List list) { - list.sort(Comparator.comparing(DynamicNode::getDisplayName)); - return dynamicContainer( - currentDir.getName(), - URI.create("classpath:/" + currentDir.getFullName() + "/"), - list.stream() - ); - } - private Optional readTest(Resource resource, String name, TestDataImporter testImporter, Dialect sqlDialect) { try (InputStream in = resource.open()) { JsonIntegrationTest test = new JsonIntegrationTest(in); @@ -198,16 +219,6 @@ private Optional readTest(Resource resource, String name, TestDataI } } - private static DynamicTest wrapError(Resource resource, String name, Exception e) { - return DynamicTest.dynamicTest( - name, - resource.getURI(), - () -> { - throw e; - } - ); - } - private DynamicTest wrapTest(Resource resource, String name, JsonIntegrationTest test, TestDataImporter testImporter) { String testLabel = Optional.ofNullable(test.getTestSpec().getLabel()) // If no label was defined use the filename part before the first dot @@ -236,22 +247,24 @@ public synchronized TestConquery getCachedConqueryInstance(File workDir, Conquer // This should be fast enough and a stable comparison String confString = CONFIG_WRITER.writeValueAsString(conf); if (!reusedInstances.containsKey(confString)) { - // For the overriden config we must override the ports so there are no clashes + + // For the overriden config we must override the ports and storage path (xodus) so there are no clashes // We do it here so the config "hash" is not influenced by the port settings ConfigOverride.configureRandomPorts(conf); + + if (conf.getStorage() instanceof XodusStoreFactory storeFactory) { + ConfigOverride.configureWorkdir(storeFactory, workDir.toPath().resolve(String.valueOf(confString.hashCode()))); + } + log.trace("Creating a new test conquery instance for test {}", conf); TestConquery conquery = new TestConquery(workDir, conf, testDataImporter); reusedInstances.put(confString, conquery); + + // Start the fresh instance conquery.beforeAll(); } TestConquery conquery = reusedInstances.get(confString); return conquery; } - private static ResourceTree scanForResources(String testRoot, String pattern) { - ResourceTree tree = new ResourceTree(null, null); - tree.addAll(CPSTypeIdResolver.SCAN_RESULT.getResourcesMatchingPattern(Pattern.compile("^" + testRoot + pattern))); - return tree; - } - } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java b/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java index 15428973db..5ea49928d5 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java @@ -43,11 +43,11 @@ public static TreeConcept createConcept(Table table) { final TreeConcept concept = new TreeConcept(); concept.setName(table.getName() + CONCEPT_NAME_SUFFIX); - // Prepare connnector + // Prepare connector final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(concept); connector.setName(CONNECTOR_NAME); - connector.setTable(table); + connector.setTable(table.getId()); // Prepare selects List getAutoSelectsForColumn(Column column) { final String prefix = column.getName() + "_"; // Create basic single column selects - final LastValueSelect last = new LastValueSelect(column, null); + final LastValueSelect last = new LastValueSelect(column.getId(), null); last.setName(prefix + LastValueSelect.class.getAnnotation(CPSType.class).id()); - last.setColumn(column); + last.setColumn(column.getId()); - final FirstValueSelect first = new FirstValueSelect(column, null); + final FirstValueSelect first = new FirstValueSelect(column.getId(), null); first.setName(prefix + FirstValueSelect.class.getAnnotation(CPSType.class).id()); - first.setColumn(column); + first.setColumn(column.getId()); - final DistinctSelect distinct = new DistinctSelect(column, null); + final DistinctSelect distinct = new DistinctSelect(column.getId(), null); distinct.setName(prefix + DistinctSelect.class.getAnnotation(CPSType.class).id()); - distinct.setColumn(column); + distinct.setColumn(column.getId()); return List.of( last, diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java index 248929d32b..eb9f10097d 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java @@ -6,6 +6,7 @@ import java.net.URI; import java.util.List; import java.util.Map; + import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.core.GenericType; import jakarta.ws.rs.core.MediaType; @@ -28,6 +29,9 @@ import com.bakdata.conquery.resources.hierarchies.HierarchyHelper; import com.bakdata.conquery.util.support.StandaloneSupport; import com.fasterxml.jackson.databind.JsonNode; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import lombok.experimental.UtilityClass; import lombok.extern.slf4j.Slf4j; @@ -59,7 +63,7 @@ public static void importPermissionConstellation(MetaStorage storage, Role[] rol public static Query parseQuery(StandaloneSupport support, JsonNode rawQuery) throws JSONException, IOException { - return ConqueryTestSpec.parseSubTree(support, rawQuery, Query.class); + return ConqueryTestSpec.parseSubTree(support, rawQuery, Query.class, true); } /** @@ -82,8 +86,9 @@ public static ManagedExecutionId assertQueryResult(StandaloneSupport conquery, O .post(Entity.entity(query, MediaType.APPLICATION_JSON_TYPE)); - assertThat(response.getStatusInfo().getStatusCode()).as("Result of %s", postQueryURI) - .isEqualTo(expectedResponseCode); + assertThat(response.getStatusInfo().getStatusCode()) + .as(() -> response.readEntity(String.class)) + .isEqualTo(expectedResponseCode); if (expectedState == ExecutionState.FAILED && !response.getStatusInfo().getFamily().equals(Response.Status.Family.SUCCESSFUL)) { return null; diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java b/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java index f99d089b9f..9cd0d2ede6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java @@ -18,10 +18,12 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import jakarta.ws.rs.client.Client; import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.client.Invocation; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.ConceptQuery; @@ -48,6 +50,7 @@ import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.resources.admin.rest.AdminDatasetResource; +import com.bakdata.conquery.resources.admin.rest.AdminDatasetsResource; import com.bakdata.conquery.resources.hierarchies.HierarchyHelper; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.support.StandaloneSupport; @@ -64,7 +67,21 @@ @UtilityClass public class LoadingUtil { - public static void importPreviousQueries(StandaloneSupport support, RequiredData content, User user) throws IOException, JSONException { + public static void importDataset(Client client, UriBuilder adminUriBuilder, Dataset dataset) { + + final URI uri = HierarchyHelper.hierarchicalPath(adminUriBuilder, AdminDatasetsResource.class, "addDataset") + .build(); + + final Invocation.Builder request = client.target(uri).request(MediaType.APPLICATION_JSON_TYPE); + try (final Response response = request.post(Entity.json(dataset))) { + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + + public static void importPreviousQueries(StandaloneSupport support, RequiredData content, User user) throws IOException { // Load previous query results if available int id = 1; for (ResourceFile queryResults : content.getPreviousQueryResults()) { @@ -76,7 +93,7 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData ConceptQuery query = new ConceptQuery(new CQExternal(Arrays.asList("ID", "DATE_SET"), data, false)); ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedExecution managed = executionManager.createExecution(query, queryId, user, support.getNamespace(), false); + ManagedExecution managed = executionManager.createExecution(query, queryId, user.getId(), support.getNamespace(), false); user.addPermission(managed.createPermission(AbilitySets.QUERY_CREATOR)); @@ -87,11 +104,13 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData for (JsonNode queryNode : content.getPreviousQueries()) { - Query query = ConqueryTestSpec.parseSubTree(support, queryNode, Query.class); + Query query = ConqueryTestSpec.parseSubTree(support, queryNode, Query.class, false); + + // Since we don't submit the query but injecting it into the manager we need to set the id resolver UUID queryId = new UUID(0L, id++); ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedExecution managed = executionManager.createExecution(query, queryId, user, support.getNamespace(), false); + ManagedExecution managed = executionManager.createExecution(query, queryId, user.getId(), support.getNamespace(), false); user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, managed.getId())); @@ -109,13 +128,13 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData public static void importTables(StandaloneSupport support, List tables, boolean autoConcept) throws JSONException { for (RequiredTable rTable : tables) { - final Table table = rTable.toTable(support.getDataset(), support.getNamespace().getStorage().getCentralRegistry()); + final Table table = rTable.toTable(support.getDataset(), support.getNamespace().getStorage()); uploadTable(support, table); if (autoConcept) { final TreeConcept concept = AutoConceptUtil.createConcept(table); - uploadConcept(support, table.getDataset(), concept); + uploadConcept(support, table.getDataset().resolve(), concept); } } } @@ -133,6 +152,25 @@ private static void uploadTable(StandaloneSupport support, Table table) { } } + public static void uploadConcept(StandaloneSupport support, Dataset dataset, Concept concept) { + final URI uri = HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addConcept") + .buildFromMap(Map.of(ResourceConstants.DATASET, dataset.getId().toString())); + + final Invocation.Builder request = support.getClient().target(uri).request(MediaType.APPLICATION_JSON_TYPE); + try (final Response response = request.post(Entity.json(concept))) { + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + + public static void importTableContents(StandaloneSupport support, Collection tables) throws Exception { + List cqpps = generateCqpp(support, tables); + + importCqppFiles(support, cqpps); + } + public static List generateCqpp(StandaloneSupport support, Collection tables) throws Exception { List preprocessedFiles = new ArrayList<>(); List descriptions = new ArrayList<>(); @@ -174,6 +212,16 @@ public static List generateCqpp(StandaloneSupport support, Collection cqppFiles) { + for (File cqpp : cqppFiles) { + uploadCqpp(support, cqpp, false, Response.Status.Family.SUCCESSFUL); + } + + support.waitUntilWorkDone(); + + + } + public static void uploadCqpp(StandaloneSupport support, File cqpp, boolean update, Response.Status.Family expectedResponseFamily) { if(update) { assertThat(cqpp).exists(); @@ -210,22 +258,6 @@ public static void uploadCqpp(StandaloneSupport support, File cqpp, boolean upda } } - public static void importCqppFiles(StandaloneSupport support, List cqppFiles) { - for (File cqpp : cqppFiles) { - uploadCqpp(support, cqpp, false, Response.Status.Family.SUCCESSFUL); - } - - support.waitUntilWorkDone(); - - - } - - public static void importTableContents(StandaloneSupport support, Collection tables) throws Exception { - List cqpps = generateCqpp(support, tables); - - importCqppFiles(support, cqpps); - } - public static void importConcepts(StandaloneSupport support, ArrayNode rawConcepts) throws JSONException, IOException { Dataset dataset = support.getDataset(); @@ -233,7 +265,7 @@ public static void importConcepts(StandaloneSupport support, ArrayNode rawConcep support, rawConcepts, Concept.class, - c -> c.setDataset(support.getDataset()) + c -> c.setDataset(support.getDataset().getDataset()) ); for (Concept concept : concepts) { @@ -241,39 +273,25 @@ public static void importConcepts(StandaloneSupport support, ArrayNode rawConcep } } - public static void uploadConcept(StandaloneSupport support, Dataset dataset, Concept concept) { - final URI uri = HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addConcept") - .buildFromMap(Map.of(ResourceConstants.DATASET, dataset.getId().toString())); + public static void updateConcepts(StandaloneSupport support, ArrayNode rawConcepts, @NonNull Response.Status.Family expectedResponseFamily) + throws IOException { + List> concepts = getConcepts(support, rawConcepts); + for (Concept concept : concepts) { + updateConcept(support, concept, expectedResponseFamily); + } - final Invocation.Builder request = support.getClient().target(uri).request(MediaType.APPLICATION_JSON_TYPE); - try (final Response response = request.post(Entity.json(concept))) { - assertThat(response.getStatusInfo().getFamily()) - .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) - .isEqualTo(Response.Status.Family.SUCCESSFUL); - } } - private static List> getConcepts(StandaloneSupport support, ArrayNode rawConcepts) throws IOException { return ConqueryTestSpec.parseSubTreeList( support, rawConcepts, Concept.class, - c -> c.setDataset(support.getDataset()) + c -> c.setDataset(support.getDataset().getDataset()) ); } - public static void updateConcepts(StandaloneSupport support, ArrayNode rawConcepts, @NonNull Response.Status.Family expectedResponseFamily) - throws IOException { - List> concepts = getConcepts(support, rawConcepts); - for (Concept concept : concepts) { - updateConcept(support, concept, expectedResponseFamily); - } - - - } - private static void updateConcept(@NonNull StandaloneSupport support, @NonNull Concept concept, @NonNull Response.Status.Family expectedResponseFamily) { final URI conceptURI = @@ -300,7 +318,7 @@ public static void importIdMapping(StandaloneSupport support, RequiredData conte } try (InputStream in = content.getIdMapping().stream()) { - support.getDatasetsProcessor().setIdMapping(in, support.getNamespace()); + support.getAdminDatasetsProcessor().setIdMapping(in, support.getNamespace()); } } @@ -309,10 +327,9 @@ public static Map importSecondaryIds(StandaloneS for (RequiredSecondaryId required : secondaryIds) { final SecondaryIdDescription description = - required.toSecondaryId(support.getDataset(), support.getDatasetRegistry().findRegistry(support.getDataset().getId())); + required.toSecondaryId(support.getDataset()); - support.getDatasetsProcessor() - .addSecondaryId(support.getNamespace(), description); + uploadSecondaryId(support, description); out.put(description.getName(), description); } @@ -320,6 +337,27 @@ public static Map importSecondaryIds(StandaloneS return out; } + private static void uploadSecondaryId(@NonNull StandaloneSupport support, @NonNull SecondaryIdDescription secondaryIdDescription) { + final URI + conceptURI = + HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addSecondaryId") + .buildFromMap(Map.of( + ResourceConstants.DATASET, support.getDataset().getId() + )); + + final Invocation.Builder request = support.getClient() + .target(conceptURI) + .request(MediaType.APPLICATION_JSON); + try (final Response response = request + .post(Entity.entity(secondaryIdDescription, MediaType.APPLICATION_JSON_TYPE))) { + + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + public static void importInternToExternMappers(StandaloneSupport support, List internToExternMappers) { for (InternToExternMapper internToExternMapper : internToExternMappers) { uploadInternalToExternalMappings(support, internToExternMapper); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java index 3d187f970a..4bf413bb23 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java @@ -1,17 +1,17 @@ package com.bakdata.conquery.integration.common; import javax.annotation.Nullable; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.MajorTypeId; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.preproc.outputs.CopyOutput; import com.bakdata.conquery.models.preproc.outputs.OutputDescription; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import org.assertj.core.util.Strings; @@ -43,7 +43,7 @@ public OutputDescription createOutput() { return out; } - public Column toColumn(Table table, CentralRegistry storage) { + public Column toColumn(Table table, NamespacedStorageProvider idResolver) { Column col = new Column(); col.setName(name); col.setType(type); @@ -51,9 +51,10 @@ public Column toColumn(Table table, CentralRegistry storage) { col.setDescription(description); if (!Strings.isNullOrEmpty(secondaryId)) { - final SecondaryIdDescription description = storage.resolve(new SecondaryIdDescriptionId(table.getDataset().getId(), secondaryId)); + SecondaryIdDescriptionId secondaryIdDescriptionId = new SecondaryIdDescriptionId(table.getDataset(), secondaryId); + final SecondaryIdDescription description = secondaryIdDescriptionId.get(idResolver.getStorage(table.getDataset())); - col.setSecondaryId(description); + col.setSecondaryId(description.getId()); } return col; diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java index 6b2db6d950..7a11ec2aaf 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java @@ -2,17 +2,16 @@ import java.io.IOException; import java.util.Objects; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import jakarta.validation.constraints.NotEmpty; import lombok.Data; @Data @@ -25,14 +24,14 @@ public class RequiredSecondaryId { public final String mapping; - public SecondaryIdDescription toSecondaryId(Dataset dataset, CentralRegistry centralRegistry) { + public SecondaryIdDescription toSecondaryId(Dataset dataset) { final SecondaryIdDescription desc = new SecondaryIdDescription(); desc.setName(getName()); desc.setDescription(getDescription()); desc.setLabel(getLabel()); if (mapping != null) { - desc.setMapping(centralRegistry.resolve(InternToExternMapperId.Parser.INSTANCE.parsePrefixed(dataset.getName(), mapping))); + desc.setMapping(InternToExternMapperId.Parser.INSTANCE.parsePrefixed(dataset.getName(), mapping)); } return desc; diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java index 3de4cf93bf..87a05756c2 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java @@ -3,19 +3,19 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import net.minidev.json.annotate.JsonIgnore; @@ -24,39 +24,39 @@ @Setter public class RequiredTable { - @NotNull - @NotEmpty - private String name; - @NotNull - private ResourceFile csv; - @NotNull - @Valid - private RequiredColumn primaryColumn; - @NotEmpty - @Valid - private RequiredColumn[] columns; - @JsonIgnore - private String importName; + @NotNull + @NotEmpty + private String name; + @NotNull + private ResourceFile csv; + @NotNull + @Valid + private RequiredColumn primaryColumn; + @NotEmpty + @Valid + private RequiredColumn[] columns; + @JsonIgnore + private String importName; - public Table toTable(Dataset dataset, CentralRegistry centralRegistry) { - Table table = new Table(); - table.setPrimaryColumn(primaryColumn.toColumn(table, centralRegistry)); - table.setDataset(dataset); - table.setName(name); - table.setColumns(Arrays.stream(columns) - .map(col -> col.toColumn(table, centralRegistry)).toArray(Column[]::new)); + @JsonCreator + public static RequiredTable fromFile(String fileResource) throws JsonParseException, JsonMappingException, IOException { + return Jackson.MAPPER.readValue( + Objects.requireNonNull( + IntegrationTest.class.getResourceAsStream(fileResource), + fileResource + " not found" + ), + RequiredTable.class + ); + } - return table; - } + public Table toTable(Dataset dataset, NamespacedStorageProvider idResolver) { + Table table = new Table(); + table.setPrimaryColumn(primaryColumn.toColumn(table, idResolver)); + table.setDataset(dataset.getId()); + table.setName(name); + table.setColumns(Arrays.stream(columns) + .map(col -> col.toColumn(table, idResolver)).toArray(Column[]::new)); - @JsonCreator - public static RequiredTable fromFile(String fileResource) throws JsonParseException, JsonMappingException, IOException { - return Jackson.MAPPER.readValue( - Objects.requireNonNull( - IntegrationTest.class.getResourceAsStream(fileResource), - fileResource + " not found" - ), - RequiredTable.class - ); - } + return table; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java index c80ac829cf..a5bf248eab 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/AbstractQueryEngineTest.java @@ -20,7 +20,6 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.results.EntityResult; @@ -56,10 +55,7 @@ public void executeTest(StandaloneSupport standaloneSupport) throws IOException final ManagedExecution execution = standaloneSupport.getMetaStorage().getExecution(executionId); SingleTableResult executionResult = (SingleTableResult) execution; - //check result info size - PrintSettings printSettings = new PrintSettings(true, Locale.ROOT, standaloneSupport.getNamespace(), standaloneSupport.getConfig(), null, null); - - List resultInfos = executionResult.getResultInfos(printSettings); + List resultInfos = executionResult.getResultInfos(); assertThat(executionResult.streamResults(OptionalLong.empty()).flatMap(EntityResult::streamValues)) .as("Should have same size as result infos") diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java index f886d29a39..0cedb7574c 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java @@ -4,22 +4,21 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; - import javax.annotation.Nullable; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.config.ColumnConfig; +import com.bakdata.conquery.io.storage.FailingProvider; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.config.IdColumnConfig; -import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.util.FailingMetaStorage; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestSupport; @@ -37,67 +36,53 @@ @Setter @Getter -@JsonTypeInfo(use = JsonTypeInfo.Id.CUSTOM, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.CUSTOM, property = "type") @Slf4j @CPSBase public abstract class ConqueryTestSpec { + @Nullable + SqlSpec sqlSpec; private String label; - @Nullable private String description; - @Nullable private ConqueryConfig config; - - @Nullable - SqlSpec sqlSpec; - // default IdColumnConfig for SQL mode private IdColumnConfig idColumns = null; - public ConqueryConfig overrideConfig(ConqueryConfig config) { - - if (getConfig() != null) { - final ConqueryConfig conqueryConfig = getConfig().withStorage(new NonPersistentStoreFactory()); - conqueryConfig.setLoggingFactory(config.getLoggingFactory()); - return conqueryConfig; - } - - final IdColumnConfig idColumnConfig = idColumns != null ? idColumns : config.getIdColumns(); - return config.withIdColumns(idColumnConfig) - .withStorage(new NonPersistentStoreFactory()); - } - - public abstract void executeTest(StandaloneSupport support) throws Exception; - - public abstract void importRequiredData(StandaloneSupport support) throws Exception; - - - @Override - public String toString() { - return label; - } - - public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass) throws IOException, JSONException { - return parseSubTree(support, node, expectedClass, null); + public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass, boolean usePlaceholderResolvers) + throws IOException { + return parseSubTree(support, node, expectedClass, null, usePlaceholderResolvers); } - public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass, Consumer modifierBeforeValidation) throws IOException { - return parseSubTree(support, node, Jackson.MAPPER.getTypeFactory().constructParametricType(expectedClass, new JavaType[0]), modifierBeforeValidation); + public static T parseSubTree( + TestSupport support, + JsonNode node, + Class expectedClass, + Consumer modifierBeforeValidation, + boolean usePlaceholderResolvers + ) throws IOException { + return parseSubTree(support, node, Jackson.MAPPER.getTypeFactory() + .constructParametricType(expectedClass, new JavaType[0]), modifierBeforeValidation, usePlaceholderResolvers); } - public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType) throws IOException, JSONException { - return parseSubTree(support, node, expectedType, null); - } + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation, + boolean usePlaceholderResolvers) throws IOException { + final ObjectMapper om = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + final ObjectMapper mapper = om.addHandler(new DatasetPlaceHolderFiller(support)); - public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation) throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - support.getDataset().injectInto(mapper); - support.getNamespace().injectInto(mapper); - support.getMetaStorage().injectInto(mapper); support.getConfig().injectInto(mapper); - mapper.addHandler(new DatasetPlaceHolderFiller(support)); + support.getDataset().injectInto(mapper); + if (usePlaceholderResolvers) { + FailingProvider.INSTANCE.injectInto(mapper); + FailingMetaStorage.INSTANCE.injectInto(mapper); + } + else { + support.getMetaStorage().injectInto(mapper); + support.getNamespace().getStorage().injectInto(mapper); + } + T result = mapper.readerFor(expectedType).readValue(node); @@ -105,18 +90,27 @@ public static T parseSubTree(TestSupport support, JsonNode node, JavaType ex modifierBeforeValidation.accept(result); } - ValidatorHelper.failOnError(log, support.getValidator().validate(result)); + if (!usePlaceholderResolvers) { + // With placeholders the validation likely fails, so we skip it there + ValidatorHelper.failOnError(log, support.getValidator().validate(result)); + } return result; } + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, boolean usePlaceholderResolvers) + throws IOException { + return parseSubTree(support, node, expectedType, null, usePlaceholderResolvers); + } + public static List parseSubTreeList(TestSupport support, ArrayNode node, Class expectedType, Consumer modifierBeforeValidation) throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - support.getDataset().injectInto(mapper); - support.getNamespace().injectInto(mapper); - support.getMetaStorage().injectInto(mapper); - support.getConfig().injectInto(mapper); - mapper.addHandler(new DatasetPlaceHolderFiller(support)); + final ObjectMapper om = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + final ObjectMapper mapper = om.addHandler(new DatasetPlaceHolderFiller(support)); + + // Inject dataset, so that namespaced ids that are not prefixed with in the test-spec are get prefixed + support.getNamespace().getDataset().injectInto(mapper); + FailingProvider.INSTANCE.injectInto(mapper); + FailingMetaStorage.INSTANCE.injectInto(mapper); mapper.setConfig(mapper.getDeserializationConfig().withView(View.Api.class)); @@ -145,11 +139,32 @@ public static List parseSubTreeList(TestSupport support, ArrayNode node, modifierBeforeValidation.accept(value); } result.add(value); - ValidatorHelper.failOnError(log, support.getValidator().validate(value)); } return result; } + public ConqueryConfig overrideConfig(ConqueryConfig config) { + + if (getConfig() != null) { + final ConqueryConfig conqueryConfig = getConfig().withStorage(new NonPersistentStoreFactory()); + conqueryConfig.setLoggingFactory(config.getLoggingFactory()); + return conqueryConfig; + } + + final IdColumnConfig idColumnConfig = idColumns != null ? idColumns : config.getIdColumns(); + return config.withIdColumns(idColumnConfig) + .withStorage(new NonPersistentStoreFactory()); + } + + public abstract void executeTest(StandaloneSupport support) throws Exception; + + public abstract void importRequiredData(StandaloneSupport support) throws Exception; + + @Override + public String toString() { + return label; + } + public boolean isEnabled(Dialect sqlDialect) { return sqlSpec == null || sqlSpec.isEnabled() && sqlSpec.isAllowedTest(sqlDialect); } @@ -163,9 +178,9 @@ private static class DatasetPlaceHolderFiller extends DeserializationProblemHand private final TestSupport support; @Override - public Object handleWeirdStringValue(DeserializationContext ctxt, Class targetType, String valueToConvert, String failureMsg) throws IOException { - IdUtil.Parser parser = IdUtil.>>createParser((Class) targetType); - return parser.parsePrefixed(support.getDataset().getId().toString(), valueToConvert); + public Object handleWeirdStringValue(DeserializationContext ctxt, Class targetType, String valueToConvert, String failureMsg) { + IdUtil.Parser parser = IdUtil.>>createParser((Class) targetType); + return parser.parsePrefixed(support.getDataset().getId().getName(), valueToConvert); } } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java index e815960212..46190a6e75 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java @@ -1,38 +1,32 @@ package com.bakdata.conquery.integration.json; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.OptionalLong; -import java.util.concurrent.TimeUnit; import jakarta.validation.Valid; import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.forms.Form; +import com.bakdata.conquery.integration.common.IntegrationUtils; import com.bakdata.conquery.integration.common.RequiredData; import com.bakdata.conquery.integration.common.ResourceFile; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.result.csv.CsvRenderer; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; -import com.bakdata.conquery.models.query.ExecutionManager; -import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.io.IdColumnUtil; import com.bakdata.conquery.util.support.StandaloneSupport; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -79,39 +73,26 @@ public boolean isWithConcepts() { return rawConcepts != null || content.isAutoConcept(); } - @Override - public void importRequiredData(StandaloneSupport support) throws Exception { - support.getTestImporter().importFormTestData(support, this); - log.info("{} PARSE JSON FORM DESCRIPTION", getLabel()); - form = parseForm(support); - } - @Override public void executeTest(StandaloneSupport support) throws Exception { - Namespace namespace = support.getNamespace(); - assertThat(support.getValidator().validate(form)) - .describedAs("Form Validation Errors") - .isEmpty(); + final ManagedExecutionId managedExecutionId = IntegrationUtils.assertQueryResult(support, form, -1, ExecutionState.DONE, support.getTestUser(), 201); - ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedInternalForm managedForm = (ManagedInternalForm) executionManager - .runQuery(namespace, form, support.getTestUser(), support.getConfig(), false); + log.info("{} QUERIES EXECUTED", getLabel()); - ExecutionState executionState = namespace.getExecutionManager().awaitDone(managedForm, 10, TimeUnit.MINUTES); - if (executionState != ExecutionState.DONE) { - if (managedForm.getState() == ExecutionState.FAILED) { - fail(getLabel() + " Query failed"); - } - else { - fail(getLabel() + " not finished after 10 min"); - } - } + checkResults(support, (ManagedInternalForm) support.getMetaStorage().getExecution(managedExecutionId), support.getTestUser()); + } - log.info("{} QUERIES EXECUTED", getLabel()); + @Override + public void importRequiredData(StandaloneSupport support) throws Exception { + support.getTestImporter().importFormTestData(support, this); + log.info("{} PARSE JSON FORM DESCRIPTION", getLabel()); + form = parseForm(support); + } - checkResults(support, managedForm, support.getTestUser()); + private Form parseForm(StandaloneSupport support) throws IOException { + return parseSubTree(support, rawForm, Form.class, true); } private void checkResults(StandaloneSupport standaloneSupport, ManagedInternalForm managedForm, User user) throws IOException { @@ -132,47 +113,8 @@ private void checkResults(StandaloneSupport standaloneSupport, ManagedInternalFo } - /** - * Checks result of subqueries instead of form result. - * - * @see FormTest#checkSingleResult(ManagedForm, ConqueryConfig, PrintSettings) - */ - private void checkMultipleResult(Map> managedMapping, ConqueryConfig config, PrintSettings printSettings) throws IOException { - for (Map.Entry> managed : managedMapping.entrySet()) { - List resultInfos = managed.getValue().get(0).getResultInfos(printSettings); - log.info("{} CSV TESTING: {}", getLabel(), managed.getKey()); - - ByteArrayOutputStream output = new ByteArrayOutputStream(); - - final CsvWriter writer = config.getCsv().createWriter(output); - - CsvRenderer renderer = new CsvRenderer(writer, printSettings); - - renderer.toCSV( - config.getIdColumns().getIdResultInfos(printSettings), - resultInfos, - managed.getValue() - .stream() - .flatMap(managedQuery -> managedQuery.streamResults(OptionalLong.empty())) - ); - - writer.close(); - output.close(); - - assertThat(In.stream(new ByteArrayInputStream(output.toByteArray())).withUTF8().readLines()) - .as("Checking result " + managed.getKey()) - .containsExactlyInAnyOrderElementsOf( - In.stream(expectedCsv.get(managed.getKey()).stream()) - .withUTF8() - .readLines() - ); - } - } - /** * The form produces only one result, so the result is directly requested. - * - * @see FormTest#checkMultipleResult(Map, ConqueryConfig, PrintSettings) */ private & SingleTableResult> void checkSingleResult(F managedForm, ConqueryConfig config, PrintSettings printSettings) throws IOException { @@ -183,9 +125,9 @@ private & SingleTableResult> void checkSingleResult(F final CsvRenderer renderer = new CsvRenderer(writer, printSettings); renderer.toCSV( - config.getIdColumns().getIdResultInfos(printSettings), - managedForm.getResultInfos(printSettings), - managedForm.streamResults(OptionalLong.empty()) + config.getIdColumns().getIdResultInfos(), + managedForm.getResultInfos(), + managedForm.streamResults(OptionalLong.empty()), printSettings ); writer.close(); @@ -201,9 +143,4 @@ private & SingleTableResult> void checkSingleResult(F } - - - private Form parseForm(StandaloneSupport support) throws JSONException, IOException { - return parseSubTree(support, rawForm, Form.class); - } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java b/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java index ba050711b1..06be3476a6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.integration.common.RequiredTable; import com.bakdata.conquery.integration.json.filter.FilterTest; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.util.support.StandaloneSupport; public class WorkerTestDataImporter implements TestDataImporter { @@ -53,12 +54,18 @@ public void importFilterTestData(StandaloneSupport support, FilterTest test) thr importSearchIndexes(support, test.getSearchIndices()); importTables(support, content.getTables(), content.isAutoConcept()); + test.setConnector(ConqueryTestSpec.parseSubTree( - support, - test.getRawConnector(), - ConceptTreeConnector.class, - conn -> conn.setConcept(test.getConcept()) - )); + support, + test.getRawConnector(), + ConceptTreeConnector.class, + conn -> { + conn.setTable(new TableId(support.getDataset().getDataset(), FilterTest.TABLE_NAME)); + conn.setConcept(test.getConcept()); + }, + true + ) + ); test.getConcept().setConnectors(Collections.singletonList((ConceptTreeConnector) test.getConnector())); waitUntilDone(support, () -> LoadingUtil.uploadConcept(support, support.getDataset(), test.getConcept())); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java index dade5aca56..2c09a96303 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java @@ -6,7 +6,6 @@ import java.time.LocalDate; import java.util.Collections; import java.util.List; - import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; @@ -28,6 +27,7 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -46,6 +46,8 @@ @CPSType(id = "FILTER_TEST", base = ConqueryTestSpec.class) public class FilterTest extends AbstractQueryEngineTest { + public static final String CONCEPT_LABEL = "concept"; + public static final String TABLE_NAME = "table"; private ResourceFile expectedCsv; @NotNull @@ -79,22 +81,23 @@ public class FilterTest extends AbstractQueryEngineTest { @JsonIgnore private Connector connector; + @JsonIgnore private TreeConcept concept; @Override public void importRequiredData(StandaloneSupport support) throws Exception { - ((ObjectNode) rawContent.get("tables")).put("name", "table"); + ((ObjectNode) rawContent.get("tables")).put("name", TABLE_NAME); - content = parseSubTree(support, rawContent, RequiredData.class); + content = parseSubTree(support, rawContent, RequiredData.class, true); concept = new TreeConcept(); - concept.setLabel("concept"); + concept.setLabel(CONCEPT_LABEL); - concept.setDataset(support.getDataset()); + concept.setDataset(new DatasetId(support.getDataset().getId().getName())); rawConnector.put("name", "connector"); - rawConnector.put("table", "table"); + rawConnector.put(TABLE_NAME, TABLE_NAME); ((ObjectNode) rawConnector.get("filters")).put("name", "filter"); @@ -113,18 +116,18 @@ private Query parseQuery(StandaloneSupport support) throws JSONException, IOExce } - FilterValue result = parseSubTree(support, rawFilterValue, Jackson.MAPPER.getTypeFactory().constructType(FilterValue.class)); + FilterValue result = parseSubTree(support, rawFilterValue, Jackson.MAPPER.getTypeFactory().constructType(FilterValue.class), false); CQTable cqTable = new CQTable(); cqTable.setFilters(Collections.singletonList(result)); - cqTable.setConnector(connector); + cqTable.setConnector(connector.getId()); CQConcept cqConcept = new CQConcept(); cqTable.setConcept(cqConcept); - cqConcept.setElements(Collections.singletonList(concept)); + cqConcept.setElements(Collections.singletonList(concept.getId())); cqConcept.setTables(Collections.singletonList(cqTable)); if (dateRange != null) { @@ -134,15 +137,11 @@ private Query parseQuery(StandaloneSupport support) throws JSONException, IOExce return new ConceptQuery(cqConcept); } - @Override - public Query getQuery() { - return query; - } - @Override public void executeTest(StandaloneSupport standaloneSupport) throws IOException { try { - final FrontendFilterConfiguration.Top actual = connector.getFilters().iterator().next().createFrontendConfig(standaloneSupport.getConfig()); + final Connector internalConnector = standaloneSupport.getNamespace().getStorage().getAllConcepts().findFirst().get().getConnectors().get(0); + final FrontendFilterConfiguration.Top actual = internalConnector.getFilters().iterator().next().createFrontendConfig(standaloneSupport.getConfig()); if (expectedFrontendConfig != null) { log.info("Checking actual FrontendConfig: {}", actual); @@ -155,4 +154,9 @@ public void executeTest(StandaloneSupport standaloneSupport) throws IOException super.executeTest(standaloneSupport); } + + @Override + public Query getQuery() { + return query; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java index 603b94445f..9076db091a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java @@ -58,14 +58,14 @@ public void execute(StandaloneSupport conquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - // Id of the lone concept that is used in the test. - Concept conceptId = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); + // The lone concept that is used in the test. + Concept concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); IntegrationUtils.assertQueryResult(conquery, query, -1, ExecutionState.FAILED, user, 403); // Add the necessary Permission { - final ConqueryPermission permission = conceptId.createPermission(Ability.READ.asSet()); + final ConqueryPermission permission = concept.createPermission(Ability.READ.asSet()); log.info("Adding the Permission[{}] to User[{}]", permission, user); user.addPermission(permission); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java index 476e6a4fae..9aa3cfacd6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java @@ -12,6 +12,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.AdditionalMediaTypes; import com.bakdata.conquery.apiv1.execution.ResultAsset; @@ -22,11 +25,10 @@ import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.preview.EntityPreviewStatus; @@ -40,9 +42,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.assertj.core.description.LazyTextDescription; @@ -123,17 +122,16 @@ public void execute(String name, TestConquery testConquery) throws Exception { final URI entityExport = HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), DatasetQueryResource.class, "getEntityData") .buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName())); - // Api uses NsIdRef so we have to use the real objects here. - final List allConnectors = conquery.getNamespaceStorage().getAllConcepts().stream() - .map(Concept::getConnectors) - .flatMap(List::stream) - .collect(Collectors.toList()); - final EntityPreviewStatus result; try (Response allEntityDataResponse = conquery.getClient().target(entityExport) .request(MediaType.APPLICATION_JSON_TYPE) .header("Accept-Language", "en-Us") - .post(Entity.json(new EntityPreviewRequest("ID", "1", dateRange, allConnectors)))) { + .post(Entity.json(new EntityPreviewRequest("ID", "1", dateRange, + List.of( + ConnectorId.Parser.INSTANCE.parse(dataset.getName() + ".tree1.connector"), + ConnectorId.Parser.INSTANCE.parse(dataset.getName() + ".tree2.connector") + ) + )))) { assertThat(allEntityDataResponse.getStatusInfo().getFamily()) .describedAs(new LazyTextDescription(() -> allEntityDataResponse.readEntity(String.class))) @@ -153,7 +151,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { new ColumnDescriptor( "Values", "Values", "Description", "LIST[STRING]", Set.of(new SemanticType.SelectResultT( - conquery.getNamespace().getCentralRegistry().resolve(valuesSelectId) + valuesSelectId )) ) ); @@ -182,7 +180,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { 9, ResultType.Primitive.INTEGER.typeInfo(), null, - Set.of(new SemanticType.SelectResultT(conquery.getDatasetRegistry().resolve(SelectId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.age")))) + Set.of(new SemanticType.SelectResultT(SelectId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.age"))) ), new EntityPreviewStatus.Info( "Values", @@ -190,14 +188,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { new ResultType.ListT(ResultType.Primitive.STRING).typeInfo(), null, Set.of( - new SemanticType.SelectResultT(conquery.getDatasetRegistry().resolve(valuesSelectId)) + new SemanticType.SelectResultT(valuesSelectId) ) ) ); - - assertThat(result.getColumnDescriptions()) .isNotNull() .isNotEmpty(); @@ -210,8 +206,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(t2values.get().getDescription()).isEqualTo("This is a column"); assertThat(t2values.get().getSemantics()) .contains( - new SemanticType.ConceptColumnT(conquery.getDatasetRegistry() - .resolve(ConceptId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree2"))) + new SemanticType.ConceptColumnT(ConceptId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree2")) ); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java index 7009dcf4d3..0b830e1628 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java @@ -8,6 +8,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.integration.common.LoadingUtil; @@ -24,9 +27,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.assertj.core.description.LazyTextDescription; @@ -75,9 +75,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName())); // Api uses NsIdRef, so we have to use the real objects here. - final Filter filter = conquery.getDatasetRegistry().resolve( - FilterId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.values-filter") - ); + FilterId filterId = FilterId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.values-filter"); + Filter filter = filterId.get(conquery.getNamespaceStorage()); final List> result; @@ -87,8 +86,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .post(Entity.json( new FilterValue[]{ // Bit lazy, but this explicitly or's two filters - new FilterValue.CQMultiSelectFilter((Filter>) filter, Set.of("A1")), - new FilterValue.CQMultiSelectFilter((Filter>) filter, Set.of("B2")) + new FilterValue.CQMultiSelectFilter(filter.getId(), Set.of("A1")), + new FilterValue.CQMultiSelectFilter(filter.getId(), Set.of("B2")) } ))) { diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java index ef5723c00a..570ace712d 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java @@ -46,80 +46,83 @@ @Slf4j public class ExternalFormBackendTest implements ProgrammaticIntegrationTest { - public static final String FORM_BACKEND_ID = "mock"; private ClientAndServer formBackend; @Override public void execute(String name, TestConquery testConquery) throws Exception { - - final StandaloneSupport support = testConquery.getSupport(name); - - log.info("Test health"); - assertThat(testConquery.getStandaloneCommand() - .getManagerNode() - .getEnvironment() - .healthChecks() - .runHealthCheck(FORM_BACKEND_ID) - .isHealthy()) - .describedAs("Checking health of form backend").isTrue(); - - log.info("Get external form configs"); - final FormScanner formScanner = testConquery.getStandaloneCommand().getManagerNode().getFormScanner(); - formScanner.execute(Collections.emptyMap(), null); - - final String externalFormId = FormBackendConfig.createSubTypedId("SOME_EXTERNAL_FORM"); - assertThat(FormScanner.FRONTEND_FORM_CONFIGS.keySet()).contains(externalFormId); - - log.info("Get version info"); - final UriBuilder apiUriBuilder = testConquery.getSupport(name).defaultApiURIBuilder(); - final URI frontendConfigURI = HierarchyHelper.hierarchicalPath(apiUriBuilder.clone(), ConfigResource.class, "getFrontendConfig") - .build(); - final FrontendConfiguration - frontendConfiguration = - support.getClient().target(frontendConfigURI).request(MediaType.APPLICATION_JSON_TYPE).get().readEntity(FrontendConfiguration.class); - - assertThat(frontendConfiguration.versions()) - .describedAs("Checking health of form backend") - .contains(new VersionContainer(FORM_BACKEND_ID, "3.2.1-ge966c285", ZonedDateTime.parse("2007-08-31T16:47:00+00:00"))); // example value from OpenAPI Spec - - log.info("Send an external form"); - final User testUser = support.getTestUser(); - final ManagedExecutionId - managedExecutionId = - IntegrationUtils.assertQueryResult(support, String.format("{\"type\": \"%s\", \"testProp\": \"testVal\"}", externalFormId), -1, ExecutionState.DONE, testUser, 201); - - log.info("Request state"); - assert managedExecutionId != null; - final FullExecutionStatus executionStatus = IntegrationUtils.getExecutionStatus(support, managedExecutionId, testUser, 200); + try { + + final StandaloneSupport support = testConquery.getSupport(name); + + log.info("Test health"); + assertThat(testConquery.getStandaloneCommand() + .getManagerNode() + .getEnvironment() + .healthChecks() + .runHealthCheck(FORM_BACKEND_ID) + .isHealthy()) + .describedAs("Checking health of form backend").isTrue(); + + log.info("Get external form configs"); + final FormScanner formScanner = testConquery.getStandaloneCommand().getManagerNode().getFormScanner(); + formScanner.execute(Collections.emptyMap(), null); + + final String externalFormId = FormBackendConfig.createSubTypedId("SOME_EXTERNAL_FORM"); + assertThat(FormScanner.FRONTEND_FORM_CONFIGS.keySet()).contains(externalFormId); + + log.info("Get version info"); + final UriBuilder apiUriBuilder = testConquery.getSupport(name).defaultApiURIBuilder(); + final URI frontendConfigURI = HierarchyHelper.hierarchicalPath(apiUriBuilder.clone(), ConfigResource.class, "getFrontendConfig") + .build(); + final FrontendConfiguration + frontendConfiguration = + support.getClient().target(frontendConfigURI).request(MediaType.APPLICATION_JSON_TYPE).get().readEntity(FrontendConfiguration.class); + + assertThat(frontendConfiguration.versions()) + .describedAs("Checking health of form backend") + .contains(new VersionContainer(FORM_BACKEND_ID, "3.2.1-ge966c285", ZonedDateTime.parse("2007-08-31T16:47:00+00:00"))); // example value from OpenAPI Spec + + log.info("Send an external form"); + final User testUser = support.getTestUser(); + final ManagedExecutionId + managedExecutionId = + IntegrationUtils.assertQueryResult(support, String.format("{\"type\": \"%s\", \"testProp\": \"testVal\"}", externalFormId), -1, ExecutionState.DONE, testUser, 201); + + log.info("Request state"); + assert managedExecutionId != null; + final FullExecutionStatus executionStatus = IntegrationUtils.getExecutionStatus(support, managedExecutionId, testUser, 200); assertThat(executionStatus.getStatus()).isEqualTo(ExecutionState.DONE); - // Generate asset urls and check them in the status - final ManagedExecution storedExecution = testConquery.getSupport(name).getMetaStorage().getExecution(managedExecutionId); - final URI - downloadUrlAsset1 = - ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() - .get(0) - .getAssetId()); - final URI - downloadUrlAsset2 = - ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() - .get(1) - .getAssetId()); + // Generate asset urls and check them in the status + final ManagedExecution storedExecution = testConquery.getSupport(name).getMetaStorage().getExecution(managedExecutionId); + final URI + downloadUrlAsset1 = + ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() + .get(0) + .getAssetId()); + final URI + downloadUrlAsset2 = + ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() + .get(1) + .getAssetId()); + - assertThat(executionStatus.getResultUrls()).containsExactly(new ResultAsset("Result", downloadUrlAsset1), new ResultAsset("Another Result", downloadUrlAsset2)); + assertThat(executionStatus.getResultUrls()).containsExactly(new ResultAsset("Result", downloadUrlAsset1), new ResultAsset("Another Result", downloadUrlAsset2)); - log.info("Download Result"); - final String - response = - support.getClient().target(executionStatus.getResultUrls().get(0).url()).request(TEXT_PLAIN_TYPE).get(String.class); + log.info("Download Result"); + final String + response = + support.getClient().target(executionStatus.getResultUrls().get(0).url()).request(TEXT_PLAIN_TYPE).get(String.class); - assertThat(response).isEqualTo("Hello"); + assertThat(response).isEqualTo("Hello"); - log.info("Stopping mock form backend server"); - formBackend.stop(); + log.info("Stopping mock form backend server"); + } finally { + formBackend.stop(); + } } @Override diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java index 3846fb4286..468e9e1361 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java @@ -3,6 +3,7 @@ import static com.bakdata.conquery.resources.ResourceConstants.*; import static org.assertj.core.api.Assertions.assertThat; +import java.io.File; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; @@ -11,6 +12,10 @@ import java.util.Optional; import java.util.OptionalInt; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.client.Invocation; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.FilterTemplate; import com.bakdata.conquery.apiv1.frontend.FrontendValue; @@ -19,6 +24,7 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.CSVConfig; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; @@ -31,10 +37,6 @@ import com.bakdata.conquery.resources.hierarchies.HierarchyHelper; import com.bakdata.conquery.util.support.StandaloneSupport; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.client.Invocation; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -56,6 +58,12 @@ public Set forModes() { return Set.of(StandaloneSupport.Mode.WORKER, StandaloneSupport.Mode.SQL); } + @Override + public ConqueryConfig overrideConfig(ConqueryConfig conf, File workdir) { + conf.getIndex().setEmptyLabel("emptyDefaultLabel"); + return conf; + } + @Override public void execute(StandaloneSupport conquery) throws Exception { final SelectFilter filter = setupSearch(conquery); @@ -71,7 +79,7 @@ public void execute(StandaloneSupport conquery) throws Exception { Map.of( DATASET, conquery.getDataset().getId(), CONCEPT, concept.getId(), - TABLE, filter.getConnector().getTable().getId(), + TABLE, filter.getConnector().getResolvedTable().getId(), FILTER, filter.getId() ) ); @@ -153,22 +161,30 @@ private static SelectFilter setupSearch(StandaloneSupport conquery) throws Ex final CSVConfig csvConf = conquery.getConfig().getCsv(); NamespaceStorage namespaceStorage = conquery.getNamespace().getStorage(); - final Concept concept = namespaceStorage.getAllConcepts().stream().filter(c -> c.getName().equals("geschlecht_select")).findFirst().orElseThrow(); + final Concept concept = namespaceStorage.getAllConcepts().filter(c -> c.getName().equals("geschlecht_select")).findFirst().orElseThrow(); final Connector connector = concept.getConnectors().iterator().next(); final SelectFilter filter = (SelectFilter) connector.getFilters().iterator().next(); // Copy search csv from resources to tmp folder. - final Path tmpCSv = Files.createTempFile("conquery_search", "csv"); + // TODO this file is not deleted at the end of this test + final Path tmpCsv = Files.createTempFile("conquery_search", "csv"); Files.write( - tmpCSv, + tmpCsv, String.join(csvConf.getLineSeparator(), RAW_LINES).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE ); - final IndexService indexService = new IndexService(conquery.getConfig().getCsv().createCsvParserSettings(), "emptyDefaultLabel"); + IndexService indexService = conquery.getDatasetRegistry().getIndexService(); + + final FilterTemplate + filterTemplate = + new FilterTemplate(conquery.getDataset().getId(), "test", tmpCsv.toUri(), "id", "{{label}}", "Hello this is {{option}}", 2, true, indexService); + filter.setTemplate(filterTemplate.getId()); - filter.setTemplate(new FilterTemplate(conquery.getDataset(), "test", tmpCSv.toUri(), "id", "{{label}}", "Hello this is {{option}}", 2, true, indexService)); + // We need to persist the modification before we submit the update matching stats request + namespaceStorage.addSearchIndex(filterTemplate); + namespaceStorage.updateConcept(concept); final URI matchingStatsUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder() , AdminDatasetResource.class, "postprocessNamespace") diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java index 62a292c2f7..f136a67f4a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java @@ -9,7 +9,6 @@ import java.nio.file.StandardOpenOption; import java.util.List; import java.util.Map; - import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; @@ -19,6 +18,7 @@ import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.CSVConfig; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; @@ -66,8 +66,10 @@ public void execute(StandaloneSupport conquery) throws Exception { conquery.waitUntilWorkDone(); + // Prepare the concept by injecting a filter template - final Concept concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); + NamespaceStorage namespaceStorage = conquery.getNamespace().getStorage(); + final Concept concept = namespaceStorage.getAllConcepts().iterator().next(); final Connector connector = concept.getConnectors().iterator().next(); final SelectFilter filter = (SelectFilter) connector.getFilters().iterator().next(); @@ -79,7 +81,14 @@ public void execute(StandaloneSupport conquery) throws Exception { final IndexService indexService = new IndexService(conquery.getConfig().getCsv().createCsvParserSettings(), "emptyDefaultLabel"); - filter.setTemplate(new FilterTemplate(conquery.getDataset(), "test", tmpCSv.toUri(), "HEADER", "", "", 2, true, indexService)); + final FilterTemplate + filterTemplate = + new FilterTemplate(conquery.getDataset().getId(), "test", tmpCSv.toUri(), "HEADER", "", "", 2, true, indexService); + filter.setTemplate(filterTemplate.getId()); + + // We need to persist the modification before we submit the update matching stats request + namespaceStorage.addSearchIndex(filterTemplate); + namespaceStorage.updateConcept(concept); final URI matchingStatsUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder() , AdminDatasetResource.class, "postprocessNamespace") @@ -101,7 +110,7 @@ public void execute(StandaloneSupport conquery) throws Exception { Map.of( DATASET, conquery.getDataset().getId(), CONCEPT, concept.getId(), - TABLE, filter.getConnector().getTable().getId(), + TABLE, filter.getConnector().getResolvedTable().getId(), FILTER, filter.getId() ) ); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java index 3291d3ac4f..8adcd61eae 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java @@ -5,6 +5,7 @@ import java.io.File; import java.util.List; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.Query; @@ -30,7 +31,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; @@ -77,9 +77,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(cqpps.size()).isEqualTo(tables.size()); LoadingUtil.importCqppFiles(conquery, List.of(cqpps.get(0))); - conquery.waitUntilWorkDone(); - } final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); @@ -92,8 +90,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId1)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId1)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -105,7 +103,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) @@ -187,8 +185,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId1)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId1)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -200,7 +198,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) @@ -216,8 +214,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(namespace.getNumberOfEntities()).isEqualTo(9); // Issue a query and assert that it has more content. IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201); - - } } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java index fbaa0d43ef..c6573cb181 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java @@ -8,6 +8,7 @@ import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; @@ -34,7 +35,8 @@ public void execute(StandaloneSupport conquery) throws Exception { //ensure the metadata is collected DistributedNamespace namespace = (DistributedNamespace) conquery.getNamespace(); - namespace.getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); + namespace.getWorkerHandler() + .sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts().map(Concept::getId).toList())); conquery.waitUntilWorkDone(); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java index ba3de818a8..969607c86a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java @@ -47,7 +47,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { String testJson = In.resource("/tests/query/RESTART_TEST_DATA/SIMPLE_FRONTEND_Query.json").withUTF8().readAll(); Validator validator = Validators.newValidator(); - EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap(); ManagerNode manager = testConquery.getStandaloneCommand().getManagerNode(); AdminDatasetProcessor adminDatasetProcessor = manager.getAdmin().getAdminDatasetProcessor(); @@ -64,11 +63,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { test.executeTest(conquery); - final int numberOfExecutions = conquery.getMetaStorage().getAllExecutions().size(); + final long numberOfExecutions = conquery.getMetaStorage().getAllExecutions().count(); assertThat(numberOfExecutions).isEqualTo(1); // IDMapping Testing NamespaceStorage namespaceStorage = conquery.getNamespaceStorage(); + EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap(namespaceStorage); namespaceStorage.updateIdMapping(entityIdMap); @@ -81,8 +81,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Dataset dataset6 = adminDatasetProcessor.addDataset(TEST_DATASET_6); - - MetaStorage storage = conquery.getMetaStorage(); Role role = new Role("role", "ROLE", storage); @@ -147,9 +145,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Restart complete"); - DatasetRegistry datasetRegistry = support.getDatasetsProcessor().getDatasetRegistry(); + DatasetRegistry datasetRegistry = support.getDatasetRegistry(); - assertThat(support.getMetaStorage().getAllExecutions().size()).as("Executions after restart").isEqualTo(numberOfExecutions); + assertThat(support.getMetaStorage().getAllExecutions().count()).as("Executions after restart").isEqualTo(numberOfExecutions); List allQueries = IntegrationUtils.getAllQueries(support, 200); assertThat(allQueries).size().isEqualTo(1); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java index bef52ead96..65e8365625 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java @@ -7,6 +7,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.query.ConceptQuery; @@ -21,17 +23,16 @@ import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.json.TestDataImporter; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; -import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; @@ -44,8 +45,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; import lombok.extern.slf4j.Slf4j; @@ -139,15 +138,16 @@ public void execute(String name, TestConquery testConquery) throws Exception { // We select only a single event of the query by the exact filtering. final CQConcept cqConcept = new CQConcept(); final ConceptId conceptId = new ConceptId(conquery.getDataset().getId(), "concept"); - final Concept concept = conquery.getNamespaceStorage().getConcept(conceptId); - cqConcept.setElements(List.of(concept)); + final NamespaceStorage namespaceStorage = conquery.getNamespaceStorage(); + final Concept concept = namespaceStorage.getConcept(conceptId); + cqConcept.setElements(List.of(concept.getId())); final CQTable cqTable = new CQTable(); cqTable.setConcept(cqConcept); - final CentralRegistry centralRegistry = conquery.getNamespaceStorage().getCentralRegistry(); - final Connector connector = centralRegistry.resolve(new ConnectorId(conceptId, "connector1")); - cqTable.setConnector(connector); - cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter((Filter>) centralRegistry.resolve(new FilterId(connector.getId(), "filter")), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d))))); + ConnectorId connector1 = new ConnectorId(conceptId, "connector1"); + final Connector connector = connector1.get(namespaceStorage); + cqTable.setConnector(connector.getId()); + cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter(new FilterId(connector.getId(), "filter"), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d))))); cqConcept.setTables(List.of(cqTable)); cqConcept.setExcludeFromSecondaryId(false); @@ -195,9 +195,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { reusedDiffId.setRoot(new CQReusedQuery(execution1.getId())); // ignored is a single global value and therefore the same as by-PID - reusedDiffId.setSecondaryId(conquery.getNamespace() - .getStorage() - .getSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored"))); + reusedDiffId.setSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored")); final ManagedExecutionId executionId = @@ -217,7 +215,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { reused.setSecondaryId(query.getSecondaryId()); User shareHolder = new User("shareholder", "ShareHolder", conquery.getMetaStorage()); - conquery.getMetaProcessor().addUser(shareHolder); + conquery.getAdminProcessor().addUser(shareHolder); shareHolder.addPermissions(Set.of( dataset.createPermission(Set.of(Ability.READ)), diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java index fa395475f7..0b8e24398b 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java @@ -5,6 +5,9 @@ import java.net.URI; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.frontend.FrontendSecondaryId; import com.bakdata.conquery.integration.IntegrationTest; @@ -23,9 +26,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -54,7 +54,7 @@ public void execute(StandaloneSupport conquery) throws Exception { final Set secondaryIds = fetchSecondaryIdDescriptions(conquery); log.info("{}", secondaryIds); - description.setDataset(conquery.getDataset()); + description.setDataset(conquery.getDataset().getId()); assertThat(secondaryIds) .extracting(FrontendSecondaryId::getId) .containsExactly(description.getId().toString()); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java index 0b1e1ee887..236f64ed93 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java @@ -4,6 +4,7 @@ import static org.assertj.core.api.Assertions.assertThat; import java.util.Objects; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; @@ -23,7 +24,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; /** @@ -48,10 +48,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ConceptId conceptId = ConceptId.Parser.INSTANCE.parse(dataset.getName(), "test_tree"); - final Concept concept; - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); - final QueryTest test2 = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson2); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test2 = JsonIntegrationTest.readJson(dataset, testJson2); // Manually import data, so we can do our own work. { @@ -84,8 +83,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -94,12 +93,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { } final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -114,13 +113,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { // To perform the update, the old concept will be deleted first and the new concept will be added. That means the deletion of concept is also covered here { log.info("Executing update"); - LoadingUtil.updateConcepts(conquery, test2.getRawConcepts(), Response.Status.Family.SUCCESSFUL); conquery.waitUntilWorkDone(); - log.info("Update executed"); - - } @@ -133,8 +128,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -144,12 +139,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -182,8 +177,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -193,12 +188,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -214,8 +209,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Delete the Concept. { log.info("Issuing deletion of import {}", conceptId); - concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId)); - conquery.getDatasetsProcessor().deleteConcept(concept); + Concept concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId)); + conquery.getAdminDatasetsProcessor().deleteConcept(conceptId); conquery.waitUntilWorkDone(); } @@ -229,8 +224,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNull(); assertThat( conquery.getShardNodes().stream() @@ -242,8 +237,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { .noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null) // CBlocks of Concept are deleted on Workers .noneMatch(workerStorage -> workerStorage.getAllCBlocks() - .stream() - .anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId))); + .anyMatch(cBlock -> cBlock.getConnector().getConcept().equals(conceptId))); log.info("Executing query after deletion (EXPECTING AN EXCEPTION IN THE LOGS!)"); @@ -271,8 +265,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNull(); assertThat( conquery.getShardNodes().stream() @@ -284,8 +278,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { .noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null) // CBlocks of Concept are deleted on Workers .noneMatch(workerStorage -> workerStorage.getAllCBlocks() - .stream() - .anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId))); + .anyMatch(cBlock -> cBlock.getConnector().getConcept().equals(conceptId))); log.info("Executing query after restart (EXPECTING AN EXCEPTION IN THE LOGS!)"); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java index e5ba64967a..7c2062e340 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java @@ -4,6 +4,8 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import jakarta.ws.rs.WebApplicationException; + import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.integration.common.IntegrationUtils; @@ -12,7 +14,6 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.exceptions.ValidatorHelper; @@ -22,7 +23,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.WebApplicationException; import lombok.extern.slf4j.Slf4j; /** @@ -36,11 +36,10 @@ public class DatasetDeletionTest implements ProgrammaticIntegrationTest { public void execute(String name, TestConquery testConquery) throws Exception { final StandaloneSupport conquery = testConquery.getSupport(name); - final MetaStorage storage = conquery.getMetaStorage(); final Dataset dataset = conquery.getDataset(); Namespace namespace = conquery.getNamespace(); final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -62,15 +61,15 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); log.info("Checking state before deletion"); // Assert state before deletion. { // Must contain the import. - assertThat(namespace.getStorage().getCentralRegistry().getOptional(dataset.getId())) - .isNotEmpty(); + assertThat(namespace.getStorage().getDataset()) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -96,25 +95,25 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Delete Dataset. { - log.info("Issuing deletion of import {}", dataset); + log.info("Issuing deletion of dataset {}", dataset); // Delete the import. // But, we do not allow deletion of tables with associated connectors, so this should throw! - assertThatThrownBy(() -> conquery.getDatasetsProcessor().deleteDataset(dataset)) + assertThatThrownBy(() -> conquery.getAdminDatasetsProcessor().deleteDataset(dataset)) .isInstanceOf(WebApplicationException.class); //TODO use api conquery.getNamespace().getStorage().getTables() - .forEach(tableId -> conquery.getDatasetsProcessor().deleteTable(tableId, true)); + .forEach(tableId -> conquery.getAdminDatasetsProcessor().deleteTable(tableId, true)); conquery.waitUntilWorkDone(); // Finally delete dataset - conquery.getDatasetsProcessor().deleteDataset(dataset); + conquery.getAdminDatasetsProcessor().deleteDataset(dataset); conquery.waitUntilWorkDone(); - assertThat(storage.getCentralRegistry().getOptional(dataset.getId())).isEmpty(); + assertThat(conquery.getDatasetRegistry().get(dataset.getId())).isNull(); } // State after deletion. @@ -122,7 +121,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after deletion"); // We have deleted an import now there should be two less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(0); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(0); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -140,30 +139,26 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", value.getInfo().getId()) - .filteredOn(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId())) + .filteredOn(bucket -> bucket.getTable().getDataset().equals(dataset.getId())) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getTable().getDataset().getId().equals(dataset.getId())) + .filteredOn(cBlock -> cBlock.getBucket().resolve().getTable().getDataset().equals(dataset.getId())) .isEmpty(); } } - // It's not exactly possible to issue a query for a non-existant dataset, so we assert that parsing the fails. - assertThatThrownBy(() -> { - IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - }).isNotNull(); - + // Try to execute the query after deletion IntegrationUtils.assertQueryResult(conquery, query, 0, ExecutionState.FAILED, conquery.getTestUser(), 404); } // Reload the dataset and assert the state. // We have to do some weird trix with StandaloneSupport to open it with another Dataset - final StandaloneSupport conqueryReimport = testConquery.getSupport(namespace.getDataset().getName()); + final StandaloneSupport conqueryReimport = testConquery.getSupport(dataset.getName()); { // only import the deleted import/table LoadingUtil.importTables(conqueryReimport, content.getTables(), content.isAutoConcept()); @@ -178,11 +173,11 @@ public void execute(String name, TestConquery testConquery) throws Exception { LoadingUtil.importConcepts(conqueryReimport, test.getRawConcepts()); conqueryReimport.waitUntilWorkDone(); - assertThat(conqueryReimport.getDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())) + assertThat(conqueryReimport.getAdminDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())) .describedAs("Dataset after re-import.") .isNotNull(); - assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conqueryReimport.getShardNodes()) { assertThat(node.getWorkers().getWorkers().values()) @@ -209,7 +204,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); - assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().size()).isEqualTo(2); + assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conqueryRestart.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -219,7 +214,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId()))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getTable().getDataset().equals(dataset.getId()))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java index 5536f967cf..fa1235d7e7 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java @@ -9,6 +9,8 @@ import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.Query; @@ -39,8 +41,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; @@ -65,7 +65,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ImportId importId = ImportId.Parser.INSTANCE.parse(dataset.getName(), "test_table2", "test_table2"); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -88,7 +88,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); // State before deletion. @@ -100,8 +100,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -113,7 +113,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) @@ -155,7 +155,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after deletion"); // We have deleted an import now there should be one less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports - 1); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -173,19 +173,19 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getId().getImp().equals(importId)) + .filteredOn(cBlock -> cBlock.getBucket().getImp().equals(importId)) .isEmpty(); - + // Import should not exists anymore assertThat(workerStorage.getImport(importId)) - .describedAs("Import for Worker %s", worker.getInfo().getId()) - .isNull(); + .describedAs("Import for Worker %s", worker.getInfo().getId()) + .isNull(); } } @@ -239,10 +239,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile)); //import preprocessedFiles - conquery.getDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile))); - conquery.waitUntilWorkDone(); - - + conquery.getAdminDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile))); conquery.waitUntilWorkDone(); } @@ -250,7 +247,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after re-import"); - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -262,7 +259,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) .isNotEmpty(); } @@ -285,7 +282,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); { - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2); + assertThat(conquery2.getNamespace().getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conquery2.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -298,7 +295,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .isNotEmpty(); } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java index 661bc05c31..fc00254905 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java @@ -6,6 +6,7 @@ import java.net.URI; import java.util.Map; import java.util.stream.Collectors; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; @@ -15,7 +16,6 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.exceptions.ValidatorHelper; @@ -29,7 +29,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; /** @@ -43,8 +42,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { final StandaloneSupport conquery = testConquery.getSupport(name); - final MetaStorage storage = conquery.getMetaStorage(); - final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); final Dataset dataset = conquery.getDataset(); @@ -52,7 +49,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2"); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -74,15 +71,15 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); // State before deletion. { log.info("Checking state before deletion"); // Must contain the import. - assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getTable(tableId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -115,10 +112,10 @@ public void execute(String name, TestConquery testConquery) throws Exception { final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove") - .buildFromMap(Map.of( - ResourceConstants.DATASET, conquery.getDataset().getName(), - ResourceConstants.TABLE, tableId.toString() - )); + .buildFromMap(Map.of( + ResourceConstants.DATASET, conquery.getDataset().getName(), + ResourceConstants.TABLE, tableId.toString() + )); final Response failed = conquery.getClient() .target(deleteTable) @@ -127,7 +124,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR); - conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next()); + conquery.getAdminDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next().getId()); Thread.sleep(100); conquery.waitUntilWorkDone(); @@ -147,7 +144,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after deletion"); // We have deleted an import now there should be two less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports - 1); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -165,13 +162,13 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", value.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)) + .filteredOn(bucket -> bucket.getImp().getTable().equals(tableId)) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)) + .filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().equals(tableId)) .isEmpty(); } } @@ -211,7 +208,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { continue; } - assertThat(value.getStorage().getCentralRegistry().resolve(tableId)) + assertThat(value.getStorage().getTable(tableId)) .describedAs("Table in worker storage.") .isNotNull(); } @@ -221,7 +218,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Test state after reimport. { log.info("Checking state after re-import"); - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -231,7 +228,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getImp().getTable().equals(tableId))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } @@ -255,7 +252,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); { - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2); + Namespace namespace2 = conquery2.getNamespace(); + assertThat(namespace2.getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conquery2.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -265,7 +263,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getImp().getTable().equals(tableId))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java index 64c67acf55..68c899544f 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java @@ -1,20 +1,23 @@ package com.bakdata.conquery.io; +import jakarta.validation.Validator; + import static org.mockito.Mockito.mock; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.mode.cluster.ClusterNamespaceHandler; import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.index.IndexService; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.ShardWorkers; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import io.dropwizard.jersey.validation.Validators; import jakarta.validation.Validator; @@ -27,46 +30,52 @@ public abstract class AbstractSerializationTest { private final Validator validator = Validators.newValidator(); private final ConqueryConfig config = new ConqueryConfig(); private DatasetRegistry datasetRegistry; - private MetaStorage metaStorage; private NamespaceStorage namespaceStorage; - private IndexService indexService; - + private MetaStorage metaStorage; + private WorkerStorageImpl workerStorage; private ObjectMapper managerInternalMapper; private ObjectMapper namespaceInternalMapper; private ObjectMapper shardInternalMapper; private ObjectMapper apiMapper; + @BeforeEach public void before() { final InternalMapperFactory internalMapperFactory = new InternalMapperFactory(config, validator); + final IndexService indexService = new IndexService(config.getCsv().createCsvParserSettings(), "emptyDefaultLabel"); NonPersistentStoreFactory storageFactory = new NonPersistentStoreFactory(); metaStorage = new MetaStorage(storageFactory); namespaceStorage = new NamespaceStorage(storageFactory, ""); - indexService = new IndexService(config.getCsv().createCsvParserSettings(), "emptyDefaultLabel"); + workerStorage = new WorkerStorageImpl(new NonPersistentStoreFactory(), null, "serializationTestWorker"); final ClusterNamespaceHandler clusterNamespaceHandler = new ClusterNamespaceHandler(new ClusterState(), config, internalMapperFactory); datasetRegistry = new DatasetRegistry<>(0, config, internalMapperFactory, clusterNamespaceHandler, indexService); - // Prepare manager node internal mapper + MetricRegistry metricRegistry = new MetricRegistry(); + managerInternalMapper = internalMapperFactory.createManagerPersistenceMapper(datasetRegistry, metaStorage); + metaStorage.openStores(managerInternalMapper, metricRegistry); - metaStorage.openStores(managerInternalMapper); - metaStorage.loadData(); - // Prepare namespace persistence mapper - namespaceInternalMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetRegistry); - namespaceStorage.injectInto(namespaceInternalMapper); - namespaceStorage.openStores(namespaceInternalMapper); - namespaceStorage.loadData(); - namespaceStorage.updateDataset(new Dataset("serialization_test")); + namespaceInternalMapper = internalMapperFactory.createNamespacePersistenceMapper(namespaceStorage); + namespaceStorage.openStores(namespaceInternalMapper, metricRegistry); - // Prepare shard node internal mapper - final ShardWorkers workers = mock(ShardWorkers.class); - shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(workers); + // Prepare worker persistence mapper + workerStorage.openStores(shardInternalMapper, metricRegistry); + ShardWorkers workers = new ShardWorkers( + config.getQueries().getExecutionPool(), + internalMapperFactory, + config.getCluster().getEntityBucketSize(), + config.getQueries().getSecondaryIdSubPlanRetention() + ); + shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(workerStorage); - // Prepare api mapper with a Namespace injected (usually done by PathParamInjector) + // Prepare api response mapper apiMapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); internalMapperFactory.customizeApiObjectMapper(apiMapper, datasetRegistry, metaStorage); + // This overrides the injected datasetRegistry namespaceStorage.injectInto(apiMapper); } + + } diff --git a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java deleted file mode 100644 index cc6cbebfde..0000000000 --- a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java +++ /dev/null @@ -1,79 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import com.bakdata.conquery.io.jackson.Jackson; -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; -import com.bakdata.conquery.util.NonPersistentStoreFactory; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.databind.ObjectMapper; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import org.junit.jupiter.api.Test; - -public class IdRefrenceTest { - - @Test - public void testListReferences() throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - - CentralRegistry registry = new CentralRegistry(); - Dataset dataset = new Dataset(); - dataset.setName("dataset"); - Table table = new Table(); - table.setDataset(dataset); - table.setName("table"); - registry.register(dataset); - registry.register(table); - - final MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); - - metaStorage.openStores(null); - - - User user = new User("usermail", "userlabel", metaStorage); - metaStorage.addUser(user); - - String json = mapper.writeValueAsString( - new ListHolder( - Collections.singletonList(table), - Collections.singletonList(user) - ) - ); - - assertThat(json) - .contains("\"user.usermail\"") - .contains("\"dataset.table\""); - - new SingletonNamespaceCollection(registry) - .injectInto(mapper); - metaStorage.injectInto(mapper); - ListHolder holder = mapper - .readerFor(ListHolder.class) - .readValue(json); - - assertThat(holder.getUsers().get(0)).isSameAs(user); - assertThat(holder.getTables().get(0)).isSameAs(table); - } - - /** - * @implNote this needs to be a class, because jackson ignores NsIdRefCollection on records - */ - @Getter - @RequiredArgsConstructor(onConstructor_ = @JsonCreator) - public static class ListHolder { - @NsIdRefCollection - private final List
<@renderers[column] id="${item.id}" />
tables; - @MetaIdRefCollection - private final List users; - } -} diff --git a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java index 11db3ada69..a47a9e82b3 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java @@ -19,9 +19,8 @@ import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,7 +29,6 @@ import io.dropwizard.jersey.validation.Validators; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.Setter; import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.RecursiveComparisonAssert; @@ -53,13 +51,12 @@ public class SerializationTestUtil { User.ShiroUserAdapter.class, Validator.class, WeakReference.class, - CompletableFuture.class + CompletableFuture.class, + NamespacedStorageProvider.class }; private final JavaType type; private final Validator validator = Validators.newValidator(); - @Setter - private CentralRegistry registry; private List objectMappers = Collections.emptyList(); @NonNull private Injectable[] injectables = {}; @@ -100,6 +97,10 @@ public SerializationTestUtil customizingAssertion(UnaryOperator activeView = objectMapper.getSerializationConfig().getActiveView(); throw new IllegalStateException("Serdes failed with object mapper using view '" + activeView + "'", e); } } } - public void test(T value) throws JSONException, IOException { - test(value, value); - } - private void test(T value, T expected, ObjectMapper mapper) throws IOException { - if (registry != null) { - mapper = new SingletonNamespaceCollection(registry).injectInto(mapper); - } for (Injectable injectable : injectables) { mapper = injectable.injectInto(mapper); } @@ -157,7 +151,8 @@ private void test(T value, T expected, ObjectMapper mapper) throws IOException { .as("Unequal after copy.") .usingRecursiveComparison() .usingOverriddenEquals() - .ignoringFieldsOfTypes(TYPES_TO_IGNORE); + .ignoringFieldsOfTypes(TYPES_TO_IGNORE) + .ignoringFields("metaStorage", "namespacedStorageProvider"); // Apply assertion customizations ass = assertCustomizer.apply(ass); diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java b/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java index b2892e0fc4..431b259f32 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java @@ -1,33 +1,30 @@ package com.bakdata.conquery.io.result; -import static org.mockito.Mockito.mock; - +import java.math.BigDecimal; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.OptionalLong; -import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; -import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.resultinfo.ExternalResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.MultilineEntityResult; import com.bakdata.conquery.models.query.results.SinglelineEntityResult; @@ -41,33 +38,31 @@ @UtilityClass public class ResultTestUtil { - - private static TreeConcept concept; + public static final DatasetId DATASET = new DatasetId("test_dataset"); + private static final TreeConcept CONCEPT; static { - concept = new TreeConcept(); - concept.setName("concept"); - concept.setDataset(new Dataset("dataset")); + CONCEPT = new TreeConcept(); + CONCEPT.setName("concept"); + CONCEPT.setDataset(DATASET); } - private static final PrintSettings - PRINT_SETTINGS = - new PrintSettings(false, Locale.ROOT, null, new ConqueryConfig(), null, (selectInfo) -> selectInfo.getSelect().getLabel()); - - public static List - ID_FIELDS = - Stream.of("id1", "id2") - .map(name -> new ExternalResultInfo(name, ResultType.Primitive.STRING, "", new ResultPrinters.StringPrinter(), Set.of(new SemanticType.IdT("ID")), PRINT_SETTINGS)) - .collect(Collectors.toList()); + public static List getIdFields() { + return Stream.of("id1", "id2").map(name -> { + ExternalResultInfo info = new ExternalResultInfo(name, ResultType.Primitive.STRING); + info.addSemantics(new SemanticType.IdT("ID")); + return info; + }).collect(Collectors.toList()); + } @NotNull public static ManagedQuery getTestQuery() { - return new ManagedQuery(mock(Query.class), mock(User.class), new Dataset(ResultTestUtil.class.getSimpleName()), null, null) { + return new ManagedQuery(null, new UserId("test_user"), DATASET, null, null) { @Override - public List getResultInfos(PrintSettings printSettings) { + public List getResultInfos() { return getResultTypes().stream() - .map(resultType -> new TypedSelectDummy(resultType)) - .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet(), PRINT_SETTINGS)) + .map(TypedSelectDummy::new) + .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())) .collect(Collectors.toList()); } @@ -80,12 +75,55 @@ public Stream streamResults(OptionalLong maybeLimit) { @NotNull public static List getResultTypes() { - return List.of(ResultType.Primitive.BOOLEAN, ResultType.Primitive.INTEGER, ResultType.Primitive.NUMERIC, ResultType.Primitive.DATE, ResultType.Primitive.DATE_RANGE, ResultType.Primitive.STRING, ResultType.Primitive.MONEY, new ResultType.ListT(ResultType.Primitive.BOOLEAN), new ResultType.ListT(ResultType.Primitive.DATE_RANGE), new ResultType.ListT(ResultType.Primitive.STRING)); + return List.of(ResultType.Primitive.BOOLEAN, + ResultType.Primitive.INTEGER, + ResultType.Primitive.NUMERIC, + ResultType.Primitive.DATE, + ResultType.Primitive.DATE_RANGE, + ResultType.Primitive.STRING, + ResultType.Primitive.MONEY, + new ResultType.ListT(ResultType.Primitive.BOOLEAN), + new ResultType.ListT(ResultType.Primitive.DATE_RANGE), + new ResultType.ListT(ResultType.Primitive.STRING) + ); } @NotNull public static List getTestEntityResults() { - return List.of(new SinglelineEntityResult("1", new Object[]{Boolean.TRUE, 2345634, 123423.34, 5646, List.of(345, 534), "test_string", 4521, List.of(true, false), List.of(List.of(345, 534), List.of(1, 2)), List.of("fizz", "buzz")}), new SinglelineEntityResult("2", new Object[]{Boolean.FALSE, null, null, null, null, null, null, List.of(), List.of(List.of(1234, Integer.MAX_VALUE)), List.of()}), new SinglelineEntityResult("2", new Object[]{Boolean.TRUE, null, null, null, null, null, null, List.of(false, false), null, null}), new MultilineEntityResult("3", List.of(new Object[]{Boolean.FALSE, null, null, null, null, null, null, List.of(false), null, null}, new Object[]{Boolean.TRUE, null, null, null, null, null, null, null, null, null}, new Object[]{Boolean.TRUE, null, null, null, null, null, 4, List.of(true, false, true, false), null, null}))); + return List.of(new SinglelineEntityResult("1", new Object[]{ + Boolean.TRUE, + 2345634, + 123423.34, + 5646, + List.of(345, 534), + "test_string", + new BigDecimal("45.21"), + List.of(true, false), + List.of(List.of(345, 534), List.of(1, 2)), + List.of("fizz", "buzz") + }), + new SinglelineEntityResult("2", new Object[]{ + Boolean.FALSE, null, null, null, null, null, null, List.of(), List.of(List.of(1234, Integer.MAX_VALUE)), List.of() + }), + new SinglelineEntityResult("2", new Object[]{Boolean.TRUE, null, null, null, null, null, null, List.of(false, false), null, null}), + new MultilineEntityResult("3", + List.of(new Object[]{Boolean.FALSE, null, null, null, null, null, null, List.of(false), null, null}, + new Object[]{Boolean.TRUE, null, null, null, null, null, null, null, null, null}, + new Object[]{ + Boolean.TRUE, + null, + null, + null, + null, + null, + new BigDecimal("4.00"), + List.of(true, false, true, false), + null, + null + } + ) + ) + ); } public static class TypedSelectDummy extends Select { @@ -95,13 +133,13 @@ public static class TypedSelectDummy extends Select { public TypedSelectDummy(ResultType resultType) { setLabel(resultType.toString()); - setHolder(concept); + setHolder(CONCEPT); this.resultType = resultType; } @Nullable @Override - public List getRequiredColumns() { + public List getRequiredColumns() { return Collections.emptyList(); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/arrow/ArrowResultGenerationTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/arrow/ArrowResultGenerationTest.java index b7640e817b..497fa526b4 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/arrow/ArrowResultGenerationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/arrow/ArrowResultGenerationTest.java @@ -10,6 +10,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -22,7 +23,6 @@ import java.util.stream.Stream; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.result.ResultTestUtil; import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.config.ArrowConfig; import com.bakdata.conquery.models.config.ConqueryConfig; @@ -33,9 +33,9 @@ import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; +import com.bakdata.conquery.models.query.resultinfo.printers.ArrowResultPrinters; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; -import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.VectorSchemaRoot; @@ -53,202 +53,212 @@ @Slf4j public class ArrowResultGenerationTest { - private static final int BATCH_SIZE = 2; - public static final ConqueryConfig CONFIG = new ConqueryConfig(); - private static final PrintSettings - PRINT_SETTINGS = - new PrintSettings(false, Locale.ROOT, null, CONFIG, null, (selectInfo) -> selectInfo.getSelect().getLabel()); + public static final ConqueryConfig CONFIG = new ConqueryConfig(); + private static final int BATCH_SIZE = 2; + private static final PrintSettings PRINT_SETTINGS = new PrintSettings(false, Locale.ROOT, null, CONFIG, null, (selectInfo) -> selectInfo.getSelect().getLabel()); + public static String readTSV(InputStream inputStream) throws IOException { + StringJoiner stringJoiner = new StringJoiner("\n"); + try (ArrowStreamReader arrowReader = new ArrowStreamReader(inputStream, ROOT_ALLOCATOR)) { + log.info("Reading the produced arrow data."); + VectorSchemaRoot readRoot = arrowReader.getVectorSchemaRoot(); + stringJoiner.add(readRoot.getSchema().getFields().stream().map(Field::getName).collect(Collectors.joining("\t"))); + readRoot.setRowCount(BATCH_SIZE); + while (arrowReader.loadNextBatch()) { + List vectors = readRoot.getFieldVectors(); + + for (int rowI = 0; rowI < readRoot.getRowCount(); rowI++) { + final int currentRow = rowI; + stringJoiner.add(vectors.stream() + .map(vec -> vec.getObject(currentRow)) + .map(ArrowResultGenerationTest::getPrintValue) + .collect(Collectors.joining("\t"))); + } + } + } + return stringJoiner.toString(); + } + + public static String generateExpectedTSV(List results, List resultInfos) { + String expected = + results.stream() + .map(EntityResult.class::cast) + .map(res -> { + StringJoiner lineJoiner = new StringJoiner("\n"); + + for (Object[] line : res.listResultLines()) { + StringJoiner valueJoiner = new StringJoiner("\t"); + + valueJoiner.add(String.valueOf(res.getEntityId())); + valueJoiner.add(String.valueOf(res.getEntityId())); + + for (int lIdx = 0; lIdx < line.length; lIdx++) { + Object val = line[lIdx]; + ResultInfo info = resultInfos.get(lIdx); + + valueJoiner.add(getPrintValue(val, info.getType())); + } + + lineJoiner.add(valueJoiner.toString()); + } + return lineJoiner.toString(); + }).collect(Collectors.joining("\n")); + + return Stream.concat( + // Id column headers + getIdFields().stream().map(i -> i.defaultColumnName(PRINT_SETTINGS)), + // result column headers + getResultTypes().stream().map(ResultType::typeInfo) + ) + .collect(Collectors.joining("\t")) + + "\n" + + expected; + } + + private static String getPrintValue(Object obj, ResultType type) { + if (obj == null) { + return "null"; + } + if (type.equals(ResultType.Primitive.MONEY)) { + return Integer.toString(((BigDecimal) obj).unscaledValue().intValueExact()); + } + if (type.equals(ResultType.Primitive.DATE_RANGE)) { + // Special case for daterange in this test because it uses a StructVector, we rebuild the structural information + List dr = (List) obj; + StringBuilder sb = new StringBuilder(); + sb.append("{"); + final int min = (int) dr.get(0); + final int max = (int) dr.get(1); + // Handle cases where one of the limits is infinity + if (!CDate.isNegativeInfinity(min)) { + sb.append("\"min\":").append(min); + } + if (!CDate.isNegativeInfinity(min) && !CDate.isPositiveInfinity(max)) { + sb.append(","); + } + if (!CDate.isPositiveInfinity(max)) { + sb.append("\"max\":").append(max); + } + sb.append("}"); + return sb.toString(); + } + if (obj instanceof Collection) { + Collection col = (Collection) obj; + // Workaround: Arrow deserializes lists as a JsonStringArrayList which has a JSON String method + ResultType elemType = ((ResultType.ListT) type).getElementType(); + return col.stream().map(v -> getPrintValue(v, elemType)).collect(Collectors.joining(",", "[", "]")); + } + return obj.toString(); + } + + private static String getPrintValue(Object obj) { + if (obj instanceof JsonStringArrayList) { + // Workaround: Arrow deserializes lists as a JsonStringArrayList which has a JSON String method + return new ArrayList<>((JsonStringArrayList) obj).stream().map(ArrowResultGenerationTest::getPrintValue).collect(Collectors.joining(",", "[", "]")); + } + return Objects.toString(obj); + } @Test - void generateFieldsIdMapping() { + void generateFieldsIdMapping() { final UniqueNamer uniqueNamer = new UniqueNamer(PRINT_SETTINGS); - List fields = generateFields(ResultTestUtil.ID_FIELDS, uniqueNamer); + List fields = generateFields(getIdFields(), uniqueNamer, PRINT_SETTINGS); - assertThat(fields).containsExactlyElementsOf( - List.of( - new Field("id1", FieldType.nullable(new ArrowType.Utf8()), null), - new Field("id2", FieldType.nullable(new ArrowType.Utf8()), null))); + assertThat(fields).containsExactlyElementsOf( + List.of(new Field("id1", FieldType.nullable(new ArrowType.Utf8()), null), + new Field("id2", FieldType.nullable(new ArrowType.Utf8()), null) + )); - } + } - @Test - void generateFieldsValue() { + @Test + void generateFieldsValue() { final UniqueNamer uniqueNamer = new UniqueNamer(PRINT_SETTINGS); + List resultInfos = getResultTypes().stream() + .map(TypedSelectDummy::new) + .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())) + .collect(Collectors.toList()); - List resultInfos = getResultTypes().stream().map(TypedSelectDummy::new) - .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet(), PRINT_SETTINGS)).collect(Collectors.toList()); - - List fields = generateFields( - resultInfos, - // Custom column namer so we don't require a dataset registry - uniqueNamer + List fields = generateFields(resultInfos, + // Custom column namer so we don't require a dataset registry + uniqueNamer, PRINT_SETTINGS ); - assertThat(fields).containsExactlyElementsOf( - List.of( - new Field("BOOLEAN", FieldType.nullable(ArrowType.Bool.INSTANCE), null), - new Field("INTEGER", FieldType.nullable(new ArrowType.Int(32, true)), null), - new Field("NUMERIC", FieldType.nullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null), - new Field("DATE", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), - new Field("DATE_RANGE", - FieldType.nullable(ArrowType.Struct.INSTANCE), - List.of( - new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), - new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null) - )), - new Field("STRING", FieldType.nullable(new ArrowType.Utf8()), null), - new Field("MONEY", FieldType.nullable(new ArrowType.Int(32, true)), null), - new Field("LIST[BOOLEAN]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[BOOLEAN]", FieldType.nullable(ArrowType.Bool.INSTANCE), null))), - new Field("LIST[DATE_RANGE]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[DATE_RANGE]", - FieldType.nullable(ArrowType.Struct.INSTANCE), - List.of( - new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), - new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null) - )))), - new Field("LIST[STRING]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[STRING]", FieldType.nullable(new ArrowType.Utf8()), null))) - ) - ); - - } - - @Test - void writeAndRead() throws IOException { - - // Initialize internationalization - I18n.init(); - - // Prepare every input data - PrintSettings printSettings = new PrintSettings( - false, - Locale.ROOT, - null, - CONFIG, - (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), - (selectInfo) -> selectInfo.getSelect().getLabel()); - // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized - List results = getTestEntityResults(); - - ManagedQuery mquery = getTestQuery(); - - // First we write to the buffer, than we read from it and parse it as TSV - ByteArrayOutputStream output = new ByteArrayOutputStream(); - - renderToStream( - (root) -> new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), output), - printSettings, - new ArrowConfig(BATCH_SIZE), - ResultTestUtil.ID_FIELDS, - mquery.getResultInfos(printSettings), - mquery.streamResults(OptionalLong.empty()) + assertThat(fields).containsExactlyElementsOf( + List.of(new Field("BOOLEAN", FieldType.nullable(ArrowType.Bool.INSTANCE), null), + new Field("INTEGER", FieldType.nullable(new ArrowType.Int(32, true)), null), + new Field("NUMERIC", FieldType.nullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null), + new Field("DATE", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), + new Field("DATE_RANGE", + FieldType.nullable(ArrowType.Struct.INSTANCE), + List.of(new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), + new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null) + ) + ), + new Field("STRING", FieldType.nullable(new ArrowType.Utf8()), null), + new Field("MONEY", FieldType.nullable(new ArrowType.Int(32, true)), null), + new Field("LIST[BOOLEAN]", + FieldType.nullable(ArrowType.List.INSTANCE), + List.of(new Field("LIST[BOOLEAN]", FieldType.nullable(ArrowType.Bool.INSTANCE), null)) + ), + new Field("LIST[DATE_RANGE]", + FieldType.nullable(ArrowType.List.INSTANCE), + List.of(new Field("LIST[DATE_RANGE]", + FieldType.nullable(ArrowType.Struct.INSTANCE), + List.of(new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), + new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null) + ) + )) + ), + new Field("LIST[STRING]", + FieldType.nullable(ArrowType.List.INSTANCE), + List.of(new Field("LIST[STRING]", FieldType.nullable(new ArrowType.Utf8()), null)) + ) + )); + + } + + @Test + void writeAndRead() throws IOException { + + // Initialize internationalization + I18n.init(); + + // Prepare every input data + PrintSettings printSettings = new PrintSettings(false, + Locale.ROOT, + null, + CONFIG, + (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), + (selectInfo) -> selectInfo.getSelect().getLabel() ); + // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized + List results = getTestEntityResults(); - InputStream inputStream = new ByteArrayInputStream(output.toByteArray()); + ManagedQuery mquery = getTestQuery(); - String computed = readTSV(inputStream); + // First we write to the buffer, than we read from it and parse it as TSV + ByteArrayOutputStream output = new ByteArrayOutputStream(); - assertThat(computed).isNotBlank(); - assertThat(computed).isEqualTo(generateExpectedTSV(results, mquery.getResultInfos(printSettings), printSettings)); + renderToStream((root) -> new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), output), + printSettings, + new ArrowConfig(BATCH_SIZE), + getIdFields(), + mquery.getResultInfos(), + mquery.streamResults(OptionalLong.empty()), + new ArrowResultPrinters() + ); - } + InputStream inputStream = new ByteArrayInputStream(output.toByteArray()); - public static String readTSV(InputStream inputStream) throws IOException { - StringJoiner stringJoiner = new StringJoiner("\n"); - try (ArrowStreamReader arrowReader = new ArrowStreamReader(inputStream, ROOT_ALLOCATOR)) { - log.info("Reading the produced arrow data."); - VectorSchemaRoot readRoot = arrowReader.getVectorSchemaRoot(); - stringJoiner.add(readRoot.getSchema().getFields().stream().map(Field::getName).collect(Collectors.joining("\t"))); - readRoot.setRowCount(BATCH_SIZE); - while (arrowReader.loadNextBatch()) { - List vectors = readRoot.getFieldVectors(); + String computed = readTSV(inputStream); - for (int rowI = 0; rowI < readRoot.getRowCount(); rowI++) { - final int currentRow = rowI; - stringJoiner.add( - vectors.stream() - .map(vec -> vec.getObject(currentRow)) - .map(ArrowResultGenerationTest::getPrintValue) - .collect(Collectors.joining("\t"))); - } - } - } - return stringJoiner.toString(); - } - - public static String generateExpectedTSV(List results, List resultInfos, PrintSettings settings) { - String expected = results.stream() - .map(EntityResult.class::cast) - .map(res -> { - StringJoiner lineJoiner = new StringJoiner("\n"); - - for (Object[] line : res.listResultLines()) { - StringJoiner valueJoiner = new StringJoiner("\t"); - valueJoiner.add(String.valueOf(res.getEntityId())); - valueJoiner.add(String.valueOf(res.getEntityId())); - for (int lIdx = 0; lIdx < line.length; lIdx++) { - Object val = line[lIdx]; - ResultInfo info = resultInfos.get(lIdx); - valueJoiner.add(getPrintValue(val, info.getType(), settings)); - } - lineJoiner.add(valueJoiner.toString()); - } - return lineJoiner.toString(); - }) - .collect(Collectors.joining("\n")); + assertThat(computed).isNotBlank(); + assertThat(computed).isEqualTo(generateExpectedTSV(results, mquery.getResultInfos())); - return Stream.concat( - // Id column headers - ResultTestUtil.ID_FIELDS.stream().map(i -> i.defaultColumnName()), - // result column headers - getResultTypes().stream().map(ResultType::typeInfo) - ).collect(Collectors.joining("\t")) - + "\n" + expected; - } - - private static String getPrintValue(Object obj, ResultType type, PrintSettings settings) { - if (obj == null) { - return "null"; - } - if (type.equals(ResultType.Primitive.DATE_RANGE)) { - // Special case for daterange in this test because it uses a StructVector, we rebuild the structural information - List dr = (List) obj; - StringBuilder sb = new StringBuilder(); - sb.append("{"); - final int min = (int) dr.get(0); - final int max = (int) dr.get(1); - // Handle cases where one of the limits is infinity - if (!CDate.isNegativeInfinity(min)) { - sb.append("\"min\":").append(min); - } - if (!CDate.isNegativeInfinity(min) && !CDate.isPositiveInfinity(max)) { - sb.append(","); - } - if (!CDate.isPositiveInfinity(max)) { - sb.append("\"max\":").append(max); - } - sb.append("}"); - return sb.toString(); - } - if(obj instanceof Collection) { - Collection col = (Collection) obj; - // Workaround: Arrow deserializes lists as a JsonStringArrayList which has a JSON String method - @NonNull ResultType elemType = ((ResultType.ListT) type).getElementType(); - return col.stream().map(v -> getPrintValue(v, elemType, settings)).collect(Collectors.joining(",", "[", "]")); - } - return obj.toString(); - } - - private static String getPrintValue(Object obj) { - if(obj instanceof JsonStringArrayList) { - // Workaround: Arrow deserializes lists as a JsonStringArrayList which has a JSON String method - return new ArrayList<>((JsonStringArrayList) obj).stream() - .map(ArrowResultGenerationTest::getPrintValue) - .collect(Collectors.joining(",", "[", "]")); - } - return Objects.toString(obj); - } + } } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/csv/CsvResultGenerationTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/csv/CsvResultGenerationTest.java index 770bb17270..54ae964fed 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/csv/CsvResultGenerationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/csv/CsvResultGenerationTest.java @@ -12,13 +12,14 @@ import java.util.StringJoiner; import java.util.stream.Collectors; -import com.bakdata.conquery.io.result.ResultTestUtil; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.mapping.EntityPrintId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.StringResultPrinters; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.util.NonPersistentStoreFactory; @@ -28,41 +29,41 @@ @Slf4j public class CsvResultGenerationTest { - static { - I18n.init(); - } - - public static final ConqueryConfig CONFIG = new ConqueryConfig(){{ + public static final ConqueryConfig CONFIG = new ConqueryConfig() {{ // Suppress java.lang.NoClassDefFoundError: com/bakdata/conquery/io/jackson/serializer/CurrencyUnitDeserializer setStorage(new NonPersistentStoreFactory()); }}; + static { + I18n.init(); + } @Test void writeAndRead() throws IOException { // Prepare every input data - PrintSettings printSettings = new PrintSettings( - true, - Locale.GERMAN, - null, - CONFIG, - (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), - (selectInfo) -> selectInfo.getSelect().getLabel()); + StringResultPrinters printers = new StringResultPrinters(); + final PrintSettings printSettings = new PrintSettings(true, + Locale.GERMAN, + null, + CONFIG, + (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), + (selectInfo) -> selectInfo.getSelect().getLabel() + ); // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized - List results = getTestEntityResults(); + final List results = getTestEntityResults(); - ManagedQuery mquery = getTestQuery(); + final ManagedQuery mquery = getTestQuery(); // First we write to the buffer, than we read from it and parse it as TSV - StringWriter writer = new StringWriter(); + final StringWriter writer = new StringWriter(); - CsvRenderer renderer = new CsvRenderer(CONFIG.getCsv().createWriter(writer), printSettings); - renderer.toCSV(ResultTestUtil.ID_FIELDS, mquery.getResultInfos(printSettings), mquery.streamResults(OptionalLong.empty())); + final CsvRenderer renderer = new CsvRenderer(CONFIG.getCsv().createWriter(writer), printSettings); + renderer.toCSV(getIdFields(), mquery.getResultInfos(), mquery.streamResults(OptionalLong.empty()), printSettings); - String computed = writer.toString(); + final String computed = writer.toString(); - String expected = generateExpectedCSV(results, mquery.getResultInfos(printSettings)); + final String expected = generateExpectedCSV(results, mquery.getResultInfos(), printSettings, printers); log.info("Wrote and than read this csv data: {}", computed); @@ -71,31 +72,44 @@ void writeAndRead() throws IOException { } - private String generateExpectedCSV(List results, List resultInfos) { - List expected = new ArrayList<>(); - expected.add(ResultTestUtil.ID_FIELDS.stream().map(info -> info.defaultColumnName()).collect(Collectors.joining(",")) + "," + getResultTypes().stream().map(ResultType::typeInfo).collect(Collectors.joining(",")) + "\n"); + private String generateExpectedCSV(List results, List resultInfos, PrintSettings printSettings, PrinterFactory printerFactory) { + final List expected = new ArrayList<>(); + expected.add(getIdFields().stream().map(info -> info.defaultColumnName(printSettings)).collect(Collectors.joining(",")) + + "," + + getResultTypes().stream() + .map(ResultType::typeInfo) + .collect(Collectors.joining(",")) + + "\n"); + + final String delimiter = String.valueOf(CONFIG.getCsv().getDelimeter()); + results.stream() - .map(EntityResult.class::cast) - .forEach(res -> { - - for (Object[] line : res.listResultLines()) { - StringJoiner valueJoiner = new StringJoiner(","); - valueJoiner.add(String.valueOf(res.getEntityId())); - valueJoiner.add(String.valueOf(res.getEntityId())); - for (int lIdx = 0; lIdx < line.length; lIdx++) { - Object val = line[lIdx]; - if(val == null) { - valueJoiner.add(""); - continue; - } - ResultInfo info = resultInfos.get(lIdx); - final String printVal = info.printNullable(val); - valueJoiner.add(printVal.contains(String.valueOf(CONFIG.getCsv().getDelimeter()))? "\""+printVal+"\"": printVal); - } - - expected.add(valueJoiner + "\n"); - } - }); + .map(EntityResult.class::cast) + .forEach(res -> { + + for (Object[] line : res.listResultLines()) { + final StringJoiner valueJoiner = new StringJoiner(","); + + valueJoiner.add(String.valueOf(res.getEntityId())); + valueJoiner.add(String.valueOf(res.getEntityId())); + + for (int lIdx = 0; lIdx < line.length; lIdx++) { + final Object val = line[lIdx]; + + if (val == null) { + valueJoiner.add(""); + continue; + } + + final ResultInfo info = resultInfos.get(lIdx); + final String printVal = (String) info.createPrinter(printerFactory, printSettings).apply(val); + + valueJoiner.add(printVal.contains(delimiter) ? "\"" + printVal + "\"" : printVal); + } + + expected.add(valueJoiner + "\n"); + } + }); return String.join("", expected); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java index a9bb2101ce..9fb1870c83 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java @@ -13,6 +13,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.OptionalLong; import java.util.StringJoiner; import java.util.stream.Collectors; @@ -21,6 +22,7 @@ import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.result.ResultTestUtil; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; @@ -31,9 +33,13 @@ import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.StringResultPrinters; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import lombok.extern.slf4j.Slf4j; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.DataFormatter; @@ -46,62 +52,56 @@ @Slf4j public class ExcelResultRenderTest { - static { - I18n.init(); - } - public static final ConqueryConfig CONFIG = new ConqueryConfig(){{ // Suppress java.lang.NoClassDefFoundError: com/bakdata/conquery/io/jackson/serializer/CurrencyUnitDeserializer setStorage(new NonPersistentStoreFactory()); }}; private static final List printIdFields = List.of("id1", "id2"); + static { + I18n.init(); + } @Test void writeAndRead() throws IOException { // Prepare every input data - PrintSettings printSettings = new PrintSettings( - true, - Locale.GERMAN, - null, - CONFIG, - (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), - (selectInfo) -> selectInfo.getSelect().getLabel()); + final PrintSettings printSettings = new PrintSettings(true, + Locale.GERMAN, + null, + CONFIG, + (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), + (selectInfo) -> selectInfo.getSelect().getLabel() + ); // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized - List results = getTestEntityResults(); + final List results = getTestEntityResults(); - ManagedQuery mquery = new ManagedQuery(mock(Query.class), mock(User.class), new Dataset(ExcelResultRenderTest.class.getSimpleName()), null, null) { - public List getResultInfos(PrintSettings printSettings) { - return getResultTypes().stream() - .map(ResultTestUtil.TypedSelectDummy::new) - .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet(), printSettings)) - .collect(Collectors.toList()); - } + MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); + metaStorage.openStores(null, new MetricRegistry()); - @Override - public Stream streamResults(OptionalLong maybeLimit) { - return results.stream(); - } - }; + ManagedQuery mquery = getManagedQuery(metaStorage, results); // First we write to the buffer, than we read from it and parse it as TSV - ByteArrayOutputStream output = new ByteArrayOutputStream(); + final ByteArrayOutputStream output = new ByteArrayOutputStream(); - ExcelRenderer renderer = new ExcelRenderer(new ExcelConfig(),printSettings); + final ExcelRenderer renderer = new ExcelRenderer(new ExcelConfig(), printSettings); - renderer.renderToStream( - ResultTestUtil.ID_FIELDS, - mquery, - output, OptionalLong.empty(), printSettings - ); + renderer.renderToStream(ResultTestUtil.getIdFields(), mquery, output, OptionalLong.empty(), printSettings); - InputStream inputStream = new ByteArrayInputStream(output.toByteArray()); + final InputStream inputStream = new ByteArrayInputStream(output.toByteArray()); - List computed = readComputed(inputStream, printSettings); + final List computed = readComputed(inputStream, printSettings); + // We have to do some magic here to emulate the excel printed results. + PrintSettings tsvPrintSettings = new PrintSettings(true, + Locale.GERMAN, + null, + CONFIG, + (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), + (selectInfo) -> selectInfo.getSelect().getLabel() + ); - List expected = generateExpectedTSV(results, mquery.getResultInfos(printSettings)); + final List expected = generateExpectedTSV(results, mquery.getResultInfos(), tsvPrintSettings, new StringResultPrinters()); log.info("Wrote and than read this excel data: {}", computed); @@ -110,15 +110,35 @@ public Stream streamResults(OptionalLong maybeLimit) { } + private static @NotNull ManagedQuery getManagedQuery(MetaStorage metaStorage, List results) { + User user = new User("test", "test", metaStorage); + user.updateStorage(); + + return new ManagedQuery(mock(Query.class), user.getId(), new Dataset(ExcelResultRenderTest.class.getSimpleName()).getId(), metaStorage, null) { + @Override + public Stream streamResults(OptionalLong maybeLimit) { + return results.stream(); + } + + @Override + public List getResultInfos() { + return getResultTypes().stream() + .map(ResultTestUtil.TypedSelectDummy::new) + .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())) + .collect(Collectors.toList()); + } + }; + } + @NotNull private List readComputed(InputStream inputStream, PrintSettings settings) throws IOException { - XSSFWorkbook workbook = new XSSFWorkbook(inputStream); - XSSFSheet sheet = workbook.getSheetAt(0); + final XSSFWorkbook workbook = new XSSFWorkbook(inputStream); + final XSSFSheet sheet = workbook.getSheetAt(0); - List computed = new ArrayList<>(); + final List computed = new ArrayList<>(); for (Row row : sheet) { - StringJoiner sj = new StringJoiner("\t"); - DataFormatter formatter = new DataFormatter(settings.getLocale()); + final StringJoiner sj = new StringJoiner("\t"); + final DataFormatter formatter = new DataFormatter(settings.getLocale()); for (Cell cell : row) { final String formatted = switch (cell.getCellType()) { @@ -136,46 +156,48 @@ private List readComputed(InputStream inputStream, PrintSettings setting } - private List generateExpectedTSV(List results, List resultInfos) { - List expected = new ArrayList<>(); + private List generateExpectedTSV( + List results, List resultInfos, PrintSettings printSettings, PrinterFactory printerFactory) { + final List expected = new ArrayList<>(); expected.add(String.join("\t", printIdFields) + "\t" + getResultTypes().stream().map(ResultType::typeInfo).collect(Collectors.joining("\t"))); results.stream() - .map(EntityResult.class::cast) - .forEach(res -> { - - for (Object[] line : res.listResultLines()) { - StringJoiner valueJoiner = new StringJoiner("\t"); - valueJoiner.add(String.valueOf(res.getEntityId())); - valueJoiner.add(String.valueOf(res.getEntityId())); - for (int lIdx = 0; lIdx < line.length; lIdx++) { - Object val = line[lIdx]; - if(val == null) { - valueJoiner.add("null"); - continue; - } - ResultInfo info = resultInfos.get(lIdx); - joinValue(valueJoiner, val, info); - } - expected.add(valueJoiner.toString()); - } - }); + .map(EntityResult.class::cast) + .forEach(res -> { + + for (Object[] line : res.listResultLines()) { + final StringJoiner valueJoiner = new StringJoiner("\t"); + + valueJoiner.add(String.valueOf(res.getEntityId())); + valueJoiner.add(String.valueOf(res.getEntityId())); + + for (int lIdx = 0; lIdx < line.length; lIdx++) { + final Object val = line[lIdx]; + + final ResultInfo info = resultInfos.get(lIdx); + final String printed = printValue(val, info, printSettings, printerFactory); + + valueJoiner.add(printed); + } + expected.add(valueJoiner.toString()); + } + }); return expected; } - private void joinValue(StringJoiner valueJoiner, Object val, ResultInfo info) { - String printVal = info.printNullable(val); + private String printValue(Object val, ResultInfo info, PrintSettings printSettings, PrinterFactory printerFactory) { + if (val == null) { + return "null"; + } + + final Printer printer = info.createPrinter(printerFactory, printSettings); if (info.getType().equals(ResultType.Primitive.BOOLEAN)) { // Even though we set the locale to GERMAN, poi's {@link DataFormatter#formatCellValue(Cell)} hardcoded english booleans - printVal = (Boolean) val ? "TRUE" : "FALSE"; - } - - if(info.getType().equals(ResultType.Primitive.MONEY)){ - printVal = printVal.replace(" ", " "); + return (Boolean) val ? "TRUE" : "FALSE"; } - valueJoiner.add(printVal); + return Objects.toString(printer.apply(val)); } } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/parquet/ParquetResultGenerationTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/parquet/ParquetResultGenerationTest.java index 8751490399..0b6a5e4f85 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/parquet/ParquetResultGenerationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/parquet/ParquetResultGenerationTest.java @@ -16,7 +16,6 @@ import java.util.stream.Collectors; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.result.ResultTestUtil; import com.bakdata.conquery.io.result.arrow.ArrowResultGenerationTest; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.i18n.I18n; @@ -56,9 +55,9 @@ void generateSchema() { final UniqueNamer uniqueNamer = new UniqueNamer(PRINT_SETTINGS); List resultInfos = getResultTypes().stream().map(TypedSelectDummy::new) - .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet(), PRINT_SETTINGS)).collect(Collectors.toList()); + .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())).collect(Collectors.toList()); - final MessageType messageType = EntityResultWriteSupport.generateSchema(ResultTestUtil.ID_FIELDS, resultInfos, uniqueNamer); + final MessageType messageType = EntityResultWriteSupport.generateSchema(getIdFields(), resultInfos, uniqueNamer, PRINT_SETTINGS); assertThat(messageType).isEqualTo( Types.buildMessage() @@ -104,13 +103,12 @@ void writeAndRead() throws IOException { I18n.init(); // Prepare every input data - PrintSettings printSettings = new PrintSettings( - false, - Locale.ROOT, - null, - CONFIG, - (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), - (selectInfo) -> selectInfo.getSelect().getLabel() + PrintSettings printSettings = new PrintSettings(false, + Locale.ROOT, + null, + CONFIG, + (cer) -> EntityPrintId.from(cer.getEntityId(), cer.getEntityId()), + (selectInfo) -> selectInfo.getSelect().getLabel() ); // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized List results = getTestEntityResults(); @@ -119,7 +117,7 @@ void writeAndRead() throws IOException { // First we write to the buffer, than we read from it and parse it as TSV ByteArrayOutputStream output = new ByteArrayOutputStream(); - ParquetRenderer.writeToStream(output, ResultTestUtil.ID_FIELDS, managedQuery.getResultInfos(printSettings), printSettings, managedQuery.streamResults(OptionalLong.empty())); + ParquetRenderer.writeToStream(output, getIdFields(), managedQuery.getResultInfos(), printSettings, managedQuery.streamResults(OptionalLong.empty())); final byte[] buf = output.toByteArray(); @@ -141,7 +139,7 @@ void writeAndRead() throws IOException { log.info("\n{}", actual); - assertThat(actual).isEqualTo(ArrowResultGenerationTest.generateExpectedTSV(results, managedQuery.getResultInfos(printSettings), printSettings)); + assertThat(actual).isEqualTo(ArrowResultGenerationTest.generateExpectedTSV(results, managedQuery.getResultInfos())); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java index c1ad2f258b..baa5b24a3c 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java @@ -18,7 +18,7 @@ import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.XodusStoreFactory; -import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; @@ -44,7 +44,7 @@ public class SerializingStoreDumpTest { private ObjectMapper objectMapper; // Test data - private final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), new Dataset("dataset"), STORAGE, null); + private final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), new UserId("test"), new DatasetId("dataset"), STORAGE, null); private final ConceptQuery cQuery = new ConceptQuery( new CQReusedQuery(managedQuery.getId())); private final User user = new User("username", "userlabel", STORAGE); diff --git a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java index cd0bd04dc0..1fd2c10925 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java @@ -1,11 +1,9 @@ package com.bakdata.conquery.models; -import static com.bakdata.conquery.models.types.SerialisationObjectsUtil.*; +import static com.bakdata.conquery.util.SerialisationObjectsUtil.*; import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; import java.time.LocalDate; import java.time.ZonedDateTime; import java.util.Arrays; @@ -29,12 +27,19 @@ import com.bakdata.conquery.apiv1.query.ConceptQuery; import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.CQOr; import com.bakdata.conquery.io.AbstractSerializationTest; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.external.form.FormBackendVersion; +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.jackson.serializer.SerializationTestUtil; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; @@ -70,19 +75,16 @@ import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.i18n.I18n; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.IdMapSerialisationTest; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; -import com.bakdata.conquery.models.index.InternToExternMapper; -import com.bakdata.conquery.models.index.MapInternToExternMapper; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.MultilineEntityResult; -import com.bakdata.conquery.util.SerialisationObjectsUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; @@ -98,6 +100,7 @@ import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.RecursiveComparisonAssert; +import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -107,10 +110,36 @@ @Slf4j public class SerializationTests extends AbstractSerializationTest { + public static Stream> rangeData() { + final int SEED = 7; + Random random = new Random(SEED); + return Stream + .generate(() -> { + int first = random.nextInt(); + int second = random.nextInt(); + + if (first < second) { + return Range.of(first, second); + } + return Range.of(second, first); + }) + .filter(Range::isOrdered) + .flatMap(range -> Stream.of( + range, + Range.exactly(range.getMin()), + Range.atMost(range.getMin()), + Range.atLeast(range.getMin()) + )) + .filter(Range::isOrdered) + .limit(100); + } + @Test public void dataset() throws IOException, JSONException { Dataset dataset = new Dataset(); dataset.setName("dataset"); + dataset.setLabel("Dataset"); + dataset.setNamespacedStorageProvider(getDatasetRegistry()); SerializationTestUtil .forType(Dataset.class) @@ -144,18 +173,18 @@ public void role() throws IOException, JSONException { @Test public void user() throws IOException, JSONException { User user = new User("user", "user", getMetaStorage()); + user.setMetaStorage(getMetaStorage()); user.addPermission(DatasetPermission.onInstance(Ability.READ, new DatasetId("test"))); user.addPermission(ExecutionPermission.onInstance(Ability.READ, new ManagedExecutionId(new DatasetId("dataset"), UUID.randomUUID()))); Role role = new Role("company", "company", getMetaStorage()); user.addRole(role); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(role); + getMetaStorage().addRole(role); SerializationTestUtil .forType(User.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) + .injectables(getMetaStorage()) .test(user); } @@ -171,9 +200,9 @@ public void group() throws IOException, JSONException { User user = new User("userName", "userLabel", getMetaStorage()); group.addMember(user); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(role); - registry.register(user); + final MetaStorage metaStorage = getMetaStorage(); + metaStorage.addRole(role); + metaStorage.addUser(user); SerializationTestUtil .forType(Group.class) @@ -181,7 +210,6 @@ public void group() throws IOException, JSONException { .test(group); } - @Test @Tag("OBJECT_2_INT_MAP") // Bucket uses Object2IntMap public void bucketCompoundDateRange() throws JSONException, IOException { @@ -208,11 +236,11 @@ public void bucketCompoundDateRange() throws JSONException, IOException { compoundCol.setTable(table); table.setColumns(new Column[]{startCol, endCol, compoundCol}); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setName("tableName"); - Import imp = new Import(table); + Import imp = new Import(table.getId()); imp.setName("importTest"); @@ -223,39 +251,70 @@ public void bucketCompoundDateRange() throws JSONException, IOException { ColumnStore startStore = new IntegerDateStore(new ShortArrayStore(new short[]{1, 2, 3, 4}, Short.MIN_VALUE)); ColumnStore endStore = new IntegerDateStore(new ShortArrayStore(new short[]{5, 6, 7, 8}, Short.MIN_VALUE)); - Bucket bucket = new Bucket(0, new ColumnStore[]{startStore, endStore, compoundStore}, Object2IntMaps.singleton("0", 0), Object2IntMaps.singleton("0", 4),4, imp); + Bucket bucket = + new Bucket(0, Object2IntMaps.singleton("0", 0), Object2IntMaps.singleton("0", 4), 4, imp.getId(), new ColumnStore[]{startStore, endStore, compoundStore}); compoundStore.setParent(bucket); + final WorkerStorageImpl workerStorage = getWorkerStorage(); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - registry.register(dataset); - registry.register(startCol); - registry.register(endCol); - registry.register(compoundCol); - registry.register(table); - registry.register(imp); - registry.register(bucket); + workerStorage.updateDataset(dataset); + workerStorage.addTable(table); + workerStorage.addImport(imp); + workerStorage.addBucket(bucket); final Validator validator = Validators.newValidator(); SerializationTestUtil .forType(Bucket.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper()) - .registry(registry) - .injectables(values -> values.add(Validator.class, validator)) + .objectMappers(getShardInternalMapper()) + .injectables(new Injectable() { + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(Validator.class, validator); + } + }) .test(bucket); } - @Test public void table() throws JSONException, IOException { - Dataset dataset = new Dataset(); - dataset.setName("datasetName"); + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + Dataset dataset = createDataset(namespaceStorage); + + Table table = getTable(dataset); + table.setStorage(namespaceStorage); + + table.init(); + + SerializationTestUtil + .forType(Table.class) + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(table); + } + + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + Dataset dataset = createDataset(workerStorage); + + Table table = getTable(dataset); + table.setStorage(workerStorage); + + workerStorage.addTable(table); + + SerializationTestUtil + .forType(Table.class) + .objectMappers(getShardInternalMapper()) + .test(table); + } + } + private static @NotNull Table getTable(Dataset dataset) { Table table = new Table(); Column column = new Column(); @@ -266,54 +325,69 @@ public void table() throws JSONException, IOException { table.setColumns(new Column[]{column}); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setLabel("tableLabel"); table.setName("tableName"); + return table; + } + @Test + public void filterValueMoneyRange() throws JSONException, IOException { + FilterValue.CQMoneyRangeFilter filterValue = + new FilterValue.CQMoneyRangeFilter(FilterId.Parser.INSTANCE.parse("dataset.concept.connector.filter"), new Range.LongRange(2000L, 30000L)); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); + filterValue.setConfig(getConfig()); - registry.register(dataset); - registry.register(table); - registry.register(column); SerializationTestUtil - .forType(Table.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(table); + .forType(FilterValue.class) + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(filterValue); } @Test public void treeConcept() throws IOException, JSONException { + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final Dataset dataset = createDataset(namespaceStorage); + TreeConcept concept = createConcept(dataset, namespaceStorage); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - Dataset dataset = createDataset(registry); + SerializationTestUtil + .forType(Concept.class) + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(concept); + } - TreeConcept concept = createConcept(registry, dataset); - concept.init(); + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + final Dataset dataset = createDataset(workerStorage); + TreeConcept concept = createConcept(dataset, workerStorage); - SerializationTestUtil - .forType(Concept.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(concept); - } + SerializationTestUtil + .forType(Concept.class) + .objectMappers(getShardInternalMapper()) + .test(concept); + } + } @Test public void persistentIdMap() throws JSONException, IOException { + EntityIdMap persistentMap = IdMapSerialisationTest.createTestPersistentMap(getNamespaceStorage()); + SerializationTestUtil.forType(EntityIdMap.class) - .objectMappers(getManagerInternalMapper()) - .test(IdMapSerialisationTest.createTestPersistentMap()); + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(persistentMap); } @Test public void formConfig() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final Dataset dataset = createDataset(registry); + final Dataset dataset = createDataset(namespaceStorage); ExportForm form = new ExportForm(); AbsoluteMode mode = new AbsoluteMode(); @@ -328,77 +402,76 @@ public void formConfig() throws JSONException, IOException { SerializationTestUtil .forType(FormConfig.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .test(formConfig); } @Test public void managedQuery() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final MetaStorage metaStorage = getMetaStorage(); final Dataset dataset = new Dataset("test-dataset"); final User user = new User("test-user", "test-user", getMetaStorage()); - registry.register(dataset); - registry.register(user); + namespaceStorage.updateDataset(dataset); - getMetaStorage().updateUser(user); + metaStorage.updateUser(user); - ManagedQuery execution = new ManagedQuery(null, user, dataset, getMetaStorage(), getDatasetRegistry()); + ManagedQuery execution = new ManagedQuery(null, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); execution.setTags(new String[]{"test-tag"}); + // Trigger UUID creation + execution.getId(); + SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) + .injectables(metaStorage) .test(execution); } @Test public void testExportForm() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - final Dataset dataset = createDataset(registry); - + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - registry.register(dataset); + final Dataset dataset = createDataset(namespaceStorage); - final ExportForm exportForm = createExportForm(registry, dataset); + final ExportForm exportForm = createExportForm(dataset, namespaceStorage); SerializationTestUtil.forType(QueryDescription.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .checkHashCode() .test(exportForm); } @Test public void managedForm() throws JSONException, IOException { + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + final Dataset dataset = createDataset(namespaceStorage); - final Dataset dataset = createDataset(registry); + final User user = createUser(getMetaStorage()); - final User user = createUser(registry, getMetaStorage()); + final ExportForm exportForm = createExportForm(dataset, namespaceStorage); - final ExportForm exportForm = createExportForm(registry, dataset); - - ManagedInternalForm execution = new ManagedInternalForm<>(exportForm, user, dataset, getMetaStorage(), getDatasetRegistry()); + ManagedInternalForm execution = new ManagedInternalForm<>(exportForm, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); execution.setTags(new String[]{"test-tag"}); + // Trigger UUID creation + execution.getId(); + SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .test(execution); } - @Test public void testExternalExecution() throws IOException, JSONException { - final CentralRegistry centralRegistry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final String subType = "test-type"; JsonNodeFactory factory = new JsonNodeFactory(false); @@ -407,53 +480,71 @@ public void testExternalExecution() throws IOException, JSONException { )); ExternalForm form = new ExternalForm(node, subType); - final Dataset dataset = SerialisationObjectsUtil.createDataset(centralRegistry); - final User user = SerialisationObjectsUtil.createUser(centralRegistry, getMetaStorage()); + final Dataset dataset = createDataset(namespaceStorage); + final User user = createUser(getMetaStorage()); - final ExternalExecution execution = new ExternalExecution(form, user, dataset, getMetaStorage(), getDatasetRegistry()); + final ExternalExecution execution = new ExternalExecution(form, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); + + // Trigger UUID creation + execution.getId(); SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper()) - .registry(centralRegistry) .test(execution); } @Test public void cqConcept() throws JSONException, IOException { + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final Dataset dataset = new Dataset(); - dataset.setName("dataset"); + final CQConcept cqConcept = createCqConcept(namespaceStorage); + + SerializationTestUtil + .forType(CQConcept.class) + .objectMappers(getManagerInternalMapper(), getApiMapper()) + .test(cqConcept); + } + + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + final CQConcept cqConcept = createCqConcept(workerStorage); + + SerializationTestUtil + .forType(CQConcept.class) + .objectMappers(getShardInternalMapper()) + .test(cqConcept); + } + } + + private static @NotNull CQConcept createCqConcept(NamespacedStorageImpl namespaceStorage) { + Dataset dataset = createDataset(namespaceStorage); final TreeConcept concept = new TreeConcept(); concept.setName("concept"); - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(concept); + connector.setName("connector"); concept.setConnectors(List.of(connector)); final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(concept)); + cqConcept.setElements(List.of(concept.getId())); cqConcept.setLabel("Label"); final CQTable cqTable = new CQTable(); - cqTable.setConnector(connector); + cqTable.setConnector(connector.getId()); cqTable.setFilters(List.of()); cqTable.setConcept(cqConcept); cqConcept.setTables(List.of(cqTable)); - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(dataset); - registry.register(concept); - registry.register(connector); - - SerializationTestUtil - .forType(CQConcept.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(cqConcept); + namespaceStorage.updateConcept(concept); + return cqConcept; } @Test @@ -479,7 +570,6 @@ public void executionCreationResolveError() throws JSONException, IOException { .test(error); } - @Test public void executionQueryJobError() throws JSONException, IOException { log.info("Beware, this test will print an ERROR message."); @@ -493,13 +583,15 @@ public void executionQueryJobError() throws JSONException, IOException { @Test public void meInformation() throws IOException, JSONException { - User user = new User("name", "label", getMetaStorage()); + User user = new User("name", "labe", getMetaStorage()); MeProcessor.FrontendMeInformation info = MeProcessor.FrontendMeInformation.builder() .userName(user.getLabel()) .hideLogoutButton(false) .groups(List.of(new IdLabel<>(new GroupId("test_group"), "test_group_label"))) - .datasetAbilities(Map.of(new DatasetId("testdataset"), new MeProcessor.FrontendDatasetAbility(true, true, true))) + .datasetAbilities(Map.of(new DatasetId("testdataset"), + new MeProcessor.FrontendDatasetAbility(true, true, true) + )) .build(); SerializationTestUtil @@ -514,7 +606,7 @@ public void testFormQuery() throws IOException, JSONException { final TreeConcept testConcept = new TreeConcept(); Dataset dataset = new Dataset(); dataset.setName("testDataset"); - testConcept.setDataset(dataset); + testConcept.setDataset(dataset.getId()); testConcept.setName("concept"); final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(testConcept); @@ -522,10 +614,12 @@ public void testFormQuery() throws IOException, JSONException { testConcept.setConnectors(List.of(connector)); - concept.setElements(Collections.singletonList(testConcept)); + concept.setElements(Collections.singletonList(testConcept.getId())); CQTable[] tables = {new CQTable()}; - connector.setTable(new Table()); - tables[0].setConnector(connector); + Table table = new Table(); + table.setDataset(dataset.getId()); + connector.setTable(table.getId()); + tables[0].setConnector(connector.getId()); tables[0].setConcept(concept); concept.setTables(Arrays.asList(tables)); ConceptQuery subQuery = new ConceptQuery(concept); @@ -545,28 +639,30 @@ public void testFormQuery() throws IOException, JSONException { ) ); - CentralRegistry centralRegistry = getMetaStorage().getCentralRegistry(); - centralRegistry.register(dataset); - centralRegistry.register(testConcept); - centralRegistry.register(connector); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + namespaceStorage.updateDataset(dataset); + namespaceStorage.updateConcept(testConcept); + + WorkerStorageImpl workerStorage = getWorkerStorage(); + workerStorage.updateDataset(dataset); + workerStorage.updateConcept(testConcept); SerializationTestUtil .forType(AbsoluteFormQuery.class) .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(centralRegistry) .test(query); } - @Test - public void serialize() throws IOException, JSONException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + public void cBlock() throws IOException, JSONException { + final WorkerStorageImpl workerStorage = getWorkerStorage(); final Dataset dataset = new Dataset(); + dataset.setNamespacedStorageProvider(workerStorage); dataset.setName("dataset"); final TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); concept.setName("concept"); final ConceptTreeConnector connector = new ConceptTreeConnector(); @@ -577,26 +673,24 @@ public void serialize() throws IOException, JSONException { final Table table = new Table(); table.setName("table"); - table.setDataset(dataset); + table.setDataset(dataset.getId()); - final Import imp = new Import(table); + final Import imp = new Import(table.getId()); imp.setName("import"); - final Bucket bucket = new Bucket(0, new ColumnStore[0], Object2IntMaps.emptyMap(), Object2IntMaps.emptyMap(),0, imp); + workerStorage.updateDataset(dataset); + workerStorage.addTable(table); + workerStorage.updateConcept(concept); + workerStorage.addImport(imp); + final Bucket bucket = new Bucket(0, Object2IntMaps.emptyMap(), Object2IntMaps.emptyMap(), 0, imp.getId(), new ColumnStore[0]); - final CBlock cBlock = CBlock.createCBlock(connector, bucket, 10); + workerStorage.addBucket(bucket); - registry.register(dataset) - .register(table) - .register(concept) - .register(connector) - .register(bucket) - .register(imp); + final CBlock cBlock = new CBlock(bucket.getId(), connector.getId(), 0, Collections.emptyMap(), Collections.emptyMap(), new int[0][]); SerializationTestUtil.forType(CBlock.class) .objectMappers(getShardInternalMapper()) - .registry(registry) .test(cBlock); } @@ -639,30 +733,6 @@ public void testNonStrictNumbers() throws JSONException, IOException { ); } - public static Stream> rangeData() { - final int SEED = 7; - Random random = new Random(SEED); - return Stream - .generate(() -> { - int first = random.nextInt(); - int second = random.nextInt(); - - if (first < second) { - return Range.of(first, second); - } - return Range.of(second, first); - }) - .filter(Range::isOrdered) - .flatMap(range -> Stream.of( - range, - Range.exactly(range.getMin()), - Range.atMost(range.getMin()), - Range.atLeast(range.getMin()) - )) - .filter(Range::isOrdered) - .limit(100); - } - @ParameterizedTest @MethodSource("rangeData") public void test(Range range) throws IOException, JSONException { @@ -843,25 +913,4 @@ public void formBackendVersion() throws JSONException, IOException { .test(version); } - - @Test - public void mapInternToExternMapper() throws JSONException, IOException, URISyntaxException { - final MapInternToExternMapper mapper = new MapInternToExternMapper( - "test1", - new URI("classpath:/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv"), - "internal", - "{{external}}" - ); - - mapper.setStorage(getNamespaceStorage()); - mapper.setConfig(getConfig()); - mapper.setMapIndex(getIndexService()); - - - mapper.init(); - - SerializationTestUtil.forType(InternToExternMapper.class) - .objectMappers(getApiMapper(), getNamespaceInternalMapper()) - .test(mapper); - } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java index 01b3301d79..be42149d21 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java @@ -10,7 +10,6 @@ import static org.mockserver.model.ParameterBody.params; import java.util.Map; - import jakarta.validation.Validator; import com.auth0.jwt.JWT; @@ -18,20 +17,23 @@ import com.bakdata.conquery.models.auth.oidc.passwordflow.IdpDelegatingAccessTokenCreator; import com.bakdata.conquery.models.config.auth.IntrospectionDelegatingRealmFactory; import com.bakdata.conquery.models.exceptions.ValidatorHelper; +import com.bakdata.conquery.util.extensions.MockServerExtension; import io.dropwizard.validation.BaseValidator; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpStatus; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.JsonBody; import org.mockserver.model.MediaType; @Slf4j public class IdpDelegatingAccessTokenCreatorTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(1080), IdpDelegatingAccessTokenCreatorTest::initOIDCServer); - private static final OIDCMockServer OIDC_SERVER = new OIDCMockServer(); private static final IntrospectionDelegatingRealmFactory CONFIG = new IntrospectionDelegatingRealmFactory(); private static final Validator VALIDATOR = BaseValidator.newValidator(); @@ -45,8 +47,6 @@ public class IdpDelegatingAccessTokenCreatorTest { @BeforeAll public static void beforeAll() { - initOIDCServer(); - initRealmConfig(); idpDelegatingAccessTokenCreator = new IdpDelegatingAccessTokenCreator(CONFIG); @@ -57,16 +57,14 @@ private static void initRealmConfig() { CONFIG.setRealm(OIDCMockServer.REALM_NAME); CONFIG.setResource("test_cred"); CONFIG.setCredentials(Map.of(CONFIDENTIAL_CREDENTIAL, "test_cred")); - CONFIG.setAuthServerUrl(OIDCMockServer.MOCK_SERVER_URL); + CONFIG.setAuthServerUrl(OIDC_SERVER.baseUrl()); ValidatorHelper.failOnError(log, VALIDATOR.validate(CONFIG)); } - private static void initOIDCServer() { - - OIDC_SERVER.init( (server) -> { - + private static void initOIDCServer(ClientAndServer clientAndServer) { + OIDCMockServer.init(clientAndServer, (server) -> { // Mock username-password-for-token exchange server.when( request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) @@ -81,9 +79,9 @@ private static void initOIDCServer() { .withBody(JsonBody.json( new Object() { @Getter - String token_type = "Bearer"; + final String token_type = "Bearer"; @Getter - String access_token = USER_1_TOKEN; + final String access_token = USER_1_TOKEN; } ))); // Block other exchange requests (this has a lower prio than the above) @@ -112,10 +110,4 @@ public void invaildUsernamePassword() { } - @AfterAll - public static void afterAll() { - OIDC_SERVER.deinit(); - } - - } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java index 82c65ab045..768b20c8f7 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java @@ -13,13 +13,11 @@ import java.util.Map; import java.util.Set; import java.util.UUID; - import jakarta.validation.Validator; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealm; import com.bakdata.conquery.models.auth.oidc.keycloak.KeycloakApi; @@ -28,22 +26,33 @@ import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; -import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.GroupExtension; +import com.bakdata.conquery.util.extensions.MetaStorageExtension; +import com.bakdata.conquery.util.extensions.MockServerExtension; +import com.bakdata.conquery.util.extensions.UserExtension; +import com.codahale.metrics.MetricRegistry; import io.dropwizard.validation.BaseValidator; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpStatus; import org.apache.shiro.authc.AuthenticationInfo; import org.apache.shiro.authc.BearerToken; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.MediaType; @Slf4j public class IntrospectionDelegatingRealmTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IntrospectionDelegatingRealmTest::initOIDCServer); - private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); + @RegisterExtension + @Order(0) + private static final MetaStorageExtension STORAGE_EXTENTION = new MetaStorageExtension(new MetricRegistry()); + private static final MetaStorage STORAGE = STORAGE_EXTENTION.getMetaStorage(); private static final IntrospectionDelegatingRealmFactory CONFIG = new IntrospectionDelegatingRealmFactory(); private static final Validator VALIDATOR = BaseValidator.newValidator(); @@ -51,7 +60,8 @@ public class IntrospectionDelegatingRealmTest { // User 1 private static final String USER_1_NAME = "test_name1"; - private static final User USER_1 = new User(USER_1_NAME, USER_1_NAME, STORAGE); + @RegisterExtension + private static final UserExtension USER_1_EXTENSION = new UserExtension(STORAGE, USER_1_NAME); private static final String USER_1_PASSWORD = "test_password1"; public static final String BACKEND_AUD = "backend"; public static final String SOME_SECRET = "secret"; @@ -64,8 +74,9 @@ public class IntrospectionDelegatingRealmTest { // User 2 private static final String USER_2_NAME = "test_name2"; - private static final User USER_2 = new User(USER_2_NAME, USER_2_NAME, STORAGE); private static final String USER_2_LABEL = "test_label2"; + @RegisterExtension + private static final UserExtension USER_2_EXTENSION = new UserExtension(STORAGE, USER_2_NAME, USER_2_LABEL); private static final String USER_2_TOKEN = JWT.create() .withSubject(USER_2_NAME) .withAudience(BACKEND_AUD) @@ -75,8 +86,9 @@ public class IntrospectionDelegatingRealmTest { // User 3 existing private static final String USER_3_NAME = "test_name3"; - private static final User USER_3 = new User(USER_3_NAME, USER_3_NAME, STORAGE); private static final String USER_3_LABEL = "test_label3"; + @RegisterExtension + private static final UserExtension USER_3_EXTENSION = new UserExtension(STORAGE, USER_3_NAME, USER_3_LABEL); private static final String USER_3_TOKEN = JWT.create() .withSubject(USER_3_NAME) .withAudience(BACKEND_AUD) @@ -86,23 +98,23 @@ public class IntrospectionDelegatingRealmTest { // Groups private static final String GROUPNAME_1 = "group1"; - private static final Group GROUP_1_EXISTING = new Group(GROUPNAME_1, GROUPNAME_1, STORAGE); - public static final KeycloakGroup - KEYCLOAK_GROUP_1 = - new KeycloakGroup(UUID.randomUUID().toString(), "Group1", "g1", Map.of(GROUP_ID_ATTRIBUTE, GROUP_1_EXISTING.getId().toString()), Set.of()); + @RegisterExtension + private static final GroupExtension GROUP_1_EXISTING_EXTENSION = new GroupExtension(STORAGE, GROUPNAME_1); + public static KeycloakGroup KEYCLOAK_GROUP_1; + private static final String GROUPNAME_2 = "group2"; // Group is created during test - public static final KeycloakGroup - KEYCLOAK_GROUP_2 = - new KeycloakGroup(UUID.randomUUID().toString(), "Group2", "g2", Map.of(GROUP_ID_ATTRIBUTE, new GroupId(GROUPNAME_2).toString()), Set.of()); - public static final URI FRONT_CHANNEL_LOGOUT = URI.create("http://localhost:1080/realms/test_realm/protocol/openid-connect/logout"); + public static KeycloakGroup KEYCLOAK_GROUP_2; + public static final URI FRONT_CHANNEL_LOGOUT = URI.create("http://localhost:%d/realms/test_realm/protocol/openid-connect/logout".formatted(OIDC_SERVER.getPort())); - private static OIDCMockServer OIDC_SERVER; private static TestRealm REALM; private static KeycloakApi KEYCLOAK_API; @BeforeAll public static void beforeAll() { + KEYCLOAK_GROUP_1 = new KeycloakGroup(UUID.randomUUID().toString(), "Group1", "g1", Map.of(GROUP_ID_ATTRIBUTE, GROUP_1_EXISTING_EXTENSION.getGroup().getId().toString()), Set.of()); + KEYCLOAK_GROUP_2 = new KeycloakGroup(UUID.randomUUID().toString(), "Group2", "g2", Map.of(GROUP_ID_ATTRIBUTE, new GroupId(GROUPNAME_2).toString()), Set.of()); + KEYCLOAK_API = mock(KeycloakApi.class); doAnswer(invocation -> Set.of(KEYCLOAK_GROUP_1, KEYCLOAK_GROUP_2)).when(KEYCLOAK_API) .getGroupHierarchy(); @@ -116,21 +128,20 @@ public static void beforeAll() { } ).when(KEYCLOAK_API).getUserGroups(any(String.class)); - initOIDCServer(); initRealm(); } - + @BeforeEach public void beforeEach() { // clear storage underlying data structures STORAGE.clear(); - + // Clear Token Cache REALM.getTokenCache().invalidateAll(); - - // add existing group to storage - STORAGE.addGroup(GROUP_1_EXISTING); + + // add existing group to storage + STORAGE.addGroup(GROUP_1_EXISTING_EXTENSION.getGroup()); } @@ -139,131 +150,130 @@ private static void initRealm() { CONFIG.setResource("backend"); CONFIG.setGroupIdAttribute(GROUP_ID_ATTRIBUTE); CONFIG.setCredentials(Map.of(CONFIDENTIAL_CREDENTIAL, "test_cred")); - CONFIG.setAuthServerUrl(OIDCMockServer.MOCK_SERVER_URL); + CONFIG.setAuthServerUrl(OIDC_SERVER.baseUrl()); ValidatorHelper.failOnError(log, VALIDATOR.validate(CONFIG)); REALM = new TestRealm(STORAGE, CONFIG); } - private static void initOIDCServer() { - OIDC_SERVER = new OIDCMockServer(); - - OIDC_SERVER.init( (server) -> { - - + private static void initOIDCServer(ClientAndServer mockServer) { // Mock username-password-for-token exchange - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")).withBody( - params( - param("password", USER_1_PASSWORD), - param("grant_type", "password"), - param("username", USER_1_NAME), - param("scope", "openid")))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"token_type\" : \"Bearer\",\"access_token\" : \"" + USER_1_TOKEN + "\"}")); - // Block other exchange requests (this has a lower prio than the above) - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME))) - .respond( - response().withStatusCode(HttpStatus.SC_FORBIDDEN).withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"error\" : \"Wrong username or password\"")); - - // Mock token introspection - // For USER 1 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_1_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_1_NAME + "\", \"active\": true}")); - // For USER 2 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_2_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_2_NAME + "\",\"name\" : \"" + USER_2_LABEL + "\", \"active\": true}")); - // For USER 3 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_3_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_3_NAME + "\",\"name\" : \"" + USER_3_LABEL + "\", \"active\": true}")); - - }); + OIDCMockServer.init( + mockServer, + (server) -> { + server.when( + request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")).withBody( + params( + param("password", USER_1_PASSWORD), + param("grant_type", "password"), + param("username", USER_1_NAME), + param("scope", "openid") + ))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"token_type\" : \"Bearer\",\"access_token\" : \"" + USER_1_TOKEN + "\"}")); + + // Block other exchange requests (this has a lower prio than the above) + server.when( + request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME))) + .respond( + response().withStatusCode(HttpStatus.SC_FORBIDDEN).withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"error\" : \"Wrong username or password\"")); + + // Mock token introspection + // For USER 1 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_1_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_1_NAME + "\", \"active\": true}")); + // For USER 2 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_2_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_2_NAME + "\",\"name\" : \"" + USER_2_LABEL + "\", \"active\": true}")); + // For USER 3 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_3_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_3_NAME + "\",\"name\" : \"" + USER_3_LABEL + "\", \"active\": true}")); + } + ); } @Test public void tokenIntrospectionSimpleUserNew() { AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER1_TOKEN_WRAPPED); - + assertThat(info) .usingRecursiveComparison() + .usingOverriddenEquals() .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) .ignoringFieldsOfTypes(User.ShiroUserAdapter.class) - .isEqualTo(new ConqueryAuthenticationInfo(USER_1, USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(new User(USER_1_NAME, USER_1_NAME, STORAGE)); + .isEqualTo(new ConqueryAuthenticationInfo(USER_1_EXTENSION.getUser(), USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(new User(USER_1_NAME, USER_1_NAME, STORAGE_EXTENTION.getMetaStorage())); } - + @Test public void tokenIntrospectionSimpleUserExisting() { - STORAGE.addUser(USER_1); - + AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER1_TOKEN_WRAPPED); - + assertThat(info) - .usingRecursiveComparison() - .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) - .isEqualTo(new ConqueryAuthenticationInfo(USER_1, USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_1); + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) + .isEqualTo(new ConqueryAuthenticationInfo(USER_1_EXTENSION.getUser(), USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_1_EXTENSION.getUser()); } - + @Test public void tokenIntrospectionGroupedUser() { - STORAGE.addUser(USER_2); AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER_2_TOKEN_WRAPPED); - final ConqueryAuthenticationInfo expected = new ConqueryAuthenticationInfo(USER_2, USER_2_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT); + final ConqueryAuthenticationInfo expected = new ConqueryAuthenticationInfo(USER_2_EXTENSION.getUser(), USER_2_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT); assertThat(info) - .usingRecursiveComparison() - .isEqualTo(expected); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_2); + .usingRecursiveComparison() + .usingOverriddenEquals() + .isEqualTo(expected); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_2_EXTENSION.getUser()); assertThat(STORAGE.getAllGroups()).hasSize(2); // Pre-existing group and a second group that has been added in the process assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).contains(new UserId(USER_2_NAME)); assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_2)).getMembers()).contains(new UserId(USER_2_NAME)); } - + @Test public void tokenIntrospectionGroupedUserRemoveGroupMapping() { - STORAGE.addUser(USER_3); - GROUP_1_EXISTING.addMember(USER_3); - + GROUP_1_EXISTING_EXTENSION.getGroup().addMember(USER_3_EXTENSION.getUser()); + assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).contains(new UserId(USER_3_NAME)); - + AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER_3_TOKEN_WRAPPED); - + assertThat(info) - .usingRecursiveComparison() - .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) - .isEqualTo(new ConqueryAuthenticationInfo(USER_3, USER_3_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_3); + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) + .isEqualTo(new ConqueryAuthenticationInfo(USER_3_EXTENSION.getUser(), USER_3_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_3_EXTENSION.getUser()); assertThat(STORAGE.getAllGroups()).hasSize(1); // Pre-existing group assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).doesNotContain(new UserId(USER_3_NAME)); } - @AfterAll - public static void afterAll() { - OIDC_SERVER.deinit(); - } - private static class TestRealm extends IntrospectionDelegatingRealm { public TestRealm(MetaStorage storage, IntrospectionDelegatingRealmFactory config) { diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java b/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java index d610877011..6df41067e0 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java @@ -1,13 +1,13 @@ package com.bakdata.conquery.models.auth; import static org.junit.Assert.fail; -import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; import static org.mockserver.model.HttpResponse.response; import java.util.function.Consumer; import lombok.Getter; +import lombok.experimental.UtilityClass; import lombok.extern.slf4j.Slf4j; import org.mockserver.integration.ClientAndServer; import org.mockserver.mock.action.ExpectationResponseCallback; @@ -16,42 +16,36 @@ import org.mockserver.model.JsonBody; @Slf4j +@UtilityClass public class OIDCMockServer { - public static final int MOCK_SERVER_PORT = 1080; - public static final String MOCK_SERVER_URL = "http://localhost:" + MOCK_SERVER_PORT; public static final String REALM_NAME = "test_realm"; - private final ClientAndServer OIDC_SERVER; - - public OIDCMockServer() { - OIDC_SERVER = startClientAndServer(MOCK_SERVER_PORT); - } - - public OIDCMockServer(int port) { - OIDC_SERVER = startClientAndServer(port); + public static void init(ClientAndServer server) { + init(server, (_server) -> {}); } + public static void init(ClientAndServer server, Consumer testMappings) { - public void init(Consumer testMappings) { + String mockServerUrl = "http://localhost:%d".formatted(server.getPort()); // Mock well-known discovery endpoint (this is actually the output of keycloak) - OIDC_SERVER.when(request().withMethod("GET").withPath("/realms/" + REALM_NAME + "/.well-known/uma2-configuration")) + server.when(request().withMethod("GET").withPath("/realms/" + REALM_NAME + "/.well-known/uma2-configuration")) .respond( response().withBody( JsonBody.json( new Object() { @Getter - final String issuer = MOCK_SERVER_URL; + final String issuer = mockServerUrl; @Getter - final String authorization_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/auth"; + final String authorization_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/auth"; @Getter - final String token_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/token"; + final String token_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/token"; @Getter - final String introspection_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/token/introspect"; + final String introspection_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/token/introspect"; @Getter - final String end_session_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/logout"; + final String end_session_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/logout"; @Getter - final String jwks_uri = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/certs"; + final String jwks_uri = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/certs"; @Getter final String[] grant_types_supported = {"authorization_code", "implicit", "refresh_token", "password", "client_credentials"}; @Getter @@ -59,7 +53,7 @@ public void init(Consumer testMappings) { @Getter final String[] response_modes_supported = {"query", "fragment", "form_post"}; @Getter - final String registration_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/clients-registrations/openid-connect"; + final String registration_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/clients-registrations/openid-connect"; @Getter final String[] token_endpoint_auth_methods_supported = {"private_key_jwt", "client_secret_basic", "client_secret_post", "tls_client_auth", "client_secret_jwt"}; @Getter @@ -67,20 +61,20 @@ public void init(Consumer testMappings) { @Getter final String[] scopes_supported = {"openid", "address", "email", "microprofile-jwt", "offline_access", "phone", "profile", "roles", "web-origins"}; @Getter - final String resource_registration_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/resource_set"; + final String resource_registration_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/resource_set"; @Getter - final String permission_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/permission"; + final String permission_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/permission"; @Getter - final String policy_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/uma-policy"; + final String policy_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/uma-policy"; } ) )); // Register test provided mappings - testMappings.accept(OIDC_SERVER); + testMappings.accept(server); // At last (so it has the lowest priority): initialize a trap for debugging, that captures all unmapped requests - OIDC_SERVER.when(request()).respond(new ExpectationResponseCallback() { + server.when(request()).respond(new ExpectationResponseCallback() { @Override public HttpResponse handle(HttpRequest httpRequest) throws Exception { @@ -96,8 +90,4 @@ public HttpResponse handle(HttpRequest httpRequest) throws Exception { } }); } - - public void deinit() { - OIDC_SERVER.stop(); - } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java index f12774d576..cf95a1e09b 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java @@ -5,29 +5,29 @@ import static org.mockserver.model.HttpResponse.response; import java.net.URI; - import jakarta.ws.rs.client.Client; import com.bakdata.conquery.models.auth.OIDCMockServer; import com.bakdata.conquery.models.config.auth.JwtPkceVerifyingRealmFactory; +import com.bakdata.conquery.util.extensions.MockServerExtension; import io.dropwizard.client.JerseyClientBuilder; import io.dropwizard.core.setup.Environment; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.JsonBody; public class IdpConfigRetrievalTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IdpConfigRetrievalTest::init); - private static final OIDCMockServer OIDC_MOCK_SERVER = new OIDCMockServer(); private static final JwtPkceVerifyingRealmFactory REALM_FACTORY = new JwtPkceVerifyingRealmFactory(); private static final Client CLIENT = new JerseyClientBuilder(new Environment("oidc-test")).build("oidc-test-client"); - @BeforeAll - static void init() { - OIDC_MOCK_SERVER.init((server) -> { + private static void init(ClientAndServer mockServer) { + OIDCMockServer.init(mockServer, (server) -> { // MOCK JWK Endpoint (1 signing + 1 encryption key) server.when(request().withMethod("GET").withPath("/realms/" + OIDCMockServer.REALM_NAME + "/protocol/openid-connect/certs")) .respond( @@ -36,15 +36,10 @@ static void init() { ))); }); - REALM_FACTORY.setWellKnownEndpoint(URI.create(OIDCMockServer.MOCK_SERVER_URL + REALM_FACTORY.setWellKnownEndpoint(URI.create(OIDC_SERVER.baseUrl() + "/realms/" + OIDCMockServer.REALM_NAME + "/.well-known/uma2-configuration")); } - @AfterAll - static void deinit() { - OIDC_MOCK_SERVER.deinit(); - } - @Test void getConfig() { assertThatCode(() -> REALM_FACTORY.retrieveIdpConfiguration(CLIENT)).doesNotThrowAnyException(); diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java index 7814dad2f6..c179713dce 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java @@ -5,6 +5,8 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.LongStream; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; @@ -18,8 +20,6 @@ import com.fasterxml.jackson.annotation.JsonView; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.type.TypeFactory; -import jakarta.validation.constraints.Min; -import jakarta.validation.constraints.NotEmpty; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.experimental.FieldNameConstants; @@ -36,7 +36,7 @@ public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig @Override public FilterNode createFilterNode(GroupFilterValue compoundFilterValue) { - return new MultiSelectFilterNode(getColumn(), Set.of(compoundFilterValue.getResolvedValues())); + return new MultiSelectFilterNode(getColumn().resolve(), Set.of(compoundFilterValue.getResolvedValues())); } private Map getFEFilter() { diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java index d6b263ec37..baa15d8444 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendTable; import com.bakdata.conquery.apiv1.frontend.FrontendValue; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; @@ -15,43 +16,50 @@ import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.util.extensions.NamespaceStorageExtension; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class FilterSearchItemTest { + @RegisterExtension + private static final NamespaceStorageExtension NAMESPACE_STORAGE_EXTENSION = new NamespaceStorageExtension(); + private static final NamespacedStorage NAMESPACED_STORAGE = NAMESPACE_STORAGE_EXTENSION.getStorage(); + @Test public void sortedValidityDates() { Dataset dataset = new Dataset(); dataset.setName("testDataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); Table table = new Table(); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setName("testTable"); + NAMESPACED_STORAGE.addTable(table); Column column = new Column(); column.setName("testColumn"); column.setTable(table); Column dateColumn1 = new Column(); - column.setName("dateColumn1"); - column.setType(MajorTypeId.DATE); - column.setTable(table); + dateColumn1.setName("dateColumn1"); + dateColumn1.setType(MajorTypeId.DATE); + dateColumn1.setTable(table); Column dateColumn2 = new Column(); - column.setName("dateColumn2"); - column.setType(MajorTypeId.DATE); - column.setTable(table); - + dateColumn2.setName("dateColumn2"); + dateColumn2.setType(MajorTypeId.DATE); + dateColumn2.setTable(table); + TreeConcept concept = new TreeConcept(); + concept.setDataset(dataset.getId()); + concept.setName("testConcept"); ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setName("testConnector"); - TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); - concept.setName("testConcept"); - ValidityDate val0 = ValidityDate.create(dateColumn1); val0.setName("val0"); val0.setConnector(connector); @@ -65,11 +73,12 @@ public void sortedValidityDates() { val2.setConnector(connector); List validityDates = List.of(val0, val1, val2); - connector.setColumn(column); + connector.setColumn(column.getId()); connector.setConcept(concept); connector.setValidityDates(validityDates); + FrontendTable feTable = new FrontEndConceptBuilder(new ConqueryConfig()).createTable(connector); - + assertThat(feTable.getDateColumn().getOptions()).containsExactly( new FrontendValue(val0.getId().toString(), "val0"), new FrontendValue(val1.getId().toString(), "val1"), diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java index 50783215aa..768688bb00 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java @@ -8,26 +8,27 @@ import java.util.Random; import java.util.function.Supplier; import java.util.stream.Stream; +import jakarta.validation.Validator; import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.exceptions.ConfigurationException; import com.bakdata.conquery.models.exceptions.JSONException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; import com.bakdata.conquery.util.CalculatedValue; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.node.ObjectNode; import com.github.powerlibraries.io.In; import io.dropwizard.jersey.validation.Validators; -import jakarta.validation.Validator; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.parallel.Execution; @@ -48,10 +49,10 @@ public static Stream getTestKeys() { ); return Stream.of( - "A13B", "I43A", "H41B", "B05Z", "L02C", "L12Z", "H08A", "I56B", "I03A", "E79C", "B80Z", "I47A", "N13A", "G08B", "F43B", "P04A", "T36Z", "T36Z", "N11A", "D13A", "R01D", "F06A", "F24A", "O03Z", "P01Z", "R63D", "A13A", "O05A", "G29B", "I18A", "J08A", "E74Z", "D06C", "H36Z", "H05Z", "P65B", "I09A", "A66Z", "F12E", "Q60E", "I46B", "I97Z", "I78Z", "T01B", "J24C", "A62Z", "Q01Z", "N25Z", "A01B", "G02A" - , "ZULU" // This may not fail, but return null on both sides - ) - .map(v -> Arguments.of(v, rowMap.get())); + "A13B", "I43A", "H41B", "B05Z", "L02C", "L12Z", "H08A", "I56B", "I03A", "E79C", "B80Z", "I47A", "N13A", "G08B", "F43B", "P04A", "T36Z", "T36Z", "N11A", "D13A", "R01D", "F06A", "F24A", "O03Z", "P01Z", "R63D", "A13A", "O05A", "G29B", "I18A", "J08A", "E74Z", "D06C", "H36Z", "H05Z", "P65B", "I09A", "A66Z", "F12E", "Q60E", "I46B", "I97Z", "I78Z", "T01B", "J24C", "A62Z", "Q01Z", "N25Z", "A01B", "G02A" + , "ZULU" // This may not fail, but return null on both sides + ) + .map(v -> Arguments.of(v, rowMap.get())); } private static TreeConcept indexedConcept; @@ -60,21 +61,23 @@ public static Stream getTestKeys() { @BeforeAll public static void init() throws IOException, JSONException, ConfigurationException { - ObjectNode node = Jackson.MAPPER.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream()); + final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + ObjectNode node = mapper.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream()); // load concept tree from json - CentralRegistry registry = new CentralRegistry(); - + final NamespaceStorage storage = new NamespaceStorage(new NonPersistentStoreFactory(), "GroovyIndexedTest"); + storage.openStores(mapper, new MetricRegistry()); Table table = new Table(); table.setName("the_table"); Dataset dataset = new Dataset(); dataset.setName("the_dataset"); + dataset.injectInto(mapper); - registry.register(dataset); + storage.updateDataset(dataset); - table.setDataset(dataset); + table.setDataset(dataset.getId()); Column column = new Column(); column.setName("the_column"); @@ -83,25 +86,27 @@ public static void init() throws IOException, JSONException, ConfigurationExcept table.setColumns(new Column[]{column}); column.setTable(table); - registry.register(table); - registry.register(column); - + storage.addTable(table); // Prepare Serdes injections - ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - ((Injectable) values -> values.add(Validator.class, Validators.newValidator())).injectInto(mapper); - new SingletonNamespaceCollection(registry).injectInto(mapper); - dataset.injectInto(mapper); - final ObjectReader conceptReader = mapper.readerFor(Concept.class); - - // load tree twice to avoid references + final Validator validator = Validators.newValidator(); + final ObjectReader conceptReader = new Injectable(){ + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(Validator.class, validator); + } + }.injectInto(mapper).readerFor(Concept.class); + + // load tree twice to to avoid references indexedConcept = conceptReader.readValue(node); - indexedConcept.setDataset(dataset); + indexedConcept.setDataset(dataset.getId()); + indexedConcept.initElements(); oldConcept = conceptReader.readValue(node); - oldConcept.setDataset(dataset); + oldConcept.setDataset(dataset.getId()); + oldConcept.initElements(); } @@ -110,11 +115,11 @@ public static void init() throws IOException, JSONException, ConfigurationExcept public void basic(String key, CalculatedValue> rowMap) throws JSONException { log.trace("Searching for {}", key); - ConceptElement idxResult = indexedConcept.findMostSpecificChild(key, rowMap); - ConceptElement oldResult = oldConcept.findMostSpecificChild(key, rowMap); + ConceptTreeChild idxResult = indexedConcept.findMostSpecificChild(key, rowMap); + ConceptTreeChild oldResult = oldConcept.findMostSpecificChild(key, rowMap); assertThat(oldResult.getId()).describedAs("%s hierarchical name", key).isEqualTo(idxResult.getId()); } -} +} \ No newline at end of file diff --git a/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java index 6811691944..d411065ba6 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java @@ -11,6 +11,8 @@ import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.serializer.SerializationTestUtil; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; @@ -33,8 +35,8 @@ import com.bakdata.conquery.models.events.stores.specific.RebasingIntegerStore; import com.bakdata.conquery.models.events.stores.specific.ScaledDecimalStore; import com.bakdata.conquery.models.exceptions.JSONException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.ShardWorkers; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import io.dropwizard.jersey.validation.Validators; @@ -51,18 +53,21 @@ public class ColumnStoreSerializationTests { */ private static final Set> EXCLUDING = Set.of(CompoundDateRangeStore.class); - private static final CentralRegistry CENTRAL_REGISTRY = new CentralRegistry(); + private static final NamespaceStorage STORAGE = new NamespaceStorage(new NonPersistentStoreFactory(), "ColumnStoreSerializationTests"); private static ObjectMapper shardInternalMapper; + private static ConqueryConfig config; @BeforeAll public static void setupRegistry() { - CENTRAL_REGISTRY.register(Dataset.PLACEHOLDER); + STORAGE.openStores(null, new MetricRegistry()); + STORAGE.updateDataset(Dataset.PLACEHOLDER); // Prepare shard node internal mapper - InternalMapperFactory internalMapperFactory = new InternalMapperFactory(new ConqueryConfig(), Validators.newValidator()); - shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(mock(ShardWorkers.class)); + config = new ConqueryConfig(); + InternalMapperFactory internalMapperFactory = new InternalMapperFactory(config, Validators.newValidator()); + shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(mock(WorkerStorageImpl.class)); } @Test @@ -88,7 +93,7 @@ public static List createCTypes() { return Arrays.asList( new ScaledDecimalStore(13, IntArrayStore.create(10)), - new MoneyIntStore(IntArrayStore.create(10)), + new MoneyIntStore(IntArrayStore.create(10), 2).config(config), new DirectDateRangeStore(IntegerDateStore.create(10), IntegerDateStore.create(10)), new QuarterDateRangeStore(LongArrayStore.create(10)), new IntegerDateStore(LongArrayStore.create(10)), @@ -113,7 +118,6 @@ public void testSerialization(ColumnStore type) throws IOException, JSONExceptio SerializationTestUtil .forType(ColumnStore.class) .objectMappers(shardInternalMapper) - .registry(CENTRAL_REGISTRY) .test(type); } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java index 34919dd389..ae7ca64fbe 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java @@ -2,8 +2,6 @@ import static com.bakdata.conquery.models.execution.ManagedExecution.AUTO_LABEL_SUFFIX; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; import java.time.LocalDateTime; import java.util.List; @@ -12,54 +10,57 @@ import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; import com.bakdata.conquery.apiv1.query.ConceptQuery; -import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; +import org.mockito.Mockito; public class DefaultLabelTest { + public static final ConqueryConfig CONFIG = new ConqueryConfig(); private final static MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); - - private static final Namespace NAMESPACE = mock(LocalNamespace.class); + private final static NamespaceStorage NS_ID_RESOLVER = new NonPersistentStoreFactory().createNamespaceStorage(); + private static final Namespace NAMESPACE = Mockito.mock(LocalNamespace.class); private static final Dataset DATASET = new Dataset("dataset"); - private static final User user = new User("user","user", STORAGE); - - private static final TreeConcept CONCEPT = new TreeConcept() { - { - setDataset(DATASET); - setName("defaultconcept"); - setLabel("Default Concept"); - } - }; - public static final ConqueryConfig CONFIG = new ConqueryConfig(); + private static final User user = new User("user", "user", STORAGE); + private static final TreeConcept CONCEPT = new TreeConcept(); @BeforeAll public static void beforeAll() { + DATASET.setNamespacedStorageProvider(NS_ID_RESOLVER); + NS_ID_RESOLVER.updateDataset(DATASET); + + // no mapper required - STORAGE.openStores(null); + STORAGE.openStores(null, new MetricRegistry()); - I18n.init(); + CONCEPT.setDataset(DATASET.getId()); + CONCEPT.setName("defaultconcept"); + CONCEPT.setLabel("Default Concept"); + + NS_ID_RESOLVER.updateConcept(CONCEPT); - doAnswer((invocation -> CONCEPT)).when(NAMESPACE) - .resolve(CONCEPT.getId()); + I18n.init(); } @ParameterizedTest @@ -70,9 +71,9 @@ public static void beforeAll() { void autoLabelConceptQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - CQConcept concept = makeCQConcept("Concept"); + CQConcept concept = makeCQConceptWithLabel("Concept"); ConceptQuery cq = new ConceptQuery(concept); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -80,19 +81,19 @@ void autoLabelConceptQuery(Locale locale, String autoLabel) { assertThat(mQuery.getLabelWithoutAutoLabelSuffix()).isEqualTo(autoLabel); } - @NotNull - private PrintSettings getPrintSettings(Locale locale) { - return new PrintSettings(true, locale, NAMESPACE, CONFIG, null, null); - } - - private static CQConcept makeCQConcept(String label) { + private static CQConcept makeCQConceptWithLabel(String label) { CQConcept concept = new CQConcept(); concept.setLabel(label); - concept.setElements(List.of(CONCEPT)); + concept.setElements(List.of(CONCEPT.getId())); return concept; } + @NotNull + private PrintSettings getPrintSettings(Locale locale) { + return new PrintSettings(true, locale, NAMESPACE, CONFIG, null, null); + } + @ParameterizedTest @CsvSource({ "de,Default-Concept", @@ -103,9 +104,9 @@ void autoLabelConceptQueryFallback(Locale locale, String autoLabel) { CQConcept concept = new CQConcept(); concept.setLabel(null); - concept.setElements(List.of(CONCEPT)); + concept.setElements(List.of(CONCEPT.getId())); ConceptQuery cq = new ConceptQuery(concept); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); UUID uuid = UUID.randomUUID(); mQuery.setQueryId(uuid); @@ -124,12 +125,12 @@ void autoLabelConceptQueryFallback(Locale locale, String autoLabel) { void autoLabelReusedQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQReusedQuery reused = new CQReusedQuery(managedQuery.getId()); ConceptQuery cq = new ConceptQuery(reused); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -148,7 +149,7 @@ void autoLabelUploadQuery(Locale locale, String autoLabel) { CQExternal external = new CQExternal(List.of(), new String[0][0], false); ConceptQuery cq = new ConceptQuery(external); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -164,13 +165,13 @@ void autoLabelUploadQuery(Locale locale, String autoLabel) { void autoLabelComplexQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQAnd and = new CQAnd(); - CQConcept concept1 = makeCQConcept("Concept1"); - CQConcept concept2 = makeCQConcept("Concept2"); - CQConcept concept3 = makeCQConcept("Concept3veryveryveryveryveryveryveryverylooooooooooooooooooooonglabel"); + CQConcept concept1 = makeCQConceptWithLabel("Concept1"); + CQConcept concept2 = makeCQConceptWithLabel("Concept2"); + CQConcept concept3 = makeCQConceptWithLabel("Concept3veryveryveryveryveryveryveryverylooooooooooooooooooooonglabel"); and.setChildren(List.of( new CQExternal(List.of(), new String[0][0], false), @@ -180,7 +181,7 @@ void autoLabelComplexQuery(Locale locale, String autoLabel) { concept3 )); ConceptQuery cq = new ConceptQuery(and); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -197,15 +198,15 @@ void autoLabelComplexQuery(Locale locale, String autoLabel) { void autoLabelComplexQueryNullLabels(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQAnd and = new CQAnd(); CQConcept concept1 = new CQConcept(); concept1.setLabel(null); - concept1.setElements(List.of(CONCEPT)); - CQConcept concept2 = makeCQConcept("Concept2"); - CQConcept concept3 = makeCQConcept("Concept3"); + concept1.setElements(List.of(CONCEPT.getId())); + CQConcept concept2 = makeCQConceptWithLabel("Concept2"); + CQConcept concept3 = makeCQConceptWithLabel("Concept3"); and.setChildren(List.of( new CQExternal(List.of(), new String[0][0], false), new CQReusedQuery(managedQuery.getId()), @@ -214,7 +215,7 @@ void autoLabelComplexQueryNullLabels(Locale locale, String autoLabel) { concept3 )); ConceptQuery cq = new ConceptQuery(and); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -231,7 +232,7 @@ void autoLabelExportForm(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); ExportForm form = new ExportForm(); - ManagedForm mForm = form.toManagedExecution(user, DATASET, STORAGE, null); + ManagedForm mForm = form.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mForm.setCreationTime(LocalDateTime.of(2020, 10, 30, 12, 37)); mForm.setLabel(mForm.makeAutoLabel(getPrintSettings(locale))); diff --git a/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java b/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java index ed9e8d76b9..7ba606498d 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java @@ -1,13 +1,14 @@ package com.bakdata.conquery.models.identifiable; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.identifiable.mapping.EntityPrintId; import com.bakdata.conquery.models.identifiable.mapping.ExternalId; public class IdMapSerialisationTest { - public static EntityIdMap createTestPersistentMap() { - EntityIdMap entityIdMap = new EntityIdMap(); + public static EntityIdMap createTestPersistentMap(NamespaceStorage namespaceStorage) { + EntityIdMap entityIdMap = new EntityIdMap(namespaceStorage); entityIdMap.addInputMapping("test1", new ExternalId("id", "a")); diff --git a/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java b/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java index 3f5bb9ca82..e070ecc5cd 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java @@ -2,30 +2,76 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; import java.io.IOException; import java.lang.reflect.Modifier; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.mode.cluster.InternalMapperFactory; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.IdUtil.Parser; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptTreeChildId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonMappingException; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import io.dropwizard.jersey.validation.Validators; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; public class IdTests { + public static Stream reflectionTest() { + return CPSTypeIdResolver + .SCAN_RESULT + .getClassesImplementing(Identifiable.class.getName()).loadClasses() + .stream() + .filter(cl -> !cl.isInterface()) + .filter(cl -> !Modifier.isAbstract(cl.getModifiers())) + //filter test classes + .filter(cl -> !cl.toString().toLowerCase().contains("test")) + .map(cl -> { + + Class idClazz = null; + // Try to get the specific Id + try { + idClazz = cl.getMethod("getId").getReturnType(); + + } + catch (NoSuchMethodException e) { + return fail(cl.getName() + " does not implement the method 'getId()'"); + } + + if (Modifier.isAbstract(idClazz.getModifiers())) { + try { + idClazz = cl.getMethod("createId").getReturnType(); + + } + catch (NoSuchMethodException e) { + return fail(cl.getName() + " does not implement the method 'createId()' unable to retrieve specific id class"); + } + } + + String packageString = "com.bakdata.conquery.models.identifiable.ids.specific."; + if (!idClazz.getName().startsWith(packageString)) { + return fail("The id class " + idClazz + " is not located in the package " + packageString + ". Please clean that up."); + } + + return Arguments.of( + cl, + idClazz + ); + }); + } + @Test public void testEquals() { ConceptTreeChildId idA = new ConceptTreeChildId( @@ -38,7 +84,7 @@ public void testEquals() { ), "4" ); - + ConceptTreeChildId idB = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -49,7 +95,7 @@ public void testEquals() { ), "4" ); - + assertThat(idA).isEqualTo(idB); assertThat(idA).hasSameHashCodeAs(idB); assertThat(idA.toString()).isEqualTo(idB.toString()); @@ -67,16 +113,16 @@ public void testStringSerialization() { ), "4" ); - + ConceptTreeChildId copy = ConceptTreeChildId.Parser.INSTANCE.parse(id.toString()); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } @Test - public void testJacksonSerialization() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { + public void testJacksonSerialization() throws IOException { ConceptTreeChildId id = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -87,22 +133,29 @@ public void testJacksonSerialization() throws JsonParseException, JsonMappingExc ), "4" ); - + ObjectMapper mapper = Jackson.MAPPER; ConceptTreeChildId copy = mapper.readValue(mapper.writeValueAsBytes(id), ConceptTreeChildId.class); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } @Test - public void testInterning() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { - String raw = "1.concepts.2.3.4"; - - ConceptTreeChildId id1 = ConceptTreeChildId.Parser.INSTANCE.parse(raw); - ConceptTreeChildId id2 = ConceptTreeChildId.Parser.INSTANCE.parse(raw); - + public void testInterning() throws IOException { + + InternalMapperFactory internalMapperFactory = new InternalMapperFactory(new ConqueryConfig(), Validators.newValidator()); + ObjectMapper objectMapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + internalMapperFactory.customizeApiObjectMapper(objectMapper, mock(DatasetRegistry.class), new NonPersistentStoreFactory().createMetaStorage()); + + ObjectReader objectReader = objectMapper.readerFor(ConceptTreeChildId.class); + + String raw = "\"1.concepts.2.3.4\""; + + ConceptTreeChildId id1 = objectReader.readValue(raw); + ConceptTreeChildId id2 = objectReader.readValue(raw); + assertThat(id1).isSameAs(id2); assertThat(id1.getParent()).isSameAs(id2.getParent()); assertThat(id1.findConcept()).isSameAs(id2.findConcept()); @@ -110,7 +163,7 @@ public void testInterning() throws JsonParseException, JsonMappingException, Jso } @Test - public void testJacksonBinarySerialization() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { + public void testJacksonBinarySerialization() throws IOException { ConceptTreeChildId id = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -121,57 +174,14 @@ public void testJacksonBinarySerialization() throws JsonParseException, JsonMapp ), "4" ); - + ObjectMapper mapper = Jackson.BINARY_MAPPER; ConceptTreeChildId copy = mapper.readValue(mapper.writeValueAsBytes(id), ConceptTreeChildId.class); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } - - public static Stream reflectionTest() { - return CPSTypeIdResolver - .SCAN_RESULT - .getClassesImplementing(Identifiable.class.getName()).loadClasses() - .stream() - .filter(cl -> !cl.isInterface()) - .filter(cl -> !Modifier.isAbstract(cl.getModifiers())) - //filter test classes - .filter(cl -> !cl.toString().toLowerCase().contains("test")) - .map(cl -> { - - Class idClazz = null; - // Try to get the specific Id - try { - idClazz = cl.getMethod("getId").getReturnType(); - - } - catch (NoSuchMethodException e) { - return fail(cl.getName() + " does not implement the method 'getId()'"); - } - - if (Modifier.isAbstract(idClazz.getModifiers())) { - try { - idClazz = cl.getMethod("createId").getReturnType(); - - } - catch (NoSuchMethodException e) { - return fail(cl.getName() + " does not implement the method 'createId()' unable to retrieve specific id class"); - } - } - - String packageString = "com.bakdata.conquery.models.identifiable.ids.specific."; - if (!idClazz.getName().startsWith(packageString)) { - return fail("The id class " + idClazz + " is not located in the package " + packageString + ". Please clean that up."); - } - - return Arguments.of( - cl, - idClazz - ); - }); - } @ParameterizedTest @MethodSource diff --git a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java index 8782a45152..45d17e41bf 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java @@ -1,8 +1,6 @@ package com.bakdata.conquery.models.query; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import java.util.ArrayList; @@ -11,12 +9,15 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; +import jakarta.validation.Validator; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; @@ -28,15 +29,16 @@ import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorSelectId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.util.NonPersistentStoreFactory; import io.dropwizard.jersey.validation.Validators; -import jakarta.validation.Validator; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.params.ParameterizedTest; @@ -49,17 +51,18 @@ public class DefaultColumnNameTest { private static final PrintSettings SETTINGS = new PrintSettings(false, Locale.ENGLISH, NAMESPACE, new ConqueryConfig(), null, null); private static final Validator VALIDATOR = Validators.newValidator(); + private static final BiFunction CONCEPT_SELECT_SELECTOR = (concept, cq) -> { final UniversalSelect select = concept.getSelects().get(0); - cq.setSelects(List.of(select)); + cq.setSelects(List.of(select.getId())); return select; }; private static final BiFunction CONNECTOR_SELECT_SELECTOR = (concept, cq) -> { final Select select = concept.getConnectors().get(0).getSelects().get(0); - cq.getTables().get(0).setSelects(List.of(select)); + cq.getTables().get(0).setSelects(List.of((ConnectorSelectId) select.getId())); return select; }; @@ -157,26 +160,17 @@ private static Stream provideCombinations() { @ParameterizedTest @MethodSource("provideCombinations") void checkCombinations(TestConcept concept, boolean hasCQConceptLabel, String expectedColumnName) { - - doAnswer(invocation -> { - final ConceptId id = invocation.getArgument(0); - if (!concept.getId().equals(id)) { - throw new IllegalStateException("Expected the id " + concept.getId() + " but got " + id); - } - return concept; - }).when(NAMESPACE).resolve(any()); - final CQConcept cqConcept = concept.createCQConcept(hasCQConceptLabel); final UniqueNamer uniqNamer = new UniqueNamer(SETTINGS); - SelectResultInfo info = new SelectResultInfo(concept.extractSelect(cqConcept), cqConcept, Collections.emptySet(), SETTINGS); + SelectResultInfo info = new SelectResultInfo(concept.extractSelect(cqConcept), cqConcept, Collections.emptySet()); - assertThat(uniqNamer.getUniqueName(info)).isEqualTo(expectedColumnName); + assertThat(uniqNamer.getUniqueName(info, SETTINGS)).isEqualTo(expectedColumnName); } private static class TestCQConcept extends CQConcept { - private static CQConcept create(boolean withLabel, TestConcept concept) { + private static CQConcept create(boolean withLabel, TestConcept concept) { CQConcept cqConcept = new CQConcept(); if (withLabel) { cqConcept.setLabel("TestCQLabel"); @@ -190,14 +184,15 @@ private static CQConcept create(boolean withLabel, TestConcept concept) { if (elements.isEmpty()) { elements = List.of(concept); } + final List> list = (List>) elements.stream().map(ConceptElement::getId).toList(); cqConcept.setElements( - elements + list ); List tables = concept.getConnectors().stream() .map(con -> { CQTable table = new CQTable(); - table.setConnector(con); + table.setConnector(con.getId()); table.setConcept(cqConcept); return table; }) @@ -213,36 +208,41 @@ private static CQConcept create(boolean withLabel, TestConcept concept) { private static class TestConcept extends TreeConcept { - private static final Dataset DATASET = new Dataset() { - { - setName("test"); - } - }; + /** + * We use a different dataset for each concept/test. Otherwise, the concepts override each other in the + * NamespacedStorageProvider map during test parameter creation. + */ + private static final AtomicInteger DATASET_COUNTER = new AtomicInteger(0); + private final BiFunction selectExtractor; private TestConcept(BiFunction selectExtractor) { + final NamespaceStorage NS_ID_RESOLVER = new NonPersistentStoreFactory().createNamespaceStorage(); this.selectExtractor = selectExtractor; setName("TestConceptName"); setLabel("TestConceptLabel"); - setDataset(DATASET); - setSelects(List.of(new TestUniversalSelect(this))); - } + Dataset DATASET = new Dataset() { + { + setName("test_" + DATASET_COUNTER.getAndIncrement()); + setNamespacedStorageProvider(NS_ID_RESOLVER); + NS_ID_RESOLVER.updateDataset(this); + } + }; + setDataset(DATASET.getId()); - public Select extractSelect(CQConcept cq) { - return selectExtractor.apply(this, cq); - } + NS_ID_RESOLVER.updateConcept(this); - public CQConcept createCQConcept(boolean hasCQConceptLabel) { - return TestCQConcept.create(hasCQConceptLabel, this); + setSelects(List.of(new TestUniversalSelect(this))); } - @SneakyThrows public static TestConcept create(int countConnectors, BiFunction selectExtractor, int countIds, String overwriteLabel) { TestConcept concept = new TestConcept(selectExtractor); if (overwriteLabel != null) { concept.setLabel(overwriteLabel); } + + List connectors = new ArrayList<>(); concept.setConnectors(connectors); for (; countConnectors > 0; countConnectors--) { @@ -263,15 +263,24 @@ public static TestConcept create(int countConnectors, BiFunction holder) { } @Override - public ResultType getResultType() { - return ResultType.Primitive.STRING; + public Aggregator createAggregator() { + return null; } @Override - public Aggregator createAggregator() { - return null; + public ResultType getResultType() { + return ResultType.Primitive.STRING; } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/query/UniqueNameTest.java b/backend/src/test/java/com/bakdata/conquery/models/query/UniqueNameTest.java index 441b6ef2bf..c95a6d3a01 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/query/UniqueNameTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/query/UniqueNameTest.java @@ -16,14 +16,14 @@ void testNameCollision() { PrintSettings settings = new PrintSettings(true, Locale.ROOT, null, new ConqueryConfig(), null, null); final UniqueNamer uniqueNamer = new UniqueNamer(settings); - final ExternalResultInfo info1 = new ExternalResultInfo("test", ResultType.Primitive.STRING, settings); - final ExternalResultInfo info2 = new ExternalResultInfo("test", ResultType.Primitive.STRING, settings); - final ExternalResultInfo info3 = new ExternalResultInfo("test_1", ResultType.Primitive.STRING, settings); - final ExternalResultInfo info4 = new ExternalResultInfo("test", ResultType.Primitive.STRING, settings); + final ExternalResultInfo info1 = new ExternalResultInfo("test", ResultType.Primitive.STRING); + final ExternalResultInfo info2 = new ExternalResultInfo("test", ResultType.Primitive.STRING); + final ExternalResultInfo info3 = new ExternalResultInfo("test_1", ResultType.Primitive.STRING); + final ExternalResultInfo info4 = new ExternalResultInfo("test", ResultType.Primitive.STRING); - assertThat(uniqueNamer.getUniqueName(info1)).isEqualTo("test"); - assertThat(uniqueNamer.getUniqueName(info2)).isEqualTo("test_1"); - assertThat(uniqueNamer.getUniqueName(info3)).isEqualTo("test_1_1"); - assertThat(uniqueNamer.getUniqueName(info4)).isEqualTo("test_2"); + assertThat(uniqueNamer.getUniqueName(info1, settings)).isEqualTo("test"); + assertThat(uniqueNamer.getUniqueName(info2, settings)).isEqualTo("test_1"); + assertThat(uniqueNamer.getUniqueName(info3, settings)).isEqualTo("test_1_1"); + assertThat(uniqueNamer.getUniqueName(info4, settings)).isEqualTo("test_2"); } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/types/ResultTypeTest.java b/backend/src/test/java/com/bakdata/conquery/models/types/ResultTypeTest.java index 45888ecd27..f66f7fff5b 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/types/ResultTypeTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/types/ResultTypeTest.java @@ -11,14 +11,16 @@ import java.util.Map; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.ExternalResultInfo; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.query.resultinfo.printers.ResultPrinters; -import com.google.common.collect.ImmutableMap; +import com.bakdata.conquery.models.query.resultinfo.printers.Printer; +import com.bakdata.conquery.models.query.resultinfo.printers.PrinterFactory; +import com.bakdata.conquery.models.query.resultinfo.printers.StringResultPrinters; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -30,6 +32,8 @@ public class ResultTypeTest { private static final PrintSettings PRETTY_DE = new PrintSettings(true, Locale.GERMANY, null, CONFIG, null, null); private static final PrintSettings PLAIN = new PrintSettings(false, Locale.ENGLISH, null, CONFIG, null, null); + private static PrinterFactory PRINTERS = new StringResultPrinters(); + static { // Initialization of the internationalization I18n.init(); @@ -38,83 +42,100 @@ public class ResultTypeTest { CONFIG.getLocale().setDateFormatMapping(Map.of(Locale.GERMAN, "dd.MM.yyyy")); } + @SuppressWarnings("unused") public static List testData() { - return List.of( - //TODO Arguments.of(PRETTY, ConqueryConstants.formResolutionInfo(PLAIN), Resolution.COMPLETE.name(), "complete"), -//TODO Arguments.of(PRETTY_DE, ConqueryConstants.formResolutionInfo(PLAIN), Resolution.COMPLETE.name(), "Gesamt"), - - Arguments.of(PRETTY, ResultType.Primitive.BOOLEAN, true, "Yes"), - Arguments.of(PRETTY, ResultType.Primitive.BOOLEAN, false, "No"), - Arguments.of(PRETTY, ResultType.Primitive.STRING, "test", "test"), - Arguments.of(PRETTY, ResultType.Primitive.DATE, LocalDate.of(2013, 7, 12).toEpochDay(), "2013-07-12"), - Arguments.of(PRETTY_DE, ResultType.Primitive.DATE, LocalDate.of(2013, 7, 12).toEpochDay(), "12.07.2013"), - Arguments.of(PRETTY, ResultType.Primitive.DATE_RANGE, List.of(Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue(), Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue()), "2013-07-12"), - Arguments.of(PRETTY_DE, ResultType.Primitive.DATE_RANGE, List.of(Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue(), Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue()), "12.07.2013"), - Arguments.of(PRETTY, ResultType.Primitive.DATE_RANGE, List.of(Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue(), Long.valueOf(LocalDate.of(2014, 7, 12).toEpochDay()) - .intValue()), "2013-07-12/2014-07-12"), - Arguments.of(PRETTY_DE, ResultType.Primitive.DATE_RANGE, List.of(Long.valueOf(LocalDate.of(2013, 7, 12).toEpochDay()) - .intValue(), Long.valueOf(LocalDate.of(2014, 7, 12).toEpochDay()) - .intValue()), "12.07.2013 - 12.07.2014"), - Arguments.of(PRETTY, ResultType.Primitive.INTEGER, 51839274, "51,839,274"), - Arguments.of(PRETTY_DE, ResultType.Primitive.INTEGER, 51839274, "51.839.274"), - Arguments.of(PRETTY, ResultType.Primitive.MONEY, 51839274L, "€518,392.74"), - Arguments.of(PRETTY_DE, ResultType.Primitive.MONEY, 51839274L, "518.392,74 €"), - Arguments.of(PRETTY, ResultType.Primitive.NUMERIC, 0.2, "0.2"), - Arguments.of(PRETTY_DE, ResultType.Primitive.NUMERIC, 0.2, "0,2"), - Arguments.of(PRETTY, ResultType.Primitive.NUMERIC, new BigDecimal("716283712389817246892743124.12312"), "716,283,712,389,817,246,892,743,124.12312"), - Arguments.of(PRETTY_DE, ResultType.Primitive.NUMERIC, new BigDecimal("716283712389817246892743124.12312"), "716.283.712.389.817.246.892.743.124,12312"), - Arguments.of(PRETTY, ResultType.Primitive.STRING, "test", "test"), - - Arguments.of(PLAIN, ResultType.Primitive.BOOLEAN, true, "1"), - Arguments.of(PLAIN, ResultType.Primitive.BOOLEAN, false, "0"), - Arguments.of(PLAIN, ResultType.Primitive.STRING, "test", "test"), - Arguments.of(PLAIN, ResultType.Primitive.DATE, LocalDate.of(2013, 7, 12).toEpochDay(), "2013-07-12"), - Arguments.of(PLAIN, ResultType.Primitive.INTEGER, 51839274, "51839274"), - Arguments.of(PLAIN, ResultType.Primitive.MONEY, 51839274L, "51839274"), - Arguments.of(PLAIN, ResultType.Primitive.NUMERIC, 0.2, "0.2"), - Arguments.of(PLAIN, ResultType.Primitive.NUMERIC, new BigDecimal("716283712389817246892743124.12312"), "716283712389817246892743124.12312"), - Arguments.of(PLAIN, ResultType.Primitive.STRING, "test", "test"), - Arguments.of(PLAIN, ResultType.Primitive.STRING, Resolution.COMPLETE.name(), "COMPLETE"), // TODO fk: is supposed not to test the mapping? - Arguments.of(PLAIN, ResultType.Primitive.STRING, ImmutableMap.of("a", 2, "c", 1), "{a=2, c=1}") + return List.of(Arguments.of(PRETTY, ResultType.Primitive.BOOLEAN, true, "Yes"), + Arguments.of(PRETTY, ResultType.Primitive.BOOLEAN, false, "No"), + Arguments.of(PRETTY, ResultType.Primitive.STRING, "test", "test"), + Arguments.of(PRETTY, ResultType.Primitive.DATE, CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), "2013-07-12"), + Arguments.of(PRETTY_DE, ResultType.Primitive.DATE, CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), "12.07.2013"), + Arguments.of(PRETTY, + ResultType.Primitive.DATE_RANGE, + List.of(CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), CDate.ofLocalDate(LocalDate.of(2013, 7, 12))), + "2013-07-12" + ), + Arguments.of(PRETTY_DE, + ResultType.Primitive.DATE_RANGE, + List.of(CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), CDate.ofLocalDate(LocalDate.of(2013, 7, 12))), + "12.07.2013" + ), + Arguments.of(PRETTY, + ResultType.Primitive.DATE_RANGE, + List.of(CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), CDate.ofLocalDate(LocalDate.of(2014, 7, 12))), + "2013-07-12/2014-07-12" + ), + Arguments.of(PRETTY_DE, + ResultType.Primitive.DATE_RANGE, + List.of(CDate.ofLocalDate(LocalDate.of(2013, 7, 12)), CDate.ofLocalDate(LocalDate.of(2014, 7, 12))), + "12.07.2013 - 12.07.2014" + ), + Arguments.of(PRETTY, ResultType.Primitive.INTEGER, 51839274L, "51,839,274"), + Arguments.of(PRETTY_DE, ResultType.Primitive.INTEGER, 51839274L, "51.839.274"), + Arguments.of(PRETTY, ResultType.Primitive.MONEY, new BigDecimal("518392.74"), "€518,392.74"), + Arguments.of(PRETTY_DE, ResultType.Primitive.MONEY, new BigDecimal("518392.74"), "518.392,74\u00A0€"), + Arguments.of(PRETTY, ResultType.Primitive.NUMERIC, 0.2, "0.2"), + Arguments.of(PRETTY_DE, ResultType.Primitive.NUMERIC, 0.2, "0,2"), + Arguments.of(PRETTY, + ResultType.Primitive.NUMERIC, + new BigDecimal("716283712389817246892743124.12312"), + "716,283,712,389,817,246,892,743,124.12312" + ), + Arguments.of(PRETTY_DE, + ResultType.Primitive.NUMERIC, + new BigDecimal("716283712389817246892743124.12312"), + "716.283.712.389.817.246.892.743.124,12312" + ), + Arguments.of(PRETTY, ResultType.Primitive.STRING, "test", "test"), + + Arguments.of(PLAIN, ResultType.Primitive.BOOLEAN, true, "1"), + Arguments.of(PLAIN, ResultType.Primitive.BOOLEAN, false, "0"), + Arguments.of(PLAIN, ResultType.Primitive.STRING, "test", "test"), + Arguments.of(PLAIN, ResultType.Primitive.DATE, LocalDate.of(2013, 7, 12).toEpochDay(), "2013-07-12"), + Arguments.of(PLAIN, ResultType.Primitive.INTEGER, 51839274L, "51839274"), + Arguments.of(PLAIN, ResultType.Primitive.MONEY, new BigDecimal(51839274L), "51839274"), + Arguments.of(PLAIN, ResultType.Primitive.NUMERIC, 0.2, "0.2"), + Arguments.of(PLAIN, + ResultType.Primitive.NUMERIC, + new BigDecimal("716283712389817246892743124.12312"), + "716283712389817246892743124.12312" + ), + Arguments.of(PLAIN, ResultType.Primitive.STRING, "test", "test"), + Arguments.of(PLAIN, ResultType.Primitive.STRING, Resolution.COMPLETE.name(), "COMPLETE") ); } - public static ResultInfo info(ResultType type, PrintSettings settings) { - return new ExternalResultInfo("col", type, settings); - } - @ParameterizedTest(name = "{0} {1}: {2} -> {3}") @MethodSource("testData") - public void testPrinting(PrintSettings cfg, ResultType type, Object value, String expected) throws IOException { - ResultInfo info = info(type, cfg); + public void testPrinting(PrintSettings printSettings, ResultType type, Object value, String expected) throws IOException { + ResultInfo info = info(type); - final ResultPrinters.Printer printer = info.getPrinter(); - assertThat(printer.print(value)).isEqualTo(expected); + final Printer printer = info.createPrinter(PRINTERS, printSettings); + + assertThat(printer.apply(value)).isEqualTo(expected); final String str = Jackson.MAPPER.writeValueAsString(value); final Object copy = Jackson.MAPPER.readValue(str, Object.class); - assertThat(printer.print(copy)).isEqualTo(expected); + assertThat(printer.apply(copy)).isEqualTo(expected); + } + + public static ResultInfo info(ResultType type) { + return new ExternalResultInfo("col", type); } @ParameterizedTest(name = "{1}: {2}") @MethodSource("testData") - public void testBinaryPrinting(PrintSettings cfg, ResultType type, Object value, String expected) throws IOException { - ResultInfo info = info(type, cfg); + public void testBinaryPrinting(PrintSettings printSettings, ResultType type, Object value, String expected) throws IOException { + ResultInfo info = info(type); - final ResultPrinters.Printer printer = info.getPrinter(); - assertThat(printer.print(value)).isEqualTo(expected); + final Printer printer = info.createPrinter(PRINTERS, printSettings); + assertThat(printer.apply(value)).isEqualTo(expected); final byte[] bytes = Jackson.BINARY_MAPPER.writeValueAsBytes(value); final Object copy = Jackson.BINARY_MAPPER.readValue(bytes, Object.class); - assertThat(printer.print(copy)).isEqualTo(expected); + assertThat(printer.apply(copy)).isEqualTo(expected); } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java deleted file mode 100644 index d2e9409287..0000000000 --- a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java +++ /dev/null @@ -1,129 +0,0 @@ -package com.bakdata.conquery.models.types; - -import java.time.LocalDate; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; - -import com.bakdata.conquery.apiv1.forms.export_form.AbsoluteMode; -import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.events.MajorTypeId; -import com.bakdata.conquery.models.forms.util.ResolutionShortNames; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.fasterxml.jackson.databind.node.TextNode; -import lombok.experimental.UtilityClass; -import org.jetbrains.annotations.NotNull; - -/** - * Helper class for nested objects needed in {@link com.bakdata.conquery.models.SerializationTests} - */ -@UtilityClass -public class SerialisationObjectsUtil { - - - @NotNull - public static Dataset createDataset(CentralRegistry registry) { - final Dataset dataset = new Dataset("test-dataset"); - registry.register(dataset); - return dataset; - } - - @NotNull - public static TreeConcept createConcept(CentralRegistry registry, Dataset dataset) { - TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); - concept.setLabel("conceptLabel"); - concept.setName("conceptName"); - - Table table = new Table(); - - Column column = new Column(); - column.setLabel("colLabel"); - column.setName("colName"); - column.setType(MajorTypeId.STRING); - column.setTable(table); - - Column dateColumn = new Column(); - dateColumn.setLabel("colLabel2"); - dateColumn.setName("colName2"); - dateColumn.setType(MajorTypeId.DATE); - dateColumn.setTable(table); - - - table.setColumns(new Column[]{column, dateColumn}); - table.setDataset(dataset); - table.setLabel("tableLabel"); - table.setName("tableName"); - - column.setTable(table); - - ConceptTreeConnector connector = new ConceptTreeConnector(); - connector.setConcept(concept); - connector.setLabel("connLabel"); - connector.setName("connName"); - connector.setColumn(column); - - concept.setConnectors(List.of(connector)); - - ValidityDate valDate = ValidityDate.create(dateColumn); - valDate.setConnector(connector); - valDate.setLabel("valLabel"); - valDate.setName("valName"); - connector.setValidityDates(List.of(valDate)); - - registry.register(concept); - registry.register(column); - registry.register(dateColumn); - registry.register(table); - registry.register(connector); - registry.register(valDate); - return concept; - } - - @NotNull - public static ExportForm createExportForm(CentralRegistry registry, Dataset dataset) { - final TreeConcept concept = createConcept(registry, dataset); - final ExportForm exportForm = new ExportForm(); - final AbsoluteMode mode = new AbsoluteMode(); - mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); - mode.setForm(exportForm); - - final CQConcept cqConcept = new CQConcept(); - - final CQTable table = new CQTable(); - table.setConcept(cqConcept); - table.setConnector(concept.getConnectors().get(0)); - - // Use ArrayList instead of ImmutalbeList here because they use different hash code implementations - cqConcept.setTables(new ArrayList<>(List.of(table))); - cqConcept.setElements(new ArrayList<>(List.of(concept))); - - exportForm.setTimeMode(mode); - exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); - exportForm.setValues(new TextNode("Some Node")); - exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); - exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); - return exportForm; - } - - @NotNull - public static User createUser(CentralRegistry registry, MetaStorage storage) { - final User user = new User("test-user", "test-user", storage); - registry.register(user); - - user.updateStorage(); - return user; - } -} diff --git a/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java b/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java index 42f9c71e3c..231e9bfd61 100644 --- a/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java +++ b/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java @@ -5,24 +5,30 @@ import java.util.List; import java.util.Map; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.config.IndexConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SingleSelectFilter; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.index.IndexCreationException; import com.bakdata.conquery.models.query.FilterSearch; +import com.bakdata.conquery.util.extensions.NamespaceStorageExtension; import com.google.common.collect.ImmutableBiMap; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class FilterSearchTest { + @RegisterExtension + private static final NamespaceStorageExtension NAMESPACE_STORAGE_EXTENSION = new NamespaceStorageExtension(); + private static final NamespacedStorage NAMESPACED_STORAGE = NAMESPACE_STORAGE_EXTENSION.getStorage(); + @Test - public void totals() throws IndexCreationException { + public void totals() { final IndexConfig indexConfig = new IndexConfig(); FilterSearch search = new FilterSearch(indexConfig); @@ -33,10 +39,13 @@ public void totals() throws IndexCreationException { Column column = new Column(); Table table = new Table(); Dataset dataset = new Dataset("test_dataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); table.setName("test_table"); - table.setDataset(dataset); - concept.setDataset(dataset); + table.setDataset(dataset.getId()); + table.setColumns(new Column[]{column}); + concept.setDataset(dataset.getId()); concept.setName("test_concept"); concept.setConnectors(List.of(connector)); connector.setName("test_connector"); @@ -44,7 +53,8 @@ public void totals() throws IndexCreationException { connector.setConcept(concept); column.setTable(table); column.setName("test_column"); - filter.setColumn(column); + NAMESPACED_STORAGE.addTable(table); + filter.setColumn(column.getId()); filter.setConnector(connector); @@ -55,9 +65,14 @@ public void totals() throws IndexCreationException { )); // Register - for (Searchable searchable : filter.getSearchReferences()) { - search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); - } + filter.getSearchReferences().forEach(searchable -> { + try { + search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); + } + catch (IndexCreationException e) { + throw new RuntimeException(e); + } + }); search.registerValues(column, List.of( "a", @@ -71,7 +86,7 @@ public void totals() throws IndexCreationException { } @Test - public void totalsEmptyFiler() throws IndexCreationException { + public void totalsEmptyFiler() { final IndexConfig indexConfig = new IndexConfig(); FilterSearch search = new FilterSearch(indexConfig); @@ -82,10 +97,13 @@ public void totalsEmptyFiler() throws IndexCreationException { Column column = new Column(); Table table = new Table(); Dataset dataset = new Dataset("test_dataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); table.setName("test_table"); - table.setDataset(dataset); - concept.setDataset(dataset); + table.setDataset(dataset.getId()); + table.setColumns(new Column[]{column}); + concept.setDataset(dataset.getId()); concept.setName("test_concept"); concept.setConnectors(List.of(connector)); connector.setName("test_connector"); @@ -94,14 +112,20 @@ public void totalsEmptyFiler() throws IndexCreationException { column.setTable(table); column.setName("test_column"); column.setSearchDisabled(true); - filter.setColumn(column); + NAMESPACED_STORAGE.addTable(table); + + filter.setColumn(column.getId()); filter.setConnector(connector); // Register - for (Searchable searchable : filter.getSearchReferences()) { - search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); - } - + filter.getSearchReferences().forEach(searchable -> { + try { + search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); + } + catch (IndexCreationException e) { + throw new RuntimeException(e); + } + }); search.shrinkSearch(column); assertThat(search.getTotal(filter)).isEqualTo(0); diff --git a/backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java b/backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java similarity index 77% rename from backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java rename to backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java index 828c76cb7c..50c301a5c3 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java +++ b/backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java @@ -1,7 +1,7 @@ -package com.bakdata.conquery.models.index; +package com.bakdata.conquery.service; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.awaitility.Awaitility.await; import static org.mockserver.model.HttpRequest.request; import java.io.IOException; @@ -10,22 +10,26 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; -import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.index.MapIndex; +import com.bakdata.conquery.models.index.MapInternToExternMapper; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.MockServerExtension; import com.github.powerlibraries.io.In; import com.univocity.parsers.csv.CsvParserSettings; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.extension.RegisterExtension; import org.mockserver.integration.ClientAndServer; import org.mockserver.model.HttpResponse; import org.mockserver.model.MediaType; @@ -33,39 +37,41 @@ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) @Slf4j public class IndexServiceTest { + @RegisterExtension + private static final MockServerExtension REF_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IndexServiceTest::initRefServer); private static final NamespaceStorage NAMESPACE_STORAGE = new NamespaceStorage(new NonPersistentStoreFactory(), IndexServiceTest.class.getName()); private static final Dataset DATASET = new Dataset("dataset"); private static final ConqueryConfig CONFIG = new ConqueryConfig(); - private static final ClientAndServer REF_SERVER = ClientAndServer.startClientAndServer(); private final IndexService indexService = new IndexService(new CsvParserSettings(), "emptyDefaultLabel"); + @SneakyThrows(IOException.class) + private static void initRefServer(ClientAndServer mockServer) { + log.info("Test loading of mapping"); + + try (InputStream inputStream = In.resource("/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv").asStream()) { + mockServer.when(request().withPath("/mapping.csv")) + .respond(HttpResponse.response().withContentType(new MediaType("text", "csv")).withBody(inputStream.readAllBytes())); + } + + } + @BeforeAll @SneakyThrows public static void beforeAll() { - NAMESPACE_STORAGE.openStores(Jackson.MAPPER); - - NAMESPACE_STORAGE.updateDataset(DATASET); CONFIG.getIndex().setBaseUrl(new URI(String.format("http://localhost:%d/", REF_SERVER.getPort()))); - } + NAMESPACE_STORAGE.openStores(null, null); + + DATASET.setNamespacedStorageProvider(NAMESPACE_STORAGE); + NAMESPACE_STORAGE.updateDataset(DATASET); - @AfterAll - @SneakyThrows - public static void afterAll() { - REF_SERVER.stop(); } @Test @Order(0) - void testLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException, IOException, ExecutionException, InterruptedException { - log.info("Test loading of mapping"); - - try (InputStream inputStream = In.resource("/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv").asStream()) { - REF_SERVER.when(request().withPath("/mapping.csv")) - .respond(HttpResponse.response().withContentType(new MediaType("text", "csv")).withBody(inputStream.readAllBytes())); - } + void testLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException, IOException { final MapInternToExternMapper mapper = new MapInternToExternMapper( "test1", @@ -74,6 +80,7 @@ void testLoading() throws NoSuchFieldException, IllegalAccessException, URISynta "{{external}}" ); + final MapInternToExternMapper mapperUrlAbsolute = new MapInternToExternMapper( "testUrlAbsolute", new URI(String.format("http://localhost:%d/mapping.csv", REF_SERVER.getPort())), @@ -97,17 +104,15 @@ void testLoading() throws NoSuchFieldException, IllegalAccessException, URISynta mapperUrlAbsolute.init(); mapperUrlRelative.init(); - // Wait for future - mapper.getInt2ext().get(); - mapperUrlAbsolute.getInt2ext().get(); - mapperUrlRelative.getInt2ext().get(); - + await().timeout(5, TimeUnit.SECONDS).until(mapper::initialized); assertThat(mapper.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapper.external("int2")).as("Internal Value").isEqualTo("int2"); + await().timeout(5, TimeUnit.SECONDS).until(mapperUrlAbsolute::initialized); assertThat(mapperUrlAbsolute.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapperUrlAbsolute.external("int2")).as("Internal Value").isEqualTo("int2"); + await().timeout(5, TimeUnit.SECONDS).until(mapperUrlRelative::initialized); assertThat(mapperUrlRelative.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapperUrlRelative.external("int2")).as("Internal Value").isEqualTo("int2"); @@ -124,7 +129,7 @@ private static void injectComponents(MapInternToExternMapper mapInternToExternMa final Field configField = MapInternToExternMapper.class.getDeclaredField(MapInternToExternMapper.Fields.config); configField.setAccessible(true); - configField.set(mapInternToExternMapper, IndexServiceTest.CONFIG); + configField.set(mapInternToExternMapper, CONFIG); } @@ -143,10 +148,8 @@ void testEvictOnMapper() injectComponents(mapInternToExternMapper, indexService); mapInternToExternMapper.init(); - // Wait for future - mapInternToExternMapper.getInt2ext().get(); - // Before eviction the result should be the same + await().timeout(5, TimeUnit.SECONDS).until(mapInternToExternMapper::initialized); assertThat(mapInternToExternMapper.external("int1")).as("Internal Value").isEqualTo("hello"); @@ -165,25 +168,4 @@ void testEvictOnMapper() assertThat(mappingBeforeEvict).as("Mapping before and after eviction") .isNotSameAs(mappingAfterEvict); } - - @Test - void testFailedLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException { - final MapInternToExternMapper mapInternToExternMapper = new MapInternToExternMapper( - "test1", - new URI("classpath:/tests/aggregator/FIRST_MAPPED_AGGREGATOR/not_existing_mapping.csv"), - "internal", - "{{external}}" - ); - - injectComponents(mapInternToExternMapper, indexService); - mapInternToExternMapper.init(); - - // Wait for future - assertThatThrownBy(() -> mapInternToExternMapper.getInt2ext().get()).as("Not existent CSV").hasCauseInstanceOf(IllegalStateException.class); - - - // Before eviction the result should be the same - assertThat(mapInternToExternMapper.external("int1")).as("Internal Value").isEqualTo("int1"); - } - } diff --git a/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java b/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java index 5d1cd5674a..91b5e718b4 100644 --- a/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java +++ b/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java @@ -3,7 +3,6 @@ import static com.bakdata.conquery.tasks.PermissionCleanupTask.deletePermissionsOfOwnedInstances; import static com.bakdata.conquery.tasks.PermissionCleanupTask.deleteQueryPermissionsWithMissingRef; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; import java.time.Instant; import java.time.LocalDateTime; @@ -21,6 +20,7 @@ import com.bakdata.conquery.models.auth.permissions.WildcardPermission; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; import org.junit.jupiter.api.AfterEach; @@ -42,7 +42,7 @@ private ManagedQuery createManagedQuery() { ConceptQuery query = new ConceptQuery(root); - final ManagedQuery managedQuery = new ManagedQuery(query, mock(User.class), new Dataset("test"), STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(query, new UserId("test_user"), new Dataset("test").getId(), STORAGE, null); managedQuery.setCreationTime(LocalDateTime.now().minusDays(1)); @@ -121,7 +121,7 @@ void doDeletePermissionsOfOwnedReference() { STORAGE.updateUser(user); user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, managedQueryOwned.getId())); - managedQueryOwned.setOwner(user); + managedQueryOwned.setOwner(user.getId()); STORAGE.updateExecution(managedQueryOwned); // Created not owned execution @@ -130,7 +130,7 @@ void doDeletePermissionsOfOwnedReference() { user.addPermission(ExecutionPermission.onInstance(Ability.READ, managedQueryNotOwned.getId())); // Set owner - managedQueryNotOwned.setOwner(user2); + managedQueryNotOwned.setOwner(user2.getId()); STORAGE.updateExecution(managedQueryNotOwned); deletePermissionsOfOwnedInstances(STORAGE, ExecutionPermission.DOMAIN.toLowerCase(), ManagedExecutionId.Parser.INSTANCE, STORAGE::getExecution); diff --git a/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java b/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java index 05e23edad3..28691377ad 100644 --- a/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java +++ b/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.tasks; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; import java.time.Duration; import java.time.LocalDateTime; @@ -14,8 +13,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; import org.junit.jupiter.api.AfterEach; @@ -25,6 +24,7 @@ @TestInstance(Lifecycle.PER_CLASS) class QueryCleanupTaskTest { + private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); private final Duration queryExpiration = Duration.ofDays(30); @@ -36,16 +36,16 @@ private ManagedQuery createManagedQuery() { ConceptQuery query = new ConceptQuery(root); - final ManagedQuery managedQuery = new ManagedQuery(query, mock(User.class), new Dataset("test"), STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(query, new UserId("test"), new Dataset("test").getId(), STORAGE, null); managedQuery.setCreationTime(LocalDateTime.now().minus(queryExpiration).minusDays(1)); STORAGE.addExecution(managedQuery); + managedQuery.setMetaStorage(STORAGE); return managedQuery; } - private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); @AfterEach @@ -77,7 +77,8 @@ void singleNamed() throws Exception { managedQuery.setLabel("test"); - new QueryCleanupTask(STORAGE, queryExpiration).execute(Map.of(), null); + QueryCleanupTask queryCleanupTask = new QueryCleanupTask(STORAGE, queryExpiration); + queryCleanupTask.execute(Map.of(), null); assertThat(STORAGE.getAllExecutions()).containsExactlyInAnyOrder(managedQuery); } diff --git a/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java b/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java new file mode 100644 index 0000000000..039daac7f5 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java @@ -0,0 +1,180 @@ +package com.bakdata.conquery.util; + +import java.util.stream.Stream; + +import com.bakdata.conquery.io.storage.ManagedStore; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.Group; +import com.bakdata.conquery.models.auth.entities.Role; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.MetaId; +import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.RoleId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; + +/** + * A meta storage that can be injected in to deserialization in environments where no MetaStorage exists, e.g. in tests on the client side. + * During debugging this can help to identify where an object was deserialized. + */ +public class FailingMetaStorage extends MetaStorage { + + public final static FailingMetaStorage INSTANCE = new FailingMetaStorage(); + public static final String ERROR_MSG = "Cannot be used in this environment. The real metastore exists only on the manager node."; + + private FailingMetaStorage() { + super(null); + } + + @Override + public void openStores(ObjectMapper mapper, MetricRegistry metricRegistry) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public ImmutableList getStores() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void addExecution(ManagedExecution query) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public ManagedExecution getExecution(ManagedExecutionId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllExecutions() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateExecution(ManagedExecution query) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeExecution(ManagedExecutionId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addGroup(Group group) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Group getGroup(GroupId groupId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllGroups() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void removeGroup(GroupId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateGroup(Group group) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addUser(User user) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public User getUser(UserId userId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllUsers() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeUser(UserId userId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateUser(User user) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addRole(Role role) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Role getRole(RoleId roleId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllRoles() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeRole(RoleId roleId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateRole(Role role) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FormConfig getFormConfig(FormConfigId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllFormConfigs() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeFormConfig(FormConfigId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateFormConfig(FormConfig formConfig) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addFormConfig(FormConfig formConfig) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public & MetaId, VALUE> VALUE get(ID id) { + throw new UnsupportedOperationException(ERROR_MSG); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java index 216d858144..ac8c06a21e 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java +++ b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java @@ -1,9 +1,9 @@ package com.bakdata.conquery.util; import java.io.IOException; -import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore; @@ -46,23 +46,28 @@ public void remove(KEY key) { } @Override - public void loadData() { + public int count() { + return map.size(); + } + @Override + public Stream getAll() { + return map.values().stream(); } @Override - public int count() { - return map.size(); + public Stream getAllKeys() { + return map.keySet().stream(); } @Override - public Collection getAll() { - return map.values(); + public void loadData() { + } @Override - public Collection getAllKeys() { - return map.keySet(); + public void close() throws IOException { + // Nothing to close } @Override @@ -74,9 +79,4 @@ public void removeStore() { public void clear() { map.clear(); } - - @Override - public void close() throws IOException { - // Nothing to close - } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java index ae13a560f6..9b079ae62d 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java +++ b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java @@ -6,27 +6,35 @@ import java.util.concurrent.ConcurrentHashMap; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.storage.*; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; +import com.bakdata.conquery.io.storage.IdentifiableStore; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.Store; +import com.bakdata.conquery.io.storage.StoreMappings; +import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.*; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.PreviewConfig; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.StructureNode; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; import com.bakdata.conquery.models.worker.WorkerInformation; import com.bakdata.conquery.models.worker.WorkerToBucketsMap; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; @CPSType(id = "NON_PERSISTENT", base = StoreFactory.class) @@ -63,7 +71,7 @@ public Collection discoverNamespaceStorages() { } @Override - public Collection discoverWorkerStorages() { + public Collection discoverWorkerStorages() { return Collections.emptyList(); } @@ -73,99 +81,99 @@ public SingletonStore createDatasetStore(String pathName, ObjectMapper } @Override - public IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(secondaryIdDescriptionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(secondaryIdDescriptionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(internToExternStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(tableStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(searchIndexStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(conceptStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(previewStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); - + public IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(importStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.cached(entity2Bucket.computeIfAbsent(pathName, ignored -> new NonPersistentStore<>())); + public IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(cBlockStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(tableStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(bucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(conceptStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(workerStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(importStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createIdMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(idMappingStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(cBlockStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(workerToBucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(bucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(structureStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(workerStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(executionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createIdMappingStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(idMappingStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(formConfigStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(workerToBucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(userStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(structureStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(roleStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(executionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(groupStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(formConfigStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(internToExternStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(userStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(searchIndexStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(roleStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(previewStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + } @Override - public IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(groupStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { + return entity2Bucket.computeIfAbsent(pathName, ignored -> new NonPersistentStore<>()); } /** @@ -173,7 +181,16 @@ public IdentifiableStore createGroupStore(CentralRegistry centralRegistry */ public MetaStorage createMetaStorage() { final MetaStorage metaStorage = new MetaStorage(this); - metaStorage.openStores(null); + metaStorage.openStores(null, new MetricRegistry()); return metaStorage; } + + /** + * @implNote intended for Unit-tests + */ + public NamespaceStorage createNamespaceStorage() { + final NamespaceStorage storage = new NamespaceStorage(this, "_"); + storage.openStores(null, new MetricRegistry()); + return storage; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java index de4137ac1f..2555aecde9 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java @@ -10,17 +10,18 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.forms.util.ResolutionShortNames; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.fasterxml.jackson.databind.node.TextNode; import lombok.experimental.UtilityClass; @@ -34,19 +35,63 @@ public class SerialisationObjectsUtil { @NotNull - public static Dataset createDataset(CentralRegistry registry) { - final Dataset dataset = new Dataset("test-dataset"); - registry.register(dataset); + public static Dataset createDataset(NamespacedStorageImpl storage) { + return createDataset("test-dataset", storage); + } + + @NotNull + public static Dataset createDataset(String name, NamespacedStorageImpl storage) { + Dataset dataset = new Dataset(name); + dataset.setNamespacedStorageProvider(storage); + storage.updateDataset(dataset); return dataset; } @NotNull - public static TreeConcept createConcept(CentralRegistry registry, Dataset dataset) { + public static ExportForm createExportForm(Dataset dataset, NamespacedStorageImpl storage) { + final TreeConcept concept = createConcept(dataset, storage); + final ExportForm exportForm = new ExportForm(); + final AbsoluteMode mode = new AbsoluteMode(); + mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); + mode.setForm(exportForm); + + final CQConcept cqConcept = new CQConcept(); + + final CQTable table = new CQTable(); + table.setConcept(cqConcept); + table.setConnector(concept.getConnectors().get(0).getId()); + + // Use ArrayList instead of ImmutableList here because they use different hash code implementations + cqConcept.setTables(new ArrayList<>(List.of(table))); + cqConcept.setElements(new ArrayList<>(List.of(concept.getId()))); + + exportForm.setTimeMode(mode); + exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); + exportForm.setValues(new TextNode("Some Node")); + exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); + exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); + + storage.updateConcept(concept); + + return exportForm; + } + + /** + * Does not add the produced concept to a store, only dependencies. + * Otherwise, it might clash during serdes because init was not executed + */ + @NotNull + public static TreeConcept createConcept(Dataset dataset, NamespacedStorageImpl storage) { TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); + + concept.setDataset(dataset.getId()); concept.setLabel("conceptLabel"); concept.setName("conceptName"); + final SecondaryIdDescription secondaryIdDescription = new SecondaryIdDescription(); + secondaryIdDescription.setDataset(dataset.getId()); + secondaryIdDescription.setName("sid"); + Table table = new Table(); Column column = new Column(); @@ -63,9 +108,9 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase table.setColumns(new Column[]{column, dateColumn}); - table.setDataset(dataset); table.setLabel("tableLabel"); table.setName("tableName"); + table.setDataset(dataset.getId()); column.setTable(table); @@ -73,56 +118,33 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase connector.setConcept(concept); connector.setLabel("connLabel"); connector.setName("connName"); - connector.setColumn(column); concept.setConnectors(List.of(connector)); + storage.updateDataset(dataset); + storage.addSecondaryId(secondaryIdDescription); + storage.addTable(table); + + // Set/Create ids after setting id resolver + connector.setColumn(column.getId()); + column.setSecondaryId(secondaryIdDescription.getId()); + ValidityDate valDate = ValidityDate.create(dateColumn); valDate.setConnector(connector); valDate.setLabel("valLabel"); valDate.setName("valName"); connector.setValidityDates(List.of(valDate)); - registry.register(concept); - registry.register(column); - registry.register(dateColumn); - registry.register(table); - registry.register(connector); - registry.register(valDate); - return concept; - } - - @NotNull - public static ExportForm createExportForm(CentralRegistry registry, Dataset dataset) { - final TreeConcept concept = createConcept(registry, dataset); - final ExportForm exportForm = new ExportForm(); - final AbsoluteMode mode = new AbsoluteMode(); - mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); - mode.setForm(exportForm); - - final CQConcept cqConcept = new CQConcept(); - - final CQTable table = new CQTable(); - table.setConcept(cqConcept); - table.setConnector(concept.getConnectors().get(0)); + // Initialize Concept + concept = new TreeConcept.Initializer().convert(concept); - // Use ArrayList instead of ImmutalbeList here because they use different hash code implementations - cqConcept.setTables(new ArrayList<>(List.of(table))); - cqConcept.setElements(new ArrayList<>(List.of(concept))); - - exportForm.setTimeMode(mode); - exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); - exportForm.setValues(new TextNode("Some Node")); - exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); - exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); - return exportForm; + return concept; } @NotNull - public static User createUser(CentralRegistry registry, MetaStorage storage) { - final User user = new User("test-user", "test-user", storage); - registry.register(user); - + public static User createUser(MetaStorage metaStorage) { + final User user = new User("test-user", "test-user", metaStorage); + user.setMetaStorage(metaStorage); user.updateStorage(); return user; } diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java new file mode 100644 index 0000000000..8c0c2a890e --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.Group; +import lombok.Getter; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +@Getter +public class GroupExtension implements BeforeAllCallback { + + private final Group group; + + public GroupExtension(MetaStorage metaStorage, String name) { + group = new Group(name, name, metaStorage); + + } + @Override + public void beforeAll(ExtensionContext context) throws Exception { + group.updateStorage(); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java new file mode 100644 index 0000000000..10fc15d3b5 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + + +@RequiredArgsConstructor +@Getter +public class MetaStorageExtension implements BeforeAllCallback { + private final MetricRegistry metricRegistry; + + private final MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + metaStorage.openStores(null, metricRegistry); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java new file mode 100644 index 0000000000..8315c044d4 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java @@ -0,0 +1,32 @@ +package com.bakdata.conquery.util.extensions; + +import java.util.function.Consumer; + +import lombok.RequiredArgsConstructor; +import lombok.experimental.Delegate; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.mockserver.integration.ClientAndServer; + +@RequiredArgsConstructor +public class MockServerExtension implements BeforeAllCallback, AfterAllCallback { + + @Delegate + private final ClientAndServer server; + private final Consumer setup; + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + setup.accept(server); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + server.stop(); + } + + public String baseUrl(){ + return "http://localhost:%d".formatted(server.getPort()); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java index b01d28f2ca..bf6f5dd8c0 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java @@ -3,6 +3,7 @@ import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import lombok.Getter; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; @@ -15,7 +16,7 @@ public class NamespaceStorageExtension implements BeforeAllCallback, BeforeEachC @Override public void beforeAll(ExtensionContext context) throws Exception { - storage.openStores(Jackson.MAPPER); + storage.openStores(Jackson.MAPPER, new MetricRegistry()); } @Override diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java new file mode 100644 index 0000000000..86da936696 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java @@ -0,0 +1,28 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.User; +import lombok.Getter; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +public class UserExtension implements BeforeAllCallback { + private final MetaStorage metaStorage; + + @Getter + private final User user; + + public UserExtension(MetaStorage metaStorage, String id, String label) { + this.metaStorage = metaStorage; + user = new User(id, label, metaStorage); + } + + public UserExtension(MetaStorage metaStorage, String id) { + this(metaStorage, id, id); + } + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + metaStorage.addUser(user); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java b/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java index 3d5dfa73a1..929716ec59 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java @@ -2,6 +2,7 @@ import java.io.File; import java.net.ServerSocket; +import java.nio.file.Path; import java.util.Collection; import com.bakdata.conquery.models.config.ConqueryConfig; @@ -53,4 +54,11 @@ public static void configureRandomPorts(ConqueryConfig config) { config.getCluster().setPort(s.getLocalPort()); } } + + + public static void configureWorkdir(XodusStoreFactory storageConfig, Path workdir) { + + // Create new storage path to prevent xodus lock conflicts + storageConfig.setDirectory(workdir); + } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java b/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java new file mode 100644 index 0000000000..aef66c7c2a --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java @@ -0,0 +1,25 @@ +package com.bakdata.conquery.util.support; + +import java.util.function.Supplier; +import jakarta.ws.rs.client.ClientRequestContext; +import jakarta.ws.rs.client.ClientRequestFilter; +import jakarta.ws.rs.core.HttpHeaders; + +/** + * Simple filter for http client in test to provide authentication information. + * Skips, if the request had an {@link HttpHeaders#AUTHORIZATION} already set. + * @param tokenSupplier Supplier that provides a (fresh) token for each request. + */ +record ConqueryAuthenticationFilter(Supplier tokenSupplier) implements ClientRequestFilter { + + @Override + public void filter(ClientRequestContext requestContext) { + // If none set to provided token + if (requestContext.getHeaders().containsKey(HttpHeaders.AUTHORIZATION)) { + return; + } + + String token = tokenSupplier.get(); + requestContext.getHeaders().add(HttpHeaders.AUTHORIZATION, "Bearer " + token); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java index 8f3410c085..84fc8ca08b 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java @@ -1,46 +1,32 @@ package com.bakdata.conquery.util.support; import java.io.File; -import java.io.IOException; import java.util.List; import java.util.Map; -import jakarta.validation.Validator; import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.ClientRequestContext; -import jakarta.ws.rs.client.ClientRequestFilter; import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.commands.PreprocessorCommand; -import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.integration.json.TestDataImporter; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.AuthorizationController; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; -import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor; -import com.bakdata.conquery.resources.admin.rest.AdminProcessor; import com.google.common.util.concurrent.MoreExecutors; import io.dropwizard.core.setup.Environment; -import lombok.Data; import lombok.Getter; import lombok.RequiredArgsConstructor; +import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor public class StandaloneSupport implements TestSupport { - public enum Mode {WORKER, SQL} - @Getter private final Mode mode; + @Delegate private final TestConquery testConquery; @Getter private final Namespace namespace; @@ -51,18 +37,8 @@ public enum Mode {WORKER, SQL} @Getter private final ConqueryConfig config; @Getter - private final AdminProcessor metaProcessor; - @Getter - private final AdminDatasetProcessor datasetsProcessor; - @Getter - private final User testUser; - @Getter private final TestDataImporter testImporter; - public AuthorizationController getAuthorizationController() { - return testConquery.getStandaloneCommand().getManagerNode().getAuthController(); - } - public void waitUntilWorkDone() { testConquery.waitUntilWorkDone(); } @@ -90,58 +66,23 @@ public void run(Environment environment, net.sourceforge.argparse4j.inf.Namespac .run(env, namespace, config); } - - public Validator getValidator() { - return testConquery.getStandaloneCommand().getManagerNode().getValidator(); - } - - public MetaStorage getMetaStorage() { - return testConquery.getStandaloneCommand().getManagerNode().getMetaStorage(); - } - public NamespaceStorage getNamespaceStorage() { - return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry().get(dataset.getId()).getStorage(); + return getStandaloneCommand().getManagerNode().getDatasetRegistry().get(dataset.getId()).getStorage(); } - public DatasetRegistry getDatasetRegistry() { - return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry(); - } - - public List getShardNodes() { - return testConquery.getStandaloneCommand().getShardNodes(); - } - - /** - * Retrieves the port of the admin API. - * - * @return The port. - */ - public int getAdminPort() { - return testConquery.getDropwizard().getAdminPort(); + public AuthorizationController getAuthorizationController() { + return testConquery.getStandaloneCommand().getManagerNode().getAuthController(); } public Client getClient() { - return testConquery.getClient() - .register(new ConqueryAuthenticationFilter(getAuthorizationController().getConqueryTokenRealm().createTokenForUser(getTestUser().getId()))); - } - - public & NamespacedId, VALUE extends Identifiable> VALUE resolve(ID id) { - return getDatasetRegistry().resolve(id); + return testConquery.getClient(); } - @Data - private static class ConqueryAuthenticationFilter implements ClientRequestFilter { - private final String token; - - @Override - public void filter(ClientRequestContext requestContext) throws IOException { - // If none set to provided token - if(requestContext.getHeaders().containsKey("Authorization")){ - return; - } - - requestContext.getHeaders().add("Authorization", "Bearer " + getToken()); - } + public UriBuilder defaultApiURIBuilder() { + return UriBuilder.fromPath("api") + .host("localhost") + .scheme("http") + .port(getLocalPort()); } /** @@ -153,17 +94,23 @@ public int getLocalPort() { return testConquery.getDropwizard().getLocalPort(); } - public UriBuilder defaultApiURIBuilder() { - return UriBuilder.fromPath("api") - .host("localhost") - .scheme("http") - .port(getLocalPort()); - } - public UriBuilder defaultAdminURIBuilder() { return UriBuilder.fromPath("admin") .host("localhost") .scheme("http") .port(getAdminPort()); } + + /** + * Retrieves the port of the admin API. + * + * @return The port. + */ + public int getAdminPort() { + return testConquery.getDropwizard().getAdminPort(); + } + + public enum Mode {WORKER, SQL} + + } diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java index c451a4d7e0..90b6f61be1 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java @@ -1,36 +1,44 @@ package com.bakdata.conquery.util.support; +import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.time.Duration; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import jakarta.validation.Validator; import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.Conquery; import com.bakdata.conquery.commands.DistributedStandaloneCommand; import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.commands.StandaloneCommand; import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.integration.common.LoadingUtil; import com.bakdata.conquery.integration.json.TestDataImporter; import com.bakdata.conquery.integration.sql.SqlStandaloneCommand; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.mode.cluster.ClusterManager; import com.bakdata.conquery.mode.cluster.ClusterState; +import com.bakdata.conquery.models.auth.AuthorizationController; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor; +import com.bakdata.conquery.resources.admin.rest.AdminProcessor; import com.bakdata.conquery.util.io.Cloner; import com.google.common.util.concurrent.Uninterruptibles; import io.dropwizard.client.JerseyClientBuilder; @@ -63,6 +71,7 @@ public class TestConquery { private Client client; // Initial user which is set before each test from the config. + @Getter private User testUser; public synchronized StandaloneSupport openDataset(DatasetId datasetId) { @@ -75,6 +84,62 @@ public synchronized StandaloneSupport openDataset(DatasetId datasetId) { } } + private synchronized StandaloneSupport createSupport(DatasetId datasetId, String name) { + if (config.getSqlConnectorConfig().isEnabled()) { + return buildSupport(datasetId, name, StandaloneSupport.Mode.SQL); + } + return buildDistributedSupport(datasetId, name); + } + + private StandaloneSupport buildSupport(DatasetId datasetId, String name, StandaloneSupport.Mode mode) { + + DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); + Namespace ns = datasets.get(datasetId); + + // make tmp subdir and change cfg accordingly + File localTmpDir = new File(tmpDir, "tmp_" + name); + + if (!localTmpDir.exists()) { + if (!localTmpDir.mkdir()) { + throw new IllegalStateException("Could not create directory for Support"); + } + } + else { + log.info("Reusing existing folder {} for Support", localTmpDir.getPath()); + } + + ConqueryConfig + localCfg = + Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManagerNode().getEnvironment().getValidator()), IntegrationTests.MAPPER); + + StandaloneSupport support = new StandaloneSupport( + mode, + this, + ns, + ns.getStorage().getDataset(), + localTmpDir, + localCfg, + // Getting the User from AuthorizationConfig + testDataImporter + ); + + support.waitUntilWorkDone(); + openSupports.add(support); + return support; + } + + private synchronized StandaloneSupport buildDistributedSupport(DatasetId datasetId, String name) { + + ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); + ClusterState clusterState = manager.getConnectionManager().getClusterState(); + assertThat(clusterState.getShardNodes()).hasSize(2); + + await().atMost(10, TimeUnit.SECONDS) + .until(() -> clusterState.getWorkerHandlers().get(datasetId).getWorkers().size() == clusterState.getShardNodes().size()); + + return buildSupport(datasetId, name, StandaloneSupport.Mode.WORKER); + } + public synchronized StandaloneSupport getSupport(String name) { try { log.info("Setting up dataset"); @@ -83,14 +148,73 @@ public synchronized StandaloneSupport getSupport(String name) { name += "[" + count + "]"; } Dataset dataset = new Dataset(name); - standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor().addDataset(dataset); - return createSupport(dataset.getId(), name); + waitUntilWorkDone(); + LoadingUtil.importDataset(getClient(), defaultAdminURIBuilder(), dataset); + + // Little detour here, but this way we get the correctly initialized dataset id + DatasetId datasetId = getDatasetRegistry().get(new DatasetId(dataset.getName())).getDataset().getId(); + waitUntilWorkDone(); + + return createSupport(datasetId, name); } catch (Exception e) { return fail("Failed to create a support for " + name, e); } } + public void waitUntilWorkDone() { + log.info("Waiting for jobs to finish"); + //sample multiple times from the job queues to make sure we are done with everything and don't miss late arrivals + long started = System.nanoTime(); + for (int i = 0; i < 5; i++) { + do { + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.MILLISECONDS); + + if (!isBusy()) { + break; + } + + + if (Duration.ofNanos(System.nanoTime() - started).toSeconds() > 10) { + started = System.nanoTime(); + log.warn("waiting for done work for a long time", new Exception()); + } + + } while (true); + } + log.trace("all jobs finished"); + } + + public UriBuilder defaultAdminURIBuilder() { + return UriBuilder.fromPath("admin") + .host("localhost") + .scheme("http") + .port(dropwizard.getAdminPort()); + } + + public DatasetRegistry getDatasetRegistry() { + return getStandaloneCommand().getManagerNode().getDatasetRegistry(); + } + + private boolean isBusy() { + boolean busy; + busy = standaloneCommand.getManagerNode().getJobManager().isSlowWorkerBusy(); + busy |= standaloneCommand.getManager().getDatasetRegistry().getDatasets().stream() + .map(Namespace::getExecutionManager) + .flatMap(e -> e.getExecutionStates().asMap().values().stream()) + .map(ExecutionManager.State::getState) + .anyMatch(ExecutionState.RUNNING::equals); + + for (Namespace namespace : standaloneCommand.getManagerNode().getDatasetRegistry().getDatasets()) { + busy |= namespace.getJobManager().isSlowWorkerBusy(); + } + + for (ShardNode shard : standaloneCommand.getShardNodes()) { + busy |= shard.isBusy(); + } + return busy; + } + @SneakyThrows public synchronized void shutdown() { //stop dropwizard directly so ConquerySupport does not delete the tmp directory @@ -98,7 +222,6 @@ public synchronized void shutdown() { openSupports.clear(); } - public void beforeAll() throws Exception { log.info("Working in temporary directory {}", tmpDir); @@ -117,19 +240,28 @@ public void beforeAll() throws Exception { // start server dropwizard.before(); - - if (!config.getSqlConnectorConfig().isEnabled()) { - // Wait for shards to be connected - ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); - ClusterState clusterState = manager.getConnectionManager().getClusterState(); - await().atMost(10, TimeUnit.SECONDS).until(() -> clusterState.getShardNodes().size() == 2); - } - // create HTTP client for api tests client = new JerseyClientBuilder(this.getDropwizard().getEnvironment()) .withProperty(ClientProperties.CONNECT_TIMEOUT, 10000) .withProperty(ClientProperties.READ_TIMEOUT, 10000) .build("test client"); + + + + // The test user is recreated after each test, in the storage, but its id stays the same. + // Here we register the client filter once for that test user id. + UserId testUserId = config.getAuthorizationRealms().getInitialUsers().get(0).createId(); + client.register(new ConqueryAuthenticationFilter(() -> getAuthorizationController().getConqueryTokenRealm().createTokenForUser(testUserId))); + + testUser = getMetaStorage().getUser(testUserId); + } + + public AuthorizationController getAuthorizationController() { + return getStandaloneCommand().getManagerNode().getAuthController(); + } + + public MetaStorage getMetaStorage() { + return getStandaloneCommand().getManagerNode().getMetaStorage(); } public void afterAll() { @@ -162,109 +294,36 @@ public void removeSupport(StandaloneSupport support) { } } - public void waitUntilWorkDone() { - log.info("Waiting for jobs to finish"); - //sample multiple times from the job queues to make sure we are done with everything and don't miss late arrivals - long started = System.nanoTime(); - for (int i = 0; i < 5; i++) { - do { - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.MILLISECONDS); - - if (!isBusy()) { - break; - } - + public void beforeEach() { - if (Duration.ofNanos(System.nanoTime() - started).toSeconds() > 10) { - started = System.nanoTime(); - log.warn("waiting for done work for a long time", new Exception()); - } + // Because Shiro works with a static Security manager + getStandaloneCommand().getManagerNode().getAuthController().registerStaticSecurityManager(); - } while (true); - } - log.trace("all jobs finished"); - } - - public void beforeEach() { + // MetaStorage is cleared after each test, so we need to add the test user again final MetaStorage storage = standaloneCommand.getManagerNode().getMetaStorage(); testUser = standaloneCommand.getManagerNode().getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(storage); - storage.updateUser(testUser); } - private synchronized StandaloneSupport createSupport(DatasetId datasetId, String name) { - if (config.getSqlConnectorConfig().isEnabled()) { - return buildSupport(datasetId, name, StandaloneSupport.Mode.SQL); - } - return buildDistributedSupport(datasetId, name); + public Validator getValidator() { + return getStandaloneCommand().getManagerNode().getValidator(); } - private synchronized StandaloneSupport buildDistributedSupport(DatasetId datasetId, String name) { - - ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); - ClusterState clusterState = manager.getConnectionManager().getClusterState(); - - await().atMost(10, TimeUnit.SECONDS) - .until(() -> clusterState.getWorkerHandlers().get(datasetId).getWorkers().size() == clusterState.getShardNodes().size()); - - return buildSupport(datasetId, name, StandaloneSupport.Mode.WORKER); + public List getShardNodes() { + return getStandaloneCommand().getShardNodes(); } - private StandaloneSupport buildSupport(DatasetId datasetId, String name, StandaloneSupport.Mode mode) { - - DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); - Namespace ns = datasets.get(datasetId); - - // make tmp subdir and change cfg accordingly - File localTmpDir = new File(tmpDir, "tmp_" + name); - - if (!localTmpDir.exists()) { - if (!localTmpDir.mkdir()) { - throw new IllegalStateException("Could not create directory for Support"); - } - } - else { - log.info("Reusing existing folder {} for Support", localTmpDir.getPath()); - } - - ConqueryConfig - localCfg = - Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManagerNode().getEnvironment().getValidator()), IntegrationTests.MAPPER); - - StandaloneSupport support = new StandaloneSupport( - mode, - this, - ns, - ns.getStorage().getDataset(), - localTmpDir, - localCfg, - standaloneCommand.getManagerNode().getAdmin().getAdminProcessor(), - standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor(), - // Getting the User from AuthorizationConfig - testUser, - testDataImporter - ); - - support.waitUntilWorkDone(); - openSupports.add(support); - return support; + public AdminProcessor getAdminProcessor() { + return standaloneCommand.getManagerNode().getAdmin().getAdminProcessor(); } - private boolean isBusy() { - boolean busy; - busy = standaloneCommand.getManagerNode().getJobManager().isSlowWorkerBusy(); - busy |= standaloneCommand.getManager().getDatasetRegistry().getDatasets().stream() - .map(Namespace::getExecutionManager) - .flatMap(e -> e.getExecutionStates().asMap().values().stream()) - .map(ExecutionManager.State::getState) - .anyMatch(ExecutionState.RUNNING::equals); - - for (Namespace namespace : standaloneCommand.getManagerNode().getDatasetRegistry().getDatasets()) { - busy |= namespace.getJobManager().isSlowWorkerBusy(); - } + public AdminDatasetProcessor getAdminDatasetsProcessor() { + return standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor(); + } - for (ShardNode shard : standaloneCommand.getShardNodes()) { - busy |= shard.isBusy(); - } - return busy; + public UriBuilder defaultApiURIBuilder() { + return UriBuilder.fromPath("api") + .host("localhost") + .scheme("http") + .port(dropwizard.getLocalPort()); } } diff --git a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json b/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json deleted file mode 100644 index 8c624cf0e2..0000000000 --- a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "type": "QUERY_TEST", - "label": "MULTI_SELECT_AGGREGATOR Test", - "expectedCsv": "tests/aggregator/MULTI_SELECT_AGGREGATOR/expected.csv", - "query": { - "type": "CONCEPT_QUERY", - "root": { - "ids": [ - "concept" - ], - "type": "CONCEPT", - "tables": [ - { - "id": "concept.connector", - "selects": [ - "concept.connector.select" - ] - } - ] - } - }, - "concepts": [ - { - "name": "concept", - "type": "TREE", - "connectors": [ - { - "label": "connector", - "table": "table", - "validityDates": { - "label": "datum", - "column": "table.datum" - }, - "selects": { - "type": "COUNT_OCCURENCES", - "name" : "select", - "column": "table.value", - "selection": [ - "f", - "m" - ] - } - } - ] - } - ], - "content": { - "tables": [ - { - "csv": "tests/aggregator/MULTI_SELECT_AGGREGATOR/content.csv", - "name": "table", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "datum", - "type": "DATE" - }, - { - "name": "value", - "type": "STRING" - } - ] - } - ] - } -} diff --git a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/content.csv b/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/content.csv deleted file mode 100644 index 5358e7b61a..0000000000 --- a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/content.csv +++ /dev/null @@ -1,10 +0,0 @@ -pid,datum,value -1,2012-01-01,"f" -1,2012-01-02,"f" -2,2010-07-15, -3,2013-11-10,"f" -4,2012-11-11,"m" -5,2012-11-11,"x" -5,2012-11-11,"f" -6,2012-11-11,"m" -6,2012-11-11,"f" diff --git a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/expected.csv b/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/expected.csv deleted file mode 100644 index 15ec609372..0000000000 --- a/backend/src/test/resources/tests/aggregator/MULTI_SELECT_AGGREGATOR/expected.csv +++ /dev/null @@ -1,7 +0,0 @@ -result,dates,concept select -2,{2010-07-15/2010-07-15}, -3,{2013-11-10/2013-11-10},{f=1} -6,{2012-11-11/2012-11-11},"{f=1, m=1}" -1,{2012-01-01/2012-01-02},{f=2} -4,{2012-11-11/2012-11-11},{m=1} -5,{2012-11-11/2012-11-11},{f=1} \ No newline at end of file diff --git a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json b/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json deleted file mode 100644 index 59a83c0f6e..0000000000 --- a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/SIMPLE_VIRTUAL_CONCEPT_Query.test.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "type": "QUERY_TEST", - "label": "SELECT_AGGREGATOR Test", - "expectedCsv": "tests/aggregator/SELECT_AGGREGATOR/expected.csv", - "query": { - "type": "CONCEPT_QUERY", - "root": { - "ids": [ - "concept" - ], - "type": "CONCEPT", - "tables": [ - { - "id": "concept.connector", - "selects": [ - "concept.connector.select" - ] - } - ] - } - }, - "concepts": [ - { - "name": "concept", - "type": "TREE", - "connectors": [ - { - "label": "connector", - "table": "table", - "validityDates": { - "label": "datum", - "column": "table.datum" - }, - "selects": { - "type": "COUNT_OCCURENCES", - "name": "select", - "column": "table.value", - "selection": "f" - } - } - ] - } - ], - "content": { - "tables": [ - { - "csv": "tests/aggregator/SELECT_AGGREGATOR/content.csv", - "name": "table", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "datum", - "type": "DATE" - }, - { - "name": "value", - "type": "STRING" - } - ] - } - ] - } -} diff --git a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/content.csv b/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/content.csv deleted file mode 100644 index cadb60f0f0..0000000000 --- a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/content.csv +++ /dev/null @@ -1,6 +0,0 @@ -pid,datum,value -1,2012-01-01,"f" -1,2012-01-02,"f" -2,2010-07-15, -3,2013-11-10,"f" -4,2012-11-11,"m" diff --git a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/expected.csv b/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/expected.csv deleted file mode 100644 index f01e356de2..0000000000 --- a/backend/src/test/resources/tests/aggregator/SELECT_AGGREGATOR/expected.csv +++ /dev/null @@ -1,5 +0,0 @@ -result,dates,concept select -1,{2012-01-01/2012-01-02},2 -2,{2010-07-15/2010-07-15}, -3,{2013-11-10/2013-11-10},1 -4,{2012-11-11/2012-11-11}, \ No newline at end of file diff --git a/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json b/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json deleted file mode 100644 index 5c9888f4ef..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "type": "FILTER_TEST", - "label": "GROUP Test", - "expectedCsv": "tests/filter/GROUP/expected.csv", - "content": { - "tables": { - "csv": "tests/filter/GROUP/content.csv", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "datum", - "type": "DATE" - }, - { - "name": "value", - "type": "STRING" - } - ] - } - }, - "connector": { - "validityDates": { - "label": "Datum", - "column": "table.datum" - }, - "filters": { - "label": "test filter", - "type": "TEST_GROUP_FILTER", - "column": "table.value" - } - }, - "filterValue": { - "type": "GROUP", - "value": { - "strings": [ - "a", - "ab" - ], - "repetitions": 2 - } - }, - "expectedFrontendConfig": { - "label": "test filter", - "type": "GROUP", - "filters": { - "strings": { - "type": "MULTI_SELECT", - "label": "Elements", - "options": [] - }, - "repetitions": { - "type": "INTEGER", - "label": "Maximum Repetitions", - "options": [] - } - }, - "options": [] - } -} diff --git a/backend/src/test/resources/tests/filter/GROUP/content.csv b/backend/src/test/resources/tests/filter/GROUP/content.csv deleted file mode 100644 index a3527700c6..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/content.csv +++ /dev/null @@ -1,15 +0,0 @@ -pid,datum,value -1,2015-03-17,a - -2,2015-03-17,ab - -3,2015-03-17,abab - -4,2015-03-17,aaa -4,2015-03-18,ababab - -5,2015-03-18,abab -5,2015-03-18,aaa - -6,2015-03-18, - diff --git a/backend/src/test/resources/tests/filter/GROUP/expected.csv b/backend/src/test/resources/tests/filter/GROUP/expected.csv deleted file mode 100644 index 0ec1de07b3..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/expected.csv +++ /dev/null @@ -1,5 +0,0 @@ -result,dates -1,{2015-03-17/2015-03-17} -2,{2015-03-17/2015-03-17} -3,{2015-03-17/2015-03-17} -5,{2015-03-18/2015-03-18} diff --git a/backend/src/test/resources/tests/filter/NUMBER_MONEY/NUMBER.test.json b/backend/src/test/resources/tests/filter/NUMBER_MONEY/NUMBER.test.json index 2b72f879ff..b435f19155 100644 --- a/backend/src/test/resources/tests/filter/NUMBER_MONEY/NUMBER.test.json +++ b/backend/src/test/resources/tests/filter/NUMBER_MONEY/NUMBER.test.json @@ -32,7 +32,7 @@ } }, "filterValue": { - "type": "INTEGER_RANGE", + "type": "MONEY_RANGE", "value": { "min": 10000, "max": 20000 diff --git a/backend/src/test/resources/tests/sql/filter/number_money/integer_range/number_money.spec.json b/backend/src/test/resources/tests/sql/filter/number_money/integer_range/number_money.spec.json index f65241485a..1349c3df9d 100644 --- a/backend/src/test/resources/tests/sql/filter/number_money/integer_range/number_money.spec.json +++ b/backend/src/test/resources/tests/sql/filter/number_money/integer_range/number_money.spec.json @@ -1,5 +1,5 @@ { - "label": "Single Number-Real-Range Filter Query with INTEGER RANGE on column with MONEY type", + "label": "Single Number-Real-Range Filter Query with MONEY RANGE on column with MONEY type", "type": "QUERY_TEST", "sqlSpec": { "isEnabled": true @@ -22,7 +22,7 @@ "filters": [ { "filter": "number.number_connector.money_number_filter", - "type": "INTEGER_RANGE", + "type": "MONEY_RANGE", "value": { "min": 10000, "max": 20000 diff --git a/backend/src/test/resources/tests/sql/filter/number_money/real_range/content.csv b/backend/src/test/resources/tests/sql/filter/number_money/real_range/content.csv deleted file mode 100644 index 1cb67fbcf4..0000000000 --- a/backend/src/test/resources/tests/sql/filter/number_money/real_range/content.csv +++ /dev/null @@ -1,19 +0,0 @@ -pid,nr -1,50 -2,250 -3,150 -3,150 -4,50 -4,150 -5,250 -5,150 -6,50 -6,250 -7,150.01 -8,200.01 -9,99.99 -10, -11, -11,300 -12,150 -12, diff --git a/backend/src/test/resources/tests/sql/filter/number_money/real_range/expected.csv b/backend/src/test/resources/tests/sql/filter/number_money/real_range/expected.csv deleted file mode 100644 index 235c68aa46..0000000000 --- a/backend/src/test/resources/tests/sql/filter/number_money/real_range/expected.csv +++ /dev/null @@ -1,6 +0,0 @@ -result,dates -3,{} -4,{} -5,{} -9,{} -12,{} diff --git a/backend/src/test/resources/tests/sql/filter/number_money/real_range/number_money.spec.json b/backend/src/test/resources/tests/sql/filter/number_money/real_range/number_money.spec.json deleted file mode 100644 index 9c869eaf1b..0000000000 --- a/backend/src/test/resources/tests/sql/filter/number_money/real_range/number_money.spec.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "label": "Single Number-Real-Range Filter Query with REAL RANGE on column with MONEY type", - "type": "QUERY_TEST", - "sqlSpec": { - "isEnabled": true - }, - "expectedCsv": "tests/sql/filter/number_money/real_range/expected.csv", - "query": { - "type": "CONCEPT_QUERY", - "root": { - "type": "AND", - "children": [ - { - "ids": [ - "number" - ], - "type": "CONCEPT", - "label": "vs", - "tables": [ - { - "id": "number.number_connector", - "filters": [ - { - "filter": "number.number_connector.money_number_filter", - "type": "REAL_RANGE", - "value": { - "min": 99.00, - "max": 150.00 - } - } - ] - } - ] - } - ] - } - }, - "concepts": [ - { - "label": "number", - "type": "TREE", - "connectors": [ - { - "label": "number_connector", - "table": "table1", - "filters": { - "name": "money_number_filter", - "column": "table1.money", - "type": "NUMBER" - } - } - ] - } - ], - "content": { - "tables": [ - { - "csv": "tests/sql/filter/number_money/real_range/content.csv", - "name": "table1", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "money", - "type": "MONEY" - } - ] - } - ] - } -} diff --git a/cypress/e2e/backend-admin-ui/test_2_dataset.cy.js b/cypress/e2e/backend-admin-ui/test_2_dataset.cy.js index a57f95ea41..746335e4f4 100644 --- a/cypress/e2e/backend-admin-ui/test_2_dataset.cy.js +++ b/cypress/e2e/backend-admin-ui/test_2_dataset.cy.js @@ -139,7 +139,7 @@ context("Admin UI Single Dataset", () => { it("Counts are right", () => { visitAdminUI(`datasets/${testDSID}/connectors/${testDSID}.concept1.column`); cy.get('[data-test-id="accordion-Filters"] > .card-header').contains("20 entries"); - cy.get('[data-test-id="accordion-Selects"] > .card-header').contains("17 entries"); + cy.get('[data-test-id="accordion-Selects"] > .card-header').contains("16 entries"); }); }); diff --git a/cypress/e2e/backend-admin-ui/test_3_smoketest.cy.js b/cypress/e2e/backend-admin-ui/test_3_smoketest.cy.js index 1d548877e4..98414be290 100644 --- a/cypress/e2e/backend-admin-ui/test_3_smoketest.cy.js +++ b/cypress/e2e/backend-admin-ui/test_3_smoketest.cy.js @@ -63,7 +63,7 @@ context("Simple UI Render Smoke Tests", () => { cy.get('#roles > .table-responsive').contains('Unresolvable Role') }) - + it("Delete faulty member and role", () => { visitAdminUI("groups/group.faulty_group"); @@ -80,6 +80,53 @@ context("Simple UI Render Smoke Tests", () => { cy.get('#roles > .table-responsive').should('not.contain.text', 'Unresolvable Role') }) - }) + }); + + describe("Dataset pages render", () => { + + it("Datasets", () => { + + visitAdminUI("datasets"); + + cy.root().should('not.contain.text', 'FreeMarker template error') + + cy.get('[data-cy=datasets-dataset1]') + .find('a') + .contains('dataset1') + .click() + + + cy.get('[data-test-id="accordion-Mappings"]').click() + cy.get('[data-test-id="accordion-SearchIndices"]').click() + cy.get('[data-test-id="accordion-Tables"]').click() + cy.get('[data-test-id="accordion-Concepts"]').click() + cy.get('[data-test-id="accordion-SecondaryIds"]').click() + + cy.root().should('not.contain.text', 'FreeMarker template error') -}) \ No newline at end of file + // Table page + cy.get('[data-test-id="accordion-Tables"]') + .find('a') + .contains('table') + .click() + + cy.get('[data-test-id="accordion-Tags"]').click() + cy.get('[data-test-id="accordion-Concepts"]').click() + cy.get('[data-test-id="accordion-Columns"]').click() + + cy.root().should('not.contain.text', 'FreeMarker template error') + + // Import page + cy.get('[data-test-id="accordion-Tags"]') + .find('a') + .contains('table') + .click() + + cy.get('[data-test-id="accordion-Columns"]').click() + + cy.root().should('not.contain.text', 'FreeMarker template error') + + + }); + }); +}) \ No newline at end of file diff --git a/cypress/support/test_data/all_types.concept.json b/cypress/support/test_data/all_types.concept.json index 36e7d64ab8..c629b0c7d7 100644 --- a/cypress/support/test_data/all_types.concept.json +++ b/cypress/support/test_data/all_types.concept.json @@ -159,12 +159,6 @@ "type": "RANDOM", "column": "table.INTEGER" }, - { - "label": "COUNT_OCCURENCES", - "type": "COUNT_OCCURENCES", - "column": "table.STRING", - "selection": [] - }, { "label": "FIRST", "type": "FIRST", diff --git a/cypress/support/test_data/data.csv b/cypress/support/test_data/data.csv new file mode 100644 index 0000000000..15345c9730 --- /dev/null +++ b/cypress/support/test_data/data.csv @@ -0,0 +1,2 @@ +id,STRING,INTEGER,BOOLEAN,REAL,DECIMAL,MONEY,DATE,DATE_RANGE +1,abc,1,true,1.1,1.1111111111111111111111111111111,1.11,2023-03-23,2023-03-23/2023-03-25 \ No newline at end of file diff --git a/cypress/support/test_data/data.import.json b/cypress/support/test_data/data.import.json new file mode 100644 index 0000000000..42392b4228 --- /dev/null +++ b/cypress/support/test_data/data.import.json @@ -0,0 +1,73 @@ +{ + "inputs": [ + { + "output": [ + { + "inputColumn": "id", + "name": "id", + "inputType": "STRING", + "operation": "COPY" + }, + { + "inputColumn": "STRING", + "name": "STRING", + "inputType": "STRING", + "operation": "COPY" + }, + { + "inputColumn": "INTEGER", + "name": "INTEGER", + "inputType": "INTEGER", + "operation": "COPY" + }, + { + "inputColumn": "BOOLEAN", + "name": "BOOLEAN", + "inputType": "BOOLEAN", + "operation": "COPY" + }, + { + "inputColumn": "REAL", + "name": "REAL", + "inputType": "REAL", + "operation": "COPY" + }, + { + "inputColumn": "DECIMAL", + "name": "DECIMAL", + "inputType": "DECIMAL", + "operation": "COPY" + }, + { + "inputColumn": "MONEY", + "name": "MONEY", + "inputType": "MONEY", + "operation": "COPY" + }, + { + "inputColumn": "DATE", + "name": "DATE", + "inputType": "DATE", + "operation": "COPY" + }, + { + "inputColumn": "DATE_RANGE", + "name": "DATE_RANGE", + "inputType": "DATE_RANGE", + "operation": "COPY" + } + ], + "primary": { + "inputColumn": "id", + "inputType": "STRING", + "name": "id", + "operation": "COPY", + "required": true + }, + "sourceFile": "data.csv" + } + ], + "label": "table", + "name": "table", + "table": "table" +} \ No newline at end of file diff --git a/frontend/.storybook/main.cjs b/frontend/.storybook/main.cjs deleted file mode 100644 index 6eba201346..0000000000 --- a/frontend/.storybook/main.cjs +++ /dev/null @@ -1,25 +0,0 @@ -const path = require("path"); -const { mergeConfig } = require("vite"); -const toPath = (_path) => path.resolve(path.join(__dirname, _path)); - -module.exports = { - stories: ["../src/**/*.mdx", "../src/**/*.stories.@(js|jsx|ts|tsx)"], - addons: [ - "@storybook/addon-links", - "@storybook/addon-essentials", - "@storybook/addon-interactions", - ], - framework: { - name: "@storybook/react-vite", - options: {}, - }, - features: { - storyStoreV7: true, - }, - core: { - disableTelemetry: true, - }, - docs: { - autodocs: true, - }, -}; diff --git a/frontend/.storybook/main.ts b/frontend/.storybook/main.ts new file mode 100644 index 0000000000..5329599c30 --- /dev/null +++ b/frontend/.storybook/main.ts @@ -0,0 +1,18 @@ +import type { StorybookConfig } from "@storybook/react-vite"; + +const config: StorybookConfig = { + stories: ["../src/**/*.mdx", "../src/**/*.stories.@(js|jsx|mjs|ts|tsx)"], + addons: [ + "@storybook/addon-links", + "@storybook/addon-essentials", + "@storybook/addon-interactions", + ], + framework: { + name: "@storybook/react-vite", + options: {}, + }, + core: { + disableTelemetry: true, + }, +}; +export default config; diff --git a/frontend/.storybook/manager.js b/frontend/.storybook/manager.js new file mode 100644 index 0000000000..5128367956 --- /dev/null +++ b/frontend/.storybook/manager.js @@ -0,0 +1,5 @@ +import { addons } from "@storybook/manager-api"; + +addons.setConfig({ + panelPosition: "right", +}); diff --git a/frontend/.storybook/preview.tsx b/frontend/.storybook/preview.tsx index 4b56d0c0aa..90e7ef34a3 100644 --- a/frontend/.storybook/preview.tsx +++ b/frontend/.storybook/preview.tsx @@ -1,4 +1,5 @@ import { ThemeProvider } from "@emotion/react"; +import type { Preview } from "@storybook/react"; import { theme } from "../src/app-theme"; import GlobalStyles from "../src/js/GlobalStyles"; @@ -9,23 +10,26 @@ import translationsDe from "../src/localization/de.json"; i18next.addResourceBundle("de", "translation", translationsDe, true, true); i18next.changeLanguage("de"); -export const parameters = { - actions: { argTypesRegex: "^on[A-Z].*" }, - controls: { - matchers: { - color: /(background|color)$/i, - date: /Date$/, +const Decorator = (Story: any) => ( + + + + + + +); + +const preview: Preview = { + decorators: [Decorator], + parameters: { + actions: { argTypesRegex: "^on[A-Z].*" }, + controls: { + matchers: { + color: /(background|color)$/i, + date: /Date$/i, + }, }, }, }; -export const decorators = [ - (Story) => ( - - - - - - - ), -]; +export default preview; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 24e8a89e3f..7c3e355e2c 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -13,19 +13,19 @@ "@emotion/is-prop-valid": "^1.2.1", "@emotion/react": "^11.11.1", "@emotion/styled": "^11.11.0", - "@fortawesome/fontawesome-svg-core": "^6.4.2", - "@fortawesome/free-regular-svg-icons": "^6.4.2", - "@fortawesome/free-solid-svg-icons": "^6.4.2", - "@fortawesome/react-fontawesome": "^0.2.0", + "@fortawesome/fontawesome-svg-core": "^6.6.0", + "@fortawesome/free-regular-svg-icons": "^6.6.0", + "@fortawesome/free-solid-svg-icons": "^6.6.0", + "@fortawesome/react-fontawesome": "^0.2.2", "@paralleldrive/cuid2": "^2.2.2", "@react-keycloak-fork/web": "^4.0.3", "@tippyjs/react": "^4.2.6", - "@vitejs/plugin-react": "^4.1.1", - "apache-arrow": "^13.0.0", + "@vitejs/plugin-react": "^4.3.2", + "apache-arrow": "^17.0.0", "autoprefixer": "^10.4.19", - "axios": "^1.6.0", + "axios": "^1.7.7", "chance": "^1.1.11", - "chart.js": "^4.4.0", + "chart.js": "^4.4.4", "compression": "^1.7.4", "date-fns": "^2.30.0", "downshift": "^7.4.1", @@ -41,28 +41,28 @@ "nodemon": "^3.0.1", "postcss": "^8.4.38", "prettier-plugin-organize-imports": "^3.2.3", - "rc-table": "^7.35.2", - "react": "^18.2.0", + "rc-table": "^7.48.0", + "react": "^18.3.1", "react-chartjs-2": "^5.2.0", "react-datepicker": "^4.21.0", "react-dnd": "^16.0.1", "react-dnd-html5-backend": "^16.0.1", "react-dnd-multi-backend": "^8.0.3", "react-dnd-touch-backend": "^16.0.1", - "react-dom": "^18.2.0", + "react-dom": "^18.3.1", "react-error-boundary": "^3.1.4", "react-highlight-words": "^0.20.0", - "react-hook-form": "^7.48.2", - "react-hotkeys-hook": "^4.4.1", + "react-hook-form": "^7.53.0", + "react-hotkeys-hook": "^4.5.1", "react-i18next": "^12.2.0", "react-list": "^0.8.16", "react-markdown": "^8.0.0", "react-merge-refs": "^2.1.1", - "react-number-format": "^5.3.1", + "react-number-format": "^5.4.2", "react-redux": "^8.1.3", "react-resizable-panels": "^0.0.55", - "react-router-dom": "^6.18.0", - "react-window": "^1.8.9", + "react-router-dom": "^6.26.2", + "react-window": "^1.8.10", "redux": "^4.1.2", "redux-devtools-extension": "^2.13.9", "remark-flexible-markers": "^1.0.3", @@ -70,16 +70,16 @@ "resize-observer-polyfill": "^1.5.1", "tailwindcss": "^3.4.3", "typesafe-actions": "^5.1.0", - "vite": "^4.5.0" + "vite": "^5.4.8" }, "devDependencies": { "@babel/core": "^7.23.2", - "@storybook/addon-actions": "7.5.3", - "@storybook/addon-essentials": "7.5.3", - "@storybook/addon-interactions": "7.5.3", - "@storybook/addon-links": "7.5.3", - "@storybook/react": "7.5.3", - "@storybook/react-vite": "7.5.3", + "@storybook/addon-actions": "8.3.4", + "@storybook/addon-essentials": "8.3.4", + "@storybook/addon-interactions": "8.3.4", + "@storybook/addon-links": "8.3.4", + "@storybook/react": "8.3.4", + "@storybook/react-vite": "8.3.4", "@storybook/testing-library": "^0.2.2", "@swc/core": "^1.3.96", "@testing-library/react": "^14.0.0", @@ -95,13 +95,13 @@ "@types/mustache": "^4.2.4", "@types/node": "^18.15.3", "@types/papaparse": "^5.3.10", - "@types/react": "^18.2.79", + "@types/react": "^18.3.10", "@types/react-datepicker": "^4.19.1", - "@types/react-dom": "^18.2.14", + "@types/react-dom": "^18.3.0", "@types/react-highlight-words": "^0.16.6", "@types/react-list": "^0.8.9", "@types/react-router-dom": "^5.3.3", - "@types/react-window": "^1.8.7", + "@types/react-window": "^1.8.8", "@types/redux": "^3.6.0", "@typescript-eslint/eslint-plugin": "^6.10.0", "@typescript-eslint/parser": "^6.10.0", @@ -114,7 +114,7 @@ "jest-environment-jsdom": "^29.7.0", "papaparse": "^5.4.1", "prettier": "^3.0.3", - "storybook": "7.5.3", + "storybook": "8.3.4", "tailwind-styled-components": "^2.2.0", "terser": "^5.24.0", "ts-jest": "^29.1.1", @@ -153,6 +153,12 @@ "node": ">=0.10.0" } }, + "node_modules/@adobe/css-tools": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.0.tgz", + "integrity": "sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==", + "dev": true + }, "node_modules/@alloc/quick-lru": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", @@ -175,89 +181,41 @@ "node": ">=6.0.0" } }, - "node_modules/@aw-web-design/x-default-browser": { - "version": "1.4.126", - "dev": true, - "license": "MIT", - "dependencies": { - "default-browser-id": "3.0.0" - }, - "bin": { - "x-default-browser": "bin/x-default-browser.js" - } - }, "node_modules/@babel/code-frame": { - "version": "7.22.13", - "license": "MIT", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dependencies": { - "@babel/highlight": "^7.22.13", - "chalk": "^2.4.2" + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/ansi-styles": { - "version": "3.2.1", - "license": "MIT", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/chalk": { - "version": "2.4.2", - "license": "MIT", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/color-convert": { - "version": "1.9.3", - "license": "MIT", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/code-frame/node_modules/color-name": { - "version": "1.1.3", - "license": "MIT" - }, - "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { - "version": "1.0.5", - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/@babel/compat-data": { - "version": "7.23.2", - "license": "MIT", + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.4.tgz", + "integrity": "sha512-+LGRog6RAsCJrrrg/IO6LGmpphNe5DiK30dGjCoxxeGv49B10/3XYGxPsAwrDlMFcFEvdAUavDT8r9k/hSyQqQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.23.2", - "license": "MIT", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.13", - "@babel/generator": "^7.23.0", - "@babel/helper-compilation-targets": "^7.22.15", - "@babel/helper-module-transforms": "^7.23.0", - "@babel/helpers": "^7.23.2", - "@babel/parser": "^7.23.0", - "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.2", - "@babel/types": "^7.23.0", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -280,47 +238,27 @@ } }, "node_modules/@babel/generator": { - "version": "7.23.0", - "license": "MIT", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.6.tgz", + "integrity": "sha512-VPC82gr1seXOpkjAAKoLhP50vx4vGNlF4msF64dSFq1P8RfB+QAuJWGHPXXPc8QyfVWwwB/TNNU4+ayZmHNbZw==", "dependencies": { - "@babel/types": "^7.23.0", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", + "@babel/types": "^7.25.6", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.22.15", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.15" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.22.15", - "license": "MIT", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", "dependencies": { - "@babel/compat-data": "^7.22.9", - "@babel/helper-validator-option": "^7.22.15", - "browserslist": "^4.21.9", + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -330,149 +268,41 @@ }, "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { "version": "5.1.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dependencies": { "yallist": "^3.0.2" } }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.1", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { - "version": "3.1.1", - "license": "ISC" - }, - "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.22.15", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-member-expression-to-functions": "^7.22.15", - "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.9", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-class-features-plugin/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.22.15", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "regexpu-core": "^5.3.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } }, - "node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.4.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.22.6", - "@babel/helper-plugin-utils": "^7.22.5", - "debug": "^4.1.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.14.2" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.20", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.23.0", - "license": "MIT", - "dependencies": { - "@babel/template": "^7.22.15", - "@babel/types": "^7.23.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.23.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.23.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.15", - "license": "MIT", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", "dependencies": { - "@babel/types": "^7.22.15" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.23.0", - "license": "MIT", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-module-imports": "^7.22.15", - "@babel/helper-simple-access": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/helper-validator-identifier": "^7.22.20" + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" }, "engines": { "node": ">=6.9.0" @@ -481,140 +311,71 @@ "@babel/core": "^7.0.0" } }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.22.5", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.22.20", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-wrap-function": "^7.22.20" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.22.20", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-member-expression-to-functions": "^7.22.15", - "@babel/helper-optimise-call-expression": "^7.22.5" - }, + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", + "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", "engines": { "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-simple-access": { - "version": "7.22.5", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.22.6", - "license": "MIT", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.22.5", - "license": "MIT", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "license": "MIT", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.22.15", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.22.20", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-function-name": "^7.22.5", - "@babel/template": "^7.22.15", - "@babel/types": "^7.22.19" - }, + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.23.2", - "license": "MIT", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.6.tgz", + "integrity": "sha512-Xg0tn4HcfTijTwfDwYlvVCl43V6h4KyVVX2aEm4qdO/PC6L2YvzLHFdmxhoeSA3eslcE6+ZVXHgWwopXYLNq4Q==", "dependencies": { - "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.2", - "@babel/types": "^7.23.0" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.22.20", - "license": "MIT", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", + "@babel/helper-validator-identifier": "^7.24.7", "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" @@ -622,7 +383,8 @@ }, "node_modules/@babel/highlight/node_modules/ansi-styles": { "version": "3.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dependencies": { "color-convert": "^1.9.0" }, @@ -632,7 +394,8 @@ }, "node_modules/@babel/highlight/node_modules/chalk": { "version": "2.4.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -644,25 +407,32 @@ }, "node_modules/@babel/highlight/node_modules/color-convert": { "version": "1.9.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dependencies": { "color-name": "1.1.3" } }, "node_modules/@babel/highlight/node_modules/color-name": { "version": "1.1.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, "node_modules/@babel/highlight/node_modules/escape-string-regexp": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "engines": { "node": ">=0.8.0" } }, "node_modules/@babel/parser": { - "version": "7.23.0", - "license": "MIT", + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.6.tgz", + "integrity": "sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==", + "dependencies": { + "@babel/types": "^7.25.6" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -670,211 +440,46 @@ "node": ">=6.0.0" } }, - "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.22.15", + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { - "@babel/core": "^7.0.0" + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.22.15", + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/plugin-transform-optional-chaining": "^7.22.15" - }, - "engines": { - "node": ">=6.9.0" + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { - "@babel/core": "^7.13.0" + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-proposal-class-properties": { - "version": "7.18.6", + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" + "@babel/helper-plugin-utils": "^7.12.13" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-proposal-nullish-coalescing-operator": { - "version": "7.18.6", + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.18.6", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-optional-chaining": { - "version": "7.21.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2", - "@babel/helper-skip-transparent-expression-wrappers": "^7.20.0", - "@babel/plugin-syntax-optional-chaining": "^7.8.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-async-generators": { - "version": "7.8.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-bigint": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", - "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-properties": { - "version": "7.12.13", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.12.13" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-static-block": { - "version": "7.14.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-dynamic-import": { - "version": "7.8.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-export-namespace-from": { - "version": "7.8.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.3" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-flow": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" + "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -971,20 +576,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-private-property-in-object": { - "version": "7.14.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/@babel/plugin-syntax-top-level-await": { "version": "7.14.5", "dev": true, @@ -1013,27 +604,26 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-unicode-sets-regex": { - "version": "7.18.6", - "dev": true, - "license": "MIT", + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.24.7.tgz", + "integrity": "sha512-fOPQYbGSgH0HUp4UJO4sMBFjY6DuWq+2i8rixyUMb3CdGixs/gccURvYOAhajBdKDoGajFr3mUq5rH3phtkGzw==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" + "@babel/helper-plugin-utils": "^7.24.7" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { - "@babel/core": "^7.0.0" + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.24.7.tgz", + "integrity": "sha512-J2z+MWzZHVOemyLweMqngXrgGC42jQ//R0KdxqkIz/OrbVIIlhFI3WigZ5fO+nwFvBlncr4MGapd8vTyc7RPNQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.7" }, "engines": { "node": ">=6.9.0" @@ -1042,985 +632,822 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-async-generator-functions": { + "node_modules/@babel/runtime": { "version": "7.23.2", - "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-remap-async-to-generator": "^7.22.20", - "@babel/plugin-syntax-async-generators": "^7.8.4" + "regenerator-runtime": "^0.14.0" }, "engines": { "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@babel/template": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", "dependencies": { - "@babel/helper-module-imports": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-remap-async-to-generator": "^7.22.5" + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "node_modules/@babel/traverse": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.6.tgz", + "integrity": "sha512-9Vrcx5ZW6UwK5tvqsj0nGpp/XzqthkT0dqIc9g1AdtygFToNtTF67XzYS//dm+SAK9cp3B9R4ZO/46p63SCjlQ==", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.6", + "@babel/parser": "^7.25.6", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6", + "debug": "^4.3.1", + "globals": "^11.1.0" }, "engines": { "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.23.0", - "dev": true, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=4" } }, - "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@babel/types": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.6.tgz", + "integrity": "sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" }, "engines": { "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.22.11", + "node_modules/@base2/pretty-print-object": { + "version": "1.0.1", "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.22.11", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-class-static-block": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0" - } + "license": "BSD-2-Clause" }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.22.15", - "dev": true, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "devOptional": true, "license": "MIT", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-compilation-targets": "^7.22.15", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.9", - "@babel/helper-split-export-declaration": "^7.22.6", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" + "@jridgewell/trace-mapping": "0.3.9" }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/globals": { - "version": "11.12.0", - "dev": true, - "license": "MIT", "engines": { - "node": ">=4" + "node": ">=12" } }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.22.5", - "dev": true, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "devOptional": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/template": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" } }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.23.0", - "dev": true, + "node_modules/@emotion/babel-plugin": { + "version": "11.11.0", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "@babel/helper-module-imports": "^7.16.7", + "@babel/runtime": "^7.18.3", + "@emotion/hash": "^0.9.1", + "@emotion/memoize": "^0.8.1", + "@emotion/serialize": "^1.1.2", + "babel-plugin-macros": "^3.1.0", + "convert-source-map": "^1.5.0", + "escape-string-regexp": "^4.0.0", + "find-root": "^1.1.0", + "source-map": "^0.5.7", + "stylis": "4.2.0" + } + }, + "node_modules/@emotion/babel-plugin/node_modules/convert-source-map": { + "version": "1.9.0", + "license": "MIT" + }, + "node_modules/@emotion/babel-plugin/node_modules/source-map": { + "version": "0.5.7", + "license": "BSD-3-Clause", "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=0.10.0" } }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.22.5", - "dev": true, + "node_modules/@emotion/cache": { + "version": "11.11.0", "license": "MIT", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@emotion/memoize": "^0.8.1", + "@emotion/sheet": "^1.2.2", + "@emotion/utils": "^1.2.1", + "@emotion/weak-memoize": "^0.3.1", + "stylis": "4.2.0" } }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.22.5", - "dev": true, + "node_modules/@emotion/hash": { + "version": "0.9.1", + "license": "MIT" + }, + "node_modules/@emotion/is-prop-valid": { + "version": "1.2.1", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@emotion/memoize": "^0.8.1" } }, - "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.22.11", - "dev": true, + "node_modules/@emotion/memoize": { + "version": "0.8.1", + "license": "MIT" + }, + "node_modules/@emotion/react": { + "version": "11.11.1", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-dynamic-import": "^7.8.3" - }, - "engines": { - "node": ">=6.9.0" + "@babel/runtime": "^7.18.3", + "@emotion/babel-plugin": "^11.11.0", + "@emotion/cache": "^11.11.0", + "@emotion/serialize": "^1.1.2", + "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", + "@emotion/utils": "^1.2.1", + "@emotion/weak-memoize": "^0.3.1", + "hoist-non-react-statics": "^3.3.1" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "react": ">=16.8.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } } }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.22.5", - "dev": true, + "node_modules/@emotion/serialize": { + "version": "1.1.2", "license": "MIT", "dependencies": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@emotion/hash": "^0.9.1", + "@emotion/memoize": "^0.8.1", + "@emotion/unitless": "^0.8.1", + "@emotion/utils": "^1.2.1", + "csstype": "^3.0.2" } }, - "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.22.11", - "dev": true, + "node_modules/@emotion/sheet": { + "version": "1.2.2", + "license": "MIT" + }, + "node_modules/@emotion/styled": { + "version": "11.11.0", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-export-namespace-from": "^7.8.3" - }, - "engines": { - "node": ">=6.9.0" + "@babel/runtime": "^7.18.3", + "@emotion/babel-plugin": "^11.11.0", + "@emotion/is-prop-valid": "^1.2.1", + "@emotion/serialize": "^1.1.2", + "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", + "@emotion/utils": "^1.2.1" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@emotion/react": "^11.0.0-rc.0", + "react": ">=16.8.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } } }, - "node_modules/@babel/plugin-transform-flow-strip-types": { - "version": "7.22.5", - "dev": true, + "node_modules/@emotion/unitless": { + "version": "0.8.1", + "license": "MIT" + }, + "node_modules/@emotion/use-insertion-effect-with-fallbacks": { + "version": "1.0.1", "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-flow": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "react": ">=16.8.0" } }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.22.15", + "node_modules/@emotion/utils": { + "version": "1.2.1", + "license": "MIT" + }, + "node_modules/@emotion/weak-memoize": { + "version": "0.3.1", + "license": "MIT" + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", + "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "cpu": [ + "ppc64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "aix" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.22.5", + "node_modules/@esbuild/android-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", + "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "cpu": [ + "arm" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.22.11", + "node_modules/@esbuild/android-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", + "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-json-strings": "^7.8.3" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.22.5", + "node_modules/@esbuild/android-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", + "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.22.11", + "node_modules/@esbuild/darwin-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", + "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" - }, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.22.5", + "node_modules/@esbuild/darwin-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", + "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.23.0", + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", + "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.23.0", + "node_modules/@esbuild/freebsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", + "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-simple-access": "^7.22.5" - }, + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.23.0", + "node_modules/@esbuild/linux-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", + "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "cpu": [ + "arm" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-module-transforms": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.20" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.22.5", + "node_modules/@esbuild/linux-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", + "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.22.5", + "node_modules/@esbuild/linux-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", + "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "cpu": [ + "ia32" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.22.5", + "node_modules/@esbuild/linux-loong64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", + "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "cpu": [ + "loong64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.22.11", + "node_modules/@esbuild/linux-mips64el": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", + "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "cpu": [ + "mips64el" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.22.11", + "node_modules/@esbuild/linux-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", + "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "cpu": [ + "ppc64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-numeric-separator": "^7.10.4" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.22.15", + "node_modules/@esbuild/linux-riscv64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", + "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "cpu": [ + "riscv64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.22.9", - "@babel/helper-compilation-targets": "^7.22.15", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.22.15" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.22.5", + "node_modules/@esbuild/linux-s390x": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", + "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "cpu": [ + "s390x" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.5" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.22.11", + "node_modules/@esbuild/linux-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", + "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.23.0", + "node_modules/@esbuild/netbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", + "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", - "@babel/plugin-syntax-optional-chaining": "^7.8.3" - }, + "optional": true, + "os": [ + "netbsd" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.22.15", + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", + "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.22.5", + "node_modules/@esbuild/openbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", + "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.22.11", + "node_modules/@esbuild/sunos-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", + "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-create-class-features-plugin": "^7.22.11", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5" - }, + "optional": true, + "os": [ + "sunos" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.22.5", + "node_modules/@esbuild/win32-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", + "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.22.5", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", + "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.22.5", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "node_modules/@esbuild/win32-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", + "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=18" } }, - "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.22.10", - "dev": true, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "regenerator-transform": "^0.15.2" + "eslint-visitor-keys": "^3.3.0" }, "engines": { - "node": ">=6.9.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.22.5", - "dev": true, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.22.5", - "dev": true, + "node_modules/@eslint/eslintrc": { + "version": "2.1.3", "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" }, "engines": { - "node": ">=6.9.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "funding": { + "url": "https://opencollective.com/eslint" } }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.22.5", - "dev": true, + "node_modules/@eslint/js": { + "version": "8.53.0", "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" - }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, + "node_modules/@fortawesome/fontawesome-common-types": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.6.0.tgz", + "integrity": "sha512-xyX0X9mc0kyz9plIyryrRbl7ngsA9jz77mCZJsUkLl+ZKs0KWObgaEBoSgQiYWAsSmjz/yjl0F++Got0Mdp4Rw==", "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=6" } }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@fortawesome/fontawesome-svg-core": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-6.6.0.tgz", + "integrity": "sha512-KHwPkCk6oRT4HADE7smhfsKudt9N/9lm6EJ5BVg0tD1yPA5hht837fB87F8pn15D8JfTqQOjhKTktwmLMiD7Kg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@fortawesome/fontawesome-common-types": "6.6.0" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=6" } }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@fortawesome/free-regular-svg-icons": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-regular-svg-icons/-/free-regular-svg-icons-6.6.0.tgz", + "integrity": "sha512-Yv9hDzL4aI73BEwSEh20clrY8q/uLxawaQ98lekBx6t9dQKDHcDzzV1p2YtBGTtolYtNqcWdniOnhzB+JPnQEQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@fortawesome/fontawesome-common-types": "6.6.0" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=6" } }, - "node_modules/@babel/plugin-transform-typescript": { - "version": "7.22.15", - "dev": true, - "license": "MIT", + "node_modules/@fortawesome/free-solid-svg-icons": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.6.0.tgz", + "integrity": "sha512-IYv/2skhEDFc2WGUcqvFJkeK39Q+HyPf5GHUrT/l2pKbtgEIv1al1TKd6qStR5OIwQdN1GZP54ci3y4mroJWjA==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-create-class-features-plugin": "^7.22.15", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-typescript": "^7.22.5" + "@fortawesome/fontawesome-common-types": "6.6.0" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=6" } }, - "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.22.10", - "dev": true, - "license": "MIT", + "node_modules/@fortawesome/react-fontawesome": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@fortawesome/react-fontawesome/-/react-fontawesome-0.2.2.tgz", + "integrity": "sha512-EnkrprPNqI6SXJl//m29hpaNzOp1bruISWaOiRtkMi/xSvHJlzc2j2JAYS7egxt/EbjSNV/k6Xy0AQI6vB2+1g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" + "prop-types": "^15.8.1" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@fortawesome/fontawesome-svg-core": "~1 || ~6", + "react": ">=16.3" } }, - "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.22.5", - "dev": true, - "license": "MIT", + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.13", + "license": "Apache-2.0", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" + "@humanwhocodes/object-schema": "^2.0.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=10.10.0" } }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "license": "Apache-2.0", "engines": { - "node": ">=6.9.0" + "node": ">=12.22" }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.22.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.22.5", - "@babel/helper-plugin-utils": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.1", + "license": "BSD-3-Clause" }, - "node_modules/@babel/preset-env": { - "version": "7.23.2", - "dev": true, - "license": "MIT", + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "license": "ISC", "dependencies": { - "@babel/compat-data": "^7.23.2", - "@babel/helper-compilation-targets": "^7.22.15", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-validator-option": "^7.22.15", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.22.15", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.22.15", - "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-async-generators": "^7.8.4", - "@babel/plugin-syntax-class-properties": "^7.12.13", - "@babel/plugin-syntax-class-static-block": "^7.14.5", - "@babel/plugin-syntax-dynamic-import": "^7.8.3", - "@babel/plugin-syntax-export-namespace-from": "^7.8.3", - "@babel/plugin-syntax-import-assertions": "^7.22.5", - "@babel/plugin-syntax-import-attributes": "^7.22.5", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "@babel/plugin-syntax-json-strings": "^7.8.3", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", - "@babel/plugin-syntax-numeric-separator": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", - "@babel/plugin-syntax-optional-chaining": "^7.8.3", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5", - "@babel/plugin-syntax-top-level-await": "^7.14.5", - "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.22.5", - "@babel/plugin-transform-async-generator-functions": "^7.23.2", - "@babel/plugin-transform-async-to-generator": "^7.22.5", - "@babel/plugin-transform-block-scoped-functions": "^7.22.5", - "@babel/plugin-transform-block-scoping": "^7.23.0", - "@babel/plugin-transform-class-properties": "^7.22.5", - "@babel/plugin-transform-class-static-block": "^7.22.11", - "@babel/plugin-transform-classes": "^7.22.15", - "@babel/plugin-transform-computed-properties": "^7.22.5", - "@babel/plugin-transform-destructuring": "^7.23.0", - "@babel/plugin-transform-dotall-regex": "^7.22.5", - "@babel/plugin-transform-duplicate-keys": "^7.22.5", - "@babel/plugin-transform-dynamic-import": "^7.22.11", - "@babel/plugin-transform-exponentiation-operator": "^7.22.5", - "@babel/plugin-transform-export-namespace-from": "^7.22.11", - "@babel/plugin-transform-for-of": "^7.22.15", - "@babel/plugin-transform-function-name": "^7.22.5", - "@babel/plugin-transform-json-strings": "^7.22.11", - "@babel/plugin-transform-literals": "^7.22.5", - "@babel/plugin-transform-logical-assignment-operators": "^7.22.11", - "@babel/plugin-transform-member-expression-literals": "^7.22.5", - "@babel/plugin-transform-modules-amd": "^7.23.0", - "@babel/plugin-transform-modules-commonjs": "^7.23.0", - "@babel/plugin-transform-modules-systemjs": "^7.23.0", - "@babel/plugin-transform-modules-umd": "^7.22.5", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", - "@babel/plugin-transform-new-target": "^7.22.5", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.22.11", - "@babel/plugin-transform-numeric-separator": "^7.22.11", - "@babel/plugin-transform-object-rest-spread": "^7.22.15", - "@babel/plugin-transform-object-super": "^7.22.5", - "@babel/plugin-transform-optional-catch-binding": "^7.22.11", - "@babel/plugin-transform-optional-chaining": "^7.23.0", - "@babel/plugin-transform-parameters": "^7.22.15", - "@babel/plugin-transform-private-methods": "^7.22.5", - "@babel/plugin-transform-private-property-in-object": "^7.22.11", - "@babel/plugin-transform-property-literals": "^7.22.5", - "@babel/plugin-transform-regenerator": "^7.22.10", - "@babel/plugin-transform-reserved-words": "^7.22.5", - "@babel/plugin-transform-shorthand-properties": "^7.22.5", - "@babel/plugin-transform-spread": "^7.22.5", - "@babel/plugin-transform-sticky-regex": "^7.22.5", - "@babel/plugin-transform-template-literals": "^7.22.5", - "@babel/plugin-transform-typeof-symbol": "^7.22.5", - "@babel/plugin-transform-unicode-escapes": "^7.22.10", - "@babel/plugin-transform-unicode-property-regex": "^7.22.5", - "@babel/plugin-transform-unicode-regex": "^7.22.5", - "@babel/plugin-transform-unicode-sets-regex": "^7.22.5", - "@babel/preset-modules": "0.1.6-no-external-plugins", - "@babel/types": "^7.23.0", - "babel-plugin-polyfill-corejs2": "^0.4.6", - "babel-plugin-polyfill-corejs3": "^0.8.5", - "babel-plugin-polyfill-regenerator": "^0.5.3", - "core-js-compat": "^3.31.0", - "semver": "^6.3.1" + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=12" } }, - "node_modules/@babel/preset-env/node_modules/semver": { - "version": "6.3.1", + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", "dev": true, "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/preset-flow": { - "version": "7.22.15", - "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-validator-option": "^7.22.15", - "@babel/plugin-transform-flow-strip-types": "^7.22.5" + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-modules": { - "version": "0.1.6-no-external-plugins", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/types": "^7.4.4", - "esutils": "^2.0.2" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + "node": ">=8" } }, - "node_modules/@babel/preset-typescript": { - "version": "7.23.2", + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-validator-option": "^7.22.15", - "@babel/plugin-syntax-jsx": "^7.22.5", - "@babel/plugin-transform-modules-commonjs": "^7.23.0", - "@babel/plugin-transform-typescript": "^7.22.15" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "sprintf-js": "~1.0.2" } }, - "node_modules/@babel/register": { - "version": "7.22.15", + "node_modules/@istanbuljs/load-nyc-config/node_modules/camelcase": { + "version": "5.3.1", "dev": true, "license": "MIT", - "dependencies": { - "clone-deep": "^4.0.1", - "find-cache-dir": "^2.0.0", - "make-dir": "^2.1.0", - "pirates": "^4.0.5", - "source-map-support": "^0.5.16" - }, "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=6" } }, - "node_modules/@babel/register/node_modules/find-cache-dir": { - "version": "2.1.0", + "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { + "version": "4.1.0", "dev": true, "license": "MIT", "dependencies": { - "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, - "node_modules/@babel/register/node_modules/find-up": { - "version": "3.0.0", + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", "dev": true, "license": "MIT", "dependencies": { - "locate-path": "^3.0.0" + "argparse": "^1.0.7", + "esprima": "^4.0.0" }, - "engines": { - "node": ">=6" + "bin": { + "js-yaml": "bin/js-yaml.js" } }, - "node_modules/@babel/register/node_modules/locate-path": { - "version": "3.0.0", + "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { + "version": "5.0.0", "dev": true, "license": "MIT", "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, - "node_modules/@babel/register/node_modules/p-limit": { + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { "version": "2.3.0", "dev": true, "license": "MIT", @@ -2034,2964 +1461,1333 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@babel/register/node_modules/p-locate": { - "version": "3.0.0", + "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { + "version": "4.1.0", "dev": true, "license": "MIT", "dependencies": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" }, "engines": { - "node": ">=6" + "node": ">=8" } }, - "node_modules/@babel/register/node_modules/path-exists": { - "version": "3.0.0", + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", "dev": true, "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, - "node_modules/@babel/register/node_modules/pkg-dir": { - "version": "3.0.0", + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", "dev": true, - "license": "MIT", "dependencies": { - "find-up": "^3.0.0" + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" }, "engines": { - "node": ">=6" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@babel/regjsgen": { - "version": "0.8.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@babel/runtime": { - "version": "7.23.2", - "license": "MIT", + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, "dependencies": { - "regenerator-runtime": "^0.14.0" + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=6.9.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, - "node_modules/@babel/template": { - "version": "7.22.15", - "license": "MIT", + "node_modules/@jest/core/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/parser": "^7.22.15", - "@babel/types": "^7.22.15" + "ansi-regex": "^5.0.1" }, "engines": { - "node": ">=6.9.0" + "node": ">=8" } }, - "node_modules/@babel/traverse": { - "version": "7.23.2", - "license": "MIT", + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/generator": "^7.23.0", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.23.0", - "@babel/types": "^7.23.0", - "debug": "^4.1.0", - "globals": "^11.1.0" + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" }, "engines": { - "node": ">=6.9.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@babel/traverse/node_modules/globals": { - "version": "11.12.0", - "license": "MIT", + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, "engines": { - "node": ">=4" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@babel/types": { - "version": "7.23.0", - "license": "MIT", + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "jest-get-type": "^29.6.3" }, "engines": { - "node": ">=6.9.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@base2/pretty-print-object": { - "version": "1.0.1", + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/@bcoe/v8-coverage": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } }, - "node_modules/@colors/colors": { - "version": "1.5.0", + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", "dev": true, - "license": "MIT", - "optional": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, "engines": { - "node": ">=0.1.90" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "devOptional": true, - "license": "MIT", + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" }, "engines": { - "node": ">=12" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "devOptional": true, - "license": "MIT", + "node_modules/@jest/reporters/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, - "node_modules/@discoveryjs/json-ext": { - "version": "0.5.7", + "node_modules/@jest/schemas": { + "version": "29.6.3", "dev": true, "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, "engines": { - "node": ">=10.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/babel-plugin": { - "version": "11.11.0", - "license": "MIT", + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, "dependencies": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/runtime": "^7.18.3", - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/serialize": "^1.1.2", - "babel-plugin-macros": "^3.1.0", - "convert-source-map": "^1.5.0", - "escape-string-regexp": "^4.0.0", - "find-root": "^1.1.0", - "source-map": "^0.5.7", - "stylis": "4.2.0" + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/babel-plugin/node_modules/convert-source-map": { - "version": "1.9.0", - "license": "MIT" - }, - "node_modules/@emotion/babel-plugin/node_modules/source-map": { - "version": "0.5.7", - "license": "BSD-3-Clause", + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/cache": { - "version": "11.11.0", - "license": "MIT", + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, "dependencies": { - "@emotion/memoize": "^0.8.1", - "@emotion/sheet": "^1.2.2", - "@emotion/utils": "^1.2.1", - "@emotion/weak-memoize": "^0.3.1", - "stylis": "4.2.0" + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/hash": { - "version": "0.9.1", - "license": "MIT" - }, - "node_modules/@emotion/is-prop-valid": { - "version": "1.2.1", + "node_modules/@jest/transform": { + "version": "29.7.0", + "dev": true, "license": "MIT", "dependencies": { - "@emotion/memoize": "^0.8.1" + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/memoize": { - "version": "0.8.1", - "license": "MIT" + "node_modules/@jest/transform/node_modules/write-file-atomic": { + "version": "4.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } }, - "node_modules/@emotion/react": { - "version": "11.11.1", + "node_modules/@jest/types": { + "version": "29.6.3", + "dev": true, "license": "MIT", "dependencies": { - "@babel/runtime": "^7.18.3", - "@emotion/babel-plugin": "^11.11.0", - "@emotion/cache": "^11.11.0", - "@emotion/serialize": "^1.1.2", - "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", - "@emotion/utils": "^1.2.1", - "@emotion/weak-memoize": "^0.3.1", - "hoist-non-react-statics": "^3.3.1" - }, - "peerDependencies": { - "react": ">=16.8.0" + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/@emotion/serialize": { - "version": "1.1.2", + "node_modules/@jest/types/node_modules/@types/node": { + "version": "20.8.10", + "dev": true, "license": "MIT", "dependencies": { - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/unitless": "^0.8.1", - "@emotion/utils": "^1.2.1", - "csstype": "^3.0.2" + "undici-types": "~5.26.4" } }, - "node_modules/@emotion/sheet": { - "version": "1.2.2", - "license": "MIT" - }, - "node_modules/@emotion/styled": { - "version": "11.11.0", + "node_modules/@joshwooding/vite-plugin-react-docgen-typescript": { + "version": "0.3.0", + "dev": true, "license": "MIT", "dependencies": { - "@babel/runtime": "^7.18.3", - "@emotion/babel-plugin": "^11.11.0", - "@emotion/is-prop-valid": "^1.2.1", - "@emotion/serialize": "^1.1.2", - "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", - "@emotion/utils": "^1.2.1" + "glob": "^7.2.0", + "glob-promise": "^4.2.0", + "magic-string": "^0.27.0", + "react-docgen-typescript": "^2.2.2" }, "peerDependencies": { - "@emotion/react": "^11.0.0-rc.0", - "react": ">=16.8.0" + "typescript": ">= 4.3.x", + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" }, "peerDependenciesMeta": { - "@types/react": { + "typescript": { "optional": true } } }, - "node_modules/@emotion/unitless": { - "version": "0.8.1", - "license": "MIT" + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } }, - "node_modules/@emotion/use-insertion-effect-with-fallbacks": { - "version": "1.0.1", + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.1", "license": "MIT", - "peerDependencies": { - "react": ">=16.8.0" + "engines": { + "node": ">=6.0.0" } }, - "node_modules/@emotion/utils": { + "node_modules/@jridgewell/set-array": { "version": "1.2.1", - "license": "MIT" - }, - "node_modules/@emotion/weak-memoize": { - "version": "0.3.1", - "license": "MIT" - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.18.20", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", "engines": { - "node": ">=12" + "node": ">=6.0.0" } }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.0", + "node_modules/@jridgewell/source-map": { + "version": "0.3.5", + "devOptional": true, "license": "MIT", "dependencies": { - "eslint-visitor-keys": "^3.3.0" + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@kurkle/color": { + "version": "0.3.2", + "license": "MIT" + }, + "node_modules/@mdx-js/react": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.0.1.tgz", + "integrity": "sha512-9ZrPIU4MGf6et1m1ov3zKf+q9+deetI51zprKB1D/z3NOb+rUxxtEl3mCjW5wTGh6VhRdwPueh1oRzi6ezkA8A==", + "dev": true, + "dependencies": { + "@types/mdx": "^2.0.0" }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + "@types/react": ">=16", + "react": ">=16" } }, - "node_modules/@eslint-community/regexpp": { - "version": "4.10.0", + "node_modules/@noble/hashes": { + "version": "1.3.2", "license": "MIT", "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + "node": ">= 16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" } }, - "node_modules/@eslint/eslintrc": { - "version": "2.1.3", + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", "license": "MIT", "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node": ">= 8" } }, - "node_modules/@eslint/js": { - "version": "8.53.0", + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", "license": "MIT", "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": ">= 8" } }, - "node_modules/@fal-works/esbuild-plugin-global-externals": { - "version": "2.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@floating-ui/core": { - "version": "1.5.0", - "dev": true, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", "license": "MIT", "dependencies": { - "@floating-ui/utils": "^0.1.3" + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" } }, - "node_modules/@floating-ui/dom": { - "version": "1.5.3", - "dev": true, + "node_modules/@paralleldrive/cuid2": { + "version": "2.2.2", "license": "MIT", "dependencies": { - "@floating-ui/core": "^1.4.2", - "@floating-ui/utils": "^0.1.3" + "@noble/hashes": "^1.1.5" } }, - "node_modules/@floating-ui/react-dom": { - "version": "2.0.2", - "dev": true, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", "license": "MIT", - "dependencies": { - "@floating-ui/dom": "^1.5.1" - }, - "peerDependencies": { - "react": ">=16.8.0", - "react-dom": ">=16.8.0" + "optional": true, + "engines": { + "node": ">=14" } }, - "node_modules/@floating-ui/utils": { - "version": "0.1.6", - "dev": true, - "license": "MIT" - }, - "node_modules/@fortawesome/fontawesome-common-types": { - "version": "6.4.2", - "hasInstallScript": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/fontawesome-svg-core": { - "version": "6.4.2", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.4.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/free-regular-svg-icons": { - "version": "6.4.2", - "hasInstallScript": true, - "license": "(CC-BY-4.0 AND MIT)", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.4.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/free-solid-svg-icons": { - "version": "6.4.2", - "hasInstallScript": true, - "license": "(CC-BY-4.0 AND MIT)", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.4.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/react-fontawesome": { - "version": "0.2.0", - "license": "MIT", - "dependencies": { - "prop-types": "^15.8.1" - }, - "peerDependencies": { - "@fortawesome/fontawesome-svg-core": "~1 || ~6", - "react": ">=16.3" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.11.13", - "license": "Apache-2.0", - "dependencies": { - "@humanwhocodes/object-schema": "^2.0.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "license": "Apache-2.0", - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.1", - "license": "BSD-3-Clause" - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@istanbuljs/load-nyc-config": { - "version": "1.1.0", - "dev": true, - "license": "ISC", - "dependencies": { - "camelcase": "^5.3.1", - "find-up": "^4.1.0", - "get-package-type": "^0.1.0", - "js-yaml": "^3.13.1", - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { - "version": "1.0.10", - "dev": true, - "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/camelcase": { - "version": "5.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { - "version": "3.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/console": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", - "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", - "dev": true, - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/core": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", - "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", - "dev": true, - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/reporters": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-changed-files": "^29.7.0", - "jest-config": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-resolve-dependencies": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "jest-watcher": "^29.7.0", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/core/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/environment": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", - "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", - "dev": true, - "dependencies": { - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", - "dev": true, - "dependencies": { - "expect": "^29.7.0", - "jest-snapshot": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", - "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", - "dev": true, - "dependencies": { - "jest-get-type": "^29.6.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/fake-timers": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", - "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", - "dev": true, - "dependencies": { - "@jest/types": "^29.6.3", - "@sinonjs/fake-timers": "^10.0.2", - "@types/node": "*", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/globals": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", - "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", - "dev": true, - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/types": "^29.6.3", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/reporters": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", - "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", - "dev": true, - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "@types/node": "*", - "chalk": "^4.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "istanbul-lib-coverage": "^3.0.0", - "istanbul-lib-instrument": "^6.0.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", - "istanbul-reports": "^3.1.3", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "slash": "^3.0.0", - "string-length": "^4.0.1", - "strip-ansi": "^6.0.0", - "v8-to-istanbul": "^9.0.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/reporters/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/schemas": { - "version": "29.6.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@sinclair/typebox": "^0.27.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", - "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", - "dev": true, - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", - "dev": true, - "dependencies": { - "@jest/test-result": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/transform": { - "version": "29.7.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "babel-plugin-istanbul": "^6.1.1", - "chalk": "^4.0.0", - "convert-source-map": "^2.0.0", - "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "micromatch": "^4.0.4", - "pirates": "^4.0.4", - "slash": "^3.0.0", - "write-file-atomic": "^4.0.2" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/transform/node_modules/write-file-atomic": { - "version": "4.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/@jest/types": { - "version": "29.6.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/types/node_modules/@types/node": { - "version": "20.8.10", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@joshwooding/vite-plugin-react-docgen-typescript": { - "version": "0.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "glob": "^7.2.0", - "glob-promise": "^4.2.0", - "magic-string": "^0.27.0", - "react-docgen-typescript": "^2.2.2" - }, - "peerDependencies": { - "typescript": ">= 4.3.x", - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "license": "MIT", - "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.1", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.5", - "devOptional": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.20", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@juggle/resize-observer": { - "version": "3.4.0", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/@kurkle/color": { - "version": "0.3.2", - "license": "MIT" - }, - "node_modules/@mdx-js/react": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/mdx": "^2.0.0", - "@types/react": ">=16" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "react": ">=16" - } - }, - "node_modules/@ndelangen/get-tarball": { - "version": "3.0.9", - "dev": true, - "license": "MIT", - "dependencies": { - "gunzip-maybe": "^1.4.2", - "pump": "^3.0.0", - "tar-fs": "^2.1.1" - } - }, - "node_modules/@noble/hashes": { - "version": "1.3.2", - "license": "MIT", - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@paralleldrive/cuid2": { - "version": "2.2.2", - "license": "MIT", - "dependencies": { - "@noble/hashes": "^1.1.5" - } - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@popperjs/core": { - "version": "2.11.8", - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/popperjs" - } - }, - "node_modules/@radix-ui/number": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@radix-ui/primitive": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@radix-ui/react-arrow": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-collection": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-slot": "1.0.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-compose-refs": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-context": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-direction": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-escape-keydown": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-guards": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-scope": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-id": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-popper": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@floating-ui/react-dom": "^2.0.0", - "@radix-ui/react-arrow": "1.0.3", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-layout-effect": "1.0.1", - "@radix-ui/react-use-rect": "1.0.1", - "@radix-ui/react-use-size": "1.0.1", - "@radix-ui/rect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-portal": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-primitive": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "1.0.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-roving-focus": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-collection": "1.0.3", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-controllable-state": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-select": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/number": "1.0.1", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-collection": "1.0.3", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", - "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", - "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-slot": "1.0.2", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-controllable-state": "1.0.1", - "@radix-ui/react-use-layout-effect": "1.0.1", - "@radix-ui/react-use-previous": "1.0.1", - "@radix-ui/react-visually-hidden": "1.0.3", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "2.5.5" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-separator": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-slot": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toggle": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-controllable-state": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toggle-group": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-roving-focus": "1.0.4", - "@radix-ui/react-toggle": "1.0.3", - "@radix-ui/react-use-controllable-state": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toolbar": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-roving-focus": "1.0.4", - "@radix-ui/react-separator": "1.0.3", - "@radix-ui/react-toggle-group": "1.0.4" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-callback-ref": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-escape-keydown": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-layout-effect": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-previous": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-rect": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-size": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-visually-hidden": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/rect": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@rc-component/context": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@rc-component/context/-/context-1.4.0.tgz", - "integrity": "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w==", - "dependencies": { - "@babel/runtime": "^7.10.1", - "rc-util": "^5.27.0" - }, - "peerDependencies": { - "react": ">=16.9.0", - "react-dom": ">=16.9.0" - } - }, - "node_modules/@react-dnd/asap": { - "version": "5.0.2", - "license": "MIT" - }, - "node_modules/@react-dnd/invariant": { - "version": "4.0.2", - "license": "MIT" - }, - "node_modules/@react-dnd/shallowequal": { - "version": "4.0.2", - "license": "MIT" - }, - "node_modules/@react-keycloak-fork/core": { - "version": "4.0.3", - "license": "MIT", - "dependencies": { - "react-fast-compare": "^3.2.0" - }, - "funding": { - "type": "patreon", - "url": "https://www.patreon.com/reactkeycloak" - }, - "peerDependencies": { - "react": ">=16" - } - }, - "node_modules/@react-keycloak-fork/web": { - "version": "4.0.3", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.17.9", - "@react-keycloak-fork/core": "^4.0.3", - "hoist-non-react-statics": "^3.3.2" - }, - "funding": { - "type": "patreon", - "url": "https://www.patreon.com/reactkeycloak" - }, - "peerDependencies": { - "keycloak-js": ">=17.0.0", - "react": ">=16.0", - "react-dom": ">=16.0", - "typescript": ">=3.8" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@remix-run/router": { - "version": "1.11.0", - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@rollup/pluginutils": { - "version": "4.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "estree-walker": "^2.0.1", - "picomatch": "^2.2.2" - }, - "engines": { - "node": ">= 8.0.0" - } - }, - "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "dev": true, - "license": "MIT" - }, - "node_modules/@sinonjs/commons": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz", - "integrity": "sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==", - "dev": true, - "dependencies": { - "type-detect": "4.0.8" - } - }, - "node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", - "dev": true, - "dependencies": { - "@sinonjs/commons": "^3.0.0" - } - }, - "node_modules/@storybook/addon-actions": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "dequal": "^2.0.2", - "lodash": "^4.17.21", - "polished": "^4.2.2", - "prop-types": "^15.7.2", - "react-inspector": "^6.0.0", - "telejson": "^7.2.0", - "ts-dedent": "^2.0.0", - "uuid": "^9.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } - }, - "node_modules/@storybook/addon-backgrounds": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "memoizerific": "^1.11.3", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } - }, - "node_modules/@storybook/addon-controls": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/blocks": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/manager-api": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "lodash": "^4.17.21", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } - }, - "node_modules/@storybook/addon-docs": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/transform": "^29.3.1", - "@mdx-js/react": "^2.1.5", - "@storybook/blocks": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/csf-plugin": "7.5.3", - "@storybook/csf-tools": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/mdx2-csf": "^1.0.0", - "@storybook/node-logger": "7.5.3", - "@storybook/postinstall": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/react-dom-shim": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "fs-extra": "^11.1.0", - "remark-external-links": "^8.0.0", - "remark-slug": "^6.0.0", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/@storybook/addon-essentials": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/addon-actions": "7.5.3", - "@storybook/addon-backgrounds": "7.5.3", - "@storybook/addon-controls": "7.5.3", - "@storybook/addon-docs": "7.5.3", - "@storybook/addon-highlight": "7.5.3", - "@storybook/addon-measure": "7.5.3", - "@storybook/addon-outline": "7.5.3", - "@storybook/addon-toolbars": "7.5.3", - "@storybook/addon-viewport": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/manager-api": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/preview-api": "7.5.3", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/@storybook/addon-highlight": { - "version": "7.5.3", - "dev": true, + "node_modules/@popperjs/core": { + "version": "2.11.8", "license": "MIT", - "dependencies": { - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/preview-api": "7.5.3" - }, "funding": { "type": "opencollective", - "url": "https://opencollective.com/storybook" + "url": "https://opencollective.com/popperjs" } }, - "node_modules/@storybook/addon-interactions": { - "version": "7.5.3", - "dev": true, - "license": "MIT", + "node_modules/@rc-component/context": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@rc-component/context/-/context-1.4.0.tgz", + "integrity": "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w==", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/instrumenter": "7.5.3", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "jest-mock": "^27.0.6", - "polished": "^4.2.2", - "ts-dedent": "^2.2.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "@babel/runtime": "^7.10.1", + "rc-util": "^5.27.0" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } - }, - "node_modules/@storybook/addon-interactions/node_modules/@jest/types": { - "version": "27.5.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^16.0.0", - "chalk": "^4.0.0" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" - } - }, - "node_modules/@storybook/addon-interactions/node_modules/@types/node": { - "version": "20.8.10", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@storybook/addon-interactions/node_modules/@types/yargs": { - "version": "16.0.7", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/@storybook/addon-interactions/node_modules/jest-mock": { - "version": "27.5.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^27.5.1", - "@types/node": "*" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "react": ">=16.9.0", + "react-dom": ">=16.9.0" } }, - "node_modules/@storybook/addon-links": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/csf": "^0.1.0", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/router": "7.5.3", - "@storybook/types": "7.5.3", - "prop-types": "^15.7.2", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } + "node_modules/@react-dnd/asap": { + "version": "5.0.2", + "license": "MIT" }, - "node_modules/@storybook/addon-measure": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/types": "7.5.3", - "tiny-invariant": "^1.3.1" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } + "node_modules/@react-dnd/invariant": { + "version": "4.0.2", + "license": "MIT" }, - "node_modules/@storybook/addon-outline": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/types": "7.5.3", - "ts-dedent": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } + "node_modules/@react-dnd/shallowequal": { + "version": "4.0.2", + "license": "MIT" }, - "node_modules/@storybook/addon-toolbars": { - "version": "7.5.3", - "dev": true, + "node_modules/@react-keycloak-fork/core": { + "version": "4.0.3", "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3" + "react-fast-compare": "^3.2.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "type": "patreon", + "url": "https://www.patreon.com/reactkeycloak" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } + "react": ">=16" } }, - "node_modules/@storybook/addon-viewport": { - "version": "7.5.3", - "dev": true, + "node_modules/@react-keycloak-fork/web": { + "version": "4.0.3", "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "memoizerific": "^1.11.3", - "prop-types": "^15.7.2" + "@babel/runtime": "^7.17.9", + "@react-keycloak-fork/core": "^4.0.3", + "hoist-non-react-statics": "^3.3.2" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "type": "patreon", + "url": "https://www.patreon.com/reactkeycloak" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "keycloak-js": ">=17.0.0", + "react": ">=16.0", + "react-dom": ">=16.0", + "typescript": ">=3.8" }, "peerDependenciesMeta": { - "react": { - "optional": true - }, - "react-dom": { + "typescript": { "optional": true } } }, - "node_modules/@storybook/blocks": { - "version": "7.5.3", + "node_modules/@remix-run/router": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.19.2.tgz", + "integrity": "sha512-baiMx18+IMuD1yyvOGaHM9QrVUPGGG0jC+z+IPHnRJWUAUvaKuWKyE8gjDj2rzv3sz9zOGoRSPgeBVHRhZnBlA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rollup/pluginutils": { + "version": "4.2.1", "dev": true, "license": "MIT", "dependencies": { - "@storybook/channels": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/components": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/csf": "^0.1.0", - "@storybook/docs-tools": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/manager-api": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "@types/lodash": "^4.14.167", - "color-convert": "^2.0.1", - "dequal": "^2.0.2", - "lodash": "^4.17.21", - "markdown-to-jsx": "^7.1.8", - "memoizerific": "^1.11.3", - "polished": "^4.2.2", - "react-colorful": "^5.1.2", - "telejson": "^7.2.0", - "tocbot": "^4.20.1", - "ts-dedent": "^2.0.0", - "util-deprecate": "^1.0.2" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "estree-walker": "^2.0.1", + "picomatch": "^2.2.2" }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.24.0.tgz", + "integrity": "sha512-Q6HJd7Y6xdB48x8ZNVDOqsbh2uByBhgK8PiQgPhwkIw/HC/YX5Ghq2mQY5sRMZWHb3VsFkWooUVOZHKr7DmDIA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.24.0.tgz", + "integrity": "sha512-ijLnS1qFId8xhKjT81uBHuuJp2lU4x2yxa4ctFPtG+MqEE6+C5f/+X/bStmxapgmwLwiL3ih122xv8kVARNAZA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.24.0.tgz", + "integrity": "sha512-bIv+X9xeSs1XCk6DVvkO+S/z8/2AMt/2lMqdQbMrmVpgFvXlmde9mLcbQpztXm1tajC3raFDqegsH18HQPMYtA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.24.0.tgz", + "integrity": "sha512-X6/nOwoFN7RT2svEQWUsW/5C/fYMBe4fnLK9DQk4SX4mgVBiTA9h64kjUYPvGQ0F/9xwJ5U5UfTbl6BEjaQdBQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.24.0.tgz", + "integrity": "sha512-0KXvIJQMOImLCVCz9uvvdPgfyWo93aHHp8ui3FrtOP57svqrF/roSSR5pjqL2hcMp0ljeGlU4q9o/rQaAQ3AYA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.24.0.tgz", + "integrity": "sha512-it2BW6kKFVh8xk/BnHfakEeoLPv8STIISekpoF+nBgWM4d55CZKc7T4Dx1pEbTnYm/xEKMgy1MNtYuoA8RFIWw==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.24.0.tgz", + "integrity": "sha512-i0xTLXjqap2eRfulFVlSnM5dEbTVque/3Pi4g2y7cxrs7+a9De42z4XxKLYJ7+OhE3IgxvfQM7vQc43bwTgPwA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.24.0.tgz", + "integrity": "sha512-9E6MKUJhDuDh604Qco5yP/3qn3y7SLXYuiC0Rpr89aMScS2UAmK1wHP2b7KAa1nSjWJc/f/Lc0Wl1L47qjiyQw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.24.0.tgz", + "integrity": "sha512-2XFFPJ2XMEiF5Zi2EBf4h73oR1V/lycirxZxHZNc93SqDN/IWhYYSYj8I9381ikUFXZrz2v7r2tOVk2NBwxrWw==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.24.0.tgz", + "integrity": "sha512-M3Dg4hlwuntUCdzU7KjYqbbd+BLq3JMAOhCKdBE3TcMGMZbKkDdJ5ivNdehOssMCIokNHFOsv7DO4rlEOfyKpg==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.24.0.tgz", + "integrity": "sha512-mjBaoo4ocxJppTorZVKWFpy1bfFj9FeCMJqzlMQGjpNPY9JwQi7OuS1axzNIk0nMX6jSgy6ZURDZ2w0QW6D56g==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz", + "integrity": "sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz", + "integrity": "sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.24.0.tgz", + "integrity": "sha512-VXBrnPWgBpVDCVY6XF3LEW0pOU51KbaHhccHw6AS6vBWIC60eqsH19DAeeObl+g8nKAz04QFdl/Cefta0xQtUQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.24.0.tgz", + "integrity": "sha512-xrNcGDU0OxVcPTH/8n/ShH4UevZxKIO6HJFK0e15XItZP2UcaiLFd5kiX7hJnqCbSztUF8Qot+JWBC/QXRPYWQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.24.0.tgz", + "integrity": "sha512-fbMkAF7fufku0N2dE5TBXcNlg0pt0cJue4xBRE2Qc5Vqikxr4VCgKj/ht6SMdFcOacVA9rqF70APJ8RN/4vMJw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz", + "integrity": "sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" } }, - "node_modules/@storybook/builder-manager": { - "version": "7.5.3", + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", "dev": true, - "license": "MIT", "dependencies": { - "@fal-works/esbuild-plugin-global-externals": "^2.1.2", - "@storybook/core-common": "7.5.3", - "@storybook/manager": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@types/ejs": "^3.1.1", - "@types/find-cache-dir": "^3.2.1", - "@yarnpkg/esbuild-plugin-pnp": "^3.0.0-rc.10", - "browser-assert": "^1.2.1", - "ejs": "^3.1.8", - "esbuild": "^0.18.0", - "esbuild-plugin-alias": "^0.2.1", - "express": "^4.17.3", - "find-cache-dir": "^3.0.0", - "fs-extra": "^11.1.0", - "process": "^0.11.10", - "util": "^0.12.4" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "@sinonjs/commons": "^3.0.0" } }, - "node_modules/@storybook/builder-vite": { - "version": "7.5.3", + "node_modules/@storybook/addon-actions": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.3.4.tgz", + "integrity": "sha512-1y0yD3upKcyzNwwA6loAGW2cRDqExwl4oAT7GJQA4tmabI+fNwmANSgU/ezLvvSUf4Qo0eJHg2Zcn8y+Apq2eA==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/channels": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/csf-plugin": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/preview": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/types": "7.5.3", - "@types/find-cache-dir": "^3.2.1", - "browser-assert": "^1.2.1", - "es-module-lexer": "^0.9.3", - "express": "^4.17.3", - "find-cache-dir": "^3.0.0", - "fs-extra": "^11.1.0", - "magic-string": "^0.30.0", - "rollup": "^2.25.0 || ^3.3.0" + "@storybook/global": "^5.0.0", + "@types/uuid": "^9.0.1", + "dequal": "^2.0.2", + "polished": "^4.2.2", + "uuid": "^9.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "@preact/preset-vite": "*", - "typescript": ">= 4.3.x", - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0", - "vite-plugin-glimmerx": "*" - }, - "peerDependenciesMeta": { - "@preact/preset-vite": { - "optional": true - }, - "typescript": { - "optional": true - }, - "vite-plugin-glimmerx": { - "optional": true - } - } - }, - "node_modules/@storybook/builder-vite/node_modules/es-module-lexer": { - "version": "0.9.3", - "dev": true, - "license": "MIT" - }, - "node_modules/@storybook/builder-vite/node_modules/magic-string": { - "version": "0.30.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" - }, - "engines": { - "node": ">=12" + "storybook": "^8.3.4" } }, - "node_modules/@storybook/channels": { - "version": "7.5.3", + "node_modules/@storybook/addon-backgrounds": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-8.3.4.tgz", + "integrity": "sha512-o3nl7cN3x8erJNxLEv8YptanEQAnbqnaseOAsvSC6/nnSAcRYBSs3BvekKvo4CcpS2mxn7F5NJTBFYnCXzy8EA==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "qs": "^6.10.0", - "telejson": "^7.2.0", - "tiny-invariant": "^1.3.1" + "@storybook/global": "^5.0.0", + "memoizerific": "^1.11.3", + "ts-dedent": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/channels/node_modules/qs": { - "version": "6.11.2", + "node_modules/@storybook/addon-controls": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-8.3.4.tgz", + "integrity": "sha512-qQcaK6dczsb6wXkzGZKOjUYNA7FfKBewRv6NvoVKYY6LfhllGOkmUAtYpdtQG8adsZWTSoZaAOJS2vP2uM67lw==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" + "@storybook/global": "^5.0.0", + "dequal": "^2.0.2", + "lodash": "^4.17.21", + "ts-dedent": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/cli": { - "version": "7.5.3", + "node_modules/@storybook/addon-docs": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-8.3.4.tgz", + "integrity": "sha512-TWauhqF/gJgfwPuWeM6KM3LwC+ErCOM+K2z16w3vgao9s67sij8lnrdAoQ0hjA+kw2/KAdCakFS6FyciG81qog==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/core": "^7.22.9", - "@babel/preset-env": "^7.22.9", - "@babel/types": "^7.22.5", - "@ndelangen/get-tarball": "^3.0.7", - "@storybook/codemod": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/core-server": "7.5.3", - "@storybook/csf-tools": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/telemetry": "7.5.3", - "@storybook/types": "7.5.3", - "@types/semver": "^7.3.4", - "@yarnpkg/fslib": "2.10.3", - "@yarnpkg/libzip": "2.3.0", - "chalk": "^4.1.0", - "commander": "^6.2.1", - "cross-spawn": "^7.0.3", - "detect-indent": "^6.1.0", - "envinfo": "^7.7.3", - "execa": "^5.0.0", - "express": "^4.17.3", - "find-up": "^5.0.0", + "@mdx-js/react": "^3.0.0", + "@storybook/blocks": "8.3.4", + "@storybook/csf-plugin": "8.3.4", + "@storybook/global": "^5.0.0", + "@storybook/react-dom-shim": "8.3.4", + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", "fs-extra": "^11.1.0", - "get-npm-tarball-url": "^2.0.3", - "get-port": "^5.1.1", - "giget": "^1.0.0", - "globby": "^11.0.2", - "jscodeshift": "^0.14.0", - "leven": "^3.1.0", - "ora": "^5.4.1", - "prettier": "^2.8.0", - "prompts": "^2.4.0", - "puppeteer-core": "^2.1.1", - "read-pkg-up": "^7.0.1", - "semver": "^7.3.7", - "simple-update-notifier": "^2.0.0", - "strip-json-comments": "^3.0.1", - "tempy": "^1.0.1", - "ts-dedent": "^2.0.0", - "util-deprecate": "^1.0.2" - }, - "bin": { - "getstorybook": "bin/index.js", - "sb": "bin/index.js" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", + "rehype-external-links": "^3.0.0", + "rehype-slug": "^6.0.0", + "ts-dedent": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/cli/node_modules/commander": { - "version": "6.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/@storybook/cli/node_modules/prettier": { - "version": "2.8.8", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" + "node_modules/@storybook/addon-essentials": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-8.3.4.tgz", + "integrity": "sha512-C3+3hpmSn/8zdx5sXEP0eE6zMzxgRosHVZYfe9nBcMiEDp6UKVUyHVetWxEULOEgN46ysjcpllZ0bUkRYxi2IQ==", + "dev": true, + "dependencies": { + "@storybook/addon-actions": "8.3.4", + "@storybook/addon-backgrounds": "8.3.4", + "@storybook/addon-controls": "8.3.4", + "@storybook/addon-docs": "8.3.4", + "@storybook/addon-highlight": "8.3.4", + "@storybook/addon-measure": "8.3.4", + "@storybook/addon-outline": "8.3.4", + "@storybook/addon-toolbars": "8.3.4", + "@storybook/addon-viewport": "8.3.4", + "ts-dedent": "^2.0.0" }, "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/client-logger": { - "version": "7.5.3", + "node_modules/@storybook/addon-highlight": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-8.3.4.tgz", + "integrity": "sha512-rxZTeuZyZ7RnU+xmRhS01COFLbGnVEmlUNxBw8ArsrTEZKW5PbKpIxNLTj9F0zdH8H0MfryJGP+Aadcm0oHWlw==", "dev": true, - "license": "MIT", "dependencies": { "@storybook/global": "^5.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/codemod": { - "version": "7.5.3", + "node_modules/@storybook/addon-interactions": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-8.3.4.tgz", + "integrity": "sha512-ORxqe35wUmF7EDHo45mdDHiju3Ryk2pZ1vO9PyvW6ZItNlHt/IxAr7T/TysGejZ/eTBg6tMZR3ExGky3lTg/CQ==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/core": "^7.22.9", - "@babel/preset-env": "^7.22.9", - "@babel/types": "^7.22.5", - "@storybook/csf": "^0.1.0", - "@storybook/csf-tools": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/types": "7.5.3", - "@types/cross-spawn": "^6.0.2", - "cross-spawn": "^7.0.3", - "globby": "^11.0.2", - "jscodeshift": "^0.14.0", - "lodash": "^4.17.21", - "prettier": "^2.8.0", - "recast": "^0.23.1" + "@storybook/global": "^5.0.0", + "@storybook/instrumenter": "8.3.4", + "@storybook/test": "8.3.4", + "polished": "^4.2.2", + "ts-dedent": "^2.2.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" - } - }, - "node_modules/@storybook/codemod/node_modules/prettier": { - "version": "2.8.8", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin-prettier.js" }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/components": { - "version": "7.5.3", + "node_modules/@storybook/addon-links": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-links/-/addon-links-8.3.4.tgz", + "integrity": "sha512-R1DjARmxRIKJDGIG6uxmQ1yFNyoQbb+QIPUFjgWCak8+AdLJbC7W+Esvo9F5hQfh6czyy0piiM3qj5hpQJVh3A==", "dev": true, - "license": "MIT", "dependencies": { - "@radix-ui/react-select": "^1.2.2", - "@radix-ui/react-toolbar": "^1.0.4", - "@storybook/client-logger": "7.5.3", - "@storybook/csf": "^0.1.0", + "@storybook/csf": "^0.1.11", "@storybook/global": "^5.0.0", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "memoizerific": "^1.11.3", - "use-resize-observer": "^9.1.0", - "util-deprecate": "^1.0.2" + "ts-dedent": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.3.4" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + } } }, - "node_modules/@storybook/core-client": { - "version": "7.5.3", + "node_modules/@storybook/addon-measure": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-8.3.4.tgz", + "integrity": "sha512-IJ6WKEbqmG+r7sukFjo+bVmPB2Zry04sylGx/OGyOh7zIhhqAqpwOwMHP0uQrc3tLNnUM6qB/o83UyYX79ql+A==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/preview-api": "7.5.3" + "@storybook/global": "^5.0.0", + "tiny-invariant": "^1.3.1" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/core-common": { - "version": "7.5.3", + "node_modules/@storybook/addon-outline": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-8.3.4.tgz", + "integrity": "sha512-kRRJTTLKM8gMfeh/e83djN5XLlc0hFtr9zKWxuZxaXt9Hmr+9tH/PRFtVK/S4SgqnBDoXk49Wgv6raiwj5/e3A==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/core-events": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/types": "7.5.3", - "@types/find-cache-dir": "^3.2.1", - "@types/node": "^18.0.0", - "@types/node-fetch": "^2.6.4", - "@types/pretty-hrtime": "^1.0.0", - "chalk": "^4.1.0", - "esbuild": "^0.18.0", - "esbuild-register": "^3.5.0", - "file-system-cache": "2.3.0", - "find-cache-dir": "^3.0.0", - "find-up": "^5.0.0", - "fs-extra": "^11.1.0", - "glob": "^10.0.0", - "handlebars": "^4.7.7", - "lazy-universal-dotenv": "^4.0.0", - "node-fetch": "^2.0.0", - "picomatch": "^2.3.0", - "pkg-dir": "^5.0.0", - "pretty-hrtime": "^1.0.3", - "resolve-from": "^5.0.0", + "@storybook/global": "^5.0.0", "ts-dedent": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" - } - }, - "node_modules/@storybook/core-common/node_modules/glob": { - "version": "10.3.10", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/core-common/node_modules/minimatch": { - "version": "9.0.3", + "node_modules/@storybook/addon-toolbars": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-8.3.4.tgz", + "integrity": "sha512-Km1YciVIxqluDbd1xmHjANNFyMonEOtnA6e4MrnBnC9XkPXSigeFlj0JvxyI/zjBsLBoFRmQiwq55W6l3hQ9sA==", "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@storybook/core-common/node_modules/minipass": { - "version": "7.0.4", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/@storybook/core-common/node_modules/pkg-dir": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "find-up": "^5.0.0" + "type": "opencollective", + "url": "https://opencollective.com/storybook" }, - "engines": { - "node": ">=10" + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/core-events": { - "version": "7.5.3", + "node_modules/@storybook/addon-viewport": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-8.3.4.tgz", + "integrity": "sha512-fU4LdXSSqIOLbCEh2leq/tZUYlFliXZBWr/+igQHdUoU7HY8RIImXqVUaR9wlCaTb48WezAWT60vJtwNijyIiQ==", "dev": true, - "license": "MIT", "dependencies": { - "ts-dedent": "^2.0.0" + "memoizerific": "^1.11.3" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/core-server": { - "version": "7.5.3", + "node_modules/@storybook/blocks": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/blocks/-/blocks-8.3.4.tgz", + "integrity": "sha512-1g4aCrd5CcN+pVhF2ATu9ZRVvAIgBMb2yF9KkCuTpdvqKDuDNK3sGb0CxjS7jp3LOvyjJr9laTOQsz8v8MQc5A==", "dev": true, - "license": "MIT", "dependencies": { - "@aw-web-design/x-default-browser": "1.4.126", - "@discoveryjs/json-ext": "^0.5.3", - "@storybook/builder-manager": "7.5.3", - "@storybook/channels": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/csf": "^0.1.0", - "@storybook/csf-tools": "7.5.3", - "@storybook/docs-mdx": "^0.1.0", + "@storybook/csf": "^0.1.11", "@storybook/global": "^5.0.0", - "@storybook/manager": "7.5.3", - "@storybook/node-logger": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/telemetry": "7.5.3", - "@storybook/types": "7.5.3", - "@types/detect-port": "^1.3.0", - "@types/node": "^18.0.0", - "@types/pretty-hrtime": "^1.0.0", - "@types/semver": "^7.3.4", - "better-opn": "^3.0.2", - "chalk": "^4.1.0", - "cli-table3": "^0.6.1", - "compression": "^1.7.4", - "detect-port": "^1.3.0", - "express": "^4.17.3", - "fs-extra": "^11.1.0", - "globby": "^11.0.2", - "ip": "^2.0.0", + "@storybook/icons": "^1.2.10", + "@types/lodash": "^4.14.167", + "color-convert": "^2.0.1", + "dequal": "^2.0.2", "lodash": "^4.17.21", - "open": "^8.4.0", - "pretty-hrtime": "^1.0.3", - "prompts": "^2.4.0", - "read-pkg-up": "^7.0.1", - "semver": "^7.3.7", + "markdown-to-jsx": "^7.4.5", + "memoizerific": "^1.11.3", + "polished": "^4.2.2", + "react-colorful": "^5.1.2", "telejson": "^7.2.0", - "tiny-invariant": "^1.3.1", "ts-dedent": "^2.0.0", - "util": "^0.12.4", - "util-deprecate": "^1.0.2", - "watchpack": "^2.2.0", - "ws": "^8.2.3" + "util-deprecate": "^1.0.2" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" - } - }, - "node_modules/@storybook/csf": { - "version": "0.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^2.19.0" - } - }, - "node_modules/@storybook/csf-plugin": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/csf-tools": "7.5.3", - "unplugin": "^1.3.1" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.3.4" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } } }, - "node_modules/@storybook/csf-tools": { - "version": "7.5.3", + "node_modules/@storybook/builder-vite": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/builder-vite/-/builder-vite-8.3.4.tgz", + "integrity": "sha512-Sa6SZ7LeHpkrnuvua8P8MR8e8a+MPKbyMmr9TqCCy8Ud/t4AM4kHY3JpJGtrgeK9l43fBnBwfdZYoRl5J6oWeA==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/generator": "^7.22.9", - "@babel/parser": "^7.22.7", - "@babel/traverse": "^7.22.8", - "@babel/types": "^7.22.5", - "@storybook/csf": "^0.1.0", - "@storybook/types": "7.5.3", + "@storybook/csf-plugin": "8.3.4", + "@types/find-cache-dir": "^3.2.1", + "browser-assert": "^1.2.1", + "es-module-lexer": "^1.5.0", + "express": "^4.19.2", + "find-cache-dir": "^3.0.0", "fs-extra": "^11.1.0", - "recast": "^0.23.1", + "magic-string": "^0.30.0", "ts-dedent": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "@preact/preset-vite": "*", + "storybook": "^8.3.4", + "typescript": ">= 4.3.x", + "vite": "^4.0.0 || ^5.0.0", + "vite-plugin-glimmerx": "*" + }, + "peerDependenciesMeta": { + "@preact/preset-vite": { + "optional": true + }, + "typescript": { + "optional": true + }, + "vite-plugin-glimmerx": { + "optional": true + } } }, - "node_modules/@storybook/docs-mdx": { - "version": "0.1.0", + "node_modules/@storybook/builder-vite/node_modules/magic-string": { + "version": "0.30.11", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz", + "integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==", "dev": true, - "license": "MIT" + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } }, - "node_modules/@storybook/docs-tools": { - "version": "7.5.3", + "node_modules/@storybook/components": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/components/-/components-8.3.4.tgz", + "integrity": "sha512-iQzLJd87uGbFBbYNqlrN/ABrnx3dUrL0tjPCarzglzshZoPCNOsllJeJx5TJwB9kCxSZ8zB9TTOgr7NXl+oyVA==", "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/core-common": "7.5.3", - "@storybook/preview-api": "7.5.3", - "@storybook/types": "7.5.3", - "@types/doctrine": "^0.0.3", - "doctrine": "^3.0.0", - "lodash": "^4.17.21" - }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/global": { - "version": "5.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@storybook/instrumenter": { - "version": "7.5.3", + "node_modules/@storybook/core": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/core/-/core-8.3.4.tgz", + "integrity": "sha512-4PZB91JJpuKfcjeOR2LXj3ABaPLLSd2P/SfYOKNCygrDstsQa/yay3/yN5Z9yi1cIG84KRr6/sUW+0x8HsGLPg==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/channels": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/global": "^5.0.0", - "@storybook/preview-api": "7.5.3" + "@storybook/csf": "^0.1.11", + "@types/express": "^4.17.21", + "better-opn": "^3.0.2", + "browser-assert": "^1.2.1", + "esbuild": "^0.18.0 || ^0.19.0 || ^0.20.0 || ^0.21.0 || ^0.22.0 || ^0.23.0", + "esbuild-register": "^3.5.0", + "express": "^4.19.2", + "jsdoc-type-pratt-parser": "^4.0.0", + "process": "^0.11.10", + "recast": "^0.23.5", + "semver": "^7.6.2", + "util": "^0.12.5", + "ws": "^8.2.3" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" } }, - "node_modules/@storybook/manager": { - "version": "7.5.3", + "node_modules/@storybook/csf": { + "version": "0.1.11", + "resolved": "https://registry.npmjs.org/@storybook/csf/-/csf-0.1.11.tgz", + "integrity": "sha512-dHYFQH3mA+EtnCkHXzicbLgsvzYjcDJ1JWsogbItZogkPHgSJM/Wr71uMkcvw8v9mmCyP4NpXJuu6bPoVsOnzg==", "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "dependencies": { + "type-fest": "^2.19.0" } }, - "node_modules/@storybook/manager-api": { - "version": "7.5.3", + "node_modules/@storybook/csf-plugin": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-8.3.4.tgz", + "integrity": "sha512-ZMFWYxeTN4GxCn8dyIH4roECyLDy29yv/QKM+pHM3AC5Ny2HWI35SohWao4fGBAFxPQFbR5hPN8xa6ofHPSSTg==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/channels": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/csf": "^0.1.0", - "@storybook/global": "^5.0.0", - "@storybook/router": "7.5.3", - "@storybook/theming": "7.5.3", - "@storybook/types": "7.5.3", - "dequal": "^2.0.2", - "lodash": "^4.17.21", - "memoizerific": "^1.11.3", - "semver": "^7.3.7", - "store2": "^2.14.2", - "telejson": "^7.2.0", - "ts-dedent": "^2.0.0" + "unplugin": "^1.3.1" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "storybook": "^8.3.4" } }, - "node_modules/@storybook/mdx2-csf": { - "version": "1.1.0", + "node_modules/@storybook/global": { + "version": "5.0.0", "dev": true, "license": "MIT" }, - "node_modules/@storybook/node-logger": { - "version": "7.5.3", + "node_modules/@storybook/icons": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@storybook/icons/-/icons-1.2.12.tgz", + "integrity": "sha512-UxgyK5W3/UV4VrI3dl6ajGfHM4aOqMAkFLWe2KibeQudLf6NJpDrDMSHwZj+3iKC4jFU7dkKbbtH2h/al4sW3Q==", "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, - "node_modules/@storybook/postinstall": { - "version": "7.5.3", + "node_modules/@storybook/instrumenter": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.3.4.tgz", + "integrity": "sha512-jVhfNOPekOyJmta0BTkQl9Z6rgRbFHlc0eV4z1oSrzaawSlc9TFzAeDCtCP57vg3FuBX8ydDYAvyZ7s4xPpLyg==", "dev": true, - "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@vitest/utils": "^2.0.5", + "util": "^0.12.4" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, - "node_modules/@storybook/preview": { - "version": "7.5.3", + "node_modules/@storybook/manager-api": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/manager-api/-/manager-api-8.3.4.tgz", + "integrity": "sha512-tBx7MBfPUrKSlD666zmVjtIvoNArwCciZiW/UJ8IWmomrTJRfFBnVvPVM2gp1lkDIzRHYmz5x9BHbYaEDNcZWQ==", "dev": true, - "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.3.4" } }, "node_modules/@storybook/preview-api": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.3.4.tgz", + "integrity": "sha512-/YKQ3QDVSHmtFXXCShf5w0XMlg8wkfTpdYxdGv1CKFV8DU24f3N7KWulAgeWWCWQwBzZClDa9kzxmroKlQqx3A==", "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/channels": "7.5.3", - "@storybook/client-logger": "7.5.3", - "@storybook/core-events": "7.5.3", - "@storybook/csf": "^0.1.0", - "@storybook/global": "^5.0.0", - "@storybook/types": "7.5.3", - "@types/qs": "^6.9.5", - "dequal": "^2.0.2", - "lodash": "^4.17.21", - "memoizerific": "^1.11.3", - "qs": "^6.10.0", - "synchronous-promise": "^2.0.15", - "ts-dedent": "^2.0.0", - "util-deprecate": "^1.0.2" - }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" - } - }, - "node_modules/@storybook/preview-api/node_modules/qs": { - "version": "6.11.2", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.4" }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "peerDependencies": { + "storybook": "^8.3.4" } }, "node_modules/@storybook/react": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-8.3.4.tgz", + "integrity": "sha512-PA7iQL4/9X2/iLrv+AUPNtlhTHJWhDao9gQIT1Hef39FtFk+TU9lZGbv+g29R1H9V3cHP5162nG2aTu395kmbA==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/core-client": "7.5.3", - "@storybook/docs-tools": "7.5.3", + "@storybook/components": "^8.3.4", "@storybook/global": "^5.0.0", - "@storybook/preview-api": "7.5.3", - "@storybook/react-dom-shim": "7.5.3", - "@storybook/types": "7.5.3", + "@storybook/manager-api": "^8.3.4", + "@storybook/preview-api": "^8.3.4", + "@storybook/react-dom-shim": "8.3.4", + "@storybook/theming": "^8.3.4", "@types/escodegen": "^0.0.6", "@types/estree": "^0.0.51", - "@types/node": "^18.0.0", + "@types/node": "^22.0.0", "acorn": "^7.4.1", "acorn-jsx": "^5.3.1", "acorn-walk": "^7.2.0", "escodegen": "^2.1.0", "html-tags": "^3.1.0", - "lodash": "^4.17.21", "prop-types": "^15.7.2", "react-element-to-jsx-string": "^15.0.0", + "semver": "^7.3.7", "ts-dedent": "^2.0.0", "type-fest": "~2.19", "util-deprecate": "^1.0.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", - "typescript": "*" + "@storybook/test": "8.3.4", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.3.4", + "typescript": ">= 4.2.x" }, "peerDependenciesMeta": { + "@storybook/test": { + "optional": true + }, "typescript": { "optional": true } } }, "node_modules/@storybook/react-dom-shim": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-8.3.4.tgz", + "integrity": "sha512-L4llDvjaAzqPx6h4ddZMh36wPr75PrI2S8bXy+flLqAeVRYnRt4WNKGuxqH0t0U6MwId9+vlCZ13JBfFuY7eQQ==", "dev": true, - "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.3.4" } }, "node_modules/@storybook/react-vite": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/react-vite/-/react-vite-8.3.4.tgz", + "integrity": "sha512-0Xm8eTH+jQ7SV4moLkPN4G6U2IDrqXPXUqsZdXaccepIMcD4G75foQFm2LOrFJuY+IMySPspKeTqf8OLskPppw==", "dev": true, - "license": "MIT", "dependencies": { "@joshwooding/vite-plugin-react-docgen-typescript": "0.3.0", "@rollup/pluginutils": "^5.0.2", - "@storybook/builder-vite": "7.5.3", - "@storybook/react": "7.5.3", - "@vitejs/plugin-react": "^3.0.1", + "@storybook/builder-vite": "8.3.4", + "@storybook/react": "8.3.4", + "find-up": "^5.0.0", "magic-string": "^0.30.0", - "react-docgen": "^6.0.2" + "react-docgen": "^7.0.0", + "resolve": "^1.22.8", + "tsconfig-paths": "^4.2.0" }, "engines": { - "node": ">=16" + "node": ">=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.3.4", + "vite": "^4.0.0 || ^5.0.0" } }, "node_modules/@storybook/react-vite/node_modules/@rollup/pluginutils": { @@ -5015,35 +2811,6 @@ } } }, - "node_modules/@storybook/react-vite/node_modules/@vitejs/plugin-react": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.20.12", - "@babel/plugin-transform-react-jsx-self": "^7.18.6", - "@babel/plugin-transform-react-jsx-source": "^7.19.6", - "magic-string": "^0.27.0", - "react-refresh": "^0.14.0" - }, - "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "peerDependencies": { - "vite": "^4.1.0-beta.0" - } - }, - "node_modules/@storybook/react-vite/node_modules/@vitejs/plugin-react/node_modules/magic-string": { - "version": "0.27.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.13" - }, - "engines": { - "node": ">=12" - } - }, "node_modules/@storybook/react-vite/node_modules/magic-string": { "version": "0.30.5", "dev": true, @@ -5060,6 +2827,15 @@ "dev": true, "license": "MIT" }, + "node_modules/@storybook/react/node_modules/@types/node": { + "version": "22.7.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz", + "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, "node_modules/@storybook/react/node_modules/acorn": { "version": "7.4.1", "dev": true, @@ -5079,57 +2855,96 @@ "node": ">=0.4.0" } }, - "node_modules/@storybook/router": { - "version": "7.5.3", + "node_modules/@storybook/react/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, + "node_modules/@storybook/test": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/test/-/test-8.3.4.tgz", + "integrity": "sha512-HRiUenitln8QPHu6DEWUg9s9cEoiGN79lMykzXzw9shaUvdEIhWCsh82YKtmB3GJPj6qcc6dZL/Aio8srxyGAg==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "memoizerific": "^1.11.3", - "qs": "^6.10.0" + "@storybook/csf": "^0.1.11", + "@storybook/global": "^5.0.0", + "@storybook/instrumenter": "8.3.4", + "@testing-library/dom": "10.4.0", + "@testing-library/jest-dom": "6.5.0", + "@testing-library/user-event": "14.5.2", + "@vitest/expect": "2.0.5", + "@vitest/spy": "2.0.5", + "util": "^0.12.4" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + "storybook": "^8.3.4" } }, - "node_modules/@storybook/router/node_modules/qs": { - "version": "6.11.2", + "node_modules/@storybook/test/node_modules/@testing-library/dom": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.0.tgz", + "integrity": "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { - "side-channel": "^1.0.4" + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" }, "engines": { - "node": ">=0.6" + "node": ">=18" + } + }, + "node_modules/@storybook/test/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@storybook/telemetry": { - "version": "7.5.3", + "node_modules/@storybook/test/node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/client-logger": "7.5.3", - "@storybook/core-common": "7.5.3", - "@storybook/csf-tools": "7.5.3", - "chalk": "^4.1.0", - "detect-package-manager": "^2.0.1", - "fetch-retry": "^5.0.2", - "fs-extra": "^11.1.0", - "read-pkg-up": "^7.0.1" + "dequal": "^2.0.3" + } + }, + "node_modules/@storybook/test/node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/@storybook/test/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true + }, "node_modules/@storybook/testing-library": { "version": "0.2.2", "dev": true, @@ -5141,37 +2956,16 @@ } }, "node_modules/@storybook/theming": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-8.3.4.tgz", + "integrity": "sha512-D4XVsQgTtpHEHLhwkx59aGy1GBwOedVr/mNns7hFrH8FjEpxrrWCuZQASq1ZpCl8LXlh7uvmT5sM2rOdQbGuGg==", "dev": true, - "license": "MIT", - "dependencies": { - "@emotion/use-insertion-effect-with-fallbacks": "^1.0.0", - "@storybook/client-logger": "7.5.3", - "@storybook/global": "^5.0.0", - "memoizerific": "^1.11.3" - }, "funding": { "type": "opencollective", "url": "https://opencollective.com/storybook" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/@storybook/types": { - "version": "7.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@storybook/channels": "7.5.3", - "@types/babel__core": "^7.0.0", - "@types/express": "^4.7.0", - "file-system-cache": "2.3.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/storybook" + "storybook": "^8.3.4" } }, "node_modules/@swc/core": { @@ -5231,6 +3025,14 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/@swc/helpers": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.13.tgz", + "integrity": "sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w==", + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@swc/types": { "version": "0.1.5", "dev": true, @@ -5283,6 +3085,57 @@ "dev": true, "license": "MIT" }, + "node_modules/@testing-library/jest-dom": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.5.0.tgz", + "integrity": "sha512-xGGHpBXYSHUUr6XsKBfs85TWlYKpTc37cSBBVrXcib2MkHLboWlkClhWF37JKlDb9KEq3dHs+f2xR7XJEWGBxA==", + "dev": true, + "dependencies": { + "@adobe/css-tools": "^4.4.0", + "aria-query": "^5.0.0", + "chalk": "^3.0.0", + "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.6.3", + "lodash": "^4.17.21", + "redent": "^3.0.0" + }, + "engines": { + "node": ">=14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", + "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", + "dev": true + }, + "node_modules/@testing-library/jest-dom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@testing-library/react": { "version": "14.0.0", "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.0.0.tgz", @@ -5302,9 +3155,10 @@ } }, "node_modules/@testing-library/user-event": { - "version": "14.5.1", + "version": "14.5.2", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.2.tgz", + "integrity": "sha512-YAh82Wh4TIrxYLmfGcixwD18oIjyC1pFQC2Y01F2lzV2HTMiYrI0nze0FD0ocB//CKS/7jIUgae+adPqxK5yCQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=12", "npm": ">=6" @@ -5367,8 +3221,9 @@ } }, "node_modules/@types/babel__core": { - "version": "7.20.3", - "license": "MIT", + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", @@ -5431,14 +3286,14 @@ } }, "node_modules/@types/command-line-args": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.0.tgz", - "integrity": "sha512-UuKzKpJJ/Ief6ufIaIzr3A/0XnluX7RvFgwkV89Yzvm77wCh1kFaFmqN8XEnGcN62EuHdedQjEMb8mYxFLGPyA==" + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.3.tgz", + "integrity": "sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw==" }, "node_modules/@types/command-line-usage": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.2.tgz", - "integrity": "sha512-n7RlEEJ+4x4TS7ZQddTmNSxP+zziEG0TNsMfiRIxcIVXt71ENJ9ojeXmGO3wPoTdn7pJcU2xc3CJYMktNT6DPg==" + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.4.tgz", + "integrity": "sha512-BwR5KP3Es/CSht0xqBcUXS3qCAUVXwpRKsV2+arxeb65atasuXG9LykC9Ab10Cw3s2raH92ZqOeILaQbsB2ACg==" }, "node_modules/@types/compression": { "version": "1.7.4", @@ -5480,22 +3335,6 @@ "undici-types": "~5.26.4" } }, - "node_modules/@types/cross-spawn": { - "version": "6.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/cross-spawn/node_modules/@types/node": { - "version": "20.8.10", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, "node_modules/@types/debug": { "version": "4.1.10", "license": "MIT", @@ -5503,25 +3342,11 @@ "@types/ms": "*" } }, - "node_modules/@types/detect-port": { - "version": "1.3.4", - "dev": true, - "license": "MIT" - }, "node_modules/@types/doctrine": { - "version": "0.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/ejs": { - "version": "3.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/emscripten": { - "version": "1.39.9", - "dev": true, - "license": "MIT" + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/@types/doctrine/-/doctrine-0.0.9.tgz", + "integrity": "sha512-eOIHzCUSH7SMfonMG1LsC2f8vxBFtho6NGBznK41R84YzPuvSBzrhEps33IsQiOW9+VL6NQ9DbjQJznk/S4uRA==", + "dev": true }, "node_modules/@types/escodegen": { "version": "0.0.6", @@ -5538,14 +3363,15 @@ } }, "node_modules/@types/estree": { - "version": "1.0.4", - "dev": true, - "license": "MIT" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==" }, "node_modules/@types/express": { - "version": "4.17.20", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", "dev": true, - "license": "MIT", "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.33", @@ -5579,8 +3405,9 @@ }, "node_modules/@types/find-cache-dir": { "version": "3.2.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/find-cache-dir/-/find-cache-dir-3.2.1.tgz", + "integrity": "sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==", + "dev": true }, "node_modules/@types/glob": { "version": "7.2.0", @@ -5718,9 +3545,10 @@ "license": "MIT" }, "node_modules/@types/lodash": { - "version": "4.14.200", - "dev": true, - "license": "MIT" + "version": "4.17.9", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.9.tgz", + "integrity": "sha512-w9iWudx1XWOHW5lQRS9iKpK/XuRhnN+0T7HvdCCd802FYkT1AMTnxndJHGrNJwRoRHkslGr4S29tjm1cT7x/7w==", + "dev": true }, "node_modules/@types/mdast": { "version": "3.0.14", @@ -5730,17 +3558,13 @@ } }, "node_modules/@types/mdx": { - "version": "2.0.9", - "dev": true, - "license": "MIT" + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", + "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", + "dev": true }, "node_modules/@types/mime": { - "version": "3.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/mime-types": { - "version": "2.1.3", + "version": "3.0.3", "dev": true, "license": "MIT" }, @@ -5766,33 +3590,6 @@ "undici-types": "~5.26.4" } }, - "node_modules/@types/node-fetch": { - "version": "2.6.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "form-data": "^4.0.0" - } - }, - "node_modules/@types/node-fetch/node_modules/@types/node": { - "version": "20.8.10", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.3", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/pad-left": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@types/pad-left/-/pad-left-2.1.1.tgz", - "integrity": "sha512-Xd22WCRBydkGSApl5Bw0PhAOHKSVjNL3E3AwzKaps96IMraPqy5BvZIsBVK6JLwdybUzjHnuWVwpDd0JjTfHXA==" - }, "node_modules/@types/papaparse": { "version": "5.3.10", "dev": true, @@ -5813,11 +3610,6 @@ "version": "4.0.1", "license": "MIT" }, - "node_modules/@types/pretty-hrtime": { - "version": "1.0.2", - "dev": true, - "license": "MIT" - }, "node_modules/@types/prop-types": { "version": "15.7.9", "license": "MIT" @@ -5833,9 +3625,9 @@ "license": "MIT" }, "node_modules/@types/react": { - "version": "18.2.79", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.79.tgz", - "integrity": "sha512-RwGAGXPl9kSXwdNTafkOEuFrTBD5SA2B3iEB96xi8+xu5ddUa/cpvyVCSNn+asgLCTHkb5ZxN8gbuibYJi4s1w==", + "version": "18.3.10", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.10.tgz", + "integrity": "sha512-02sAAlBnP39JgXwkAq3PeU9DVaaGpZyF3MGcC0MKgQVkZor5IiiDAipVaxQHtDJAmO4GIy/rVBy/LzVj76Cyqg==", "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -5853,9 +3645,10 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.14", + "version": "18.3.0", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", + "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", "devOptional": true, - "license": "MIT", "dependencies": { "@types/react": "*" } @@ -5896,9 +3689,10 @@ } }, "node_modules/@types/react-window": { - "version": "1.8.7", + "version": "1.8.8", + "resolved": "https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz", + "integrity": "sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==", "dev": true, - "license": "MIT", "dependencies": { "@types/react": "*" } @@ -5912,9 +3706,10 @@ } }, "node_modules/@types/resolve": { - "version": "1.20.4", - "dev": true, - "license": "MIT" + "version": "1.20.6", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.6.tgz", + "integrity": "sha512-A4STmOXPhMUtHH+S6ymgE2GiBSMqf4oTvcQZMcHzokuTLVYzXTB8ttjcgxOVaAp2lGwEdzZ0J+cRbbeevQj1UQ==", + "dev": true }, "node_modules/@types/semver": { "version": "7.5.4", @@ -5981,6 +3776,12 @@ "version": "0.0.3", "license": "MIT" }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "dev": true + }, "node_modules/@types/yargs": { "version": "17.0.29", "dev": true, @@ -6180,69 +3981,111 @@ "license": "ISC" }, "node_modules/@vitejs/plugin-react": { - "version": "4.1.1", - "license": "MIT", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.2.tgz", + "integrity": "sha512-hieu+o05v4glEBucTcKMK3dlES0OeJlD9YVOAPraVMOInBCwzumaIFiUjr4bHK7NPgnAHgiskUoceKercrN8vg==", "dependencies": { - "@babel/core": "^7.23.2", - "@babel/plugin-transform-react-jsx-self": "^7.22.5", - "@babel/plugin-transform-react-jsx-source": "^7.22.5", - "@types/babel__core": "^7.20.3", - "react-refresh": "^0.14.0" + "@babel/core": "^7.25.2", + "@babel/plugin-transform-react-jsx-self": "^7.24.7", + "@babel/plugin-transform-react-jsx-source": "^7.24.7", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.14.2" }, "engines": { "node": "^14.18.0 || >=16.0.0" }, "peerDependencies": { - "vite": "^4.2.0" + "vite": "^4.2.0 || ^5.0.0" } }, - "node_modules/@yarnpkg/esbuild-plugin-pnp": { - "version": "3.0.0-rc.15", + "node_modules/@vitest/expect": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.0.5.tgz", + "integrity": "sha512-yHZtwuP7JZivj65Gxoi8upUN2OzHTi3zVfjwdpu2WrvCZPLwsJ2Ey5ILIPccoW23dd/zQBlJ4/dhi7DWNyXCpA==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "tslib": "^2.4.0" + "@vitest/spy": "2.0.5", + "@vitest/utils": "2.0.5", + "chai": "^5.1.1", + "tinyrainbow": "^1.2.0" }, - "engines": { - "node": ">=14.15.0" + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/@vitest/pretty-format": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.0.5.tgz", + "integrity": "sha512-h8k+1oWHfwTkyTkb9egzwNMfJAEx4veaPSnMeKbVSjp4euqGSbQlm5+6VHwTr7u4FJslVVsUG5nopCaAYdOmSQ==", + "dev": true, + "dependencies": { + "tinyrainbow": "^1.2.0" }, - "peerDependencies": { - "esbuild": ">=0.10.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@yarnpkg/fslib": { - "version": "2.10.3", + "node_modules/@vitest/expect/node_modules/@vitest/utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.0.5.tgz", + "integrity": "sha512-d8HKbqIcya+GR67mkZbrzhS5kKhtp8dQLcmRZLGTscGVg7yImT82cIrhtn2L8+VujWcy6KZweApgNmPsTAO/UQ==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@yarnpkg/libzip": "^2.3.0", - "tslib": "^1.13.0" + "@vitest/pretty-format": "2.0.5", + "estree-walker": "^3.0.3", + "loupe": "^3.1.1", + "tinyrainbow": "^1.2.0" }, - "engines": { - "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@yarnpkg/fslib/node_modules/tslib": { - "version": "1.14.1", + "node_modules/@vitest/expect/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", "dev": true, - "license": "0BSD" + "dependencies": { + "@types/estree": "^1.0.0" + } }, - "node_modules/@yarnpkg/libzip": { - "version": "2.3.0", + "node_modules/@vitest/pretty-format": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.1.tgz", + "integrity": "sha512-SjxPFOtuINDUW8/UkElJYQSFtnWX7tMksSGW0vfjxMneFqxVr8YJ979QpMbDW7g+BIiq88RAGDjf7en6rvLPPQ==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@types/emscripten": "^1.39.6", - "tslib": "^1.13.0" + "tinyrainbow": "^1.2.0" }, - "engines": { - "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@yarnpkg/libzip/node_modules/tslib": { - "version": "1.14.1", + "node_modules/@vitest/spy": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.0.5.tgz", + "integrity": "sha512-c/jdthAhvJdpfVuaexSrnawxZz6pywlTPe84LUB2m/4t3rl2fTo9NFGBG4oWgaD+FTgDDV8hJ/nibT7IfH3JfA==", "dev": true, - "license": "0BSD" + "dependencies": { + "tinyspy": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.1.tgz", + "integrity": "sha512-Y6Q9TsI+qJ2CC0ZKj6VBb+T8UPz593N113nnUykqwANqhgf3QkZeHFlusgKLTqrnVHbj/XDKZcDHol+dxVT+rQ==", + "dev": true, + "dependencies": { + "@vitest/pretty-format": "2.1.1", + "loupe": "^3.1.1", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } }, "node_modules/abab": { "version": "2.0.6", @@ -6266,8 +4109,9 @@ } }, "node_modules/acorn": { - "version": "8.11.2", - "license": "MIT", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "bin": { "acorn": "bin/acorn" }, @@ -6300,14 +4144,6 @@ "node": ">=0.4.0" } }, - "node_modules/address": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -6320,18 +4156,6 @@ "node": ">= 6.0.0" } }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/ajv": { "version": "6.12.6", "license": "MIT", @@ -6410,34 +4234,36 @@ } }, "node_modules/apache-arrow": { - "version": "13.0.0", - "resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-13.0.0.tgz", - "integrity": "sha512-3gvCX0GDawWz6KFNC28p65U+zGh/LZ6ZNKWNu74N6CQlKzxeoWHpi4CgEQsgRSEMuyrIIXi1Ea2syja7dwcHvw==", - "dependencies": { - "@types/command-line-args": "5.2.0", - "@types/command-line-usage": "5.0.2", - "@types/node": "20.3.0", - "@types/pad-left": "2.1.1", - "command-line-args": "5.2.1", - "command-line-usage": "7.0.1", - "flatbuffers": "23.5.26", + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/apache-arrow/-/apache-arrow-17.0.0.tgz", + "integrity": "sha512-X0p7auzdnGuhYMVKYINdQssS4EcKec9TCXyez/qtJt32DrIMGbzqiaMiQ0X6fQlQpw8Fl0Qygcv4dfRAr5Gu9Q==", + "dependencies": { + "@swc/helpers": "^0.5.11", + "@types/command-line-args": "^5.2.3", + "@types/command-line-usage": "^5.0.4", + "@types/node": "^20.13.0", + "command-line-args": "^5.2.1", + "command-line-usage": "^7.0.1", + "flatbuffers": "^24.3.25", "json-bignum": "^0.0.3", - "pad-left": "^2.1.0", - "tslib": "^2.5.3" + "tslib": "^2.6.2" }, "bin": { - "arrow2csv": "bin/arrow2csv.js" + "arrow2csv": "bin/arrow2csv.cjs" } }, "node_modules/apache-arrow/node_modules/@types/node": { - "version": "20.3.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.0.tgz", - "integrity": "sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==" + "version": "20.16.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.16.10.tgz", + "integrity": "sha512-vQUKgWTjEIRFCvK6CyriPH3MZYiYlNy0fKiEYHWbcoWLEgs4opurGGKlebrTLqdSMIbXImH6XExNiIyNUv3WpA==", + "dependencies": { + "undici-types": "~6.19.2" + } }, - "node_modules/app-root-dir": { - "version": "1.0.2", - "dev": true, - "license": "MIT" + "node_modules/apache-arrow/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" }, "node_modules/arg": { "version": "4.1.3", @@ -6448,17 +4274,6 @@ "version": "2.0.1", "license": "Python-2.0" }, - "node_modules/aria-hidden": { - "version": "1.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/aria-query": { "version": "5.1.3", "dev": true, @@ -6499,22 +4314,20 @@ "node": ">=8" } }, - "node_modules/assert": { - "version": "2.1.0", + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "is-nan": "^1.3.2", - "object-is": "^1.1.5", - "object.assign": "^4.1.4", - "util": "^0.12.5" + "engines": { + "node": ">=12" } }, "node_modules/ast-types": { "version": "0.16.1", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz", + "integrity": "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==", "dev": true, - "license": "MIT", "dependencies": { "tslib": "^2.0.1" }, @@ -6522,16 +4335,6 @@ "node": ">=4" } }, - "node_modules/async": { - "version": "3.2.5", - "dev": true, - "license": "MIT" - }, - "node_modules/async-limiter": { - "version": "1.0.1", - "dev": true, - "license": "MIT" - }, "node_modules/asynckit": { "version": "0.4.0", "license": "MIT" @@ -6584,22 +4387,15 @@ } }, "node_modules/axios": { - "version": "1.6.0", - "license": "MIT", + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", "dependencies": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, - "node_modules/babel-core": { - "version": "7.0.0-bridge.0", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -6687,50 +4483,6 @@ "npm": ">=6" } }, - "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.4.6", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.22.6", - "@babel/helper-define-polyfill-provider": "^0.4.3", - "semver": "^6.3.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.8.6", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.4.3", - "core-js-compat": "^3.33.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.5.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.4.3" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, "node_modules/babel-preset-current-node-syntax": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", @@ -6802,8 +4554,9 @@ }, "node_modules/better-opn": { "version": "3.0.2", + "resolved": "https://registry.npmjs.org/better-opn/-/better-opn-3.0.2.tgz", + "integrity": "sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ==", "dev": true, - "license": "MIT", "dependencies": { "open": "^8.0.4" }, @@ -6811,14 +4564,6 @@ "node": ">=12.0.0" } }, - "node_modules/big-integer": { - "version": "1.6.51", - "dev": true, - "license": "Unlicense", - "engines": { - "node": ">=0.6" - } - }, "node_modules/binary-extensions": { "version": "2.2.0", "license": "MIT", @@ -6826,56 +4571,6 @@ "node": ">=8" } }, - "node_modules/bl": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/bl/node_modules/readable-stream": { - "version": "3.6.2", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/bl/node_modules/safe-buffer": { - "version": "5.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/bl/node_modules/string_decoder": { - "version": "1.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, "node_modules/body-parser": { "version": "1.20.2", "license": "MIT", @@ -6909,17 +4604,6 @@ "version": "2.0.0", "license": "MIT" }, - "node_modules/bplist-parser": { - "version": "0.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "big-integer": "^1.6.44" - }, - "engines": { - "node": ">= 5.10.0" - } - }, "node_modules/brace-expansion": { "version": "2.0.1", "license": "MIT", @@ -6939,20 +4623,14 @@ }, "node_modules/browser-assert": { "version": "1.2.1", + "resolved": "https://registry.npmjs.org/browser-assert/-/browser-assert-1.2.1.tgz", + "integrity": "sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==", "dev": true }, - "node_modules/browserify-zlib": { - "version": "0.1.4", - "dev": true, - "license": "MIT", - "dependencies": { - "pako": "~0.2.0" - } - }, "node_modules/browserslist": { - "version": "4.23.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", - "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "funding": [ { "type": "opencollective", @@ -6968,10 +4646,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001587", - "electron-to-chromium": "^1.4.668", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.0.13" + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -6980,55 +4658,24 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/bs-logger": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", - "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", - "dev": true, - "dependencies": { - "fast-json-stable-stringify": "2.x" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/bser": { - "version": "2.1.1", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "node-int64": "^0.4.0" - } - }, - "node_modules/buffer": { - "version": "5.7.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" } }, - "node_modules/buffer-crc32": { - "version": "0.2.13", + "node_modules/bser": { + "version": "2.1.1", "dev": true, - "license": "MIT", - "engines": { - "node": "*" + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" } }, "node_modules/buffer-from": { @@ -7083,9 +4730,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001610", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001610.tgz", - "integrity": "sha512-QFutAY4NgaelojVMjY63o6XlZyORPaLfyMnsl3HgnWdJUcX6K0oaJymHjH8PT5Gk7sTm8rvC/c5COUQKXqmOMA==", + "version": "1.0.30001666", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001666.tgz", + "integrity": "sha512-gD14ICmoV5ZZM1OdzPWmpx+q4GyefaK06zi8hmfHV5xe4/2nOQX3+Dw5o+fSqOws2xVwL9j+anOPFwHzdEdV4g==", "funding": [ { "type": "opencollective", @@ -7109,6 +4756,22 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/chai": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.1.tgz", + "integrity": "sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==", + "dev": true, + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/chalk": { "version": "4.1.2", "license": "MIT", @@ -7169,13 +4832,23 @@ } }, "node_modules/chart.js": { - "version": "4.4.0", - "license": "MIT", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.4.tgz", + "integrity": "sha512-emICKGBABnxhMjUjlYRR12PmOXhJ2eJjEHL2/dZlWjxRAZT1D8xplLFq5M0tMQK8ja+wBS/tuVEJB5C6r7VxJA==", "dependencies": { "@kurkle/color": "^0.3.0" }, "engines": { - "pnpm": ">=7" + "pnpm": ">=8" + } + }, + "node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "engines": { + "node": ">= 16" } }, "node_modules/chokidar": { @@ -7203,11 +4876,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/chownr": { - "version": "1.1.4", - "dev": true, - "license": "ISC" - }, "node_modules/ci-info": { "version": "3.9.0", "dev": true, @@ -7232,76 +4900,6 @@ "version": "2.3.2", "license": "MIT" }, - "node_modules/clean-stack": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "restore-cursor": "^3.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-table3": { - "version": "0.6.3", - "dev": true, - "license": "MIT", - "dependencies": { - "string-width": "^4.2.0" - }, - "engines": { - "node": "10.* || >= 12.*" - }, - "optionalDependencies": { - "@colors/colors": "1.5.0" - } - }, - "node_modules/cli-table3/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-table3/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cliui": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", @@ -7359,38 +4957,6 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/clone": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/clone-deep": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "is-plain-object": "^2.0.4", - "kind-of": "^6.0.2", - "shallow-clone": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/clone-deep/node_modules/is-plain-object": { - "version": "2.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -7421,11 +4987,6 @@ "version": "1.1.4", "license": "MIT" }, - "node_modules/colorette": { - "version": "2.0.20", - "dev": true, - "license": "MIT" - }, "node_modules/combined-stream": { "version": "1.0.8", "license": "MIT", @@ -7495,8 +5056,9 @@ }, "node_modules/commondir": { "version": "1.0.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", + "dev": true }, "node_modules/compressible": { "version": "2.0.18", @@ -7550,20 +5112,6 @@ "version": "0.0.1", "license": "MIT" }, - "node_modules/concat-stream": { - "version": "1.6.2", - "dev": true, - "engines": [ - "node >= 0.8" - ], - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" - } - }, "node_modules/content-disposition": { "version": "0.5.4", "license": "MIT", @@ -7615,23 +5163,6 @@ "version": "1.0.6", "license": "MIT" }, - "node_modules/core-js-compat": { - "version": "3.33.2", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.22.1" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "dev": true, - "license": "MIT" - }, "node_modules/cors": { "version": "2.8.5", "dev": true, @@ -7696,13 +5227,11 @@ "node": ">= 8" } }, - "node_modules/crypto-random-string": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } + "node_modules/css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", + "dev": true }, "node_modules/cssesc": { "version": "3.0.0", @@ -7821,6 +5350,15 @@ } } }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/deep-equal": { "version": "2.2.2", "dev": true, @@ -7862,32 +5400,6 @@ "node": ">=0.10.0" } }, - "node_modules/default-browser-id": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "bplist-parser": "^0.2.0", - "untildify": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/defaults": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "clone": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/define-data-property": { "version": "1.1.1", "license": "MIT", @@ -7902,8 +5414,9 @@ }, "node_modules/define-lazy-prop": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } @@ -7924,32 +5437,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/defu": { - "version": "6.1.3", - "dev": true, - "license": "MIT" - }, - "node_modules/del": { - "version": "6.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "globby": "^11.0.1", - "graceful-fs": "^4.2.4", - "is-glob": "^4.0.1", - "is-path-cwd": "^2.2.0", - "is-path-inside": "^3.0.2", - "p-map": "^4.0.0", - "rimraf": "^3.0.2", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "license": "MIT", @@ -7979,14 +5466,6 @@ "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/detect-indent": { - "version": "6.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -7996,35 +5475,6 @@ "node": ">=8" } }, - "node_modules/detect-node-es": { - "version": "1.1.0", - "dev": true, - "license": "MIT" - }, - "node_modules/detect-package-manager": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "execa": "^5.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/detect-port": { - "version": "1.5.1", - "dev": true, - "license": "MIT", - "dependencies": { - "address": "^1.0.1", - "debug": "4" - }, - "bin": { - "detect": "bin/detect-port.js", - "detect-port": "bin/detect-port.js" - } - }, "node_modules/didyoumean": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", @@ -8110,25 +5560,6 @@ "node": ">=12" } }, - "node_modules/dotenv": { - "version": "16.3.1", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/motdotla/dotenv?sponsor=1" - } - }, - "node_modules/dotenv-expand": { - "version": "10.0.0", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - } - }, "node_modules/downshift": { "version": "7.6.2", "license": "MIT", @@ -8147,17 +5578,6 @@ "version": "17.0.2", "license": "MIT" }, - "node_modules/duplexify": { - "version": "3.7.1", - "dev": true, - "license": "MIT", - "dependencies": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" - } - }, "node_modules/eastasianwidth": { "version": "0.2.0", "license": "MIT" @@ -8166,25 +5586,10 @@ "version": "1.1.1", "license": "MIT" }, - "node_modules/ejs": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", - "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", - "dev": true, - "dependencies": { - "jake": "^10.8.5" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/electron-to-chromium": { - "version": "1.4.736", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.736.tgz", - "integrity": "sha512-Rer6wc3ynLelKNM4lOCg7/zPQj8tPOCB2hzD32PX9wd3hgRRi9MxEbmkFCokzcEhRVMiOVLjnL9ig9cefJ+6+Q==" + "version": "1.5.31", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.31.tgz", + "integrity": "sha512-QcDoBbQeYt0+3CWcK/rEbuHvwpbT/8SV9T3OSgs6cX1FlcUAkgrkqbg9zLnDrMM/rLamzQwal4LYFCiWk861Tg==" }, "node_modules/emittery": { "version": "0.13.1", @@ -8209,14 +5614,6 @@ "node": ">= 0.8" } }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "dev": true, - "license": "MIT", - "dependencies": { - "once": "^1.4.0" - } - }, "node_modules/entities": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", @@ -8229,17 +5626,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/envinfo": { - "version": "7.11.0", - "dev": true, - "license": "MIT", - "bin": { - "envinfo": "dist/cli.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/error-ex": { "version": "1.3.2", "license": "MIT", @@ -8266,50 +5652,56 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-module-lexer": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.5.4.tgz", + "integrity": "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==", + "dev": true + }, "node_modules/esbuild": { - "version": "0.18.20", + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", + "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "dev": true, "hasInstallScript": true, - "license": "MIT", "bin": { "esbuild": "bin/esbuild" }, "engines": { - "node": ">=12" + "node": ">=18" }, "optionalDependencies": { - "@esbuild/android-arm": "0.18.20", - "@esbuild/android-arm64": "0.18.20", - "@esbuild/android-x64": "0.18.20", - "@esbuild/darwin-arm64": "0.18.20", - "@esbuild/darwin-x64": "0.18.20", - "@esbuild/freebsd-arm64": "0.18.20", - "@esbuild/freebsd-x64": "0.18.20", - "@esbuild/linux-arm": "0.18.20", - "@esbuild/linux-arm64": "0.18.20", - "@esbuild/linux-ia32": "0.18.20", - "@esbuild/linux-loong64": "0.18.20", - "@esbuild/linux-mips64el": "0.18.20", - "@esbuild/linux-ppc64": "0.18.20", - "@esbuild/linux-riscv64": "0.18.20", - "@esbuild/linux-s390x": "0.18.20", - "@esbuild/linux-x64": "0.18.20", - "@esbuild/netbsd-x64": "0.18.20", - "@esbuild/openbsd-x64": "0.18.20", - "@esbuild/sunos-x64": "0.18.20", - "@esbuild/win32-arm64": "0.18.20", - "@esbuild/win32-ia32": "0.18.20", - "@esbuild/win32-x64": "0.18.20" - } - }, - "node_modules/esbuild-plugin-alias": { - "version": "0.2.1", - "dev": true, - "license": "MIT" + "@esbuild/aix-ppc64": "0.23.1", + "@esbuild/android-arm": "0.23.1", + "@esbuild/android-arm64": "0.23.1", + "@esbuild/android-x64": "0.23.1", + "@esbuild/darwin-arm64": "0.23.1", + "@esbuild/darwin-x64": "0.23.1", + "@esbuild/freebsd-arm64": "0.23.1", + "@esbuild/freebsd-x64": "0.23.1", + "@esbuild/linux-arm": "0.23.1", + "@esbuild/linux-arm64": "0.23.1", + "@esbuild/linux-ia32": "0.23.1", + "@esbuild/linux-loong64": "0.23.1", + "@esbuild/linux-mips64el": "0.23.1", + "@esbuild/linux-ppc64": "0.23.1", + "@esbuild/linux-riscv64": "0.23.1", + "@esbuild/linux-s390x": "0.23.1", + "@esbuild/linux-x64": "0.23.1", + "@esbuild/netbsd-x64": "0.23.1", + "@esbuild/openbsd-arm64": "0.23.1", + "@esbuild/openbsd-x64": "0.23.1", + "@esbuild/sunos-x64": "0.23.1", + "@esbuild/win32-arm64": "0.23.1", + "@esbuild/win32-ia32": "0.23.1", + "@esbuild/win32-x64": "0.23.1" + } }, "node_modules/esbuild-register": { - "version": "3.5.0", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz", + "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==", "dev": true, - "license": "MIT", "dependencies": { "debug": "^4.3.4" }, @@ -8318,8 +5710,9 @@ } }, "node_modules/escalade": { - "version": "3.1.1", - "license": "MIT", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "engines": { "node": ">=6" } @@ -8683,33 +6076,6 @@ "version": "3.0.2", "license": "MIT" }, - "node_modules/extract-zip": { - "version": "1.7.0", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "concat-stream": "^1.6.2", - "debug": "^2.6.9", - "mkdirp": "^0.5.4", - "yauzl": "^2.10.0" - }, - "bin": { - "extract-zip": "cli.js" - } - }, - "node_modules/extract-zip/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/extract-zip/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/fast-deep-equal": { "version": "3.1.3", "license": "MIT" @@ -8751,19 +6117,6 @@ "bser": "2.1.1" } }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "pend": "~1.2.0" - } - }, - "node_modules/fetch-retry": { - "version": "5.0.6", - "dev": true, - "license": "MIT" - }, "node_modules/file-entry-cache": { "version": "6.0.1", "license": "MIT", @@ -8778,34 +6131,6 @@ "version": "2.0.5", "license": "MIT" }, - "node_modules/file-system-cache": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "fs-extra": "11.1.1", - "ramda": "0.29.0" - } - }, - "node_modules/filelist": { - "version": "1.0.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "minimatch": "^5.0.1" - } - }, - "node_modules/filelist/node_modules/minimatch": { - "version": "5.1.6", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/fill-range": { "version": "7.0.1", "license": "MIT", @@ -8845,8 +6170,9 @@ }, "node_modules/find-cache-dir": { "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "dev": true, - "license": "MIT", "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -8859,28 +6185,6 @@ "url": "https://github.com/avajs/find-cache-dir?sponsor=1" } }, - "node_modules/find-cache-dir/node_modules/make-dir": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-cache-dir/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/find-replace": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", @@ -8923,22 +6227,14 @@ } }, "node_modules/flatbuffers": { - "version": "23.5.26", - "resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-23.5.26.tgz", - "integrity": "sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ==" + "version": "24.3.25", + "resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-24.3.25.tgz", + "integrity": "sha512-3HDgPbgiwWMI9zVB7VYBHaMrbOO7Gm0v+yD2FV/sCKj+9NDeVL7BOBYUuhWAQGKWOzBo8S9WdMvV0eixO233XQ==" }, "node_modules/flatted": { "version": "3.2.9", "license": "ISC" }, - "node_modules/flow-parser": { - "version": "0.220.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/follow-redirects": { "version": "1.15.6", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", @@ -9028,15 +6324,11 @@ "node": ">= 0.6" } }, - "node_modules/fs-constants": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/fs-extra": { - "version": "11.1.1", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", "dev": true, - "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -9046,24 +6338,15 @@ "node": ">=14.14" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/fs.realpath": { "version": "1.0.0", "license": "ISC" }, "node_modules/fsevents": { - "version": "2.3.2", - "license": "MIT", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, "optional": true, "os": [ "darwin" @@ -9103,6 +6386,15 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/get-intrinsic": { "version": "1.2.2", "license": "MIT", @@ -9116,22 +6408,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-nonce": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/get-npm-tarball-url": { - "version": "2.0.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.17" - } - }, "node_modules/get-package-type": { "version": "0.1.0", "dev": true, @@ -9140,17 +6416,6 @@ "node": ">=8.0.0" } }, - "node_modules/get-port": { - "version": "5.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/get-stream": { "version": "6.0.1", "dev": true, @@ -9162,50 +6427,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/giget": { - "version": "1.1.3", - "dev": true, - "license": "MIT", - "dependencies": { - "colorette": "^2.0.20", - "defu": "^6.1.2", - "https-proxy-agent": "^7.0.2", - "mri": "^1.2.0", - "node-fetch-native": "^1.4.0", - "pathe": "^1.1.1", - "tar": "^6.2.0" - }, - "bin": { - "giget": "dist/cli.mjs" - } - }, - "node_modules/giget/node_modules/agent-base": { - "version": "7.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/giget/node_modules/https-proxy-agent": { - "version": "7.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.0.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/github-slugger": { - "version": "1.5.0", - "dev": true, - "license": "ISC" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz", + "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==", + "dev": true }, "node_modules/glob": { "version": "7.2.3", @@ -9253,11 +6479,6 @@ "glob": "^7.1.6" } }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "dev": true, - "license": "BSD-2-Clause" - }, "node_modules/globals": { "version": "13.23.0", "license": "MIT", @@ -9319,42 +6540,6 @@ "version": "1.4.0", "license": "MIT" }, - "node_modules/gunzip-maybe": { - "version": "1.4.2", - "dev": true, - "license": "MIT", - "dependencies": { - "browserify-zlib": "^0.1.4", - "is-deflate": "^1.0.0", - "is-gzip": "^1.0.0", - "peek-stream": "^1.1.0", - "pumpify": "^1.3.3", - "through2": "^2.0.3" - }, - "bin": { - "gunzip-maybe": "bin.js" - } - }, - "node_modules/handlebars": { - "version": "4.7.8", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.2", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, - "engines": { - "node": ">=0.4.7" - }, - "optionalDependencies": { - "uglify-js": "^3.1.4" - } - }, "node_modules/has-bigints": { "version": "1.0.2", "dev": true, @@ -9400,28 +6585,94 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-tostringtag": { - "version": "1.0.0", + "node_modules/has-tostringtag": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.0", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hast-util-heading-rank": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-3.0.0.tgz", + "integrity": "sha512-EJKb8oMUXVHcWZTDepnr+WNbfnXKFNf9duMesmr4S8SXTJBJ9M4Yok08pu9vxdJwdlGRhVumk9mEhkEvKGifwA==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-heading-rank/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-is-element": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", + "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", + "dev": true, + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/hast-util-to-string": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz", + "integrity": "sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==", "dev": true, - "license": "MIT", "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "@types/hast": "^3.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/hasown": { - "version": "2.0.0", - "license": "MIT", + "node_modules/hast-util-to-string/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" + "@types/unist": "*" } }, "node_modules/hast-util-whitespace": { @@ -9454,11 +6705,6 @@ "version": "16.13.1", "license": "MIT" }, - "node_modules/hosted-git-info": { - "version": "2.8.9", - "dev": true, - "license": "ISC" - }, "node_modules/html-encoding-sniffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", @@ -9582,25 +6828,6 @@ "node": ">=0.10.0" } }, - "node_modules/ieee754": { - "version": "1.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, "node_modules/ignore": { "version": "5.2.4", "license": "MIT", @@ -9661,8 +6888,9 @@ }, "node_modules/indent-string": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } @@ -9696,20 +6924,6 @@ "node": ">= 0.4" } }, - "node_modules/invariant": { - "version": "2.2.4", - "dev": true, - "license": "MIT", - "dependencies": { - "loose-envify": "^1.0.0" - } - }, - "node_modules/ip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz", - "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==", - "dev": true - }, "node_modules/ipaddr.js": { "version": "1.9.1", "license": "MIT", @@ -9718,11 +6932,15 @@ } }, "node_modules/is-absolute-url": { - "version": "3.0.3", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-4.0.1.tgz", + "integrity": "sha512-/51/TKE88Lmm7Gc4/8btclNXWS+g50wXhYJq8HWIBAGUBnoAdRu1aXeh364t/O7wXDAcTJDP8PNuNKWUDWie+A==", "dev": true, - "license": "MIT", "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-arguments": { @@ -9849,15 +7067,11 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-deflate": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/is-docker": { "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "dev": true, - "license": "MIT", "bin": { "is-docker": "cli.js" }, @@ -9893,8 +7107,9 @@ }, "node_modules/is-generator-function": { "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", "dev": true, - "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9915,22 +7130,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-gzip": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-interactive": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/is-map": { "version": "2.0.2", "dev": true, @@ -9939,21 +7138,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-nan": { - "version": "1.3.2", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.0", - "define-properties": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-number": { "version": "7.0.0", "license": "MIT", @@ -9975,14 +7159,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-path-cwd": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/is-path-inside": { "version": "3.0.3", "license": "MIT", @@ -10101,17 +7277,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-unicode-supported": { - "version": "0.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-weakmap": { "version": "2.0.1", "dev": true, @@ -10134,8 +7299,9 @@ }, "node_modules/is-wsl": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "dev": true, - "license": "MIT", "dependencies": { "is-docker": "^2.0.0" }, @@ -10152,14 +7318,6 @@ "version": "2.0.0", "license": "ISC" }, - "node_modules/isobject": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/istanbul-lib-coverage": { "version": "3.2.1", "dev": true, @@ -10268,23 +7426,6 @@ "@pkgjs/parseargs": "^0.11.0" } }, - "node_modules/jake": { - "version": "10.8.7", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", - "filelist": "^1.0.4", - "minimatch": "^3.1.2" - }, - "bin": { - "jake": "bin/cli.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", @@ -10910,61 +8051,13 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/jscodeshift": { - "version": "0.14.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.13.16", - "@babel/parser": "^7.13.16", - "@babel/plugin-proposal-class-properties": "^7.13.0", - "@babel/plugin-proposal-nullish-coalescing-operator": "^7.13.8", - "@babel/plugin-proposal-optional-chaining": "^7.13.12", - "@babel/plugin-transform-modules-commonjs": "^7.13.8", - "@babel/preset-flow": "^7.13.13", - "@babel/preset-typescript": "^7.13.0", - "@babel/register": "^7.13.16", - "babel-core": "^7.0.0-bridge.0", - "chalk": "^4.1.2", - "flow-parser": "0.*", - "graceful-fs": "^4.2.4", - "micromatch": "^4.0.4", - "neo-async": "^2.5.0", - "node-dir": "^0.1.17", - "recast": "^0.21.0", - "temp": "^0.8.4", - "write-file-atomic": "^2.3.0" - }, - "bin": { - "jscodeshift": "bin/jscodeshift.js" - }, - "peerDependencies": { - "@babel/preset-env": "^7.1.6" - } - }, - "node_modules/jscodeshift/node_modules/ast-types": { - "version": "0.15.2", - "dev": true, - "license": "MIT", - "dependencies": { - "tslib": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/jscodeshift/node_modules/recast": { - "version": "0.21.5", + "node_modules/jsdoc-type-pratt-parser": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.1.0.tgz", + "integrity": "sha512-Hicd6JK5Njt2QB6XYFS7ok9e37O8AYk3jTcppG4YVQnYjOemymvTcmc7OWsmq/Qqj5TdRFO5/x/tIPmBeRtGHg==", "dev": true, - "license": "MIT", - "dependencies": { - "ast-types": "0.15.2", - "esprima": "~4.0.0", - "source-map": "~0.6.1", - "tslib": "^2.0.1" - }, "engines": { - "node": ">= 4" + "node": ">=12.0.0" } }, "node_modules/jsdom": { @@ -11058,8 +8151,9 @@ }, "node_modules/jsonfile": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "dev": true, - "license": "MIT", "dependencies": { "universalify": "^2.0.0" }, @@ -11082,14 +8176,6 @@ "json-buffer": "3.0.1" } }, - "node_modules/kind-of": { - "version": "6.0.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/kleur": { "version": "3.0.3", "dev": true, @@ -11098,19 +8184,6 @@ "node": ">=6" } }, - "node_modules/lazy-universal-dotenv": { - "version": "4.0.0", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "app-root-dir": "^1.0.2", - "dotenv": "^16.0.0", - "dotenv-expand": "^10.0.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, "node_modules/leven": { "version": "3.1.0", "dev": true, @@ -11157,8 +8230,9 @@ }, "node_modules/lodash": { "version": "4.17.21", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "node_modules/lodash.assignwith": { "version": "4.2.0", @@ -11170,11 +8244,6 @@ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "dev": true, - "license": "MIT" - }, "node_modules/lodash.difference": { "version": "4.5.0", "license": "MIT" @@ -11189,21 +8258,6 @@ "version": "4.6.2", "license": "MIT" }, - "node_modules/log-symbols": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/longest-streak": { "version": "3.1.0", "license": "MIT", @@ -11222,14 +8276,13 @@ "loose-envify": "cli.js" } }, - "node_modules/lru-cache": { - "version": "6.0.0", - "license": "ISC", + "node_modules/loupe": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.1.tgz", + "integrity": "sha512-edNu/8D5MKVfGVFRhFf8aAxiTM6Wumfz5XsaatSxlD3w4R1d/WEKUTydCdPGbl9K7QG/Ca3GnDV2sIKIpXRQcw==", + "dev": true, "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" + "get-func-name": "^2.0.1" } }, "node_modules/lz-string": { @@ -11252,23 +8305,27 @@ } }, "node_modules/make-dir": { - "version": "2.1.0", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, - "license": "MIT", "dependencies": { - "pify": "^4.0.1", - "semver": "^5.6.0" + "semver": "^6.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/make-dir/node_modules/semver": { - "version": "5.7.2", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "license": "ISC", "bin": { - "semver": "bin/semver" + "semver": "bin/semver.js" } }, "node_modules/make-error": { @@ -11286,8 +8343,9 @@ }, "node_modules/map-or-similar": { "version": "1.5.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz", + "integrity": "sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==", + "dev": true }, "node_modules/markdown-table": { "version": "3.0.3", @@ -11298,62 +8356,15 @@ } }, "node_modules/markdown-to-jsx": { - "version": "7.3.2", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.5.0.tgz", + "integrity": "sha512-RrBNcMHiFPcz/iqIj0n3wclzHXjwS7mzjBNWecKKVhNTIxQepIix6Il/wZCn2Cg5Y1ow2Qi84+eJrryFRWBEWw==", "dev": true, - "license": "MIT", "engines": { - "node": ">= 10" - }, - "peerDependencies": { - "react": ">= 0.14.0" - } - }, - "node_modules/mdast-util-definitions": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "unist-util-visit": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-definitions/node_modules/unist-util-is": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-definitions/node_modules/unist-util-visit": { - "version": "2.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": { - "version": "3.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" + "node": ">= 10" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "peerDependencies": { + "react": ">= 0.14.0" } }, "node_modules/mdast-util-find-and-replace": { @@ -11569,8 +8580,9 @@ }, "node_modules/memoizerific": { "version": "1.11.3", + "resolved": "https://registry.npmjs.org/memoizerific/-/memoizerific-1.11.3.tgz", + "integrity": "sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==", "dev": true, - "license": "MIT", "dependencies": { "map-or-similar": "^1.5.0" } @@ -12153,8 +9165,9 @@ }, "node_modules/min-indent": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", "dev": true, - "license": "MIT", "engines": { "node": ">=4" } @@ -12179,51 +9192,13 @@ }, "node_modules/minimist": { "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, - "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/mkdirp": { - "version": "0.5.6", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/mkdirp-classic": { - "version": "0.5.3", - "dev": true, - "license": "MIT" - }, "node_modules/moment": { "version": "2.29.4", "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", @@ -12288,74 +9263,15 @@ "node": ">= 0.6" } }, - "node_modules/neo-async": { - "version": "2.6.2", - "dev": true, - "license": "MIT" - }, - "node_modules/node-dir": { - "version": "0.1.17", - "dev": true, - "license": "MIT", - "dependencies": { - "minimatch": "^3.0.2" - }, - "engines": { - "node": ">= 0.10.5" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-fetch-native": { - "version": "1.4.1", - "dev": true, - "license": "MIT" - }, - "node_modules/node-fetch/node_modules/tr46": { - "version": "0.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/node-fetch/node_modules/webidl-conversions": { - "version": "3.0.1", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/node-fetch/node_modules/whatwg-url": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, "node_modules/node-int64": { "version": "0.4.0", "dev": true, "license": "MIT" }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==" + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==" }, "node_modules/nodemon": { "version": "3.0.1", @@ -12400,25 +9316,6 @@ "nopt": "bin/nopt.js" } }, - "node_modules/normalize-package-data": { - "version": "2.5.0", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/normalize-package-data/node_modules/semver": { - "version": "5.7.2", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "license": "MIT", @@ -12553,8 +9450,9 @@ }, "node_modules/open": { "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", "dev": true, - "license": "MIT", "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", @@ -12582,40 +9480,6 @@ "node": ">= 0.8.0" } }, - "node_modules/ora": { - "version": "5.4.1", - "dev": true, - "license": "MIT", - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/p-limit": { "version": "3.1.0", "license": "MIT", @@ -12642,20 +9506,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-map": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/p-try": { "version": "2.2.0", "dev": true, @@ -12664,22 +9514,6 @@ "node": ">=6" } }, - "node_modules/pad-left": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/pad-left/-/pad-left-2.1.0.tgz", - "integrity": "sha512-HJxs9K9AztdIQIAIa/OIazRAUW/L6B9hbQDxO4X07roW3eo9XqZc2ur9bn1StH9CnbbI9EgvejHQX7CBpCF1QA==", - "dependencies": { - "repeat-string": "^1.5.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pako": { - "version": "0.2.9", - "dev": true, - "license": "MIT" - }, "node_modules/papaparse": { "version": "5.4.1", "dev": true, @@ -12796,29 +9630,19 @@ "node": ">=8" } }, - "node_modules/pathe": { - "version": "1.1.1", - "dev": true, - "license": "MIT" - }, - "node_modules/peek-stream": { - "version": "1.1.3", + "node_modules/pathval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "duplexify": "^3.5.0", - "through2": "^2.0.3" + "engines": { + "node": ">= 14.16" } }, - "node_modules/pend": { - "version": "1.2.0", - "dev": true, - "license": "MIT" - }, "node_modules/picocolors": { - "version": "1.0.0", - "license": "ISC" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -12830,14 +9654,6 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pify": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/pirates": { "version": "4.0.6", "license": "MIT", @@ -12916,9 +9732,9 @@ } }, "node_modules/postcss": { - "version": "8.4.38", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", - "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", "funding": [ { "type": "opencollective", @@ -12935,8 +9751,8 @@ ], "dependencies": { "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.2.0" + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12 || >=14" @@ -13132,35 +9948,15 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/pretty-hrtime": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, "node_modules/process": { "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", "dev": true, - "license": "MIT", "engines": { "node": ">= 0.6.0" } }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/progress": { - "version": "2.0.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/prompts": { "version": "2.4.2", "dev": true, @@ -13219,34 +10015,6 @@ "version": "1.1.8", "license": "MIT" }, - "node_modules/pump": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/pumpify": { - "version": "1.5.1", - "dev": true, - "license": "MIT", - "dependencies": { - "duplexify": "^3.6.0", - "inherits": "^2.0.3", - "pump": "^2.0.0" - } - }, - "node_modules/pumpify/node_modules/pump": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "node_modules/punycode": { "version": "2.3.1", "license": "MIT", @@ -13254,77 +10022,6 @@ "node": ">=6" } }, - "node_modules/puppeteer-core": { - "version": "2.1.1", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@types/mime-types": "^2.1.0", - "debug": "^4.1.0", - "extract-zip": "^1.6.6", - "https-proxy-agent": "^4.0.0", - "mime": "^2.0.3", - "mime-types": "^2.1.25", - "progress": "^2.0.1", - "proxy-from-env": "^1.0.0", - "rimraf": "^2.6.1", - "ws": "^6.1.0" - }, - "engines": { - "node": ">=8.16.0" - } - }, - "node_modules/puppeteer-core/node_modules/agent-base": { - "version": "5.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/puppeteer-core/node_modules/https-proxy-agent": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "5", - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/puppeteer-core/node_modules/mime": { - "version": "2.6.0", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/puppeteer-core/node_modules/rimraf": { - "version": "2.7.1", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, - "node_modules/puppeteer-core/node_modules/ws": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", - "integrity": "sha512-jmTjYU0j60B+vHey6TfR3Z7RD61z/hmxBS3VMSGIrroOWXQEneK1zNuotOUrGyBHQj0yrpsLHPWtigEFd13ndA==", - "dev": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, "node_modules/pure-rand": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.0.4.tgz", @@ -13378,15 +10075,6 @@ ], "license": "MIT" }, - "node_modules/ramda": { - "version": "0.29.0", - "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/ramda" - } - }, "node_modules/range-parser": { "version": "1.2.1", "license": "MIT", @@ -13423,16 +10111,16 @@ } }, "node_modules/rc-table": { - "version": "7.36.0", - "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.36.0.tgz", - "integrity": "sha512-3xVcdCC5OLeOOhaCg+5Lps2oPreM/GWXmUXWTSX4p6vF7F76ABM4dfPpMJ9Dnf5yGRyh+8pe7FRyhRVnWw2H/w==", + "version": "7.48.0", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.48.0.tgz", + "integrity": "sha512-LC0ojl/SYjeQa2eqBCvnbIFn/ebefkUm8TR67dTpuBo9t7ya0ucOPC9rMARjC4KeimyOGyOyCYlX++eg0Udk5g==", "dependencies": { "@babel/runtime": "^7.10.1", "@rc-component/context": "^1.4.0", "classnames": "^2.2.5", "rc-resize-observer": "^1.1.0", - "rc-util": "^5.37.0", - "rc-virtual-list": "^3.11.1" + "rc-util": "^5.41.0", + "rc-virtual-list": "^3.14.2" }, "engines": { "node": ">=8.x" @@ -13443,9 +10131,9 @@ } }, "node_modules/rc-util": { - "version": "5.38.1", - "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.38.1.tgz", - "integrity": "sha512-e4ZMs7q9XqwTuhIK7zBIVFltUtMSjphuPPQXHoHlzRzNdOwUxDejo0Zls5HYaJfRKNURcsS/ceKVULlhjBrxng==", + "version": "5.43.0", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.43.0.tgz", + "integrity": "sha512-AzC7KKOXFqAdIBqdGWepL9Xn7cm3vnAmjlHqUnoQaTMZYhM4VlXGLkkHHxj/BZ7Td0+SOPKB4RGPboBVKT9htw==", "dependencies": { "@babel/runtime": "^7.18.3", "react-is": "^18.2.0" @@ -13456,9 +10144,9 @@ } }, "node_modules/rc-virtual-list": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.11.3.tgz", - "integrity": "sha512-tu5UtrMk/AXonHwHxUogdXAWynaXsrx1i6dsgg+lOo/KJSF8oBAcprh1z5J3xgnPJD5hXxTL58F8s8onokdt0Q==", + "version": "3.14.8", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.14.8.tgz", + "integrity": "sha512-8D0KfzpRYi6YZvlOWIxiOm9BGt4Wf2hQyEaM6RXlDDiY2NhLheuYI+RA+7ZaZj1lq+XQqy3KHlaeeXQfzI5fGg==", "dependencies": { "@babel/runtime": "^7.20.0", "classnames": "^2.2.6", @@ -13469,13 +10157,14 @@ "node": ">=8.x" }, "peerDependencies": { - "react": "*", - "react-dom": "*" + "react": ">=16.9.0", + "react-dom": ">=16.9.0" } }, "node_modules/react": { - "version": "18.2.0", - "license": "MIT", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "dependencies": { "loose-envify": "^1.1.0" }, @@ -13493,8 +10182,9 @@ }, "node_modules/react-colorful": { "version": "5.6.1", + "resolved": "https://registry.npmjs.org/react-colorful/-/react-colorful-5.6.1.tgz", + "integrity": "sha512-1exovf0uGTGyq5mXQT0zgQ80uvj2PCwvF8zY1RN9/vbJVSjSo3fsB/4L3ObbF7u70NduSiK4xu4Y6q1MHoUGEw==", "dev": true, - "license": "MIT", "peerDependencies": { "react": ">=16.8.0", "react-dom": ">=16.8.0" @@ -13590,23 +10280,24 @@ } }, "node_modules/react-docgen": { - "version": "6.0.4", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-7.0.3.tgz", + "integrity": "sha512-i8aF1nyKInZnANZ4uZrH49qn1paRgBZ7wZiCNBMnenlPzEv0mRl+ShpTVEI6wZNl8sSc79xZkivtgLKQArcanQ==", "dev": true, - "license": "MIT", "dependencies": { "@babel/core": "^7.18.9", "@babel/traverse": "^7.18.9", "@babel/types": "^7.18.9", "@types/babel__core": "^7.18.0", "@types/babel__traverse": "^7.18.0", - "@types/doctrine": "^0.0.6", + "@types/doctrine": "^0.0.9", "@types/resolve": "^1.20.2", "doctrine": "^3.0.0", "resolve": "^1.22.1", "strip-indent": "^4.0.0" }, "engines": { - "node": ">=14.18.0" + "node": ">=16.14.0" } }, "node_modules/react-docgen-typescript": { @@ -13617,20 +10308,16 @@ "typescript": ">= 4.3.x" } }, - "node_modules/react-docgen/node_modules/@types/doctrine": { - "version": "0.0.6", - "dev": true, - "license": "MIT" - }, "node_modules/react-dom": { - "version": "18.2.0", - "license": "MIT", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "dependencies": { "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" + "scheduler": "^0.23.2" }, "peerDependencies": { - "react": "^18.2.0" + "react": "^18.3.1" } }, "node_modules/react-element-to-jsx-string": { @@ -13687,22 +10374,24 @@ "license": "MIT" }, "node_modules/react-hook-form": { - "version": "7.48.2", - "license": "MIT", + "version": "7.53.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.53.0.tgz", + "integrity": "sha512-M1n3HhqCww6S2hxLxciEXy2oISPnAzxY7gvwVPrtlczTM/1dDadXgUxDpHMrMTblDOcm/AXtXxHwZ3jpg1mqKQ==", "engines": { - "node": ">=12.22.0" + "node": ">=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/react-hook-form" }, "peerDependencies": { - "react": "^16.8.0 || ^17 || ^18" + "react": "^16.8.0 || ^17 || ^18 || ^19" } }, "node_modules/react-hotkeys-hook": { - "version": "4.4.1", - "license": "MIT", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/react-hotkeys-hook/-/react-hotkeys-hook-4.5.1.tgz", + "integrity": "sha512-scAEJOh3Irm0g95NIn6+tQVf/OICCjsQsC9NBHfQws/Vxw4sfq1tDQut5fhTEvPraXhu/sHxRd9lOtxzyYuNAg==", "peerDependencies": { "react": ">=16.8.1", "react-dom": ">=16.8.1" @@ -13728,14 +10417,6 @@ } } }, - "node_modules/react-inspector": { - "version": "6.0.2", - "dev": true, - "license": "MIT", - "peerDependencies": { - "react": "^16.8.4 || ^17.0.0 || ^18.0.0" - } - }, "node_modules/react-is": { "version": "18.2.0", "license": "MIT" @@ -13788,11 +10469,9 @@ } }, "node_modules/react-number-format": { - "version": "5.3.1", - "license": "MIT", - "dependencies": { - "prop-types": "^15.7.2" - }, + "version": "5.4.2", + "resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.2.tgz", + "integrity": "sha512-cg//jVdS49PYDgmcYoBnMMHl4XNTMuV723ZnHD2aXYtWWWqbVF3hjQ8iB+UZEuXapLbeA8P8H+1o6ZB1lcw3vg==", "peerDependencies": { "react": "^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0", "react-dom": "^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" @@ -13861,57 +10540,13 @@ } }, "node_modules/react-refresh": { - "version": "0.14.0", - "license": "MIT", + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", + "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", "engines": { "node": ">=0.10.0" } }, - "node_modules/react-remove-scroll": { - "version": "2.5.5", - "dev": true, - "license": "MIT", - "dependencies": { - "react-remove-scroll-bar": "^2.3.3", - "react-style-singleton": "^2.2.1", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.0", - "use-sidecar": "^1.1.2" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-remove-scroll-bar": { - "version": "2.3.4", - "dev": true, - "license": "MIT", - "dependencies": { - "react-style-singleton": "^2.2.1", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/react-resizable-panels": { "version": "0.0.55", "resolved": "https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-0.0.55.tgz", @@ -13922,10 +10557,11 @@ } }, "node_modules/react-router": { - "version": "6.18.0", - "license": "MIT", + "version": "6.26.2", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.26.2.tgz", + "integrity": "sha512-tvN1iuT03kHgOFnLPfLJ8V95eijteveqdOSk+srqfePtQvqCExB8eHOYnlilbOcyJyKnYkr1vJvf7YqotAJu1A==", "dependencies": { - "@remix-run/router": "1.11.0" + "@remix-run/router": "1.19.2" }, "engines": { "node": ">=14.0.0" @@ -13935,11 +10571,12 @@ } }, "node_modules/react-router-dom": { - "version": "6.18.0", - "license": "MIT", + "version": "6.26.2", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.26.2.tgz", + "integrity": "sha512-z7YkaEW0Dy35T3/QKPYB1LjMK2R1fxnHO8kWpUMTBdfVzZrWOiY9a7CtN8HqdWtDUWd5FY6Dl8HFsqVwH4uOtQ==", "dependencies": { - "@remix-run/router": "1.11.0", - "react-router": "6.18.0" + "@remix-run/router": "1.19.2", + "react-router": "6.26.2" }, "engines": { "node": ">=14.0.0" @@ -13949,31 +10586,10 @@ "react-dom": ">=16.8" } }, - "node_modules/react-style-singleton": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "get-nonce": "^1.0.0", - "invariant": "^2.2.4", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/react-window": { - "version": "1.8.9", - "license": "MIT", + "version": "1.8.10", + "resolved": "https://registry.npmjs.org/react-window/-/react-window-1.8.10.tgz", + "integrity": "sha512-Y0Cx+dnU6NLa5/EvoHukUD0BklJ8qITCtVEPY1C/nL8wwoZ0b5aEw8Ff1dOVHw7fCzMt55XfJDd8S8W8LCaUCg==", "dependencies": { "@babel/runtime": "^7.0.0", "memoize-one": ">=3.1.1 <6" @@ -14002,144 +10618,57 @@ "node": ">=0.10.0" } }, - "node_modules/read-pkg": { - "version": "5.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up": { - "version": "7.0.1", - "dev": true, + "node_modules/readdirp": { + "version": "3.6.0", "license": "MIT", "dependencies": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" + "picomatch": "^2.2.1" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8.10.0" } }, - "node_modules/read-pkg-up/node_modules/find-up": { - "version": "4.1.0", + "node_modules/recast": { + "version": "0.23.9", + "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.9.tgz", + "integrity": "sha512-Hx/BGIbwj+Des3+xy5uAtAbdCyqK9y9wbBcDFDYanLS9JnMqf7OeF87HQwUimE87OEc72mr6tkKUKMBBL+hF9Q==", "dev": true, - "license": "MIT", "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" + "ast-types": "^0.16.1", + "esprima": "~4.0.0", + "source-map": "~0.6.1", + "tiny-invariant": "^1.3.3", + "tslib": "^2.0.1" }, "engines": { - "node": ">=8" + "node": ">= 4" } }, - "node_modules/read-pkg-up/node_modules/locate-path": { - "version": "5.0.0", + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", "dev": true, - "license": "MIT", "dependencies": { - "p-locate": "^4.1.0" + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/read-pkg-up/node_modules/p-limit": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up/node_modules/p-locate": { - "version": "4.1.0", + "node_modules/redent/node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", "dev": true, - "license": "MIT", "dependencies": { - "p-limit": "^2.2.0" + "min-indent": "^1.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/read-pkg-up/node_modules/type-fest": { - "version": "0.8.1", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg/node_modules/type-fest": { - "version": "0.6.0", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=8" - } - }, - "node_modules/readable-stream": { - "version": "2.3.8", - "dev": true, - "license": "MIT", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/readable-stream/node_modules/isarray": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/readdirp": { - "version": "3.6.0", - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/recast": { - "version": "0.23.4", - "dev": true, - "license": "MIT", - "dependencies": { - "assert": "^2.0.0", - "ast-types": "^0.16.1", - "esprima": "~4.0.0", - "source-map": "~0.6.1", - "tslib": "^2.0.1" - }, - "engines": { - "node": ">= 4" - } - }, "node_modules/redux": { "version": "4.2.1", "license": "MIT", @@ -14154,34 +10683,10 @@ "redux": "^3.1.0 || ^4.0.0" } }, - "node_modules/regenerate": { - "version": "1.4.2", - "dev": true, - "license": "MIT" - }, - "node_modules/regenerate-unicode-properties": { - "version": "10.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/regenerator-runtime": { "version": "0.14.0", "license": "MIT" }, - "node_modules/regenerator-transform": { - "version": "0.15.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.8.4" - } - }, "node_modules/regexp.prototype.flags": { "version": "1.5.1", "dev": true, @@ -14198,95 +10703,149 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/regexpu-core": { - "version": "5.3.2", + "node_modules/rehype-external-links": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rehype-external-links/-/rehype-external-links-3.0.0.tgz", + "integrity": "sha512-yp+e5N9V3C6bwBeAC4n796kc86M4gJCdlVhiMTxIrJG5UHDMh+PJANf9heqORJbt1nrCbDwIlAZKjANIaVBbvw==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/regjsgen": "^0.8.0", - "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.1.0", - "regjsparser": "^0.9.1", - "unicode-match-property-ecmascript": "^2.0.0", - "unicode-match-property-value-ecmascript": "^2.1.0" + "@types/hast": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-is-element": "^3.0.0", + "is-absolute-url": "^4.0.0", + "space-separated-tokens": "^2.0.0", + "unist-util-visit": "^5.0.0" }, - "engines": { - "node": ">=4" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/regjsparser": { - "version": "0.9.1", + "node_modules/rehype-external-links/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/rehype-external-links/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "dev": true + }, + "node_modules/rehype-external-links/node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "jsesc": "~0.5.0" + "@types/unist": "^3.0.0" }, - "bin": { - "regjsparser": "bin/parser" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "0.5.0", + "node_modules/rehype-external-links/node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", "dev": true, - "bin": { - "jsesc": "bin/jsesc" + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/remark-external-links": { - "version": "8.0.0", + "node_modules/rehype-external-links/node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", "dev": true, - "license": "MIT", "dependencies": { - "extend": "^3.0.0", - "is-absolute-url": "^3.0.0", - "mdast-util-definitions": "^4.0.0", - "space-separated-tokens": "^1.0.0", - "unist-util-visit": "^2.0.0" + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/remark-external-links/node_modules/space-separated-tokens": { - "version": "1.1.5", + "node_modules/rehype-slug": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-6.0.0.tgz", + "integrity": "sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==", "dev": true, - "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "github-slugger": "^2.0.0", + "hast-util-heading-rank": "^3.0.0", + "hast-util-to-string": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/remark-external-links/node_modules/unist-util-is": { - "version": "4.1.0", + "node_modules/rehype-slug/node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "dev": true, - "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/rehype-slug/node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "dev": true + }, + "node_modules/rehype-slug/node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dev": true, + "dependencies": { + "@types/unist": "^3.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/remark-external-links/node_modules/unist-util-visit": { - "version": "2.0.3", + "node_modules/rehype-slug/node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", "dev": true, - "license": "MIT", "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/remark-external-links/node_modules/unist-util-visit-parents": { - "version": "3.1.1", + "node_modules/rehype-slug/node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", "dev": true, - "license": "MIT", "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" }, "funding": { "type": "opencollective", @@ -14343,73 +10902,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/remark-slug": { - "version": "6.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "github-slugger": "^1.0.0", - "mdast-util-to-string": "^1.0.0", - "unist-util-visit": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-slug/node_modules/mdast-util-to-string": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-slug/node_modules/unist-util-is": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-slug/node_modules/unist-util-visit": { - "version": "2.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-slug/node_modules/unist-util-visit-parents": { - "version": "3.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -14473,18 +10965,6 @@ "node": ">=10" } }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/reusify": { "version": "1.0.4", "license": "MIT", @@ -14507,16 +10987,36 @@ } }, "node_modules/rollup": { - "version": "3.29.4", - "license": "MIT", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.24.0.tgz", + "integrity": "sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg==", + "dependencies": { + "@types/estree": "1.0.6" + }, "bin": { "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=14.18.0", + "node": ">=18.0.0", "npm": ">=8.0.0" }, "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.24.0", + "@rollup/rollup-android-arm64": "4.24.0", + "@rollup/rollup-darwin-arm64": "4.24.0", + "@rollup/rollup-darwin-x64": "4.24.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.24.0", + "@rollup/rollup-linux-arm-musleabihf": "4.24.0", + "@rollup/rollup-linux-arm64-gnu": "4.24.0", + "@rollup/rollup-linux-arm64-musl": "4.24.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.24.0", + "@rollup/rollup-linux-riscv64-gnu": "4.24.0", + "@rollup/rollup-linux-s390x-gnu": "4.24.0", + "@rollup/rollup-linux-x64-gnu": "4.24.0", + "@rollup/rollup-linux-x64-musl": "4.24.0", + "@rollup/rollup-win32-arm64-msvc": "4.24.0", + "@rollup/rollup-win32-ia32-msvc": "4.24.0", + "@rollup/rollup-win32-x64-msvc": "4.24.0", "fsevents": "~2.3.2" } }, @@ -14572,18 +11072,17 @@ } }, "node_modules/scheduler": { - "version": "0.23.0", - "license": "MIT", + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", "dependencies": { "loose-envify": "^1.1.0" } }, "node_modules/semver": { - "version": "7.5.4", - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "bin": { "semver": "bin/semver.js" }, @@ -14667,17 +11166,6 @@ "version": "1.2.0", "license": "ISC" }, - "node_modules/shallow-clone": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/shebang-command": { "version": "2.0.0", "license": "MIT", @@ -14744,9 +11232,9 @@ } }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "engines": { "node": ">=0.10.0" } @@ -14768,34 +11256,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.3.0", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.16", - "dev": true, - "license": "CC0-1.0" - }, "node_modules/sprintf-js": { "version": "1.0.3", "dev": true, @@ -14840,21 +11300,18 @@ "node": ">= 0.4" } }, - "node_modules/store2": { - "version": "2.14.2", - "dev": true, - "license": "(MIT OR GPL-3.0)" - }, "node_modules/storybook": { - "version": "7.5.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-8.3.4.tgz", + "integrity": "sha512-nzvuK5TsEgJwcWGLGgafabBOxKn37lfJVv7ZoUVPgJIjk2mNRyJDFwYRJzUZaD37eiR/c/lQ6MoaeqlGwiXoxw==", "dev": true, - "license": "MIT", "dependencies": { - "@storybook/cli": "7.5.3" + "@storybook/core": "8.3.4" }, "bin": { - "sb": "index.js", - "storybook": "index.js" + "getstorybook": "bin/index.cjs", + "sb": "bin/index.cjs", + "storybook": "bin/index.cjs" }, "funding": { "type": "opencollective", @@ -14869,19 +11326,6 @@ "node": ">=10" } }, - "node_modules/stream-shift": { - "version": "1.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/string_decoder": { - "version": "1.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/string-length": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", @@ -15003,8 +11447,9 @@ }, "node_modules/strip-indent": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", + "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", "dev": true, - "license": "MIT", "dependencies": { "min-indent": "^1.0.1" }, @@ -15141,11 +11586,6 @@ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, - "node_modules/synchronous-promise": { - "version": "2.0.17", - "dev": true, - "license": "BSD-3-Clause" - }, "node_modules/table-layout": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-3.0.2.tgz", @@ -15234,204 +11674,36 @@ "sucrase": "^3.32.0" }, "bin": { - "tailwind": "lib/cli.js", - "tailwindcss": "lib/cli.js" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tailwindcss/node_modules/arg": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", - "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" - }, - "node_modules/tailwindcss/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar-fs": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "node_modules/tar-stream": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "bl": "^4.0.3", - "end-of-stream": "^1.4.1", - "fs-constants": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/tar-stream/node_modules/readable-stream": { - "version": "3.6.2", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/tar-stream/node_modules/safe-buffer": { - "version": "5.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/tar-stream/node_modules/string_decoder": { - "version": "1.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/tar/node_modules/chownr": { - "version": "2.0.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/mkdirp": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/telejson": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "memoizerific": "^1.11.3" - } - }, - "node_modules/temp": { - "version": "0.8.4", - "dev": true, - "license": "MIT", - "dependencies": { - "rimraf": "~2.6.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/temp-dir": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/temp/node_modules/rimraf": { - "version": "2.6.3", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" } }, - "node_modules/tempy": { - "version": "1.0.1", - "dev": true, - "license": "MIT", + "node_modules/tailwindcss/node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" + }, + "node_modules/tailwindcss/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dependencies": { - "del": "^6.0.0", - "is-stream": "^2.0.0", - "temp-dir": "^2.0.0", - "type-fest": "^0.16.0", - "unique-string": "^2.0.0" + "is-glob": "^4.0.3" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=10.13.0" } }, - "node_modules/tempy/node_modules/type-fest": { - "version": "0.16.0", + "node_modules/telejson": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/telejson/-/telejson-7.2.0.tgz", + "integrity": "sha512-1QTEcJkJEhc8OnStBx/ILRu5J2p0GjvWsBx56bmZRqnrkdBMUe+nX92jxV+p3dB4CP6PZCdJMQJwCggkNBMzkQ==", "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "dependencies": { + "memoizerific": "^1.11.3" } }, "node_modules/terser": { @@ -15487,19 +11759,29 @@ "node": ">=0.8" } }, - "node_modules/through2": { - "version": "2.0.5", + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "dev": true + }, + "node_modules/tinyrainbow": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", + "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", "dev": true, - "license": "MIT", - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" + "engines": { + "node": ">=14.0.0" } }, - "node_modules/tiny-invariant": { - "version": "1.3.1", + "node_modules/tinyspy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", + "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", "dev": true, - "license": "MIT" + "engines": { + "node": ">=14.0.0" + } }, "node_modules/tippy.js": { "version": "6.3.7", @@ -15530,11 +11812,6 @@ "node": ">=8.0" } }, - "node_modules/tocbot": { - "version": "4.21.6", - "dev": true, - "license": "MIT" - }, "node_modules/toidentifier": { "version": "1.0.1", "license": "MIT", @@ -15713,6 +11990,29 @@ } } }, + "node_modules/tsconfig-paths": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", + "integrity": "sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==", + "dev": true, + "dependencies": { + "json5": "^2.2.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tsconfig-paths/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/tslib": { "version": "2.6.2", "license": "0BSD" @@ -15758,11 +12058,6 @@ "node": ">= 0.6" } }, - "node_modules/typedarray": { - "version": "0.0.6", - "dev": true, - "license": "MIT" - }, "node_modules/typesafe-actions": { "version": "5.1.0", "license": "MIT", @@ -15789,18 +12084,6 @@ "node": ">=8" } }, - "node_modules/uglify-js": { - "version": "3.17.4", - "dev": true, - "license": "BSD-2-Clause", - "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/undefsafe": { "version": "2.0.5", "license": "MIT" @@ -15810,42 +12093,6 @@ "devOptional": true, "license": "MIT" }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^2.0.0", - "unicode-property-aliases-ecmascript": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/unified": { "version": "10.1.2", "license": "MIT", @@ -15863,17 +12110,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/unique-string": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "crypto-random-string": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/unist-builder": { "version": "3.0.1", "license": "MIT", @@ -15953,8 +12189,9 @@ }, "node_modules/universalify": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", "dev": true, - "license": "MIT", "engines": { "node": ">= 10.0.0" } @@ -15967,26 +12204,30 @@ } }, "node_modules/unplugin": { - "version": "1.5.0", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.14.1.tgz", + "integrity": "sha512-lBlHbfSFPToDYp9pjXlUEFVxYLaue9f9T1HC+4OHlmj+HnMDdz9oZY+erXfoCe/5V/7gKUSY2jpXPb9S7f0f/w==", "dev": true, - "license": "MIT", "dependencies": { - "acorn": "^8.10.0", - "chokidar": "^3.5.3", - "webpack-sources": "^3.2.3", - "webpack-virtual-modules": "^0.5.0" - } - }, - "node_modules/untildify": { - "version": "4.0.0", - "dev": true, - "license": "MIT", + "acorn": "^8.12.1", + "webpack-virtual-modules": "^0.6.2" + }, "engines": { - "node": ">=8" + "node": ">=14.0.0" + }, + "peerDependencies": { + "webpack-sources": "^3" + }, + "peerDependenciesMeta": { + "webpack-sources": { + "optional": true + } } }, "node_modules/update-browserslist-db": { - "version": "1.0.13", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "funding": [ { "type": "opencollective", @@ -16001,10 +12242,9 @@ "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -16013,289 +12253,614 @@ "browserslist": ">= 4.21.0" } }, - "node_modules/uri-js": { - "version": "4.4.1", - "license": "BSD-2-Clause", + "node_modules/uri-js": { + "version": "4.4.1", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "node_modules/use-sync-external-store": { + "version": "1.2.0", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "license": "MIT" + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "9.0.1", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/uvu/node_modules/diff": { + "version": "5.1.0", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/uvu/node_modules/kleur": { + "version": "4.1.5", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "devOptional": true, + "license": "MIT" + }, + "node_modules/v8-to-istanbul": { + "version": "9.1.3", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz", + "integrity": "sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vfile": { + "version": "5.3.7", + "license": "MIT", "dependencies": { - "punycode": "^2.1.0" + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/url-parse": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", - "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", - "dev": true, + "node_modules/vfile-message": { + "version": "3.1.4", + "license": "MIT", "dependencies": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/use-callback-ref": { - "version": "1.3.0", - "dev": true, - "license": "MIT", + "node_modules/vite": { + "version": "5.4.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.8.tgz", + "integrity": "sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==", "dependencies": { - "tslib": "^2.0.0" + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" }, "engines": { - "node": ">=10" + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" }, "peerDependenciesMeta": { - "@types/react": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { "optional": true } } }, - "node_modules/use-resize-observer": { - "version": "9.1.0", + "node_modules/vite-plugin-eslint": { + "version": "1.8.1", "dev": true, "license": "MIT", "dependencies": { - "@juggle/resize-observer": "^3.3.1" + "@rollup/pluginutils": "^4.2.1", + "@types/eslint": "^8.4.5", + "rollup": "^2.77.2" }, "peerDependencies": { - "react": "16.8.0 - 18", - "react-dom": "16.8.0 - 18" + "eslint": ">=7", + "vite": ">=2" } }, - "node_modules/use-sidecar": { - "version": "1.1.2", + "node_modules/vite-plugin-eslint/node_modules/rollup": { + "version": "2.79.1", "dev": true, "license": "MIT", - "dependencies": { - "detect-node-es": "^1.1.0", - "tslib": "^2.0.0" + "bin": { + "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "node": ">=10.0.0" }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } + "optionalDependencies": { + "fsevents": "~2.3.2" } }, - "node_modules/use-sync-external-store": { - "version": "1.2.0", - "license": "MIT", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" } }, - "node_modules/util": { - "version": "0.12.5", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "is-arguments": "^1.0.4", - "is-generator-function": "^1.0.7", - "is-typed-array": "^1.1.3", - "which-typed-array": "^1.1.2" + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" } }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "license": "MIT" + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } }, - "node_modules/utils-merge": { - "version": "1.0.1", - "license": "MIT", + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">= 0.4.0" + "node": ">=12" } }, - "node_modules/uuid": { - "version": "9.0.1", - "dev": true, - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" } }, - "node_modules/uvu": { - "version": "0.5.6", - "license": "MIT", - "dependencies": { - "dequal": "^2.0.0", - "diff": "^5.0.0", - "kleur": "^4.0.3", - "sade": "^1.7.3" - }, - "bin": { - "uvu": "bin.js" - }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/uvu/node_modules/diff": { - "version": "5.1.0", - "license": "BSD-3-Clause", + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.3.1" + "node": ">=12" } }, - "node_modules/uvu/node_modules/kleur": { - "version": "4.1.5", - "license": "MIT", + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6" + "node": ">=12" } }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "devOptional": true, - "license": "MIT" + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } }, - "node_modules/v8-to-istanbul": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz", - "integrity": "sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg==", - "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^2.0.0" - }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=10.12.0" + "node": ">=12" } }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vary": { - "version": "1.1.2", - "license": "MIT", + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">= 0.8" + "node": ">=12" } }, - "node_modules/vfile": { - "version": "5.3.7", - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^3.0.0", - "vfile-message": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vfile-message": { - "version": "3.1.4", - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vite": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.3.tgz", - "integrity": "sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==", - "dependencies": { - "esbuild": "^0.18.10", - "postcss": "^8.4.27", - "rollup": "^3.27.1" - }, - "bin": { - "vite": "bin/vite.js" - }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - }, - "peerDependencies": { - "@types/node": ">= 14", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } + "node": ">=12" } }, - "node_modules/vite-plugin-eslint": { - "version": "1.8.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^4.2.1", - "@types/eslint": "^8.4.5", - "rollup": "^2.77.2" - }, - "peerDependencies": { - "eslint": ">=7", - "vite": ">=2" + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vite-plugin-eslint/node_modules/rollup": { - "version": "2.79.1", - "dev": true, - "license": "MIT", + "node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "hasInstallScript": true, "bin": { - "rollup": "dist/bin/rollup" + "esbuild": "bin/esbuild" }, "engines": { - "node": ">=10.0.0" + "node": ">=12" }, "optionalDependencies": { - "fsevents": "~2.3.2" + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" } }, "node_modules/void-elements": { @@ -16332,26 +12897,6 @@ "loose-envify": "^1.0.0" } }, - "node_modules/watchpack": { - "version": "2.4.0", - "dev": true, - "license": "MIT", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/wcwidth": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "defaults": "^1.0.3" - } - }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", @@ -16361,18 +12906,11 @@ "node": ">=12" } }, - "node_modules/webpack-sources": { - "version": "3.2.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/webpack-virtual-modules": { - "version": "0.5.0", - "dev": true, - "license": "MIT" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", + "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==", + "dev": true }, "node_modules/whatwg-encoding": { "version": "2.0.0", @@ -16480,11 +13018,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wordwrap": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/wordwrapjs": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-5.1.0.tgz", @@ -16562,16 +13095,6 @@ "version": "1.0.2", "license": "ISC" }, - "node_modules/write-file-atomic": { - "version": "2.4.3", - "dev": true, - "license": "ISC", - "dependencies": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" - } - }, "node_modules/ws": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", @@ -16608,14 +13131,6 @@ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, - "node_modules/xtend": { - "version": "4.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.4" - } - }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -16626,8 +13141,9 @@ } }, "node_modules/yallist": { - "version": "4.0.0", - "license": "ISC" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "node_modules/yaml": { "version": "1.10.2", @@ -16689,15 +13205,6 @@ "node": ">=8" } }, - "node_modules/yauzl": { - "version": "2.10.0", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, "node_modules/yn": { "version": "3.1.1", "devOptional": true, diff --git a/frontend/package.json b/frontend/package.json index 0d165804c0..f0a11d8f72 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -27,19 +27,19 @@ "@emotion/is-prop-valid": "^1.2.1", "@emotion/react": "^11.11.1", "@emotion/styled": "^11.11.0", - "@fortawesome/fontawesome-svg-core": "^6.4.2", - "@fortawesome/free-regular-svg-icons": "^6.4.2", - "@fortawesome/free-solid-svg-icons": "^6.4.2", - "@fortawesome/react-fontawesome": "^0.2.0", + "@fortawesome/fontawesome-svg-core": "^6.6.0", + "@fortawesome/free-regular-svg-icons": "^6.6.0", + "@fortawesome/free-solid-svg-icons": "^6.6.0", + "@fortawesome/react-fontawesome": "^0.2.2", "@paralleldrive/cuid2": "^2.2.2", "@react-keycloak-fork/web": "^4.0.3", "@tippyjs/react": "^4.2.6", - "@vitejs/plugin-react": "^4.1.1", - "apache-arrow": "^13.0.0", + "@vitejs/plugin-react": "^4.3.2", + "apache-arrow": "^17.0.0", "autoprefixer": "^10.4.19", - "axios": "^1.6.0", + "axios": "^1.7.7", "chance": "^1.1.11", - "chart.js": "^4.4.0", + "chart.js": "^4.4.4", "compression": "^1.7.4", "date-fns": "^2.30.0", "downshift": "^7.4.1", @@ -55,28 +55,28 @@ "nodemon": "^3.0.1", "postcss": "^8.4.38", "prettier-plugin-organize-imports": "^3.2.3", - "rc-table": "^7.35.2", - "react": "^18.2.0", + "rc-table": "^7.48.0", + "react": "^18.3.1", "react-chartjs-2": "^5.2.0", "react-datepicker": "^4.21.0", "react-dnd": "^16.0.1", "react-dnd-html5-backend": "^16.0.1", "react-dnd-multi-backend": "^8.0.3", "react-dnd-touch-backend": "^16.0.1", - "react-dom": "^18.2.0", + "react-dom": "^18.3.1", "react-error-boundary": "^3.1.4", "react-highlight-words": "^0.20.0", - "react-hook-form": "^7.48.2", - "react-hotkeys-hook": "^4.4.1", + "react-hook-form": "^7.53.0", + "react-hotkeys-hook": "^4.5.1", "react-i18next": "^12.2.0", "react-list": "^0.8.16", "react-markdown": "^8.0.0", "react-merge-refs": "^2.1.1", - "react-number-format": "^5.3.1", + "react-number-format": "^5.4.2", "react-redux": "^8.1.3", "react-resizable-panels": "^0.0.55", - "react-router-dom": "^6.18.0", - "react-window": "^1.8.9", + "react-router-dom": "^6.26.2", + "react-window": "^1.8.10", "redux": "^4.1.2", "redux-devtools-extension": "^2.13.9", "remark-flexible-markers": "^1.0.3", @@ -84,16 +84,16 @@ "resize-observer-polyfill": "^1.5.1", "tailwindcss": "^3.4.3", "typesafe-actions": "^5.1.0", - "vite": "^4.5.0" + "vite": "^5.4.8" }, "devDependencies": { "@babel/core": "^7.23.2", - "@storybook/addon-actions": "7.5.3", - "@storybook/addon-essentials": "7.5.3", - "@storybook/addon-interactions": "7.5.3", - "@storybook/addon-links": "7.5.3", - "@storybook/react": "7.5.3", - "@storybook/react-vite": "7.5.3", + "@storybook/addon-actions": "8.3.4", + "@storybook/addon-essentials": "8.3.4", + "@storybook/addon-interactions": "8.3.4", + "@storybook/addon-links": "8.3.4", + "@storybook/react": "8.3.4", + "@storybook/react-vite": "8.3.4", "@storybook/testing-library": "^0.2.2", "@swc/core": "^1.3.96", "@testing-library/react": "^14.0.0", @@ -109,13 +109,13 @@ "@types/mustache": "^4.2.4", "@types/node": "^18.15.3", "@types/papaparse": "^5.3.10", - "@types/react": "^18.2.79", + "@types/react": "^18.3.10", "@types/react-datepicker": "^4.19.1", - "@types/react-dom": "^18.2.14", + "@types/react-dom": "^18.3.0", "@types/react-highlight-words": "^0.16.6", "@types/react-list": "^0.8.9", "@types/react-router-dom": "^5.3.3", - "@types/react-window": "^1.8.7", + "@types/react-window": "^1.8.8", "@types/redux": "^3.6.0", "@typescript-eslint/eslint-plugin": "^6.10.0", "@typescript-eslint/parser": "^6.10.0", @@ -128,7 +128,7 @@ "jest-environment-jsdom": "^29.7.0", "papaparse": "^5.4.1", "prettier": "^3.0.3", - "storybook": "7.5.3", + "storybook": "8.3.4", "tailwind-styled-components": "^2.2.0", "terser": "^5.24.0", "ts-jest": "^29.1.1", diff --git a/frontend/src/js/concept-trees/ConceptTreeFolder.tsx b/frontend/src/js/concept-trees/ConceptTreeFolder.tsx index 6462d09643..952e0f31d4 100644 --- a/frontend/src/js/concept-trees/ConceptTreeFolder.tsx +++ b/frontend/src/js/concept-trees/ConceptTreeFolder.tsx @@ -1,5 +1,5 @@ import styled from "@emotion/styled"; -import { FC } from "react"; +import { useMemo } from "react"; import type { ConceptIdT, ConceptT } from "../api/types"; import { useOpenableConcept } from "../concept-trees-open/useOpenableConcept"; @@ -7,24 +7,13 @@ import { useOpenableConcept } from "../concept-trees-open/useOpenableConcept"; import ConceptTree from "./ConceptTree"; import ConceptTreeNodeTextContainer from "./ConceptTreeNodeTextContainer"; import { getConceptById } from "./globalTreeStoreHelper"; -import type { SearchT, TreesT } from "./reducer"; +import type { LoadedConcept, SearchT, TreesT } from "./reducer"; import { isNodeInSearchResult } from "./selectors"; const Root = styled("div")` font-size: ${({ theme }) => theme.font.sm}; `; -interface PropsT { - depth: number; - trees: TreesT; - tree: ConceptT; - conceptId: ConceptIdT; - active?: boolean; - openInitially?: boolean; - search: SearchT; - onLoadTree: (id: string) => void; -} - const sumMatchingEntities = (children: string[], initSum: number) => { return children.reduce((sum, treeId) => { const rootConcept = getConceptById(treeId); @@ -43,7 +32,26 @@ const sumMatchingEntries = (children: string[], initSum: number) => { }, initSum); }; -const ConceptTreeFolder: FC = ({ +export const getNonFolderChildren = ( + trees: TreesT, + node: LoadedConcept, + conceptId: ConceptIdT, +): string[] => { + if (node.detailsAvailable) return [conceptId, ...(node.children || [])]; + + if (!node.children) return [conceptId]; + + // collect all non-folder children, recursively + return node.children.reduce( + (acc, childId) => { + const child = trees[childId]; + return acc.concat(getNonFolderChildren(trees, child, childId)); + }, + [conceptId], + ); +}; + +const ConceptTreeFolder = ({ trees, tree, conceptId, @@ -52,17 +60,34 @@ const ConceptTreeFolder: FC = ({ active, onLoadTree, openInitially, +}: { + depth: number; + trees: TreesT; + tree: ConceptT; + conceptId: ConceptIdT; + active?: boolean; + openInitially?: boolean; + search: SearchT; + onLoadTree: (id: string) => void; }) => { const { open, onToggleOpen } = useOpenableConcept({ conceptId, openInitially, }); - if (!search.showMismatches) { - const shouldRender = isNodeInSearchResult(conceptId, search, tree.children); + const nonFolderChildren = useMemo( + () => + tree.detailsAvailable + ? tree.children + : getNonFolderChildren(trees, tree, conceptId), + [trees, tree, conceptId], + ); - if (!shouldRender) return null; - } + if ( + !search.showMismatches && + !isNodeInSearchResult(conceptId, search, nonFolderChildren) + ) + return null; const matchingEntries = !tree.children || !tree.matchingEntries diff --git a/frontend/src/js/concept-trees/ConceptTreeListItem.tsx b/frontend/src/js/concept-trees/ConceptTreeListItem.tsx index b5f4bda0cb..e0668a91c3 100644 --- a/frontend/src/js/concept-trees/ConceptTreeListItem.tsx +++ b/frontend/src/js/concept-trees/ConceptTreeListItem.tsx @@ -1,7 +1,8 @@ +import { useMemo } from "react"; import type { ConceptIdT } from "../api/types"; import ConceptTree from "./ConceptTree"; -import ConceptTreeFolder from "./ConceptTreeFolder"; +import ConceptTreeFolder, { getNonFolderChildren } from "./ConceptTreeFolder"; import { getConceptById } from "./globalTreeStoreHelper"; import type { SearchT, TreesT } from "./reducer"; import { isNodeInSearchResult } from "./selectors"; @@ -19,7 +20,15 @@ const ConceptTreeListItem = ({ }) => { const tree = trees[conceptId]; - if (!isNodeInSearchResult(conceptId, search, tree.children)) return null; + const nonFolderChildren = useMemo( + () => + tree.detailsAvailable + ? tree.children + : getNonFolderChildren(trees, tree, conceptId), + [trees, tree, conceptId], + ); + + if (!isNodeInSearchResult(conceptId, search, nonFolderChildren)) return null; const rootConcept = getConceptById(conceptId); diff --git a/frontend/src/js/concept-trees/ConceptTreeNodeTextContainer.tsx b/frontend/src/js/concept-trees/ConceptTreeNodeTextContainer.tsx index d951846f63..15007c2e03 100644 --- a/frontend/src/js/concept-trees/ConceptTreeNodeTextContainer.tsx +++ b/frontend/src/js/concept-trees/ConceptTreeNodeTextContainer.tsx @@ -1,4 +1,4 @@ -import { FC, useRef } from "react"; +import { useRef } from "react"; import { useDrag } from "react-dnd"; import type { ConceptIdT, ConceptT } from "../api/types"; @@ -15,19 +15,6 @@ import AdditionalInfoHoverable from "../tooltip/AdditionalInfoHoverable"; import ConceptTreeNodeText from "./ConceptTreeNodeText"; import type { SearchT } from "./reducer"; -interface PropsT { - conceptId: ConceptIdT; - node: ConceptT; - root: ConceptT; - open: boolean; - depth: number; - active?: boolean; - onTextClick?: () => void; - createQueryElement?: () => ConceptQueryNodeType; - search: SearchT; - isStructFolder?: boolean; -} - function getResultCount( search: SearchT, node: ConceptT, @@ -44,7 +31,7 @@ function getResultCount( : null; } -const ConceptTreeNodeTextContainer: FC = ({ +const ConceptTreeNodeTextContainer = ({ conceptId, node, root, @@ -55,11 +42,24 @@ const ConceptTreeNodeTextContainer: FC = ({ onTextClick, isStructFolder, createQueryElement, +}: { + conceptId: ConceptIdT; + node: ConceptT; + root: ConceptT; + open: boolean; + depth: number; + active?: boolean; + onTextClick?: () => void; + createQueryElement?: () => ConceptQueryNodeType; + search: SearchT; + isStructFolder?: boolean; }) => { const ref = useRef(null); const red = exists(node.matchingEntries) && node.matchingEntries === 0; - const resultCount = getResultCount(search, node, conceptId); + const resultCount = isStructFolder + ? null + : getResultCount(search, node, conceptId); const hasChildren = !!node.children && node.children.length > 0; const item: DragItemConceptTreeNode = { diff --git a/frontend/src/js/concept-trees/globalTreeStoreHelper.ts b/frontend/src/js/concept-trees/globalTreeStoreHelper.ts index d16409b7dd..3f5ceee48e 100644 --- a/frontend/src/js/concept-trees/globalTreeStoreHelper.ts +++ b/frontend/src/js/concept-trees/globalTreeStoreHelper.ts @@ -181,9 +181,8 @@ export const globalSearch = async (trees: TreesT, query: string) => { // TODO: Refactor the state and keep both root trees as well as concept trees in a single format // Then simply use that here const formattedTrees = Object.fromEntries( - Object.keys(trees).map((key) => [key, { [key]: trees[key] }]), + Object.entries(trees).map(([key, value]) => [key, { [key]: value }]), ); - const combinedTrees = Object.assign({}, formattedTrees, window.conceptTrees); const result = Object.keys(combinedTrees) diff --git a/frontend/src/js/concept-trees/search.ts b/frontend/src/js/concept-trees/search.ts index f8d5db083c..6dc35ada9a 100644 --- a/frontend/src/js/concept-trees/search.ts +++ b/frontend/src/js/concept-trees/search.ts @@ -58,17 +58,17 @@ export const findConcepts = ( // Count node as 1 already, if it matches let sum = isNodeIncluded ? 1 : 0; - for (const child of node.children) { + for (const childId of node.children) { const result = findConcepts( trees, treeId, - child, - trees[treeId][child], + childId, + trees[treeId][childId], query, intermediateResult, ); - sum += result[child] || 0; + sum += result[childId] || 0; } if (sum !== 0) { diff --git a/frontend/src/js/concept-trees/selectors.ts b/frontend/src/js/concept-trees/selectors.ts index 853c7d3871..5d02b79201 100644 --- a/frontend/src/js/concept-trees/selectors.ts +++ b/frontend/src/js/concept-trees/selectors.ts @@ -15,7 +15,7 @@ const isChildWithinResults = (children: ConceptIdT[], search: SearchT) => { export const isNodeInSearchResult = ( id: ConceptIdT, search: SearchT, - children?: ConceptIdT[], + children?: ConceptIdT[], // actual concept tree ids, not folder ids ) => { if (!search.result) return true; diff --git a/frontend/src/js/dataset/actions.ts b/frontend/src/js/dataset/actions.ts index f0f8b27f77..fb127723c1 100644 --- a/frontend/src/js/dataset/actions.ts +++ b/frontend/src/js/dataset/actions.ts @@ -112,7 +112,7 @@ export const useSelectDataset = () => { dispatch(selectDatasetInput({ id: datasetId })); - dispatch(resetHistory()); + dispatch(resetHistory({ includingDefaultParams: true })); dispatch(queryResultReset({ queryType: "standard" })); dispatch(queryResultReset({ queryType: "timebased" })); dispatch(queryResultReset({ queryType: "editorV2" })); diff --git a/frontend/src/js/entity-history/Navigation.tsx b/frontend/src/js/entity-history/Navigation.tsx index 3b7139feee..8043d3d65e 100644 --- a/frontend/src/js/entity-history/Navigation.tsx +++ b/frontend/src/js/entity-history/Navigation.tsx @@ -133,7 +133,7 @@ export const Navigation = memo( const onReset = useCallback(() => { onResetHistory(); - dispatch(resetHistory()); + dispatch(resetHistory({ includingDefaultParams: false })); }, [dispatch, onResetHistory]); useHotkeys("shift+up", goToPrev, [goToPrev]); diff --git a/frontend/src/js/entity-history/actions.ts b/frontend/src/js/entity-history/actions.ts index f237e8d6cc..e96338535f 100644 --- a/frontend/src/js/entity-history/actions.ts +++ b/frontend/src/js/entity-history/actions.ts @@ -82,7 +82,9 @@ export const useLoadDefaultHistoryParams = () => { export const resetCurrentEntity = createAction( "history/RESET_CURRENT_ENTITY", )(); -export const resetHistory = createAction("history/RESET")(); +export const resetHistory = createAction("history/RESET")<{ + includingDefaultParams?: boolean; +}>(); export const loadHistoryData = createAsyncAction( "history/LOAD_START", diff --git a/frontend/src/js/entity-history/reducer.ts b/frontend/src/js/entity-history/reducer.ts index f34b67cb05..16f94c0594 100644 --- a/frontend/src/js/entity-history/reducer.ts +++ b/frontend/src/js/entity-history/reducer.ts @@ -113,7 +113,11 @@ export default function reducer( case getType(resetHistory): return { ...state, - defaultParams: initialState.defaultParams, + ...(action.payload.includingDefaultParams + ? { + defaultParams: initialState.defaultParams, + } + : {}), label: "", columns: {}, columnDescriptions: [], diff --git a/frontend/src/js/entity-history/timeline/EventCard.tsx b/frontend/src/js/entity-history/timeline/EventCard.tsx index ce0d50c8b9..9fed38d023 100644 --- a/frontend/src/js/entity-history/timeline/EventCard.tsx +++ b/frontend/src/js/entity-history/timeline/EventCard.tsx @@ -161,7 +161,7 @@ const EventCard = ({ {...currencyConfig} suffix={" " + currencyConfig.unit} displayType="text" - value={parseInt(row[column.label] as string) / 100} + value={parseFloat(row[column.label] as string)} /> diff --git a/frontend/src/js/entity-history/timeline/GroupedContent.tsx b/frontend/src/js/entity-history/timeline/GroupedContent.tsx index 9f16b1368a..2c1758ba94 100644 --- a/frontend/src/js/entity-history/timeline/GroupedContent.tsx +++ b/frontend/src/js/entity-history/timeline/GroupedContent.tsx @@ -172,7 +172,7 @@ const Cell = memo( ); } diff --git a/frontend/src/js/header/Header.tsx b/frontend/src/js/header/Header.tsx index 6fbaa852d9..ff4387ad59 100644 --- a/frontend/src/js/header/Header.tsx +++ b/frontend/src/js/header/Header.tsx @@ -1,5 +1,4 @@ import styled from "@emotion/styled"; -import { FC } from "react"; import { useTranslation } from "react-i18next"; import { useSelector } from "react-redux"; @@ -8,6 +7,7 @@ import { HistoryButton } from "../button/HistoryButton"; import DatasetSelector from "../dataset/DatasetSelector"; import { canViewEntityPreview, useHideLogoutButton } from "../user/selectors"; +import { useTheme } from "@emotion/react"; import { HelpMenu } from "./HelpMenu"; import LogoutButton from "./LogoutButton"; @@ -51,7 +51,6 @@ const Spacer = styled("span")` const Logo = styled("div")` height: 40px; width: ${({ theme }) => theme.img.logoWidth}; - background-image: url(${({ theme }) => theme.img.logo}); background-repeat: no-repeat; background-position-y: 50%; background-size: ${({ theme }) => theme.img.logoBackgroundSize}; @@ -68,7 +67,7 @@ const Headline = styled("h1")` color: ${({ theme }) => theme.col.blueGrayDark}; `; -const Header: FC = () => { +const Header = () => { const { t } = useTranslation(); const canViewHistory = useSelector(canViewEntityPreview); const hideLogoutButton = useHideLogoutButton(); @@ -77,10 +76,17 @@ const Header: FC = () => { StateT["startup"]["config"] >((state) => state.startup.config); + const theme = useTheme(); + const logo = theme.img.logo; + return ( - + {t("headline")} diff --git a/frontend/src/js/symbols/FormSymbol.stories.tsx b/frontend/src/js/symbols/FormSymbol.stories.tsx index cda6318768..517079bde8 100644 --- a/frontend/src/js/symbols/FormSymbol.stories.tsx +++ b/frontend/src/js/symbols/FormSymbol.stories.tsx @@ -1,16 +1,12 @@ -import { ComponentMeta, Story } from "@storybook/react"; -import { ComponentProps } from "react"; +import { Meta, StoryFn } from "@storybook/react"; import FormSymbol from "./FormSymbol"; -export default { +const meta = { title: "Symbols/FormSymbol", component: FormSymbol, -} as ComponentMeta; +} as Meta; -const Template: Story> = () => { - return ; -}; +export default meta; -export const Default = Template.bind({}); -Default.args = {}; +export const Default: StoryFn = () => ; diff --git a/frontend/src/js/symbols/QuerySymbol.stories.tsx b/frontend/src/js/symbols/QuerySymbol.stories.tsx index d3aa1e7ece..57286489aa 100644 --- a/frontend/src/js/symbols/QuerySymbol.stories.tsx +++ b/frontend/src/js/symbols/QuerySymbol.stories.tsx @@ -1,16 +1,11 @@ -import { ComponentMeta, Story } from "@storybook/react"; -import { ComponentProps } from "react"; +import { Meta, StoryFn } from "@storybook/react"; import QuerySymbol from "./QuerySymbol"; -export default { +const meta = { title: "Symbols/QuerySymbol", component: QuerySymbol, -} as ComponentMeta; +} as Meta; +export default meta; -const Template: Story> = () => { - return ; -}; - -export const Default = Template.bind({}); -Default.args = {}; +export const Default: StoryFn = () => ; diff --git a/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.stories.tsx b/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.stories.tsx index c46acdfafd..5332b312c6 100644 --- a/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.stories.tsx +++ b/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.stories.tsx @@ -1,4 +1,4 @@ -import { ComponentMeta, Story } from "@storybook/react"; +import { Meta, StoryObj } from "@storybook/react"; import { ComponentProps, useState } from "react"; import wordslist from "../../../fixtures/words.json"; @@ -15,11 +15,13 @@ export default { argTypes: { backgroundColor: { control: "#fafafa" }, }, -} as ComponentMeta; +} as Meta; -const Template: Story< - ComponentProps & { passOnResolve?: boolean } -> = ({ passOnResolve, ...args }) => { +type Props = ComponentProps & { + passOnResolve?: boolean; +}; + +const Render = ({ passOnResolve, ...args }: Props) => { const [loading, setLoading] = useState(false); const [options, setOptions] = useState( wl.map((w) => ({ label: w, value: w, disabled: Math.random() < 0.1 })), @@ -67,22 +69,26 @@ const Template: Story< ); }; -export const Default = Template.bind({}); -Default.args = { - indexPrefix: 5, - label: "This is a nice label", - tooltip: - "And here goes some tooltip that really helps the user understand what's going on", - disabled: false, - passOnResolve: true, - creatable: true, - loading: false, -}; -Default.argTypes = { - passOnResolve: { - type: { name: "boolean" }, +type Story = StoryObj; + +export const Default: Story = { + args: { + indexPrefix: 5, + label: "This is a nice label", + tooltip: + "And here goes some tooltip that really helps the user understand what's going on", + disabled: false, + passOnResolve: true, + creatable: true, + loading: false, }, - indexPrefix: { - type: { name: "number", required: false }, + argTypes: { + passOnResolve: { + type: { name: "boolean" }, + }, + indexPrefix: { + type: { name: "number", required: false }, + }, }, + render: Render, }; diff --git a/frontend/src/js/ui-components/InputPlain/InputPlain.stories.tsx b/frontend/src/js/ui-components/InputPlain/InputPlain.stories.tsx index 6214be05a4..0d8c73ce26 100644 --- a/frontend/src/js/ui-components/InputPlain/InputPlain.stories.tsx +++ b/frontend/src/js/ui-components/InputPlain/InputPlain.stories.tsx @@ -1,17 +1,21 @@ -import { ComponentMeta, Story } from "@storybook/react"; +import { Meta, StoryObj } from "@storybook/react"; import { ComponentProps, useState } from "react"; import InputPlain from "./InputPlain"; -export default { +const meta = { title: "FormComponents/InputPlain", component: InputPlain, argTypes: { backgroundColor: { control: "#fafafa" }, }, -} as ComponentMeta; +} as Meta; -const TemplateString: Story> = (args) => { +export default meta; + +type Story = StoryObj; + +const RenderWithString = (args: ComponentProps) => { const [value, setValue] = useState(""); console.log(value); @@ -25,20 +29,22 @@ const TemplateString: Story> = (args) => { ); }; -export const WithString = TemplateString.bind({}); -WithString.args = { - label: "This is a nice label", - tooltip: - "And here goes some tooltip that really helps the user understand what's going on", - indexPrefix: 5, -}; -WithString.argTypes = { - indexPrefix: { - type: { name: "number", required: false }, +export const WithString: Story = { + args: { + label: "This is a nice label", + tooltip: + "And here goes some tooltip that really helps the user understand what's going on", + indexPrefix: 5, + }, + argTypes: { + indexPrefix: { + type: { name: "number", required: false }, + }, }, + render: RenderWithString, }; -const TemplateNumber: Story> = (args) => { +const RenderWithNumber = (args: ComponentProps) => { const [value, setValue] = useState(null); console.log(value); @@ -53,15 +59,17 @@ const TemplateNumber: Story> = (args) => { ); }; -export const WithNumber = TemplateNumber.bind({}); -WithNumber.args = { - label: "This is a nice label", - tooltip: - "And here goes some tooltip that really helps the user understand what's going on", - indexPrefix: 5, -}; -WithNumber.argTypes = { - indexPrefix: { - type: { name: "number", required: false }, +export const WithNumber: Story = { + args: { + label: "This is a nice label", + tooltip: + "And here goes some tooltip that really helps the user understand what's going on", + indexPrefix: 5, + }, + argTypes: { + indexPrefix: { + type: { name: "number", required: false }, + }, }, + render: RenderWithNumber, }; diff --git a/frontend/src/js/ui-components/InputSelect/InputSelect.stories.tsx b/frontend/src/js/ui-components/InputSelect/InputSelect.stories.tsx index 1d9aa6969f..446bb3583e 100644 --- a/frontend/src/js/ui-components/InputSelect/InputSelect.stories.tsx +++ b/frontend/src/js/ui-components/InputSelect/InputSelect.stories.tsx @@ -1,4 +1,4 @@ -import { ComponentMeta, Story } from "@storybook/react"; +import { Meta, StoryObj } from "@storybook/react"; import { ComponentProps, useState } from "react"; import wordslist from "../../../fixtures/words.json"; @@ -14,9 +14,11 @@ export default { argTypes: { backgroundColor: { control: "#fafafa" }, }, -} as ComponentMeta; +} as Meta; -const Template: Story> = (args) => { +type Story = StoryObj; + +const RenderDefault = (args: ComponentProps) => { const [options] = useState( wl.map((w) => ({ label: w, value: w, disabled: Math.random() < 0.1 })), ); @@ -37,17 +39,19 @@ const Template: Story> = (args) => { ); }; -export const Default = Template.bind({}); -Default.args = { - label: "This is a nice label", - smallMenu: false, - tooltip: - "And here goes some tooltip that really helps the user understand what's going on", - disabled: false, - indexPrefix: 5, -}; -Default.argTypes = { - indexPrefix: { - type: { name: "number", required: false }, +export const Default: Story = { + args: { + label: "This is a nice label", + smallMenu: false, + tooltip: + "And here goes some tooltip that really helps the user understand what's going on", + disabled: false, + indexPrefix: 5, + }, + argTypes: { + indexPrefix: { + type: { name: "number", required: false }, + }, }, + render: RenderDefault, }; diff --git a/frontend/src/localization/de.json b/frontend/src/localization/de.json index 12150190c1..db7b92e579 100644 --- a/frontend/src/localization/de.json +++ b/frontend/src/localization/de.json @@ -274,7 +274,7 @@ "mustBePositiveNumber": "Muss positive Zahl sein", "isRequired": "Erforderlich", "invalidDateRange": "Enddatum liegt vor Startdatum", - "validSelectRequired": "Kompatibler Ausgabewert erforderlich" + "validSelectRequired": "Für folgende Konzepte ist ein kompatibler Ausgabewert erforderlich" }, "default": { "conceptDropzoneLabel": "Füge ein Konzept oder eine Konzeptliste hinzu", diff --git a/frontend/src/localization/en.json b/frontend/src/localization/en.json index e5e556ffc3..f62ef246f6 100644 --- a/frontend/src/localization/en.json +++ b/frontend/src/localization/en.json @@ -274,7 +274,7 @@ "mustBePositiveNumber": "Must be a positive number", "isRequired": "Required", "invalidDateRange": "End date is smaller than start date", - "validSelectRequired": "Valid select required" + "validSelectRequired": "Valid selects required for these concepts" }, "default": { "conceptDropzoneLabel": "Add a concept or a concept list", diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 8d376b9839..a5937f003c 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "jsx": "react-jsx", "jsxImportSource": "@emotion/react", - "moduleResolution": "node", + "moduleResolution": "bundler", "sourceMap": true, "inlineSourceMap": false, "inlineSources": false, @@ -34,7 +34,10 @@ }, "include": [ "src", - "node_modules/**/*/*.d.ts" + "node_modules/**/*/*.d.ts", + ".storybook/main.ts", + ".storybook/manager.js", + ".storybook/preview.tsx" ], "exclude": [ "node_modules", diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 5ba892a6fd..267ddde100 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -8,6 +8,7 @@ export default defineConfig({ build: { sourcemap: true, minify: "terser", + assetsInlineLimit: 0, }, envPrefix: "REACT_APP_", plugins: [ @@ -27,8 +28,10 @@ export default defineConfig({ __BUILD_TIMESTAMP__: JSON.stringify( new Date().toISOString().split(".")[0].split("T").join(" "), ), - __BUILD_GIT_DESCRIBE__: fs.existsSync("./git_describe.txt") - ? fs.readFileSync("./git_describe.txt", "utf-8").trim() - : '"__BUILD_GIT_DESCRIBE__"', + __BUILD_GIT_DESCRIBE__: JSON.stringify( + fs.existsSync("./git_describe.txt") + ? fs.readFileSync("./git_describe.txt", "utf-8").trim() + : "__BUILD_GIT_DESCRIBE__", + ), }, }); diff --git a/scripts/load_e2e_data.sh b/scripts/load_e2e_data.sh index fa9118d131..ae813e230a 100755 --- a/scripts/load_e2e_data.sh +++ b/scripts/load_e2e_data.sh @@ -13,7 +13,10 @@ until $(curl --output /dev/null --silent --head -H "$h_auth" --fail $admin_api/u sleep 5 done -# create users +echo "Preprocess test data" +java -jar ./executable/target/executable*.jar preprocess --in cypress/support/test_data/ --out cypress/support/test_data/ --desc cypress/support/test_data/data.import.json + +# Create users echo "Creating users and permissions" curl --fail -X POST "$admin_api/users/" -H "$h_ct" -H "$h_auth" -d '{"name": "user1", "label": "User1"}' @@ -37,4 +40,7 @@ sleep 3 echo "Creating concepts" curl --fail -X POST "$admin_api/datasets/dataset1/concepts" -H "$h_ct" -H "$h_auth" -d "@./cypress/support/test_data/all_types.concept.json" +echo "Upload test data" +curl --fail -X POST --compressed "$admin_api/datasets/dataset1/cqpp" -H "content-type:application/octet-stream" -H "$h_auth" --data-binary "@./cypress/support/test_data/table.cqpp" + echo "Done loading data"