Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/common/interfaces/common.interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export interface ILimitsFilter {

export interface IFilters<T, Y = null> {
where?: FilterQuery<T>;
include?: { relation: string }[];
include?: { relation: string; scope?: IFilters<T, Y> }[];
fields?: Y;
limits?: ILimitsFilter;
}
Expand Down
14 changes: 10 additions & 4 deletions src/common/pipes/include-validation.pipe.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@ import { OrigDatablockLookupKeysEnum } from "src/origdatablocks/types/origdatabl

@Injectable()
export class IncludeValidationPipe
implements PipeTransform<string | string[], string | string[]>
implements
PipeTransform<string | string[], string | string[] | { relation: string }[]>
{
constructor(
private lookupFields:
| Record<DatasetLookupKeysEnum, PipelineStage.Lookup | undefined>
| Record<OrigDatablockLookupKeysEnum, PipelineStage.Lookup | undefined>,
) {}
transform(inValue: string | string[]): string[] | string {
transform(
inValue: string | string[] | { relation: string }[],
): string[] | string | { relation: string }[] {
if (!inValue) {
return inValue;
}
Expand All @@ -27,11 +30,14 @@ export class IncludeValidationPipe
: Array(inValue);

includeValueParsed?.map((field) => {
if (Object.keys(this.lookupFields).includes(field)) {
let relationField = field;
if (typeof field === "object" && "relation" in field)
relationField = (field as { relation: string }).relation;
if (Object.keys(this.lookupFields).includes(relationField)) {
return field;
} else {
throw new BadRequestException(
`Provided include field ${JSON.stringify(field)} is not part of the dataset relations`,
`Provided include field ${JSON.stringify(relationField)} is not part of the dataset relations`,
);
}
});
Expand Down
17 changes: 17 additions & 0 deletions src/common/pipes/json-validation.pipe.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { PipeTransform, Injectable, BadRequestException } from "@nestjs/common";

@Injectable()
export class JsonValidationPipe implements PipeTransform<string> {
transform(inValue: string): string | undefined {
if (!inValue) {
return;
}
try {
JSON.parse(inValue);
} catch (err) {
const error = err as Error;
throw new BadRequestException(`Invalid JSON in filter: ${error.message}`);
}
return inValue;
}
}
160 changes: 52 additions & 108 deletions src/datasets/datasets.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ import {
datasetsFullQueryDescriptionFields,
datasetsFullQueryExampleFields,
filterDescription,
filterExample,
fullQueryDescriptionLimits,
fullQueryExampleLimits,
replaceLikeOperator,
Expand Down Expand Up @@ -113,6 +112,10 @@ import { LifecycleClass } from "./schemas/lifecycle.schema";
import { RelationshipClass } from "./schemas/relationship.schema";
import { TechniqueClass } from "./schemas/technique.schema";
import { DatasetType } from "./types/dataset-type.enum";
import { getSwaggerDatasetFilterContent } from "./types/dataset-filter-content";
import { DATASET_LOOKUP_FIELDS } from "./types/dataset-lookup";
import { IncludeValidationPipe } from "src/common/pipes/include-validation.pipe";
import { JsonValidationPipe } from "src/common/pipes/json-validation.pipe";

@ApiBearerAuth()
@ApiExtraModels(
Expand Down Expand Up @@ -235,7 +238,10 @@ export class DatasetsController {
ownerGroup: { $in: user.currentGroups },
};
} else if (canViewPublic) {
mergedFilters.where = { isPublished: true };
mergedFilters.where = {
...mergedFilters.where,
isPublished: true,
};
}
}

Expand Down Expand Up @@ -883,7 +889,7 @@ export class DatasetsController {
filterDescription,
required: false,
type: String,
example: filterExample,
content: getSwaggerDatasetFilterContent(),
})
@ApiResponse({
status: HttpStatus.OK,
Expand All @@ -894,66 +900,25 @@ export class DatasetsController {
async findAll(
@Req() request: Request,
@Headers() headers: Record<string, string>,
@Query(new FilterPipe()) queryFilter: { filter?: string },
@Query(
"filter",
new JsonValidationPipe(),
new IncludeValidationPipe(DATASET_LOOKUP_FIELDS),
)
queryFilter: string,
): Promise<OutputDatasetObsoleteDto[]> {
const mergedFilters = replaceLikeOperator(
this.updateMergedFiltersForList(
request,
this.getFilters(headers, queryFilter),
this.getFilters(headers, { filter: queryFilter }),
) as Record<string, unknown>,
) as IFilters<DatasetDocument, IDatasetFields>;

// this should be implemented at database level
const datasets = await this.datasetsService.findAll(mergedFilters);
let outputDatasets: OutputDatasetObsoleteDto[] = [];
if (datasets && datasets.length > 0) {
const includeFilters = mergedFilters.include ?? [];
outputDatasets = datasets.map((dataset) =>
this.convertCurrentToObsoleteSchema(dataset),
);
await Promise.all(
outputDatasets.map(async (dataset) => {
if (includeFilters) {
await Promise.all(
includeFilters.map(async ({ relation }) => {
switch (relation) {
case "attachments": {
dataset.attachments = await this.attachmentsService.findAll(
{
datasetId: dataset.pid,
},
);
break;
}
case "origdatablocks": {
dataset.origdatablocks =
await this.origDatablocksService.findAll({
where: {
datasetId: dataset.pid,
},
});
break;
}
case "datablocks": {
dataset.datablocks = await this.datablocksService.findAll({
where: {
datasetId: dataset.pid,
},
});
break;
}
}
}),
);
} else {
/* eslint-disable @typescript-eslint/no-unused-expressions */
// TODO: check the eslint error "Expected an assignment or function call and instead saw an expression"
dataset;
}
}),
);
}
return outputDatasets as OutputDatasetObsoleteDto[];
const datasets = (await this.datasetsService.findAllComplete(
mergedFilters,
"v3",
)) as DatasetClass[];
return datasets.map(this.convertCurrentToObsoleteSchema);
}

// GET /datasets/fullquery
Expand Down Expand Up @@ -1216,7 +1181,7 @@ export class DatasetsController {
filterDescription,
required: false,
type: String,
example: filterExample,
content: getSwaggerDatasetFilterContent(),
})
@ApiResponse({
status: HttpStatus.OK,
Expand All @@ -1228,51 +1193,12 @@ export class DatasetsController {
@Headers() headers: Record<string, string>,
@Query(new FilterPipe()) queryFilter: { filter?: string },
): Promise<OutputDatasetObsoleteDto | null> {
const mergedFilters = replaceLikeOperator(
this.updateMergedFiltersForList(
request,
this.getFilters(headers, queryFilter),
) as Record<string, unknown>,
) as IFilters<DatasetDocument, IDatasetFields>;

const databaseDataset = await this.datasetsService.findOne(mergedFilters);

const outputDataset =
await this.convertCurrentToObsoleteSchema(databaseDataset);

if (outputDataset) {
const includeFilters = mergedFilters.include ?? [];
await Promise.all(
includeFilters.map(async ({ relation }) => {
switch (relation) {
case "attachments": {
outputDataset.attachments = await this.attachmentsService.findAll(
{
where: {
datasetId: outputDataset.pid,
},
},
);
break;
}
case "origdatablocks": {
outputDataset.origdatablocks =
await this.origDatablocksService.findAll({
where: { datasetId: outputDataset.pid },
});
break;
}
case "datablocks": {
outputDataset.datablocks = await this.datablocksService.findAll({
where: { datasetId: outputDataset.pid },
});
break;
}
}
}),
);
}
return outputDataset;
const dataset = await this.findAll(
request,
headers,
queryFilter.filter || "{}",
);
return dataset[0] as OutputDatasetObsoleteDto;
}

// GET /datasets/count
Expand Down Expand Up @@ -1319,7 +1245,6 @@ export class DatasetsController {
}

// GET /datasets/:id
//@UseGuards(PoliciesGuard)
@UseGuards(PoliciesGuard)
@CheckPolicies(
"datasets",
Expand All @@ -1343,12 +1268,31 @@ export class DatasetsController {
status: HttpStatus.NOT_FOUND,
description: "Dataset not found",
})
async findById(@Req() request: Request, @Param("pid") id: string) {
const dataset = this.convertCurrentToObsoleteSchema(
await this.checkPermissionsForDatasetObsolete(request, id),
@ApiQuery({
name: "filter",
description:
"Database filters to apply when retrieving datasets\n" +
filterDescription,
required: false,
type: String,
content: getSwaggerDatasetFilterContent(),
})
async findById(
@Req() request: Request,
@Param("pid") id: string,
@Headers() headers: Record<string, string>,
@Query(new FilterPipe()) queryFilter: { filter?: string },
) {
const filterObj = JSON.parse(queryFilter.filter ?? "{}");
filterObj.where = filterObj.where ?? {};
filterObj.where.pid = id;
const dataset = await this.findAll(
request,
headers,
JSON.stringify(filterObj),
);

return dataset as OutputDatasetObsoleteDto;
if (dataset.length == 0) throw new ForbiddenException();
return dataset[0] as OutputDatasetObsoleteDto;
}

// PATCH /datasets/:id
Expand Down
47 changes: 36 additions & 11 deletions src/datasets/datasets.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ export class DatasetsService {
addLookupFields(
pipeline: PipelineStage[],
datasetLookupFields?: DatasetLookupKeysEnum[],
addRelationPermissions = true,
) {
if (datasetLookupFields?.includes(DatasetLookupKeysEnum.all)) {
datasetLookupFields = Object.keys(DATASET_LOOKUP_FIELDS).filter(
Expand All @@ -73,12 +74,35 @@ export class DatasetsService {
}

datasetLookupFields?.forEach((field) => {
let scope: IFilters<unknown> = {};
if (typeof field === "object" && "relation" in field) {
scope = (field as { scope: typeof scope }).scope ?? {};
field = (field as { relation: string })
.relation as DatasetLookupKeysEnum;
}
const fieldValue = structuredClone(DATASET_LOOKUP_FIELDS[field]);

if (fieldValue) {
fieldValue.$lookup.as = field;

this.datasetsAccessService.addRelationFieldAccess(fieldValue);
if (addRelationPermissions)
this.datasetsAccessService.addRelationFieldAccess(fieldValue);

const includePipeline = [];
if (scope.where) includePipeline.push({ $match: scope.where });
if (scope.fields)
includePipeline.push({
$project: parsePipelineProjection(scope.fields),
});
if (scope?.limits?.skip)
includePipeline.push({ $skip: scope.limits.skip });
if (scope?.limits?.limit)
includePipeline.push({ $limit: scope.limits.limit });

if (includePipeline.length > 0)
fieldValue.$lookup.pipeline =
fieldValue.$lookup.pipeline?.concat(includePipeline) ??
includePipeline;

pipeline.push(fieldValue);
}
Expand Down Expand Up @@ -120,37 +144,38 @@ export class DatasetsService {

async findAllComplete(
filter: FilterQuery<DatasetDocument>,
datasetVersion = "v4",
): Promise<PartialOutputDatasetDto[]> {
const whereFilter: FilterQuery<DatasetDocument> = filter.where ?? {};
const fieldsProjection: string[] = filter.fields ?? {};
const limits: QueryOptions<DatasetDocument> = filter.limits ?? {
limit: 10,
skip: 0,
sort: { createdAt: "desc" },
};
const limits: QueryOptions<DatasetDocument> | undefined =
datasetVersion === "v4"
? (filter.limits ?? { limit: 10, skip: 0, sort: { createdAt: "desc" } })
: filter.limits;

const pipeline: PipelineStage[] = [{ $match: whereFilter }];
this.addLookupFields(pipeline, filter.include);
this.addLookupFields(pipeline, filter.include, datasetVersion === "v4");

if (!isEmpty(fieldsProjection)) {
const projection = parsePipelineProjection(fieldsProjection);
pipeline.push({ $project: projection });
}

if (!isEmpty(limits.sort)) {
if (limits?.sort && !isEmpty(limits.sort)) {
const sort = parsePipelineSort(limits.sort);
pipeline.push({ $sort: sort });
}

pipeline.push({ $skip: limits.skip || 0 });
if (limits?.skip) pipeline.push({ $skip: limits.skip });

pipeline.push({ $limit: limits.limit || 10 });
if (limits?.limit) pipeline.push({ $limit: limits.limit });

const data = await this.datasetModel
.aggregate<PartialOutputDatasetDto>(pipeline)
.exec();

return data;
if (datasetVersion === "v4") return data;
return data.map((d) => this.datasetModel.hydrate(d));
}

async fullquery(
Expand Down
Loading