diff --git a/src/core/domain/useCases/validators/NewResourceValidator.ts b/src/core/domain/useCases/validators/NewResourceValidator.ts new file mode 100644 index 00000000..2abb70ae --- /dev/null +++ b/src/core/domain/useCases/validators/NewResourceValidator.ts @@ -0,0 +1,6 @@ +import { ResourceValidationError } from './errors/ResourceValidationError'; + +export interface NewResourceValidator { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + validate(...args: any[]): Promise; +} diff --git a/src/core/domain/useCases/validators/errors/ResourceValidationError.ts b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts new file mode 100644 index 00000000..4d4f7cbb --- /dev/null +++ b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts @@ -0,0 +1,5 @@ +export class ResourceValidationError extends Error { + constructor(message: string) { + super(message); + } +} diff --git a/src/datasets/domain/dtos/NewDatasetDTO.ts b/src/datasets/domain/dtos/NewDatasetDTO.ts new file mode 100644 index 00000000..5d6cdaff --- /dev/null +++ b/src/datasets/domain/dtos/NewDatasetDTO.ts @@ -0,0 +1,21 @@ +import { DatasetLicense } from '../models/Dataset'; + +export interface NewDatasetDTO { + license?: DatasetLicense; + metadataBlockValues: NewDatasetMetadataBlockValuesDTO[]; +} + +export interface NewDatasetMetadataBlockValuesDTO { + name: string; + fields: NewDatasetMetadataFieldsDTO; +} + +export type NewDatasetMetadataFieldsDTO = Record; + +export type NewDatasetMetadataFieldValueDTO = + | string + | string[] + | NewDatasetMetadataChildFieldValueDTO + | NewDatasetMetadataChildFieldValueDTO[]; + +export type NewDatasetMetadataChildFieldValueDTO = Record; diff --git a/src/datasets/domain/models/CreatedDatasetIdentifiers.ts b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts new file mode 100644 index 00000000..c4dc825a --- /dev/null +++ b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts @@ -0,0 +1,4 @@ +export interface CreatedDatasetIdentifiers { + persistentId: string; + numericId: number; +} diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index bc16c3f7..fa3c697d 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -2,6 +2,9 @@ import { Dataset } from '../models/Dataset'; import { DatasetUserPermissions } from '../models/DatasetUserPermissions'; import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; +import { NewDatasetDTO } from '../dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; export interface IDatasetsRepository { getDatasetSummaryFieldNames(): Promise; @@ -12,4 +15,9 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; + createDataset( + newDataset: NewDatasetDTO, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts new file mode 100644 index 00000000..ca352d82 --- /dev/null +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -0,0 +1,39 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase'; +import { IDatasetsRepository } from '../repositories/IDatasetsRepository'; +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../dtos/NewDatasetDTO'; +import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; +import { IMetadataBlocksRepository } from '../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; + +export class CreateDataset implements UseCase { + private datasetsRepository: IDatasetsRepository; + private metadataBlocksRepository: IMetadataBlocksRepository; + private newDatasetValidator: NewResourceValidator; + + constructor( + datasetsRepository: IDatasetsRepository, + metadataBlocksRepository: IMetadataBlocksRepository, + newDatasetValidator: NewResourceValidator, + ) { + this.datasetsRepository = datasetsRepository; + this.metadataBlocksRepository = metadataBlocksRepository; + this.newDatasetValidator = newDatasetValidator; + } + + async execute(newDataset: NewDatasetDTO, collectionId: string = 'root'): Promise { + const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); + this.newDatasetValidator.validate(newDataset, metadataBlocks); + return this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); + } + + async getNewDatasetMetadataBlocks(newDataset: NewDatasetDTO): Promise { + let metadataBlocks: MetadataBlock[] = []; + await Promise.all( + newDataset.metadataBlockValues.map(async (metadataBlockValue: NewDatasetMetadataBlockValuesDTO) => { + metadataBlocks.push(await this.metadataBlocksRepository.getMetadataBlockByName(metadataBlockValue.name)); + }), + ); + return metadataBlocks; + } +} diff --git a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts new file mode 100644 index 00000000..717f2b4c --- /dev/null +++ b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts @@ -0,0 +1,29 @@ +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { FieldValidationError } from './errors/FieldValidationError'; +import { MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetMetadataFieldAndValueInfo { + metadataFieldInfo: MetadataFieldInfo; + metadataFieldKey: string; + metadataFieldValue: NewDatasetMetadataFieldValueDTO; + metadataBlockName: string; + metadataParentFieldKey?: string; + metadataFieldPosition?: number; +} + +export abstract class BaseMetadataFieldValidator { + abstract validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void; + + protected createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + reason: string, + ): FieldValidationError { + return new FieldValidationError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + reason, + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts new file mode 100644 index 00000000..35111901 --- /dev/null +++ b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts @@ -0,0 +1,51 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; +import { EmptyFieldError } from './errors/EmptyFieldError'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; + +export class MetadataFieldValidator extends BaseMetadataFieldValidator { + constructor( + private singleMetadataFieldValidator: SingleMetadataFieldValidator, + private multipleMetadataFieldValidator: MultipleMetadataFieldValidator, + ) { + super(); + } + + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if ( + metadataFieldValue == undefined || + metadataFieldValue == null || + this.isEmptyString(metadataFieldValue) || + this.isEmptyArray(metadataFieldValue) + ) { + if (metadataFieldInfo.isRequired) { + throw new EmptyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } else { + return; + } + } + if (newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.multiple) { + this.multipleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); + } else { + this.singleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); + } + } + + private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; + } + + private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return ( + Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0 + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts new file mode 100644 index 00000000..ae0be7fa --- /dev/null +++ b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts @@ -0,0 +1,55 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; + +export class MultipleMetadataFieldValidator extends BaseMetadataFieldValidator { + constructor(private singleMetadataFieldValidator: SingleMetadataFieldValidator) { + super(); + } + + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (!Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError(newDatasetMetadataFieldAndValueInfo, 'Expecting an array of values.'); + } + if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of child fields, not strings.', + ); + } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of strings, not child fields.', + ); + } else if ( + !this.isValidArrayType(metadataFieldValue, 'object') && + !this.isValidArrayType(metadataFieldValue, 'string') + ) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'The provided array of values is not valid.', + ); + } + + const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; + fieldValues.forEach((value, metadataFieldPosition) => { + this.singleMetadataFieldValidator.validate({ + metadataFieldInfo: metadataFieldInfo, + metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldValue: value, + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: metadataFieldPosition, + }); + }); + } + + private isValidArrayType( + metadataFieldValue: Array, + expectedType: 'string' | 'object', + ): boolean { + return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValueDTO) => typeof item === expectedType); + } +} diff --git a/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts new file mode 100644 index 00000000..bb07efe6 --- /dev/null +++ b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts @@ -0,0 +1,33 @@ +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../../dtos/NewDatasetDTO'; +import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; +import { MetadataBlock } from '../../../../metadataBlocks'; +import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; +import { BaseMetadataFieldValidator } from './BaseMetadataFieldValidator'; + +export class NewDatasetResourceValidator implements NewResourceValidator { + constructor(private metadataFieldValidator: BaseMetadataFieldValidator) {} + + async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { + for (const metadataBlockValues of resource.metadataBlockValues) { + await this.validateMetadataBlock(metadataBlockValues, metadataBlocks); + } + } + + private async validateMetadataBlock( + metadataBlockValues: NewDatasetMetadataBlockValuesDTO, + metadataBlocks: MetadataBlock[], + ) { + const metadataBlockName = metadataBlockValues.name; + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name === metadataBlockName, + ); + for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { + this.metadataFieldValidator.validate({ + metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], + metadataFieldKey: metadataFieldKey, + metadataFieldValue: metadataBlockValues.fields[metadataFieldKey], + metadataBlockName: metadataBlockName, + }); + } + } +} diff --git a/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts new file mode 100644 index 00000000..e338457c --- /dev/null +++ b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts @@ -0,0 +1,95 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from './errors/DateFormatFieldError'; +import { MetadataFieldValidator } from './MetadataFieldValidator'; +import { NewDatasetMetadataChildFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; + +export class SingleMetadataFieldValidator extends BaseMetadataFieldValidator { + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a single field, not an array.', + ); + } + if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a string, not child fields.', + ); + } + if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting child fields, not a string.', + ); + } + this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + private validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (metadataFieldInfo.isControlledVocabulary) { + this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.type == 'DATE') { + this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { + this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); + } + } + + private validateControlledVocabularyFieldValue( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + ) { + if ( + !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, + ) + ) { + throw new ControlledVocabularyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; + if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { + throw new DateFormatFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + const metadataFieldValidator = new MetadataFieldValidator(this, new MultipleMetadataFieldValidator(this)); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + metadataFieldValidator.validate({ + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); + } + } +} diff --git a/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts b/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts new file mode 100644 index 00000000..b628f53f --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class ControlledVocabularyFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field does not have a valid controlled vocabulary value.', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts b/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts new file mode 100644 index 00000000..a6b36fa5 --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class DateFormatFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field requires a valid date format (YYYY-MM-DD).', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts b/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts new file mode 100644 index 00000000..e1ca1d7a --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class EmptyFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field should not be empty.', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts new file mode 100644 index 00000000..2c3c6a2e --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts @@ -0,0 +1,32 @@ +import { ResourceValidationError } from '../../../../../core/domain/useCases/validators/errors/ResourceValidationError'; + +export class FieldValidationError extends ResourceValidationError { + citationBlockName: string; + metadataFieldName: string; + parentMetadataFieldName?: string; + fieldPosition?: number; + + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + reason?: string, + ) { + let message = `There was an error when validating the field ${metadataFieldName} from metadata block ${citationBlockName}`; + if (parentMetadataFieldName) { + message += ` with parent field ${parentMetadataFieldName}`; + } + if (fieldPosition) { + message += ` in position ${fieldPosition}`; + } + if (reason) { + message += `. Reason was: ${reason}`; + } + super(message); + this.citationBlockName = citationBlockName; + this.metadataFieldName = metadataFieldName; + this.parentMetadataFieldName = parentMetadataFieldName; + this.fieldPosition = fieldPosition; + } +} diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 07a1574a..b75784b6 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -7,6 +7,12 @@ import { GetPrivateUrlDatasetCitation } from './domain/useCases/GetPrivateUrlDat import { GetDatasetUserPermissions } from './domain/useCases/GetDatasetUserPermissions'; import { GetDatasetLocks } from './domain/useCases/GetDatasetLocks'; import { GetAllDatasetPreviews } from './domain/useCases/GetAllDatasetPreviews'; +import { NewDatasetResourceValidator } from './domain/useCases/validators/NewDatasetResourceValidator'; +import { MetadataBlocksRepository } from '../metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { CreateDataset } from './domain/useCases/CreateDataset'; +import { MetadataFieldValidator } from './domain/useCases/validators/MetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './domain/useCases/validators/SingleMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './domain/useCases/validators/MultipleMetadataFieldValidator'; const datasetsRepository = new DatasetsRepository(); @@ -18,6 +24,16 @@ const getPrivateUrlDatasetCitation = new GetPrivateUrlDatasetCitation(datasetsRe const getDatasetUserPermissions = new GetDatasetUserPermissions(datasetsRepository); const getDatasetLocks = new GetDatasetLocks(datasetsRepository); const getAllDatasetPreviews = new GetAllDatasetPreviews(datasetsRepository); +const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); +const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), +); +const createDataset = new CreateDataset( + datasetsRepository, + new MetadataBlocksRepository(), + new NewDatasetResourceValidator(metadataFieldValidator), +); export { getDatasetSummaryFieldNames, @@ -28,6 +44,7 @@ export { getDatasetUserPermissions, getDatasetLocks, getAllDatasetPreviews, + createDataset, }; export { DatasetNotNumberedVersion } from './domain/models/DatasetNotNumberedVersion'; export { DatasetUserPermissions } from './domain/models/DatasetUserPermissions'; @@ -45,3 +62,11 @@ export { } from './domain/models/Dataset'; export { DatasetPreview } from './domain/models/DatasetPreview'; export { DatasetPreviewSubset } from './domain/models/DatasetPreviewSubset'; +export { + NewDatasetDTO as NewDataset, + NewDatasetMetadataBlockValuesDTO as NewDatasetMetadataBlockValues, + NewDatasetMetadataFieldsDTO as NewDatasetMetadataFields, + NewDatasetMetadataFieldValueDTO as NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValueDTO as NewDatasetMetadataChildFieldValue, +} from './domain/dtos/NewDatasetDTO'; +export { CreatedDatasetIdentifiers } from './domain/models/CreatedDatasetIdentifiers'; diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 541f1698..3323c429 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -8,6 +8,10 @@ import { DatasetLock } from '../../domain/models/DatasetLock'; import { transformDatasetLocksResponseToDatasetLocks } from './transformers/datasetLocksTransformers'; import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transformers/datasetPreviewsTransformers'; import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; +import { NewDatasetDTO } from '../../domain/dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { transformNewDatasetModelToRequestPayload } from './transformers/newDatasetTransformers'; +import { CreatedDatasetIdentifiers } from '../../domain/models/CreatedDatasetIdentifiers'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -106,4 +110,25 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi throw error; }); } + + public async createDataset( + newDataset: NewDatasetDTO, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise { + return this.doPost( + `/dataverses/${collectionId}/datasets`, + transformNewDatasetModelToRequestPayload(newDataset, datasetMetadataBlocks), + ) + .then((response) => { + const responseData = response.data.data; + return { + persistentId: responseData.persistentId, + numericId: responseData.id, + }; + }) + .catch((error) => { + throw error; + }); + } } diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts new file mode 100644 index 00000000..5ae104b5 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -0,0 +1,140 @@ +import { + NewDatasetDTO, + NewDatasetMetadataBlockValuesDTO, + NewDatasetMetadataFieldsDTO, + NewDatasetMetadataFieldValueDTO, + NewDatasetMetadataChildFieldValueDTO, +} from '../../../domain/dtos/NewDatasetDTO'; +import { DatasetLicense } from '../../../domain/models/Dataset'; +import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetRequestPayload { + datasetVersion: { + license?: DatasetLicense; + metadataBlocks: Record; + }; +} + +export interface MetadataBlockRequestPayload { + fields: MetadataFieldRequestPayload[]; + displayName: string; +} + +export interface MetadataFieldRequestPayload { + value: MetadataFieldValueRequestPayload; + typeClass: string; + multiple: boolean; + typeName: string; +} + +export type MetadataFieldValueRequestPayload = + | string + | string[] + | Record + | Record[]; + +export const transformNewDatasetModelToRequestPayload = ( + newDataset: NewDatasetDTO, + metadataBlocks: MetadataBlock[], +): NewDatasetRequestPayload => { + return { + datasetVersion: { + ...(newDataset.license && { license: newDataset.license }), + metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), + }, + }; +}; + +export const transformMetadataBlockModelsToRequestPayload = ( + newDatasetMetadataBlocksValues: NewDatasetMetadataBlockValuesDTO[], + metadataBlocks: MetadataBlock[], +): Record => { + let metadataBlocksRequestPayload: Record = {}; + newDatasetMetadataBlocksValues.forEach(function (newDatasetMetadataBlockValues: NewDatasetMetadataBlockValuesDTO) { + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name == newDatasetMetadataBlockValues.name, + ); + metadataBlocksRequestPayload[newDatasetMetadataBlockValues.name] = { + fields: transformMetadataFieldModelsToRequestPayload( + newDatasetMetadataBlockValues.fields, + metadataBlock.metadataFields, + ), + displayName: metadataBlock.displayName, + }; + }); + return metadataBlocksRequestPayload; +}; + +export const transformMetadataFieldModelsToRequestPayload = ( + newDatasetMetadataFields: NewDatasetMetadataFieldsDTO, + metadataBlockFields: Record, +): MetadataFieldRequestPayload[] => { + let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; + for (const metadataFieldKey of Object.keys(newDatasetMetadataFields)) { + const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValueDTO = + newDatasetMetadataFields[metadataFieldKey]; + metadataFieldsRequestPayload.push({ + value: transformMetadataFieldValueToRequestPayload( + newDatasetMetadataChildFieldValue, + metadataBlockFields[metadataFieldKey], + ), + typeClass: metadataBlockFields[metadataFieldKey].typeClass, + multiple: metadataBlockFields[metadataFieldKey].multiple, + typeName: metadataFieldKey, + }); + } + return metadataFieldsRequestPayload; +}; + +export const transformMetadataFieldValueToRequestPayload = ( + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValueDTO, + metadataBlockFieldInfo: MetadataFieldInfo, +): MetadataFieldValueRequestPayload => { + let value: MetadataFieldValueRequestPayload; + if (metadataBlockFieldInfo.multiple) { + const newDatasetMetadataChildFieldValues = newDatasetMetadataFieldValue as + | string[] + | NewDatasetMetadataChildFieldValueDTO[]; + if (typeof newDatasetMetadataChildFieldValues[0] == 'string') { + value = newDatasetMetadataFieldValue as string[]; + } else { + value = []; + (newDatasetMetadataChildFieldValues as NewDatasetMetadataChildFieldValueDTO[]).forEach(function ( + childMetadataFieldValue: NewDatasetMetadataChildFieldValueDTO, + ) { + (value as Record[]).push( + transformMetadataChildFieldValueToRequestPayload(childMetadataFieldValue, metadataBlockFieldInfo), + ); + }); + } + } else { + if (typeof newDatasetMetadataFieldValue == 'string') { + value = newDatasetMetadataFieldValue; + } else { + value = transformMetadataChildFieldValueToRequestPayload( + newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValueDTO, + metadataBlockFieldInfo, + ); + } + } + return value; +}; + +export const transformMetadataChildFieldValueToRequestPayload = ( + newDatasetMetadataChildFieldValue: NewDatasetMetadataChildFieldValueDTO, + metadataBlockFieldInfo: MetadataFieldInfo, +): Record => { + let metadataChildFieldRequestPayload: Record = {}; + for (const metadataChildFieldKey of Object.keys(newDatasetMetadataChildFieldValue)) { + const childMetadataFieldInfo: MetadataFieldInfo = metadataBlockFieldInfo.childMetadataFields[metadataChildFieldKey]; + const value: string = newDatasetMetadataChildFieldValue[metadataChildFieldKey] as unknown as string; + metadataChildFieldRequestPayload[metadataChildFieldKey] = { + value: value, + typeClass: childMetadataFieldInfo.typeClass, + multiple: childMetadataFieldInfo.multiple, + typeName: metadataChildFieldKey, + }; + } + + return metadataChildFieldRequestPayload; +}; diff --git a/src/metadataBlocks/domain/models/MetadataBlock.ts b/src/metadataBlocks/domain/models/MetadataBlock.ts index b95bf799..834d7908 100644 --- a/src/metadataBlocks/domain/models/MetadataBlock.ts +++ b/src/metadataBlocks/domain/models/MetadataBlock.ts @@ -10,10 +10,14 @@ export interface MetadataFieldInfo { displayName: string; title: string; type: string; + typeClass: string; watermark: string; description: string; multiple: boolean; isControlledVocabulary: boolean; + controlledVocabularyValues?: string[]; displayFormat: string; childMetadataFields?: Record; + isRequired: boolean; + displayOrder: number; } diff --git a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts index ad47afdc..00d40262 100644 --- a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts +++ b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts @@ -28,6 +28,9 @@ const transformPayloadMetadataFieldInfo = (metadataFieldInfoPayload: any, isChil multiple: metadataFieldInfoPayload.multiple, isControlledVocabulary: metadataFieldInfoPayload.isControlledVocabulary, displayFormat: metadataFieldInfoPayload.displayFormat, + isRequired: metadataFieldInfoPayload.isRequired, + displayOrder: metadataFieldInfoPayload.displayOrder, + typeClass: metadataFieldInfoPayload.typeClass, }; if (!isChild && metadataFieldInfoPayload.hasOwnProperty('childFields')) { const childMetadataFieldsPayload = metadataFieldInfoPayload.childFields; diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a3064195..d1c1d602 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -12,6 +12,9 @@ import { DatasetNotNumberedVersion, DatasetLockType, DatasetPreviewSubset } from import { fail } from 'assert'; import { ApiConfig } from '../../../src'; import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'; +import { NewDatasetDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { MetadataBlocksRepository } from '../../../src/metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { Author, DatasetContact, DatasetDescription } from '../../../src/datasets/domain/models/Dataset'; describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository(); @@ -243,4 +246,77 @@ describe('DatasetsRepository', () => { expect(typeof actualDatasetCitation).toBe('string'); }); }); + + describe('createDataset', () => { + test('should create a dataset with the provided dataset citation fields', async () => { + const testTitle = 'Dataset created using the createDataset use case'; + const testAuthorName1 = 'Admin, Dataverse'; + const testAuthorName2 = 'Owner, Dataverse'; + const testAuthorAffiliation1 = 'Dataverse.org'; + const testAuthorAffiliation2 = 'Dataversedemo.org'; + const testContactEmail = 'finch@mailinator.com'; + const testContactName = 'Finch, Fiona'; + const testDescription = 'This is the description of the dataset.'; + const testSubject = ['Medicine, Health and Life Sciences']; + + const testNewDataset: NewDatasetDTO = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: testTitle, + author: [ + { + authorName: testAuthorName1, + authorAffiliation: testAuthorAffiliation1, + }, + { + authorName: testAuthorName2, + authorAffiliation: testAuthorAffiliation2, + }, + ], + datasetContact: [ + { + datasetContactEmail: testContactEmail, + datasetContactName: testContactName, + }, + ], + dsDescription: [ + { + dsDescriptionValue: testDescription, + }, + ], + subject: testSubject, + }, + }, + ], + }; + + const metadataBlocksRepository = new MetadataBlocksRepository(); + const citationMetadataBlock = await metadataBlocksRepository.getMetadataBlockByName('citation'); + const createdDataset = await sut.createDataset(testNewDataset, [citationMetadataBlock], 'root'); + const actualCreatedDataset = await sut.getDataset(createdDataset.numericId, latestVersionId, false); + + expect(actualCreatedDataset.metadataBlocks[0].fields.title).toBe(testTitle); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorName).toBe(testAuthorName1); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorAffiliation).toBe( + testAuthorAffiliation1, + ); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorName).toBe(testAuthorName2); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorAffiliation).toBe( + testAuthorAffiliation2, + ); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactEmail, + ).toBe(testContactEmail); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactName, + ).toBe(testContactName); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.dsDescription[0] as DatasetDescription).dsDescriptionValue, + ).toBe(testDescription); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[0]).toBe(testSubject[0]); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[1]).toBe(testSubject[1]); + }); + }); }); diff --git a/test/integration/environment/setup.js b/test/integration/environment/setup.js index 57cf5c06..c618ce63 100644 --- a/test/integration/environment/setup.js +++ b/test/integration/environment/setup.js @@ -51,12 +51,11 @@ async function setupTestFixtures() { console.log('Creating test datasets...'); await createDatasetViaApi(datasetJson1) .then() - .catch((error) => { + .catch(() => { console.error('Tests setup: Error while creating test Dataset 1'); }); await createDatasetViaApi(datasetJson2) - .then() - .catch((error) => { + .catch(() => { console.error('Tests setup: Error while creating test Dataset 2'); }); console.log('Test datasets created'); diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts new file mode 100644 index 00000000..4ff08439 --- /dev/null +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -0,0 +1,312 @@ +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../src'; +import { NewDatasetRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + +export const createNewDatasetDTO = ( + titleFieldValue?: NewDatasetMetadataFieldValueDTO, + authorFieldValue?: NewDatasetMetadataFieldValueDTO, + alternativeRequiredTitleValue?: NewDatasetMetadataFieldValueDTO, + timePeriodCoveredStartValue?: NewDatasetMetadataFieldValueDTO, + contributorTypeValue?: NewDatasetMetadataFieldValueDTO, +): NewDatasetDTO => { + const validTitle = 'test dataset'; + const validAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ]; + const validAlternativeRequiredTitleValue = ['alternative1', 'alternative2']; + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: titleFieldValue !== undefined ? titleFieldValue : validTitle, + author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, + alternativeRequiredTitle: + alternativeRequiredTitleValue !== undefined + ? alternativeRequiredTitleValue + : validAlternativeRequiredTitleValue, + ...(timePeriodCoveredStartValue && { timePeriodCoveredStart: timePeriodCoveredStartValue }), + ...(contributorTypeValue && { + contributor: [ + { + contributorName: 'Admin, Dataverse', + contributorType: contributorTypeValue as string, + }, + ], + }), + }, + }, + ], + }; +}; + +export const createNewDatasetDTOWithoutFirstLevelRequiredField = (): NewDatasetDTO => { + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + }, + }, + ], + }; +}; + +export const createNewDatasetDTOWithoutSecondLevelRequiredField = (): NewDatasetDTO => { + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ], + }, + }, + ], + }; +}; + +/** + * + * This method creates a simplified and altered version of the Citation Metadata Block, only for testing purposes. + * For this reason some of the metadata fields do not correspond to the real ones. + * + * @returns {MetadataBlock} A MetadataBlock testing instance. + * + **/ +export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { + return { + id: 1, + name: 'citation', + displayName: 'Citation Metadata', + metadataFields: { + title: { + name: 'title', + displayName: 'title', + title: 'title', + type: 'DatasetField', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', + }, + author: { + name: 'author', + displayName: 'author', + title: 'author', + type: 'NONE', + watermark: 'watermark', + description: 'description', + multiple: true, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 1, + typeClass: 'compound', + childMetadataFields: { + authorName: { + name: 'authorName', + displayName: 'author name', + title: 'author name', + type: 'TEXT', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 2, + typeClass: 'primitive', + }, + authorAffiliation: { + name: 'authorAffiliation', + displayName: 'author affiliation', + title: 'author affiliation', + type: 'TEXT', + watermark: 'watermark', + description: 'descriprion', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: false, + displayOrder: 3, + typeClass: 'primitive', + }, + }, + }, + alternativeRequiredTitle: { + name: 'alternativeRequiredTitle', + displayName: 'Alternative Required Title', + title: 'Alternative Title', + type: 'TEXT', + watermark: '', + description: 'Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title', + multiple: true, + isControlledVocabulary: false, + displayFormat: '', + isRequired: true, + displayOrder: 4, + typeClass: 'primitive', + }, + timePeriodCoveredStart: { + name: 'timePeriodCoveredStart', + displayName: 'Time Period Start Date', + title: 'Start Date', + type: 'DATE', + watermark: 'YYYY-MM-DD', + description: 'The start date of the time period that the data refer to', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#NAME: #VALUE ', + isRequired: false, + displayOrder: 5, + typeClass: 'primitive', + }, + contributor: { + name: 'contributor', + displayName: 'Contributor', + title: 'Contributor', + type: 'NONE', + watermark: '', + description: + 'The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset', + multiple: true, + isControlledVocabulary: false, + displayFormat: ':', + isRequired: false, + displayOrder: 6, + typeClass: 'compound', + childMetadataFields: { + contributorType: { + name: 'contributorType', + displayName: 'Contributor Type', + title: 'Type', + type: 'TEXT', + watermark: '', + description: 'Indicates the type of contribution made to the dataset', + multiple: false, + isControlledVocabulary: true, + displayFormat: '#VALUE ', + isRequired: false, + displayOrder: 7, + controlledVocabularyValues: [ + 'Data Collector', + 'Data Curator', + 'Data Manager', + 'Editor', + 'Funder', + 'Hosting Institution', + 'Project Leader', + 'Project Manager', + 'Project Member', + 'Related Person', + 'Researcher', + 'Research Group', + 'Rights Holder', + 'Sponsor', + 'Supervisor', + 'Work Package Leader', + 'Other', + ], + typeClass: 'controlledVocabulary', + }, + contributorName: { + name: 'contributorName', + displayName: 'Contributor Name', + title: 'Name', + type: 'TEXT', + watermark: '1) FamilyName, GivenName or 2) Organization', + description: "The name of the contributor, e.g. the person's name or the name of an organization", + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 8, + typeClass: 'primitive', + }, + }, + }, + }, + }; +}; + +export const createNewDatasetRequestPayload = (): NewDatasetRequestPayload => { + return { + datasetVersion: { + metadataBlocks: { + citation: { + fields: [ + { + value: 'test dataset', + typeClass: 'primitive', + multiple: false, + typeName: 'title', + }, + { + value: [ + { + authorName: { + value: 'Admin, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + { + authorName: { + value: 'Owner, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + ], + typeClass: 'compound', + multiple: true, + typeName: 'author', + }, + { + value: ['alternative1', 'alternative2'], + typeClass: 'primitive', + multiple: true, + typeName: 'alternativeRequiredTitle', + }, + ], + displayName: 'Citation Metadata', + }, + }, + }, + }; +}; diff --git a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts index 54b12cfe..28b013be 100644 --- a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts +++ b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts @@ -16,6 +16,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -27,6 +30,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, + typeClass: 'compound', childMetadataFields: { testField3: { name: 'testName3', @@ -38,6 +44,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -49,6 +58,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, }, }, @@ -72,6 +84,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -83,6 +98,9 @@ export const createMetadataBlockPayload = (): any => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, + typeClass: 'compound', childFields: { testField3: { name: 'testName3', @@ -94,6 +112,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -105,6 +126,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, }, }, diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts new file mode 100644 index 00000000..f3f91388 --- /dev/null +++ b/test/unit/datasets/CreateDataset.test.ts @@ -0,0 +1,125 @@ +import { CreateDataset } from '../../../src/datasets/domain/useCases/CreateDataset'; +import { CreatedDatasetIdentifiers } from '../../../src/datasets/domain/models/CreatedDatasetIdentifiers'; +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; +import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; +import { createNewDatasetDTO, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; +import { WriteError, ReadError } from '../../../src'; +import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + const testDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + + afterEach(() => { + sandbox.restore(); + }); + + test('should return new dataset identifiers when validation is successful and repository call is successful', async () => { + const testCreatedDatasetIdentifiers: CreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + + const datasetsRepositoryStub = {}; + const createDatasetStub = sandbox.stub().returns(testCreatedDatasetIdentifiers); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorStub = {}; + const validateStub = sandbox.stub().resolves(); + newDatasetValidatorStub.validate = validateStub; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); + + const actual = await sut.execute(testDataset); + + assert.match(actual, testCreatedDatasetIdentifiers); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); + + assert.callOrder(validateStub, createDatasetStub); + }); + + test('should throw ResourceValidationError and not call repository when validation is unsuccessful', async () => { + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorStub = {}; + const testValidationError = new ResourceValidationError('Test error'); + const validateStub = sandbox.stub().throwsException(testValidationError); + newDatasetValidatorStub.validate = validateStub; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorStub); + let actualError: ResourceValidationError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testValidationError); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); + assert.notCalled(createDatasetMock); + }); + + test('should throw WriteError when validation is successful and repository raises an error', async () => { + const datasetsRepositoryStub = {}; + const testWriteError = new WriteError('Test error'); + const createDatasetStub = sandbox.stub().throwsException(testWriteError); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorStub = {}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorStub.validate = validateMock; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); + let actualError: WriteError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testWriteError); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateMock, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); + + assert.callOrder(validateMock, createDatasetStub); + }); + + test('should throw ReadError when metadata blocks repository raises an error', async () => { + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorMock = {}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorMock.validate = validateMock; + + const metadataBlocksRepositoryStub = {}; + const testReadError = new ReadError('Test error'); + const getMetadataBlockByNameStub = sandbox.stub().throwsException(testReadError); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorMock); + let actualError: ReadError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testReadError); + + assert.notCalled(validateMock); + assert.notCalled(createDatasetMock); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + }); +}); diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index 261df777..c33bbeb4 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -17,6 +17,12 @@ import { createDatasetPreviewModel, createDatasetPreviewPayload, } from '../../testHelpers/datasets/datasetPreviewHelper'; +import { + createNewDatasetDTO, + createNewDatasetMetadataBlockModel, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { WriteError } from '../../../src'; describe('DatasetsRepository', () => { const sandbox: SinonSandbox = createSandbox(); @@ -600,4 +606,73 @@ describe('DatasetsRepository', () => { expect(error).to.be.instanceOf(Error); }); }); + + describe('createDataset', () => { + const testNewDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const testCollectionName = 'test'; + const expectedNewDatasetRequestPayloadJson = JSON.stringify(createNewDatasetRequestPayload()); + + const testCreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + + const testCreateDatasetResponse = { + data: { + status: 'OK', + data: { + id: testCreatedDatasetIdentifiers.numericId, + persistentId: testCreatedDatasetIdentifiers.persistentId, + }, + }, + }; + + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${testCollectionName}/datasets`; + + test('should call the API with a correct request payload', async () => { + const axiosPostStub = sandbox.stub(axios, 'post').resolves(testCreateDatasetResponse); + + // API Key auth + let actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + + assert.match(actual, testCreatedDatasetIdentifiers); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + + assert.match(actual, testCreatedDatasetIdentifiers); + }); + + test('should return error result on error response', async () => { + const axiosPostStub = sandbox.stub(axios, 'post').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: WriteError = undefined; + await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); + }); }); diff --git a/test/unit/datasets/NewDatasetResourceValidator.test.ts b/test/unit/datasets/NewDatasetResourceValidator.test.ts new file mode 100644 index 00000000..3c0e1c59 --- /dev/null +++ b/test/unit/datasets/NewDatasetResourceValidator.test.ts @@ -0,0 +1,196 @@ +import { NewDatasetResourceValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetResourceValidator'; +import { assert } from 'sinon'; +import { + createNewDatasetDTO, + createNewDatasetMetadataBlockModel, + createNewDatasetDTOWithoutFirstLevelRequiredField, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { fail } from 'assert'; +import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; +import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/SingleMetadataFieldValidator'; +import { MetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator'; + +describe('validate', () => { + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + + const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); + const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), + ); + const sut = new NewDatasetResourceValidator(metadataFieldValidator); + + async function runValidateExpectingFieldValidationError( + newDataset: NewDatasetDTO, + expectedMetadataFieldName: string, + expectedErrorMessage: string, + expectedParentMetadataFieldName?: string, + expectedPosition?: number, + ): Promise { + await sut + .validate(newDataset, testMetadataBlocks) + .then(() => { + fail('Validation should fail'); + }) + .catch((error) => { + const fieldValidationError = error as T; + assert.match(fieldValidationError.citationBlockName, 'citation'); + assert.match(fieldValidationError.metadataFieldName, expectedMetadataFieldName); + assert.match(fieldValidationError.parentMetadataFieldName, expectedParentMetadataFieldName); + assert.match(fieldValidationError.fieldPosition, expectedPosition); + assert.match(fieldValidationError.message, expectedErrorMessage); + }); + } + + test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { + const testNewDataset = createNewDatasetDTO(); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise an empty field error when a first level required string field is missing', async () => { + await runValidateExpectingFieldValidationError( + createNewDatasetDTOWithoutFirstLevelRequiredField(), + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + + test('should raise an empty field error when a first level required array field is empty', async () => { + const invalidAuthorFieldValue: NewDatasetMetadataFieldValueDTO = []; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + + test('should raise an error when the provided field value for an unique field is an array', async () => { + const invalidTitleFieldValue = ['title1', 'title2']; + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a single field, not an array.', + ); + }); + + test('should raise an error when the provided field value is an object and the field expects a string', async () => { + const invalidTitleFieldValue = { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }; + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a string, not child fields.', + ); + }); + + test('should raise an error when the provided field value for a multiple field is a string', async () => { + const invalidAuthorFieldValue = 'invalidValue'; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', + ); + }); + + test('should raise an error when the provided field value is an array of strings and the field expects an array of objects', async () => { + const invalidAuthorFieldValue = ['invalidValue1', 'invalidValue2']; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of child fields, not strings', + ); + }); + + test('should raise an error when the provided field value is an array of objects and the field expects an array of strings', async () => { + const invalidAlternativeTitleFieldValue = [ + { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }, + { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, undefined, invalidAlternativeTitleFieldValue); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'alternativeRequiredTitle', + 'There was an error when validating the field alternativeRequiredTitle from metadata block citation. Reason was: Expecting an array of strings, not child fields', + ); + }); + + test('should raise an empty field error when a required child field is missing', async () => { + const invalidAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'authorName', + 'There was an error when validating the field authorName from metadata block citation with parent field author in position 1. Reason was: The field should not be empty.', + 'author', + 1, + ); + }); + + test('should not raise an empty field error when a not required child field is missing', async () => { + const authorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'John, Doe', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, authorFieldValue, undefined); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise a date format validation error when a date field has an invalid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '1-1-2020'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'timePeriodCoveredStart', + 'There was an error when validating the field timePeriodCoveredStart from metadata block citation. Reason was: The field requires a valid date format (YYYY-MM-DD).', + ); + }); + + test('should not raise a date format validation error when a date field has a valid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '2020-01-01'); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise a controlled vocabulary error when a controlled vocabulary field has an invalid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Wrong Value'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'contributorType', + 'There was an error when validating the field contributorType from metadata block citation with parent field contributor. Reason was: The field does not have a valid controlled vocabulary value.', + 'contributor', + 0, + ); + }); + + test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Project Member'); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); +}); diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts new file mode 100644 index 00000000..93e19d9b --- /dev/null +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -0,0 +1,17 @@ +import { assert } from 'sinon'; +import { + createNewDatasetMetadataBlockModel, + createNewDatasetDTO, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + +describe('transformNewDatasetModelToRequestPayload', () => { + test('should correctly transform a new dataset model to a new dataset request payload', async () => { + const testNewDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const expectedNewDatasetRequestPayload = createNewDatasetRequestPayload(); + const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); + assert.match(actual, expectedNewDatasetRequestPayload); + }); +});