diff --git a/openapi3.yaml b/openapi3.yaml index 420ba5c9..57e3ea19 100644 --- a/openapi3.yaml +++ b/openapi3.yaml @@ -233,6 +233,65 @@ paths: schema: $ref: >- ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage + /ingestion/{jobId}/bypass-validation-errors: + post: + operationId: bypassValidationErrors + tags: + - ingestion + summary: bypass an active validation job with certain allowed validation errors + description: Bypasses validation errors for a specific ingestion job + parameters: + - name: jobId + in: path + description: The id of the job to bypass validation errors for + required: true + schema: + type: string + format: uuid + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BypassValidationErrorsRequest' + responses: + '200': + description: OK + '400': + description: Bad request - Validation task is valid or not suspended + content: + application/json: + schema: + $ref: >- + ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage + '404': + description: Not Found - Job tasks not found + content: + application/json: + schema: + $ref: >- + ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage + '409': + description: Conflict + content: + application/json: + schema: + $ref: >- + ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage + '422': + description: Unprocessable Content + content: + application/json: + schema: + $ref: >- + ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage + '500': + description: Internal Server Error + content: + application/json: + schema: + $ref: >- + ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage /validate/gpkgs: post: operationId: validateGpkgs @@ -329,6 +388,22 @@ paths: ./Schema/ingestionTrigger/responses/ingestionTriggerResponses.yaml#/components/schemas/errorMessage components: schemas: + BypassValidationErrorsRequest: + type: object + required: + - allowedValidationErrors + - approver + properties: + allowedValidationErrors: + type: array + items: + type: string + enum: + - resolution + description: list of validation errors to bypass + approver: + type: string + description: user trying to bypass the errors CallbackUrls: type: array minItems: 1 diff --git a/package-lock.json b/package-lock.json index 979ad8c5..52cf2fa0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ "@map-colonies/mc-priority-queue": "^9.1.0", "@map-colonies/mc-utils": "^5.1.0", "@map-colonies/openapi-express-viewer": "^3.0.0", - "@map-colonies/raster-shared": "^7.10.2", + "@map-colonies/raster-shared": "^8.0.0-alpha", "@map-colonies/read-pkg": "0.0.1", "@map-colonies/shapefile-reader": "^1.0.1", "@map-colonies/storage-explorer-middleware": "^1.3.0", @@ -60,7 +60,7 @@ "@types/config": "^3.3.5", "@types/express": "^4.17.17", "@types/geojson": "^7946.0.16", - "@types/jest": "^29.5.2", + "@types/jest": "^29.5.14", "@types/lodash.get": "^4.4.9", "@types/lodash.has": "^4.5.9", "@types/lodash.merge": "^4.6.2", @@ -7275,9 +7275,9 @@ "license": "ISC" }, "node_modules/@map-colonies/raster-shared": { - "version": "7.10.2", - "resolved": "https://registry.npmjs.org/@map-colonies/raster-shared/-/raster-shared-7.10.2.tgz", - "integrity": "sha512-EKPtETrKPWOZTpVQ0CEs2CFaC2cU9FlXa+Ylss+Jg7wmFEaN4BKR2hUw5kjtsgD69lqL9fyFCWQysBFc7h4iTQ==", + "version": "8.0.0-alpha", + "resolved": "https://registry.npmjs.org/@map-colonies/raster-shared/-/raster-shared-8.0.0-alpha.tgz", + "integrity": "sha512-W3qFta4ecvvLiZS+RCu/9YTErhBwODdcPJ6oy3v+OFPwGGr7Ztk7eyUE5+x8qXY1U0KH7CNT4sKE4StkJsYDJA==", "license": "ISC", "dependencies": { "@map-colonies/mc-priority-queue": "^9.1.0", diff --git a/package.json b/package.json index 0555df97..9b27560c 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ "@map-colonies/mc-priority-queue": "^9.1.0", "@map-colonies/mc-utils": "^5.1.0", "@map-colonies/openapi-express-viewer": "^3.0.0", - "@map-colonies/raster-shared": "^7.10.2", + "@map-colonies/raster-shared": "^8.0.0-alpha", "@map-colonies/read-pkg": "0.0.1", "@map-colonies/shapefile-reader": "^1.0.1", "@map-colonies/storage-explorer-middleware": "^1.3.0", @@ -94,7 +94,7 @@ "@types/config": "^3.3.5", "@types/express": "^4.17.17", "@types/geojson": "^7946.0.16", - "@types/jest": "^29.5.2", + "@types/jest": "^29.5.14", "@types/lodash.get": "^4.4.9", "@types/lodash.has": "^4.5.9", "@types/lodash.merge": "^4.6.2", diff --git a/src/ingestion/controllers/ingestionController.ts b/src/ingestion/controllers/ingestionController.ts index 9a725460..adfebfba 100644 --- a/src/ingestion/controllers/ingestionController.ts +++ b/src/ingestion/controllers/ingestionController.ts @@ -1,4 +1,4 @@ -import { ConflictError, NotFoundError } from '@map-colonies/error-types'; +import { BadRequestError, ConflictError, NotFoundError } from '@map-colonies/error-types'; import { RequestHandler } from 'express'; import { HttpError } from 'express-openapi-validator/dist/framework/types'; import { StatusCodes } from 'http-status-codes'; @@ -6,13 +6,21 @@ import { inject, injectable } from 'tsyringe'; import { GpkgError } from '../../serviceClients/database/errors'; import { INGESTION_SCHEMAS_VALIDATOR_SYMBOL, SchemasValidator } from '../../utils/validation/schemasValidator'; import { FileNotFoundError, UnsupportedEntityError, ValidationError } from '../errors/ingestionErrors'; -import type { IRetryRequestParams, IRecordRequestParams, IAbortRequestParams, ResponseId } from '../interfaces'; +import type { + IRetryRequestParams, + IRecordRequestParams, + IAbortRequestParams, + ResponseId, + IBypassValidationErrorsRequestBody, + IBypassValidationErrorsParams, +} from '../interfaces'; import { IngestionManager } from '../models/ingestionManager'; type NewLayerHandler = RequestHandler; type RetryIngestionHandler = RequestHandler; type AbortIngestionHandler = RequestHandler; type UpdateLayerHandler = RequestHandler; +type BypassValidationErrorsHandler = RequestHandler; @injectable() export class IngestionController { @@ -107,4 +115,25 @@ export class IngestionController { next(error); } }; + + public bypassValidationErrors: BypassValidationErrorsHandler = async (req, res, next) => { + try { + await this.ingestionManager.bypassValidationErrors(req.body, req.params.jobId); + res.status(StatusCodes.OK).send(); + } catch (error) { + if (error instanceof BadRequestError) { + (error as HttpError).status = StatusCodes.BAD_REQUEST; //400 + } + if (error instanceof NotFoundError) { + (error as HttpError).status = StatusCodes.NOT_FOUND; //404 + } + if (error instanceof ConflictError) { + (error as HttpError).status = StatusCodes.CONFLICT; //409 + } + if (error instanceof UnsupportedEntityError) { + (error as HttpError).status = StatusCodes.UNPROCESSABLE_ENTITY; //422 + } + next(error); + } + }; } diff --git a/src/ingestion/interfaces.ts b/src/ingestion/interfaces.ts index 7d706b2e..485a9a92 100644 --- a/src/ingestion/interfaces.ts +++ b/src/ingestion/interfaces.ts @@ -25,6 +25,15 @@ export interface IAbortRequestParams { jobId: string; } +export interface IBypassValidationErrorsParams { + jobId: string; +} + +export interface IBypassValidationErrorsRequestBody { + allowedValidationErrors: string[]; + approver: string; +} + export enum IngestionOperation { RETRY = 'retry', ABORT = 'abort', diff --git a/src/ingestion/models/ingestionManager.ts b/src/ingestion/models/ingestionManager.ts index 80f1ec01..bf502dd5 100644 --- a/src/ingestion/models/ingestionManager.ts +++ b/src/ingestion/models/ingestionManager.ts @@ -1,6 +1,6 @@ import { relative } from 'node:path'; import { randomUUID } from 'node:crypto'; -import { ConflictError, NotFoundError } from '@map-colonies/error-types'; +import { BadRequestError, ConflictError, NotFoundError } from '@map-colonies/error-types'; import { Logger } from '@map-colonies/js-logger'; import { IFindJobsByCriteriaBody, @@ -38,15 +38,16 @@ import { getAbsolutePathInputFiles } from '../../utils/paths'; import { getShapefileFiles } from '../../utils/shapefile'; import { ZodValidator } from '../../utils/validation/zodValidator'; import { ValidateManager } from '../../validate/models/validateManager'; -import { ChecksumError, throwInvalidJobStatusError } from '../errors/ingestionErrors'; +import { ChecksumError, throwInvalidJobStatusError, UnsupportedEntityError } from '../errors/ingestionErrors'; import { IngestionOperation } from '../interfaces'; -import type { IngestionBaseJobParams, ResponseId } from '../interfaces'; +import type { IngestionBaseJobParams, ResponseId, IBypassValidationErrorsRequestBody } from '../interfaces'; import type { RasterLayerMetadata } from '../schemas/layerCatalogSchema'; import type { IngestionNewLayer } from '../schemas/newLayerSchema'; import type { IngestionUpdateLayer } from '../schemas/updateLayerSchema'; import { GeoValidator } from '../validators/geoValidator'; import { SourceValidator } from '../validators/sourceValidator'; import { PolygonPartsManagerClient } from '../../serviceClients/polygonPartsManagerClient'; +import { JobTrackerClient } from '../../serviceClients/jobTrackerClient'; import { ProductManager } from './productManager'; type ReplaceValuesOfKey, Key extends keyof T, Value> = { @@ -85,6 +86,7 @@ export class IngestionManager { private readonly catalogClient: CatalogClient, private readonly jobManagerWrapper: JobManagerWrapper, private readonly mapProxyClient: MapProxyClient, + private readonly jobTrackerClient: JobTrackerClient, private readonly productManager: ProductManager, private readonly zodValidator: ZodValidator ) { @@ -225,6 +227,57 @@ export class IngestionManager { } } + @withSpanAsyncV4 + public async bypassValidationErrors(body: IBypassValidationErrorsRequestBody, jobId: string): Promise { + const logCtx: LogContext = { ...this.logContext, function: this.bypassValidationErrors.name }; + const { allowedValidationErrors, approver } = body; + const job: IJobResponse = await this.jobManagerWrapper.getJob(jobId); + const validationTask = await this.getValidationTask(jobId, { ...logCtx }); + + if (validationTask.parameters.isValid === true) { + return; + } + if (job.status !== OperationStatus.SUSPENDED) { + throw new BadRequestError('cannot bypass validation errors when the job is not suspended'); + } + if (validationTask.parameters.errorsSummary === undefined) { + throw new UnsupportedEntityError('cannot bypass validation errors when there are no validation errors in task params'); + } + + const errorsSummary = validationTask.parameters.errorsSummary; + const exceededResolutionThreshold = errorsSummary.thresholds.resolution.exceeded; + + for (const [errorType, errorCount] of Object.entries(errorsSummary.errorsCount)) { + if (errorCount > 0 && !allowedValidationErrors.includes(errorType)) { + throw new UnsupportedEntityError('validation task has additional errors that are not in the allowed list'); + } + } + if (exceededResolutionThreshold) { + throw new UnsupportedEntityError('cannot bypass validation error of type: resolution, because the resolution exceeded threshold'); + } + + const existingChecksums = validationTask.parameters.checksums; + const metadataShapefilePath = await this.validateAndGetAbsoluteInputFiles(job.parameters.inputFiles); + const newChecksums = await this.getChecksum(metadataShapefilePath.metadataShapefilePath); + if (this.isChecksumChanged(existingChecksums, newChecksums)) { + throw new ConflictError( + 'cannot bypass validation errors because the metadata shapefile has been changed since the validation was performed,re-run the process' + ); + } + + await this.makeValidationTaskCompleted(validationTask); + await this.jobManagerWrapper.updateJob(jobId, { + status: OperationStatus.IN_PROGRESS, + parameters: { + ...job.parameters, + allowedValidationErrors, + approver, + }, + }); + + await this.jobTrackerClient.notify(validationTask); + } + @withSpanV4 private parseAndValidateJobIdentifiers( resourceId: string | undefined, @@ -729,4 +782,21 @@ export class IngestionManager { fileName: relative(this.sourceMount, checksum.fileName), })); } + + private async makeValidationTaskCompleted(task: ITaskResponse): Promise { + try { + await this.jobManagerWrapper.updateTask(task.jobId, task.id, { + parameters: { ...task.parameters, isValid: true }, + }); + } catch (err) { + this.logger.error({ + msg: `failed to update validation task to valid for jobId: ${task.jobId} taskId: ${task.id}`, + logContext: this.logContext, + jobId: task.jobId, + taskId: task.id, + error: err instanceof Error ? err.message : 'Unknown error', + }); + } + return; + } } diff --git a/src/ingestion/routes/ingestionRouter.ts b/src/ingestion/routes/ingestionRouter.ts index ff942eda..2981ca24 100644 --- a/src/ingestion/routes/ingestionRouter.ts +++ b/src/ingestion/routes/ingestionRouter.ts @@ -10,6 +10,7 @@ const ingestionRouterFactory: FactoryFunction = (dependencyContainer) => router.put('/:id', controller.updateLayer.bind(controller)); router.put('/:jobId/retry', controller.retryIngestion.bind(controller)); router.put('/:jobId/abort', controller.abortIngestion.bind(controller)); + router.post('/:jobId/bypass-validation-errors', controller.bypassValidationErrors.bind(controller)); return router; }; diff --git a/src/serviceClients/jobTrackerClient.ts b/src/serviceClients/jobTrackerClient.ts new file mode 100644 index 00000000..f819a245 --- /dev/null +++ b/src/serviceClients/jobTrackerClient.ts @@ -0,0 +1,52 @@ +import type { Logger } from '@map-colonies/js-logger'; +import { ITaskResponse } from '@map-colonies/mc-priority-queue'; +import { context, SpanStatusCode, trace } from '@opentelemetry/api'; +import type { Tracer } from '@opentelemetry/api'; +import { HttpClient } from '@map-colonies/mc-utils'; +import type { IHttpRetryConfig } from '@map-colonies/mc-utils'; +import { inject, injectable } from 'tsyringe'; +import { SERVICES } from '../common/constants'; +import type { IConfig } from '../common/interfaces'; + +@injectable() +export class JobTrackerClient extends HttpClient { + public constructor( + @inject(SERVICES.CONFIG) private readonly config: IConfig, + @inject(SERVICES.LOGGER) protected readonly logger: Logger, + @inject(SERVICES.TRACER) private readonly tracer: Tracer + ) { + const serviceName = 'JobTracker'; + const baseUrl = config.get('services.jobTrackerServiceURL'); + const httpRetryConfig = config.get('httpRetry'); + const disableHttpClientLogs = config.get('disableHttpClientLogs'); + super(logger, baseUrl, serviceName, httpRetryConfig, disableHttpClientLogs); + } + + public async notify(task: ITaskResponse): Promise { + await context.with(trace.setSpan(context.active(), this.tracer.startSpan(`${JobTrackerClient.name}.${this.notify.name}`)), async () => { + const activeSpan = trace.getActiveSpan(); + const monitorAttributes = { taskId: task.id, status: task.status, type: task.type }; + const logger = this.logger.child(monitorAttributes); + activeSpan?.setAttributes(monitorAttributes); + + try { + const url = `tasks/${task.id}/notify`; + logger.info({ msg: 'Notifying job tracker', url, ...monitorAttributes }); + activeSpan?.addEvent('notify.sending', { url }); + await this.post(url); + activeSpan?.setStatus({ code: SpanStatusCode.OK, message: 'Notification sent successfully' }); + } catch (err) { + if (err instanceof Error) { + const message = 'Failed to notify job tracker'; + const error = new Error(`${message}: ${err.message}`); + logger.error({ msg: message, error: err }); + activeSpan?.setStatus({ code: SpanStatusCode.ERROR, message: error.message }); + activeSpan?.recordException(error); + throw error; + } + } finally { + activeSpan?.end(); + } + }); + } +} diff --git a/tests/integration/ingestion/helpers/ingestionRequestSender.ts b/tests/integration/ingestion/helpers/ingestionRequestSender.ts index 24294824..45a1cdb9 100644 --- a/tests/integration/ingestion/helpers/ingestionRequestSender.ts +++ b/tests/integration/ingestion/helpers/ingestionRequestSender.ts @@ -1,4 +1,5 @@ import supertest from 'supertest'; +import type { IBypassValidationErrorsRequestBody } from '../../../../src/ingestion/interfaces'; import type { IngestionNewLayer } from '../../../../src/ingestion/schemas/newLayerSchema'; import type { IngestionUpdateLayer } from '../../../../src/ingestion/schemas/updateLayerSchema'; @@ -20,4 +21,8 @@ export class IngestionRequestSender { public async abortIngestion(jobId: string): Promise { return supertest.agent(this.app).put(`/ingestion/${jobId}/abort`).set('Content-Type', 'application/json'); } + + public async bypassValidationErrors(jobId: string, body: IBypassValidationErrorsRequestBody): Promise { + return supertest.agent(this.app).post(`/ingestion/${jobId}/bypass-validation-errors`).set('Content-Type', 'application/json').send(body); + } } diff --git a/tests/integration/ingestion/ingestion.spec.ts b/tests/integration/ingestion/ingestion.spec.ts index 7a5bb41c..ac391eff 100644 --- a/tests/integration/ingestion/ingestion.spec.ts +++ b/tests/integration/ingestion/ingestion.spec.ts @@ -1,6 +1,6 @@ import fs from 'node:fs'; import { faker } from '@faker-js/faker'; -import { IJobResponse, OperationStatus, type ICreateJobResponse } from '@map-colonies/mc-priority-queue'; +import { IJobResponse, OperationStatus, ICreateJobResponse } from '@map-colonies/mc-priority-queue'; import { CORE_VALIDATIONS, getMapServingLayerName, RasterProductTypes } from '@map-colonies/raster-shared'; import { SqliteError } from 'better-sqlite3'; import httpStatusCodes from 'http-status-codes'; @@ -1669,8 +1669,8 @@ describe('Ingestion', () => { nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, retryJob); nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); nock(polygonPartsManagerURL).delete('/polygonParts/validate').query({ productType, productId }).reply(httpStatusCodes.NO_CONTENT); - nock(jobManagerURL).patch(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.OK); - nock(jobManagerURL).patch(`/jobs/${jobId}`).reply(httpStatusCodes.OK); + nock(jobManagerURL).put(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.OK); + nock(jobManagerURL).put(`/jobs/${jobId}`).reply(httpStatusCodes.OK); // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any nock(jobManagerURL) // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any @@ -1943,7 +1943,7 @@ describe('Ingestion', () => { nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, retryJob); nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); nock(polygonPartsManagerURL).delete('/polygonParts/validate').query({ productType, productId }).reply(httpStatusCodes.NO_CONTENT); - nock(jobManagerURL).patch(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.INTERNAL_SERVER_ERROR); + nock(jobManagerURL).put(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.INTERNAL_SERVER_ERROR); const response = await requestSender.retryIngestion(jobId); @@ -1973,7 +1973,7 @@ describe('Ingestion', () => { nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); nock(polygonPartsManagerURL).delete('/polygonParts/validate').query({ productType, productId }).reply(httpStatusCodes.NO_CONTENT); nock(jobManagerURL) - .patch( + .put( `/jobs/${jobId}/tasks/${taskId}`, matches((body: { parameters?: { checksums?: unknown[]; isValid?: boolean; report?: unknown } }) => { return ( @@ -1986,7 +1986,7 @@ describe('Ingestion', () => { }) ) .reply(httpStatusCodes.OK); - nock(jobManagerURL).patch(`/jobs/${jobId}`).reply(httpStatusCodes.INTERNAL_SERVER_ERROR); + nock(jobManagerURL).put(`/jobs/${jobId}`).reply(httpStatusCodes.INTERNAL_SERVER_ERROR); const response = await requestSender.retryIngestion(jobId); @@ -2196,4 +2196,327 @@ describe('Ingestion', () => { }); }); }); + + describe('POST /ingestion/:jobId/bypass-validation-errors', () => { + // Format input files paths for storage (as they would appear in stored job parameters) + const storedInputFiles = { + gpkgFilesPath: [`gpkg/${validInputFiles.inputFiles.gpkgFilesPath[0]}`], + metadataShapefilePath: `metadata/${validInputFiles.inputFiles.metadataShapefilePath}/ShapeMetadata.shp`, + productShapefilePath: `product/${validInputFiles.inputFiles.productShapefilePath}/Product.shp`, + }; + + const createBypassJob = (options: { + jobId: string; + productId?: string; + productType?: RasterProductTypes; + status?: OperationStatus; + inputFiles?: unknown; + }): IJobResponse => { + const { + jobId, + productId = rasterLayerMetadataGenerators.productId(), + productType = rasterLayerMetadataGenerators.productType(), + status = OperationStatus.SUSPENDED, + inputFiles = storedInputFiles, + } = options; + return generateMockJob({ + id: jobId, + resourceId: productId, + productType, + status, + parameters: { + inputFiles, + }, + }); + }; + + describe('Happy Path', () => { + it('should return 200 status code when successfully bypassing validation errors', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const errorsSummary = { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: false } }, + }; + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + errorsSummary, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + nock(jobManagerURL).put(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.OK); + nock(jobManagerURL).put(`/jobs/${jobId}`).reply(httpStatusCodes.OK); + nock(configMock.get('services.jobTrackerServiceURL')).post(`/tasks/${taskId}/notify`).reply(httpStatusCodes.OK); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + console.log('BYPASS RES:', response.body); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.OK); + }); + + it('should return 200 when task is valid', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: true, + checksums: validInputFiles.checksums, + errorsSummary: { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: false } }, + }, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.OK); + }); + }); + + describe('Bad Path', () => { + it('should return 400 BAD_REQUEST when job is not suspended', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId, status: OperationStatus.PENDING }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.FAILED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + errorsSummary: { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: false } }, + }, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + + it('should return 422 UNPROCESSABLE_ENTITY when there are no validation errors in task params', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + }); + + it('should return 422 UNPROCESSABLE_ENTITY when there are additional errors that are not in the allowed list', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + errorsSummary: { + errorsCount: { resolution: 0, unallowedError: 1 }, + thresholds: { resolution: { exceeded: false } }, + }, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + }); + + it('should return 422 UNPROCESSABLE_ENTITY when exceeded resolution threshold', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + errorsSummary: { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: true } }, + }, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + }); + + it('should return 500 when job tracker notify fails', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const errorsSummary = { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: false } }, + }; + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: validInputFiles.checksums, + errorsSummary, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + nock(jobManagerURL).put(`/jobs/${jobId}/tasks/${taskId}`).reply(httpStatusCodes.OK); + nock(jobManagerURL).put(`/jobs/${jobId}`).reply(httpStatusCodes.OK); + nock(configMock.get('services.jobTrackerServiceURL')).post(`/tasks/${taskId}/notify`).reply(httpStatusCodes.INTERNAL_SERVER_ERROR); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.INTERNAL_SERVER_ERROR); + }); + }); + + describe('Sad Path', () => { + it('should return 409 CONFLICT when checksums have changed', async () => { + const jobId = faker.string.uuid(); + const taskId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + // Simulating different checksums + const modifiedChecksums = [...validInputFiles.checksums]; + modifiedChecksums[0] = { ...modifiedChecksums[0], checksum: 'different-checksum' }; + + const validationTask = { + id: taskId, + jobId, + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: modifiedChecksums, + errorsSummary: { + errorsCount: { resolution: 1 }, + thresholds: { resolution: { exceeded: false } }, + }, + }, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [validationTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.CONFLICT); + }); + + it('should return 404 NOT_FOUND when job does not exist', async () => { + const jobId = faker.string.uuid(); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.NOT_FOUND); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.NOT_FOUND); + }); + + it('should return 404 NOT_FOUND when validation task does not exist', async () => { + const jobId = faker.string.uuid(); + const bypassJob = createBypassJob({ jobId }); + const requestBody = { allowedValidationErrors: ['resolution'], approver: 'approverName' }; + + const otherTask = { + id: faker.string.uuid(), + jobId, + type: 'some-other-task', + status: OperationStatus.SUSPENDED, + parameters: {}, + }; + + nock(jobManagerURL).get(`/jobs/${jobId}`).query({ shouldReturnTasks: false }).reply(httpStatusCodes.OK, bypassJob); + nock(jobManagerURL).get(`/jobs/${jobId}/tasks`).reply(httpStatusCodes.OK, [otherTask]); + + const response = await requestSender.bypassValidationErrors(jobId, requestBody); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.NOT_FOUND); + }); + }); + }); }); diff --git a/tests/tsconfig.json b/tests/tsconfig.json index 38ca0b13..6056e76e 100644 --- a/tests/tsconfig.json +++ b/tests/tsconfig.json @@ -1,3 +1,6 @@ { - "extends": "../tsconfig.test.json" + "extends": "../tsconfig.test.json", + "compilerOptions": { + "types": ["node", "jest"] + }, } diff --git a/tests/unit/ingestion/models/ingestionManager.spec.ts b/tests/unit/ingestion/models/ingestionManager.spec.ts index 55ebc0df..3addc867 100644 --- a/tests/unit/ingestion/models/ingestionManager.spec.ts +++ b/tests/unit/ingestion/models/ingestionManager.spec.ts @@ -30,6 +30,7 @@ import { ChecksumProcessor } from '../../../../src/utils/hash/interfaces'; import { CHECKSUM_PROCESSOR } from '../../../../src/utils/hash/constants'; import { SourceValidator } from '../../../../src/ingestion/validators/sourceValidator'; import { PolygonPartsManagerClient } from '../../../../src/serviceClients/polygonPartsManagerClient'; +import { JobTrackerClient } from '../../../../src/serviceClients/jobTrackerClient'; describe('IngestionManager', () => { let ingestionManager: IngestionManager; @@ -63,6 +64,10 @@ describe('IngestionManager', () => { deleteValidationEntity: jest.fn(), } satisfies Partial; + const mockJobTrackerClient = { + notify: jest.fn(), + } satisfies Partial; + let createIngestionJobSpy: jest.SpyInstance; let findJobsSpy: jest.SpyInstance; let existsMapproxySpy: jest.SpyInstance; @@ -114,6 +119,7 @@ describe('IngestionManager', () => { catalogClient, jobManagerWrapper, mapProxyClient, + mockJobTrackerClient as unknown as JobTrackerClient, productManager as unknown as ProductManager, zodValidator as unknown as ZodValidator ); @@ -122,6 +128,7 @@ describe('IngestionManager', () => { afterEach(() => { clearConfig(); jest.restoreAllMocks(); // Restore original implementations + jest.clearAllMocks(); }); describe('newLayer', () => { @@ -1117,4 +1124,367 @@ describe('IngestionManager', () => { expect(mockPolygonPartsManagerClient.deleteValidationEntity).not.toHaveBeenCalled(); }); }); + describe('bypassValidationErrors', () => { + let getJobSpy: jest.SpyInstance; + let getTasksForJobSpy: jest.SpyInstance; + let updateJobSpy: jest.SpyInstance; + let updateTaskSpy: jest.SpyInstance; + + beforeEach(() => { + getJobSpy = jest.spyOn(JobManagerWrapper.prototype, 'getJob'); + getTasksForJobSpy = jest.spyOn(JobManagerWrapper.prototype, 'getTasksForJob'); + updateJobSpy = jest.spyOn(JobManagerWrapper.prototype, 'updateJob'); + updateTaskSpy = jest.spyOn(JobManagerWrapper.prototype, 'updateTask'); + }); + + it('should bypass validation errors successfully', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockChecksum = { fileName: 'some/path.shp', checksum: '123' }; + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: [mockChecksum], + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + zodValidator.validate.mockResolvedValue(undefined); + jest.spyOn(ingestionManager as unknown as { getChecksum: jest.Mock }, 'getChecksum').mockResolvedValue([mockChecksum]); + + jest + .spyOn(ingestionManager as unknown as { validateAndGetAbsoluteInputFiles: jest.Mock }, 'validateAndGetAbsoluteInputFiles') + .mockResolvedValue({ + gpkgFilesPath: [], + metadataShapefilePath: 'some/path', + productShapefilePath: 'some/path', + }); + + updateJobSpy.mockResolvedValue(undefined); + updateTaskSpy.mockResolvedValue(undefined); + mockJobTrackerClient.notify.mockResolvedValue(undefined); + + await ingestionManager.bypassValidationErrors(body, mockJobId); + + expect(updateTaskSpy).toHaveBeenCalledWith( + mockJobId, + mockTask.id, + expect.objectContaining({ + parameters: { + isValid: true, + checksums: [mockChecksum], + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + }) + ); + + expect(updateJobSpy).toHaveBeenCalledWith( + mockJobId, + expect.objectContaining({ + parameters: expect.objectContaining({ + allowedValidationErrors: ['errorType1'], + approver: 'admin', + }) as unknown, + }) + ); + + expect(mockJobTrackerClient.notify).toHaveBeenCalledWith(mockTask); + }); + + it('should return and do nothing when task is valid', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: true, + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + + await ingestionManager.bypassValidationErrors(body, mockJobId); + expect(mockJobTrackerClient.notify).not.toHaveBeenCalled(); + }); + + it('should throw BadRequestError if job is not suspended', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.PENDING }); + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + + await expect(ingestionManager.bypassValidationErrors(body, mockJobId)).rejects.toThrow(BadRequestError); + }); + + it('should throw UnsupportedEntityError if task has unallowed errors', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1, unallowedError: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + + await expect(ingestionManager.bypassValidationErrors(body, mockJobId)).rejects.toThrow(UnsupportedEntityError); + }); + + it('should throw UnsupportedEntityError when errorsSummary is undefined', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + + await expect(ingestionManager.bypassValidationErrors(body, mockJobId)).rejects.toThrow(UnsupportedEntityError); + }); + + it('should throw UnsupportedEntityError when resolution threshold exceeded', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + errorsSummary: { + thresholds: { resolution: { exceeded: true } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + + await expect(ingestionManager.bypassValidationErrors(body, mockJobId)).rejects.toThrow(UnsupportedEntityError); + }); + + it('should throw ConflictError when checksums have changed', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockChecksum = { fileName: 'some/path.shp', checksum: '123' }; + const newMockChecksum = { fileName: 'some/path.shp', checksum: '456' }; + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: [mockChecksum], + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + zodValidator.validate.mockResolvedValue(undefined); + jest.spyOn(ingestionManager as unknown as { getChecksum: jest.Mock }, 'getChecksum').mockResolvedValue([newMockChecksum]); + + jest + .spyOn(ingestionManager as unknown as { validateAndGetAbsoluteInputFiles: jest.Mock }, 'validateAndGetAbsoluteInputFiles') + .mockResolvedValue({ + gpkgFilesPath: [], + metadataShapefilePath: 'some/path', + productShapefilePath: 'some/path', + }); + + await expect(ingestionManager.bypassValidationErrors(body, mockJobId)).rejects.toThrow(ConflictError); + }); + + it('should catch error when makeValidationTaskCompleted fails', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockChecksum = { fileName: 'some/path.shp', checksum: '123' }; + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: [mockChecksum], + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + zodValidator.validate.mockResolvedValue(undefined); + jest.spyOn(ingestionManager as unknown as { getChecksum: jest.Mock }, 'getChecksum').mockResolvedValue([mockChecksum]); + + jest + .spyOn(ingestionManager as unknown as { validateAndGetAbsoluteInputFiles: jest.Mock }, 'validateAndGetAbsoluteInputFiles') + .mockResolvedValue({ + gpkgFilesPath: [], + metadataShapefilePath: 'some/path', + productShapefilePath: 'some/path', + }); + + updateTaskSpy.mockRejectedValue(new Error('Update failed')); + updateJobSpy.mockResolvedValue(undefined); + mockJobTrackerClient.notify.mockResolvedValue(undefined); + + await ingestionManager.bypassValidationErrors(body, mockJobId); + + expect(updateTaskSpy).toHaveBeenCalled(); + }); + + it('should catch error when makeValidationTaskCompleted fails with non-Error', async () => { + const mockJobId = faker.string.uuid(); + const mockJob = generateMockJob({ status: OperationStatus.SUSPENDED }); + const mockChecksum = { fileName: 'some/path.shp', checksum: '123' }; + const mockTask = { + id: faker.string.uuid(), + type: configMock.get('jobManager.validationTaskType'), + status: OperationStatus.SUSPENDED, + parameters: { + isValid: false, + checksums: [mockChecksum], + errorsSummary: { + thresholds: { resolution: { exceeded: false } }, + errorsCount: { errorType1: 1 }, + }, + }, + jobId: mockJobId, + }; + + const body = { + allowedValidationErrors: ['errorType1'], + approver: 'admin', + jobId: mockJobId, + }; + + getJobSpy.mockResolvedValue(mockJob); + getTasksForJobSpy.mockResolvedValue([mockTask]); + zodValidator.validate.mockResolvedValue(undefined); + jest.spyOn(ingestionManager as unknown as { getChecksum: jest.Mock }, 'getChecksum').mockResolvedValue([mockChecksum]); + + jest + .spyOn(ingestionManager as unknown as { validateAndGetAbsoluteInputFiles: jest.Mock }, 'validateAndGetAbsoluteInputFiles') + .mockResolvedValue({ + gpkgFilesPath: [], + metadataShapefilePath: 'some/path', + productShapefilePath: 'some/path', + }); + + updateTaskSpy.mockRejectedValue('Update failed'); + updateJobSpy.mockResolvedValue(undefined); + mockJobTrackerClient.notify.mockResolvedValue(undefined); + + await ingestionManager.bypassValidationErrors(body, mockJobId); + + expect(updateTaskSpy).toHaveBeenCalled(); + }); + }); }); diff --git a/tests/unit/serverClients/jobTrackerWrapper.spec.ts b/tests/unit/serverClients/jobTrackerWrapper.spec.ts new file mode 100644 index 00000000..05a1140c --- /dev/null +++ b/tests/unit/serverClients/jobTrackerWrapper.spec.ts @@ -0,0 +1,47 @@ +import jsLogger from '@map-colonies/js-logger'; +import { trace } from '@opentelemetry/api'; +import nock from 'nock'; +import { HttpClient } from '@map-colonies/mc-utils'; +import { InternalServerError } from '@map-colonies/error-types'; +import { ITaskResponse } from '@map-colonies/mc-priority-queue'; +import { clear as clearConfig, configMock, registerDefaultConfig } from '../../mocks/configMock'; +import { JobTrackerClient } from '../../../src/serviceClients/jobTrackerClient'; + +describe('JobTrackerClient', () => { + let jobTrackerClient: JobTrackerClient; + let postSpy: jest.SpyInstance; + + beforeEach(() => { + registerDefaultConfig(); + + jobTrackerClient = new JobTrackerClient(configMock, jsLogger({ enabled: false }), trace.getTracer('testTracer')); + postSpy = jest.spyOn(HttpClient.prototype as unknown as { post: jest.Mock }, 'post'); + }); + + afterEach(() => { + nock.cleanAll(); + clearConfig(); + jest.resetAllMocks(); + jest.restoreAllMocks(); + }); + + describe('notify', () => { + it('should call post with the correct url', async () => { + const task = { id: 'taskId1', status: 'In-Progress', type: 'type1' } as unknown as ITaskResponse; + postSpy.mockResolvedValue({}); + + await jobTrackerClient.notify(task); + + expect(postSpy).toHaveBeenCalledWith('tasks/taskId1/notify'); + }); + + it('should throw an error when post throws an error', async () => { + const task = { id: 'taskId1', status: 'In-Progress', type: 'type1' } as unknown as ITaskResponse; + postSpy.mockRejectedValue(new InternalServerError('Internal Server Error')); + + const action = async () => jobTrackerClient.notify(task); + + await expect(action()).rejects.toThrow('Failed to notify job tracker: Internal Server Error'); + }); + }); +}); diff --git a/tsconfig.test.json b/tsconfig.test.json index 9149dc03..20be7db0 100644 --- a/tsconfig.test.json +++ b/tsconfig.test.json @@ -1,7 +1,8 @@ { "extends": "./tsconfig.json", "compilerOptions": { - "sourceMap": true + "sourceMap": true, + "types": ["node", "jest"] }, "include": ["src", "tests"],