diff --git a/tests/configurations/unit/jest.config.js b/tests/configurations/unit/jest.config.js index f3b49718..e6790a3a 100644 --- a/tests/configurations/unit/jest.config.js +++ b/tests/configurations/unit/jest.config.js @@ -16,6 +16,7 @@ module.exports = { '!/src/*', '!/src/serviceClients/database/SQLiteClient.ts', '!/src/ingestion/errors/ingestionErrors.ts', + '!/src/utils/stringCapitalizationPermutations.ts', '!/src/**/interfaces.ts', '!/src/utils/hash/constants.ts', ], diff --git a/tests/integration/info/info.spec.ts b/tests/integration/info/info.spec.ts index b36e240f..aa81befe 100644 --- a/tests/integration/info/info.spec.ts +++ b/tests/integration/info/info.spec.ts @@ -10,10 +10,10 @@ import type { DeepPartial, DeepRequired, FlattenKeyTupleUnion } from '../../util import { getTestContainerConfig, resetContainer } from './helpers/containerConfig'; import { InfoRequestSender } from './helpers/infoRequestSender'; -describe('Info', function () { +describe('Info', () => { let requestSender: InfoRequestSender; - beforeEach(function () { + beforeEach(() => { const [app] = getApp({ override: [...getTestContainerConfig()], }); @@ -21,7 +21,7 @@ describe('Info', function () { requestSender = new InfoRequestSender(app); }); - afterEach(function () { + afterEach(() => { resetContainer(); jest.restoreAllMocks(); nock.cleanAll(); @@ -58,6 +58,36 @@ describe('Info', function () { expect(response.body).toStrictEqual(expectedResponseBody); }); + //Added this test to make sure that pixelSize is not a rounded number but the exact number resolution + it('should return 200 status code and sources info from gpkg file with zoom level 21', async () => { + const request = { gpkgFilesPath: getGpkgsFilesLocalPath(['zoom21.gpkg']) }; + const expectedResponseBody = [ + { + crs: 4326, + fileFormat: 'GPKG', + pixelSize: 0.000000335276126861572, + extentPolygon: { + type: 'Polygon', + coordinates: [ + [ + [34.4870513, 31.5316438], + [34.4870513, 31.5297716], + [34.4892373, 31.5297716], + [34.4892373, 31.5316438], + [34.4870513, 31.5316438], + ], + ], + }, + fileName: 'tests/mocks/testFiles/gpkg/zoom21.gpkg', + }, + ]; + + const response = await requestSender.getGpkgsInfo(request); + + expect(response.status).toBe(httpStatusCodes.OK); + expect(response.body).toStrictEqual(expectedResponseBody); + }); + it('should return 200 status code and sources info invalid response - unsupported CRS', async () => { const badRequest = { gpkgFilesPath: getGpkgsFilesLocalPath(['invalidCrs-3857.gpkg']) }; const expectedResponseBody = [ diff --git a/tests/integration/ingestion/helpers/containerConfig.ts b/tests/integration/ingestion/helpers/containerConfig.ts index 58ecb37a..5f1e27a2 100644 --- a/tests/integration/ingestion/helpers/containerConfig.ts +++ b/tests/integration/ingestion/helpers/containerConfig.ts @@ -4,10 +4,16 @@ import { container, instancePerContainerCachingFactory } from 'tsyringe'; import { SERVICES } from '../../../../src/common/constants'; import { InjectionObject } from '../../../../src/common/dependencyRegistration'; import { GDAL_INFO_MANAGER_SYMBOL, GdalInfoManager } from '../../../../src/info/models/gdalInfoManager'; +import { CHECKSUM_PROCESSOR } from '../../../../src/utils/hash/constants'; +import type { ChecksumProcessor } from '../../../../src/utils/hash/interfaces'; import { INGESTION_SCHEMAS_VALIDATOR_SYMBOL, schemasValidationsFactory } from '../../../../src/utils/validation/schemasValidator'; import { configMock, getMock, hasMock, registerDefaultConfig } from '../../../mocks/configMock'; -function getTestContainerConfig(): InjectionObject[] { +interface ContainerConfigOptions { + checksumProcessor: () => () => Promise; +} + +function getTestContainerConfig({ checksumProcessor }: ContainerConfigOptions): InjectionObject[] { registerDefaultConfig(); return [ @@ -16,6 +22,14 @@ function getTestContainerConfig(): InjectionObject[] { { token: SERVICES.TRACER, provider: { useValue: trace.getTracer('testTracer') } }, { token: INGESTION_SCHEMAS_VALIDATOR_SYMBOL, provider: { useFactory: instancePerContainerCachingFactory(schemasValidationsFactory) } }, { token: GDAL_INFO_MANAGER_SYMBOL, provider: { useClass: GdalInfoManager } }, + { + token: CHECKSUM_PROCESSOR, + provider: { + useFactory: (): (() => Promise) => { + return checksumProcessor(); + }, + }, + }, ]; } diff --git a/tests/integration/ingestion/ingestion.spec.ts b/tests/integration/ingestion/ingestion.spec.ts index f93bca7f..701af1ef 100644 --- a/tests/integration/ingestion/ingestion.spec.ts +++ b/tests/integration/ingestion/ingestion.spec.ts @@ -1,18 +1,21 @@ import fs from 'node:fs'; import { faker } from '@faker-js/faker'; import { OperationStatus, type ICreateJobResponse } from '@map-colonies/mc-priority-queue'; -import { CORE_VALIDATIONS, getMapServingLayerName, RasterProductTypes } from '@map-colonies/raster-shared'; +import { ShapefileChunkReader } from '@map-colonies/mc-utils'; +import { CORE_VALIDATIONS, getMapServingLayerName, RasterProductTypes, SHAPEFILE_EXTENSIONS_LIST } from '@map-colonies/raster-shared'; import { SqliteError } from 'better-sqlite3'; import httpStatusCodes from 'http-status-codes'; import { matches, merge, set, unset } from 'lodash'; import nock from 'nock'; import { randexp } from 'randexp'; +import xxhashFactory from 'xxhash-wasm'; import { getApp } from '../../../src/app'; import { type ResponseId } from '../../../src/ingestion/interfaces'; import type { IngestionNewLayer } from '../../../src/ingestion/schemas/newLayerSchema'; import type { IngestionUpdateLayer } from '../../../src/ingestion/schemas/updateLayerSchema'; import { SQLiteClient } from '../../../src/serviceClients/database/SQLiteClient'; import { Checksum } from '../../../src/utils/hash/checksum'; +import type { ChecksumProcessor, HashAlgorithm } from '../../../src/utils/hash/interfaces'; import { configMock } from '../../mocks/configMock'; import { createCatalogLayerResponse, @@ -37,9 +40,39 @@ describe('Ingestion', () => { let jobResponse: ICreateJobResponse; let requestSender: IngestionRequestSender; + const mocksChecksumUpdate = Array.from({ length: SHAPEFILE_EXTENSIONS_LIST.length }, () => + jest.fn, Parameters>() + ); + const mocksChecksumDigest = Array.from({ length: SHAPEFILE_EXTENSIONS_LIST.length }, () => + jest.fn, Parameters>() + ); beforeEach(() => { + let mockedFileIndex = 0; + + const defaultOptions = { + checksumProcessor: (): (() => Promise) => { + const result = async () => { + const xxhash = await xxhashFactory(); + const xx64hash = xxhash.create64(); + + const mockProcessor = { + algorithm: 'XXH64', + update: mocksChecksumUpdate[mockedFileIndex].mockImplementation((...args) => { + return xx64hash.update(...args); + }), + digest: mocksChecksumDigest[mockedFileIndex].mockImplementation((...args) => { + return xx64hash.digest(...args); + }), + } satisfies ChecksumProcessor; + mockedFileIndex++; + return Object.assign(mockProcessor, { algorithm: 'XXH64' as const satisfies HashAlgorithm }); + }; + return result; + }, + }; + const [app] = getApp({ - override: [...getTestContainerConfig()], + override: [...getTestContainerConfig(defaultOptions)], }); jobResponse = { id: faker.string.uuid(), @@ -101,7 +134,7 @@ describe('Ingestion', () => { it('should return 200 status code when product shapefile is multipolygon', async () => { const layerRequest = createNewLayerRequest({ - inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validIndexedMultiPolygon' }, + inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validMultiPolygon' }, }); const newLayerName = getMapServingLayerName(layerRequest.metadata.productId, layerRequest.metadata.productType); const findJobsParams = createFindJobsParams({ @@ -579,11 +612,62 @@ describe('Ingestion', () => { expect(scope.isDone()).toBe(false); }); + it('should return 400 status code when product shapefile has 0 features', async () => { + const layerRequest = createNewLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'empty', + }, + }); + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + const response = await requestSender.ingestNewLayer(layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + + it('should return 400 status code when product shapefile has more than 1 feature', async () => { + const layerRequest = createNewLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'multiple', + }, + }); + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + const response = await requestSender.ingestNewLayer(layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + + it('should return 400 status code when product shapefile is not polygon or multipolygon', async () => { + const layerRequest = createNewLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'point', + }, + }); + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + const response = await requestSender.ingestNewLayer(layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + it('should return 400 status code when product shapefile is not contained within gpkg extent', async () => { const layerRequest = createNewLayerRequest({ inputFiles: { gpkgFilesPath: ['validIndexed.gpkg'], - metadataShapefilePath: 'validIndexed', + metadataShapefilePath: 'valid', productShapefilePath: 'blueMarble', }, }); @@ -600,7 +684,7 @@ describe('Ingestion', () => { describe('Sad Path', () => { it('should return 422 status code when invalid gdal info', async () => { const layerRequest = createNewLayerRequest({ - inputFiles: { gpkgFilesPath: ['invalidCrs-3857.gpkg'], metadataShapefilePath: 'validIndexed', productShapefilePath: 'validIndexed' }, + inputFiles: { gpkgFilesPath: ['invalidCrs-3857.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, }); const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); @@ -612,6 +696,21 @@ describe('Ingestion', () => { expect(scope.isDone()).toBe(false); }); + it('should return 422 status code when failed to read and process product shapefile', async () => { + const layerRequest = createNewLayerRequest({ + inputFiles: { gpkgFilesPath: ['validIndexed.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, + }); + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + jest.spyOn(ShapefileChunkReader.prototype, 'readAndProcess').mockRejectedValueOnce(new Error()); + + const response = await requestSender.ingestNewLayer(layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + expect(scope.isDone()).toBe(false); + }); + it('should return 422 status code when failed to calculate checksum for input file - cannot create read stream', async () => { const layerRequest = createNewLayerRequest({ inputFiles: validInputFiles.inputFiles }); const newLayerName = getMapServingLayerName(layerRequest.metadata.productId, layerRequest.metadata.productType); @@ -698,7 +797,7 @@ describe('Ingestion', () => { expect(scope.isDone()).toBe(false); }); - it('should return 500 status code when failed to calculate checksum for input file', async () => { + it('should return 422 status code when failed to calculate checksum for input file - processing chunk', async () => { const layerRequest = createNewLayerRequest({ inputFiles: validInputFiles.inputFiles }); const newLayerName = getMapServingLayerName(layerRequest.metadata.productId, layerRequest.metadata.productType); @@ -720,12 +819,47 @@ describe('Ingestion', () => { nock(mapProxyApiServiceUrl) .get(`/layer/${encodeURIComponent(newLayerName)}`) .reply(httpStatusCodes.NOT_FOUND); - jest.spyOn(Checksum.prototype, 'calculate').mockRejectedValueOnce(new Error()); + mocksChecksumUpdate[0].mockImplementationOnce(() => { + throw new Error(); + }); const response = await requestSender.ingestNewLayer(layerRequest); expect(response).toSatisfyApiSpec(); - expect(response.status).toBe(httpStatusCodes.INTERNAL_SERVER_ERROR); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + expect(scope.isDone()).toBe(false); + }); + + it('should return 422 status code when failed to calculate checksum for input file - digesting chunk', async () => { + const layerRequest = createNewLayerRequest({ inputFiles: validInputFiles.inputFiles }); + const newLayerName = getMapServingLayerName(layerRequest.metadata.productId, layerRequest.metadata.productType); + + const findJobsParams = createFindJobsParams({ + resourceId: layerRequest.metadata.productId, + productType: layerRequest.metadata.productType, + }); + + nock(jobManagerURL).post('/jobs/find', matches(findJobsParams)).reply(httpStatusCodes.OK, []); + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + nock(catalogServiceURL) + .post('/records/find', { + metadata: { + productId: layerRequest.metadata.productId, + productType: layerRequest.metadata.productType, + }, + }) + .reply(httpStatusCodes.OK, []); + nock(mapProxyApiServiceUrl) + .get(`/layer/${encodeURIComponent(newLayerName)}`) + .reply(httpStatusCodes.NOT_FOUND); + mocksChecksumDigest[0].mockImplementationOnce(() => { + throw new Error(); + }); + + const response = await requestSender.ingestNewLayer(layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); expect(scope.isDone()).toBe(false); }); @@ -801,7 +935,7 @@ describe('Ingestion', () => { it('should return 200 status code with update request when product shapefile is multipolygon', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validIndexedMultiPolygon' }, + inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validMultiPolygon' }, callbackUrls: undefined, }); const updatedLayer = createCatalogLayerResponse(); @@ -879,7 +1013,7 @@ describe('Ingestion', () => { it('should return 200 status code with swap update request when product shapefile is multipolygon', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validIndexedMultiPolygon' }, + inputFiles: { ...validInputFiles.inputFiles, productShapefilePath: 'validMultiPolygon' }, }); const catalogLayerResponse = createCatalogLayerResponse({ metadata: { @@ -1144,11 +1278,74 @@ describe('Ingestion', () => { expect(scope.isDone()).toBe(false); }); + it('should return 400 status code when product shapefile has 0 features', async () => { + const layerRequest = createUpdateLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'empty', + }, + }); + const updatedLayer = createCatalogLayerResponse(); + const updatedLayerMetadata = updatedLayer.metadata; + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + nock(catalogServiceURL).post('/records/find', { id: updatedLayerMetadata.id }).reply(httpStatusCodes.OK, [updatedLayer]); + + const response = await requestSender.updateLayer(updatedLayerMetadata.id, layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + + it('should return 400 status code when product shapefile has more than 1 feature', async () => { + const layerRequest = createUpdateLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'multiple', + }, + }); + const updatedLayer = createCatalogLayerResponse(); + const updatedLayerMetadata = updatedLayer.metadata; + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + nock(catalogServiceURL).post('/records/find', { id: updatedLayerMetadata.id }).reply(httpStatusCodes.OK, [updatedLayer]); + + const response = await requestSender.updateLayer(updatedLayerMetadata.id, layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + + it('should return 400 status code when product shapefile is not polygon or multipolygon', async () => { + const layerRequest = createUpdateLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'point', + }, + }); + const updatedLayer = createCatalogLayerResponse(); + const updatedLayerMetadata = updatedLayer.metadata; + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + nock(catalogServiceURL).post('/records/find', { id: updatedLayerMetadata.id }).reply(httpStatusCodes.OK, [updatedLayer]); + + const response = await requestSender.updateLayer(updatedLayerMetadata.id, layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.BAD_REQUEST); + expect(scope.isDone()).toBe(false); + }); + it('should return 400 status code when product shapefile is not contained within gpkg extent', async () => { const layerRequest = createUpdateLayerRequest({ inputFiles: { gpkgFilesPath: ['validIndexed.gpkg'], - metadataShapefilePath: 'validIndexed', + metadataShapefilePath: 'valid', productShapefilePath: 'blueMarble', }, }); @@ -1321,7 +1518,7 @@ describe('Ingestion', () => { it('should return 422 status code when invalid gdal info', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { gpkgFilesPath: ['invalidCrs-3857.gpkg'], metadataShapefilePath: 'validIndexed', productShapefilePath: 'validIndexed' }, + inputFiles: { gpkgFilesPath: ['invalidCrs-3857.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, }); const updatedLayer = createCatalogLayerResponse(); const updatedLayerMetadata = updatedLayer.metadata; @@ -1338,7 +1535,7 @@ describe('Ingestion', () => { it('should return 422 status code when gpkg is invalid gpkg', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { gpkgFilesPath: ['invalid.gpkg'], metadataShapefilePath: 'validIndexed', productShapefilePath: 'validIndexed' }, + inputFiles: { gpkgFilesPath: ['invalid.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, }); const updatedLayer = createCatalogLayerResponse(); const updatedLayerMetadata = updatedLayer.metadata; @@ -1354,7 +1551,7 @@ describe('Ingestion', () => { it('should return 422 status code when gpkg index is missing', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { gpkgFilesPath: ['withoutGpkgIndex.gpkg'], metadataShapefilePath: 'validIndexed', productShapefilePath: 'validIndexed' }, + inputFiles: { gpkgFilesPath: ['withoutGpkgIndex.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, }); const updatedLayer = createCatalogLayerResponse(); const updatedLayerMetadata = updatedLayer.metadata; @@ -1371,7 +1568,7 @@ describe('Ingestion', () => { it('should return 422 status code when gpkg grid is not a supported tile matrix grid', async () => { const layerRequest = createUpdateLayerRequest({ - inputFiles: { gpkgFilesPath: ['unsupportedGridMatrix.gpkg'], metadataShapefilePath: 'validIndexed', productShapefilePath: 'validIndexed' }, + inputFiles: { gpkgFilesPath: ['unsupportedGridMatrix.gpkg'], metadataShapefilePath: 'valid', productShapefilePath: 'valid' }, }); const updatedLayer = createCatalogLayerResponse(); const updatedLayerMetadata = updatedLayer.metadata; @@ -1390,8 +1587,8 @@ describe('Ingestion', () => { const layerRequest = createUpdateLayerRequest({ inputFiles: { gpkgFilesPath: ['unsupportedTileSize-height-512.gpkg'], - metadataShapefilePath: 'validIndexed', - productShapefilePath: 'validIndexed', + metadataShapefilePath: 'valid', + productShapefilePath: 'valid', }, }); const updatedLayer = createCatalogLayerResponse(); @@ -1411,8 +1608,29 @@ describe('Ingestion', () => { const layerRequest = createUpdateLayerRequest({ inputFiles: { gpkgFilesPath: ['unsupportedTileSize-width-512.gpkg'], - metadataShapefilePath: 'validIndexed', - productShapefilePath: 'validIndexed', + metadataShapefilePath: 'valid', + productShapefilePath: 'valid', + }, + }); + const updatedLayer = createCatalogLayerResponse(); + const updatedLayerMetadata = updatedLayer.metadata; + + const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); + nock(catalogServiceURL).post('/records/find', { id: updatedLayerMetadata.id }).reply(httpStatusCodes.OK, [updatedLayer]); + + const response = await requestSender.updateLayer(updatedLayerMetadata.id, layerRequest); + + expect(response).toSatisfyApiSpec(); + expect(response.status).toBe(httpStatusCodes.UNPROCESSABLE_ENTITY); + expect(scope.isDone()).toBe(false); + }); + + it('should return 422 status code when failed to read and process product shapefile', async () => { + const layerRequest = createUpdateLayerRequest({ + inputFiles: { + gpkgFilesPath: ['validIndexed.gpkg'], + metadataShapefilePath: 'valid', + productShapefilePath: 'valid', }, }); const updatedLayer = createCatalogLayerResponse(); @@ -1420,6 +1638,7 @@ describe('Ingestion', () => { const scope = nock(jobManagerURL).post('/jobs').reply(httpStatusCodes.OK, jobResponse); nock(catalogServiceURL).post('/records/find', { id: updatedLayerMetadata.id }).reply(httpStatusCodes.OK, [updatedLayer]); + jest.spyOn(ShapefileChunkReader.prototype, 'readAndProcess').mockRejectedValueOnce(new Error()); const response = await requestSender.updateLayer(updatedLayerMetadata.id, layerRequest); @@ -1491,8 +1710,8 @@ describe('Ingestion', () => { const layerRequest = createUpdateLayerRequest({ inputFiles: { gpkgFilesPath: ['validIndexed.gpkg'], - metadataShapefilePath: 'validIndexed', - productShapefilePath: 'validIndexed', + metadataShapefilePath: 'valid', + productShapefilePath: 'valid', }, }); const updatedLayer = createCatalogLayerResponse(); diff --git a/tests/integration/validate/validate.spec.ts b/tests/integration/validate/validate.spec.ts index 89dea298..0c66eaed 100644 --- a/tests/integration/validate/validate.spec.ts +++ b/tests/integration/validate/validate.spec.ts @@ -17,13 +17,13 @@ import type { DeepPartial, DeepRequired, FlattenKeyTupleUnion } from '../../util import { getTestContainerConfig, resetContainer } from './helpers/containerConfig'; import { ValidateRequestSender } from './helpers/validateRequestSender'; -describe('Validate', function () { +describe('Validate', () => { let requestSender: ValidateRequestSender; let validateFilesExistSpy: jest.SpyInstance; let validateGdalInfoSpy: jest.SpyInstance; let validateGpkgFilesSpy: jest.SpyInstance; - beforeEach(function () { + beforeEach(() => { const [app] = getApp({ override: [...getTestContainerConfig()], }); @@ -35,14 +35,14 @@ describe('Validate', function () { validateGpkgFilesSpy = jest.spyOn(SourceValidator.prototype, 'validateGpkgFiles'); }); - afterEach(function () { + afterEach(() => { resetContainer(); jest.restoreAllMocks(); nock.cleanAll(); }); - describe('POST /validate/gpkgs', function () { - describe('Happy Path', function () { + describe('POST /validate/gpkgs', () => { + describe('Happy Path', () => { it('should return 200 status code and sources is valid response', async () => { const validateGpkgsRequest = { gpkgFilesPath: getGpkgsFilesLocalPath(validInputFiles.inputFiles.gpkgFilesPath) }; @@ -181,12 +181,12 @@ describe('Validate', function () { }); }); - describe('Bad Path', function () { + describe('Bad Path', () => { let zodValidatorSpy: jest.SpyInstance; - beforeEach(function () { + beforeEach(() => { zodValidatorSpy = jest.spyOn(ZodValidator.prototype, 'validate'); }); - afterEach(function () { + afterEach(() => { zodValidatorSpy.mockClear(); }); @@ -240,8 +240,8 @@ describe('Validate', function () { }); }); - describe('Sad Path', function () { - beforeEach(function () { + describe('Sad Path', () => { + beforeEach(() => { jest.spyOn(SQLiteClient.prototype, 'getDB').mockImplementation(() => { throw new SqliteError('failed read sqlite file', 'SQLITE_ERROR'); }); diff --git a/tests/mocks/static/exampleData.ts b/tests/mocks/static/exampleData.ts index 75a96e31..90088ce2 100644 --- a/tests/mocks/static/exampleData.ts +++ b/tests/mocks/static/exampleData.ts @@ -4,14 +4,14 @@ import type { ValidationTaskParameters } from '../../../src/ingestion/interfaces export const validInputFiles: Pick & { inputFiles: InputFiles } = { inputFiles: { gpkgFilesPath: ['validIndexed.gpkg'], - productShapefilePath: 'validIndexed', - metadataShapefilePath: 'validIndexed', + productShapefilePath: 'valid', + metadataShapefilePath: 'valid', }, checksums: [ - { algorithm: 'XXH64', checksum: 'a0915c78be995614', fileName: 'metadata/validIndexed/ShapeMetadata.cpg' }, - { algorithm: 'XXH64', checksum: '1c4047022f216b6f', fileName: 'metadata/validIndexed/ShapeMetadata.dbf' }, - { algorithm: 'XXH64', checksum: '691fb87c5aeebb48', fileName: 'metadata/validIndexed/ShapeMetadata.prj' }, - { algorithm: 'XXH64', checksum: '5e371a633204f7eb', fileName: 'metadata/validIndexed/ShapeMetadata.shp' }, - { algorithm: 'XXH64', checksum: '89abcaac2015beff', fileName: 'metadata/validIndexed/ShapeMetadata.shx' }, + { algorithm: 'XXH64', checksum: 'a0915c78be995614', fileName: 'metadata/valid/ShapeMetadata.cpg' }, + { algorithm: 'XXH64', checksum: '1c4047022f216b6f', fileName: 'metadata/valid/ShapeMetadata.dbf' }, + { algorithm: 'XXH64', checksum: '691fb87c5aeebb48', fileName: 'metadata/valid/ShapeMetadata.prj' }, + { algorithm: 'XXH64', checksum: '5e371a633204f7eb', fileName: 'metadata/valid/ShapeMetadata.shp' }, + { algorithm: 'XXH64', checksum: '89abcaac2015beff', fileName: 'metadata/valid/ShapeMetadata.shx' }, ], }; diff --git a/tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.cpg b/tests/mocks/testFiles/metadata/valid/ShapeMetadata.cpg similarity index 100% rename from tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.cpg rename to tests/mocks/testFiles/metadata/valid/ShapeMetadata.cpg diff --git a/tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.dbf b/tests/mocks/testFiles/metadata/valid/ShapeMetadata.dbf similarity index 100% rename from tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.dbf rename to tests/mocks/testFiles/metadata/valid/ShapeMetadata.dbf diff --git a/tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.prj b/tests/mocks/testFiles/metadata/valid/ShapeMetadata.prj similarity index 100% rename from tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.prj rename to tests/mocks/testFiles/metadata/valid/ShapeMetadata.prj diff --git a/tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.shp b/tests/mocks/testFiles/metadata/valid/ShapeMetadata.shp similarity index 100% rename from tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.shp rename to tests/mocks/testFiles/metadata/valid/ShapeMetadata.shp diff --git a/tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.shx b/tests/mocks/testFiles/metadata/valid/ShapeMetadata.shx similarity index 100% rename from tests/mocks/testFiles/metadata/validIndexed/ShapeMetadata.shx rename to tests/mocks/testFiles/metadata/valid/ShapeMetadata.shx diff --git a/tests/mocks/testFiles/product/validIndexed/Product.cpg b/tests/mocks/testFiles/product/empty/Product.cpg similarity index 100% rename from tests/mocks/testFiles/product/validIndexed/Product.cpg rename to tests/mocks/testFiles/product/empty/Product.cpg diff --git a/tests/mocks/testFiles/product/empty/Product.dbf b/tests/mocks/testFiles/product/empty/Product.dbf new file mode 100644 index 00000000..63b40d47 Binary files /dev/null and b/tests/mocks/testFiles/product/empty/Product.dbf differ diff --git a/tests/mocks/testFiles/product/validIndexed/Product.prj b/tests/mocks/testFiles/product/empty/Product.prj similarity index 100% rename from tests/mocks/testFiles/product/validIndexed/Product.prj rename to tests/mocks/testFiles/product/empty/Product.prj diff --git a/tests/mocks/testFiles/product/empty/Product.shp b/tests/mocks/testFiles/product/empty/Product.shp new file mode 100644 index 00000000..3d95e38a Binary files /dev/null and b/tests/mocks/testFiles/product/empty/Product.shp differ diff --git a/tests/mocks/testFiles/product/empty/Product.shx b/tests/mocks/testFiles/product/empty/Product.shx new file mode 100644 index 00000000..3d95e38a Binary files /dev/null and b/tests/mocks/testFiles/product/empty/Product.shx differ diff --git a/tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.cpg b/tests/mocks/testFiles/product/multiple/Product.cpg similarity index 100% rename from tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.cpg rename to tests/mocks/testFiles/product/multiple/Product.cpg diff --git a/tests/mocks/testFiles/product/multiple/Product.dbf b/tests/mocks/testFiles/product/multiple/Product.dbf new file mode 100644 index 00000000..f4330e08 Binary files /dev/null and b/tests/mocks/testFiles/product/multiple/Product.dbf differ diff --git a/tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.prj b/tests/mocks/testFiles/product/multiple/Product.prj similarity index 100% rename from tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.prj rename to tests/mocks/testFiles/product/multiple/Product.prj diff --git a/tests/mocks/testFiles/product/multiple/Product.shp b/tests/mocks/testFiles/product/multiple/Product.shp new file mode 100644 index 00000000..40f20b64 Binary files /dev/null and b/tests/mocks/testFiles/product/multiple/Product.shp differ diff --git a/tests/mocks/testFiles/product/multiple/Product.shx b/tests/mocks/testFiles/product/multiple/Product.shx new file mode 100644 index 00000000..e7d33ae8 Binary files /dev/null and b/tests/mocks/testFiles/product/multiple/Product.shx differ diff --git a/tests/mocks/testFiles/product/point/Product.cpg b/tests/mocks/testFiles/product/point/Product.cpg new file mode 100644 index 00000000..3ad133c0 --- /dev/null +++ b/tests/mocks/testFiles/product/point/Product.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/tests/mocks/testFiles/product/point/Product.dbf b/tests/mocks/testFiles/product/point/Product.dbf new file mode 100644 index 00000000..3587ca13 Binary files /dev/null and b/tests/mocks/testFiles/product/point/Product.dbf differ diff --git a/tests/mocks/testFiles/product/point/Product.prj b/tests/mocks/testFiles/product/point/Product.prj new file mode 100644 index 00000000..f45cbadf --- /dev/null +++ b/tests/mocks/testFiles/product/point/Product.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/tests/mocks/testFiles/product/point/Product.shp b/tests/mocks/testFiles/product/point/Product.shp new file mode 100644 index 00000000..0112cd74 Binary files /dev/null and b/tests/mocks/testFiles/product/point/Product.shp differ diff --git a/tests/mocks/testFiles/product/point/Product.shx b/tests/mocks/testFiles/product/point/Product.shx new file mode 100644 index 00000000..cc0b42ae Binary files /dev/null and b/tests/mocks/testFiles/product/point/Product.shx differ diff --git a/tests/mocks/testFiles/product/valid/Product.cpg b/tests/mocks/testFiles/product/valid/Product.cpg new file mode 100644 index 00000000..3ad133c0 --- /dev/null +++ b/tests/mocks/testFiles/product/valid/Product.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/tests/mocks/testFiles/product/validIndexed/Product.dbf b/tests/mocks/testFiles/product/valid/Product.dbf similarity index 100% rename from tests/mocks/testFiles/product/validIndexed/Product.dbf rename to tests/mocks/testFiles/product/valid/Product.dbf diff --git a/tests/mocks/testFiles/product/valid/Product.prj b/tests/mocks/testFiles/product/valid/Product.prj new file mode 100644 index 00000000..f45cbadf --- /dev/null +++ b/tests/mocks/testFiles/product/valid/Product.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/tests/mocks/testFiles/product/validIndexed/Product.shp b/tests/mocks/testFiles/product/valid/Product.shp similarity index 100% rename from tests/mocks/testFiles/product/validIndexed/Product.shp rename to tests/mocks/testFiles/product/valid/Product.shp diff --git a/tests/mocks/testFiles/product/validIndexed/Product.shx b/tests/mocks/testFiles/product/valid/Product.shx similarity index 100% rename from tests/mocks/testFiles/product/validIndexed/Product.shx rename to tests/mocks/testFiles/product/valid/Product.shx diff --git a/tests/mocks/testFiles/product/validMultiPolygon/Product.cpg b/tests/mocks/testFiles/product/validMultiPolygon/Product.cpg new file mode 100644 index 00000000..3ad133c0 --- /dev/null +++ b/tests/mocks/testFiles/product/validMultiPolygon/Product.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.dbf b/tests/mocks/testFiles/product/validMultiPolygon/Product.dbf similarity index 100% rename from tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.dbf rename to tests/mocks/testFiles/product/validMultiPolygon/Product.dbf diff --git a/tests/mocks/testFiles/product/validMultiPolygon/Product.prj b/tests/mocks/testFiles/product/validMultiPolygon/Product.prj new file mode 100644 index 00000000..f45cbadf --- /dev/null +++ b/tests/mocks/testFiles/product/validMultiPolygon/Product.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.shp b/tests/mocks/testFiles/product/validMultiPolygon/Product.shp similarity index 100% rename from tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.shp rename to tests/mocks/testFiles/product/validMultiPolygon/Product.shp diff --git a/tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.shx b/tests/mocks/testFiles/product/validMultiPolygon/Product.shx similarity index 100% rename from tests/mocks/testFiles/product/validIndexedMultiPolygon/Product.shx rename to tests/mocks/testFiles/product/validMultiPolygon/Product.shx diff --git a/tests/mocks/testFiles/unsupportedFormats/test.ecw b/tests/mocks/testFiles/unsupportedFormats/test.ecw deleted file mode 100644 index 69447c78..00000000 Binary files a/tests/mocks/testFiles/unsupportedFormats/test.ecw and /dev/null differ diff --git a/tests/mocks/testFiles/unsupportedFormats/world.jp2 b/tests/mocks/testFiles/unsupportedFormats/world.jp2 deleted file mode 100644 index 0f84bc50..00000000 Binary files a/tests/mocks/testFiles/unsupportedFormats/world.jp2 and /dev/null differ diff --git a/tests/unit/info/models/gdalInfoManager.spec.ts b/tests/unit/info/models/gdalInfoManager.spec.ts index 98414b3f..5f3863e6 100644 --- a/tests/unit/info/models/gdalInfoManager.spec.ts +++ b/tests/unit/info/models/gdalInfoManager.spec.ts @@ -1,37 +1,34 @@ import { BadRequestError } from '@map-colonies/error-types'; -import { container } from 'tsyringe'; -import { getApp } from '../../../../src/app'; -import { GDAL_INFO_MANAGER_SYMBOL, GdalInfoManager } from '../../../../src/info/models/gdalInfoManager'; +import jsLogger from '@map-colonies/js-logger'; +import { trace } from '@opentelemetry/api'; +import { GdalInfoManager } from '../../../../src/info/models/gdalInfoManager'; import { GdalInfoError } from '../../../../src/ingestion/errors/ingestionErrors'; import { GdalUtilities } from '../../../../src/utils/gdal/gdalUtilities'; -import { INGESTION_SCHEMAS_VALIDATOR_SYMBOL, SchemasValidator } from '../../../../src/utils/validation/schemasValidator'; -import { getTestContainerConfig } from '../../../integration/ingestion/helpers/containerConfig'; -import { registerDefaultConfig } from '../../../mocks/configMock'; +import { SchemasValidator } from '../../../../src/utils/validation/schemasValidator'; import { mockGdalInfoDataWithFile } from '../../../mocks/gdalInfoMock'; import { generateInputFiles } from '../../../mocks/mockFactory'; +const mockSchemaValidator = { + validateInfoData: jest.fn(), +} satisfies Partial; + +const mockGdalUtilities = { getInfoData: jest.fn() } satisfies Partial; + describe('GdalInfoManager', () => { let gdalInfoManager: GdalInfoManager; - let schemaValidator: SchemasValidator; beforeEach(() => { - const [, container] = getApp({ - override: [...getTestContainerConfig()], - useChild: true, - }); - schemaValidator = container.resolve(INGESTION_SCHEMAS_VALIDATOR_SYMBOL); - gdalInfoManager = container.resolve(GDAL_INFO_MANAGER_SYMBOL); - registerDefaultConfig(); + const testTracer = trace.getTracer('testTracer'); + + gdalInfoManager = new GdalInfoManager( + jsLogger({ enabled: false }), + testTracer, + mockSchemaValidator as unknown as SchemasValidator, + mockGdalUtilities as unknown as GdalUtilities + ); }); afterEach(() => { - container.clearInstances(); - jest.clearAllMocks(); - jest.restoreAllMocks(); - }); - - afterAll(() => { - container.clearInstances(); jest.clearAllMocks(); jest.restoreAllMocks(); }); @@ -39,55 +36,60 @@ describe('GdalInfoManager', () => { describe('validateInfoData', () => { it('should succesfuly validate gdal info data according to number of gpkg source files', async () => { const { gpkgFilesPath } = generateInputFiles(); + mockSchemaValidator.validateInfoData.mockResolvedValue(mockGdalInfoDataWithFile); - const schemaValidatorSpy = jest.spyOn(schemaValidator, 'validateInfoData').mockResolvedValue(mockGdalInfoDataWithFile); + const promise = gdalInfoManager.validateInfoData([mockGdalInfoDataWithFile]); - expect(await gdalInfoManager.validateInfoData([mockGdalInfoDataWithFile])).toBeUndefined(); - expect(schemaValidatorSpy).toHaveBeenCalledTimes(gpkgFilesPath.length); + await expect(promise).resolves.not.toThrow(); + expect(mockSchemaValidator.validateInfoData).toHaveBeenCalledTimes(gpkgFilesPath.length); }); it('should throw gdal info error - Unsupported CRS', async () => { const invalidGdalInfo = { ...mockGdalInfoDataWithFile, crs: 3857 }; + mockSchemaValidator.validateInfoData.mockRejectedValue(new BadRequestError('Unsupported CRS')); + + const promise = gdalInfoManager.validateInfoData([invalidGdalInfo]); - jest.spyOn(schemaValidator, 'validateInfoData').mockRejectedValue(new BadRequestError('Unsupported CRS')); - await expect(gdalInfoManager.validateInfoData([invalidGdalInfo])).rejects.toThrow(/Unsupported CRS/); + await expect(promise).rejects.toThrow(/Unsupported CRS/); }); it('should throw gdal info error - Unsupported pixel size', async () => { const invalidGdalInfo = { ...mockGdalInfoDataWithFile, pixelSize: 0.9 }; + mockSchemaValidator.validateInfoData.mockRejectedValue(new BadRequestError('Unsupported pixel size')); + + const promise = gdalInfoManager.validateInfoData([invalidGdalInfo]); - jest.spyOn(schemaValidator, 'validateInfoData').mockRejectedValue(new BadRequestError('Unsupported pixel size')); - await expect(gdalInfoManager.validateInfoData([invalidGdalInfo])).rejects.toThrow(/Unsupported pixel size/); + await expect(promise).rejects.toThrow(/Unsupported pixel size/); }); it('should throw gdal info error - Unsupported file format', async () => { const invalidGdalInfo = { ...mockGdalInfoDataWithFile, fileFormat: 'TIFF' }; + mockSchemaValidator.validateInfoData.mockRejectedValue(new BadRequestError('Unsupported file format')); - jest.spyOn(schemaValidator, 'validateInfoData').mockRejectedValue(new BadRequestError('Unsupported file format')); - await expect(gdalInfoManager.validateInfoData([invalidGdalInfo])).rejects.toThrow(/Unsupported file format/); + const promise = gdalInfoManager.validateInfoData([invalidGdalInfo]); + + await expect(promise).rejects.toThrow(/Unsupported file format/); }); }); describe('getInfoData', () => { it('should return gdal info data array', async () => { const gpkgFilesPath: string[] = [mockGdalInfoDataWithFile.fileName]; - const managerGdalInfoSpy = jest.spyOn(GdalInfoManager.prototype, 'getInfoData'); - const utilityGdalInfoSpy = jest.spyOn(GdalUtilities.prototype, 'getInfoData'); - utilityGdalInfoSpy.mockResolvedValue(mockGdalInfoDataWithFile); + mockGdalUtilities.getInfoData.mockResolvedValue(mockGdalInfoDataWithFile); const result = await gdalInfoManager.getInfoData(gpkgFilesPath); expect(result).toEqual([mockGdalInfoDataWithFile]); - expect(managerGdalInfoSpy).toHaveBeenCalledTimes(1); - expect(utilityGdalInfoSpy).toHaveBeenCalledTimes(gpkgFilesPath.length); - expect(managerGdalInfoSpy).toHaveBeenCalledWith(gpkgFilesPath); + expect(mockGdalUtilities.getInfoData).toHaveBeenCalledTimes(1); }); - it('should throw an GdalError when error occur', async () => { + it('should throw a GdalError when error occur', async () => { const gpkgFilesPath: string[] = [mockGdalInfoDataWithFile.fileName]; + mockGdalUtilities.getInfoData.mockRejectedValue(new Error('Unknown Error')); + + const promise = gdalInfoManager.getInfoData(gpkgFilesPath); - jest.spyOn(GdalUtilities.prototype, 'getInfoData').mockRejectedValue(new Error('Unknown Error')); - await expect(gdalInfoManager.getInfoData(gpkgFilesPath)).rejects.toThrow(GdalInfoError); + await expect(promise).rejects.toThrow(GdalInfoError); }); }); }); diff --git a/tests/unit/ingestion/models/ingestionManager.spec.ts b/tests/unit/ingestion/models/ingestionManager.spec.ts index 5940237f..65f2f5c2 100644 --- a/tests/unit/ingestion/models/ingestionManager.spec.ts +++ b/tests/unit/ingestion/models/ingestionManager.spec.ts @@ -5,8 +5,6 @@ import { ICreateJobResponse, OperationStatus } from '@map-colonies/mc-priority-q import { getMapServingLayerName } from '@map-colonies/raster-shared'; import { trace } from '@opentelemetry/api'; import { container } from 'tsyringe'; -import xxhashFactory from 'xxhash-wasm'; -import { SERVICES } from '../../../../src/common/constants'; import { InfoManager } from '../../../../src/info/models/infoManager'; import { ChecksumError, FileNotFoundError, UnsupportedEntityError } from '../../../../src/ingestion/errors/ingestionErrors'; import { IngestionManager } from '../../../../src/ingestion/models/ingestionManager'; @@ -16,8 +14,6 @@ import { CatalogClient } from '../../../../src/serviceClients/catalogClient'; import { JobManagerWrapper } from '../../../../src/serviceClients/jobManagerWrapper'; import { MapProxyClient } from '../../../../src/serviceClients/mapProxyClient'; import { Checksum } from '../../../../src/utils/hash/checksum'; -import { CHECKSUM_PROCESSOR } from '../../../../src/utils/hash/constants'; -import type { ChecksumProcessor } from '../../../../src/utils/hash/interfaces'; import type { ValidateManager } from '../../../../src/validate/models/validateManager'; import { clear as clearConfig, configMock, registerDefaultConfig } from '../../../mocks/configMock'; import { generateCatalogLayerResponse, generateChecksum, generateNewLayerRequest, generateUpdateLayerRequest } from '../../../mocks/mockFactory'; @@ -30,7 +26,7 @@ describe('IngestionManager', () => { validateShapefiles: jest.fn(), } satisfies Partial; - const productManager = { read: jest.fn() } satisfies Partial; + const mockProductManager = { read: jest.fn() } satisfies Partial; const mockInfoManager = { getGpkgsInformation: jest.fn(), @@ -38,13 +34,16 @@ describe('IngestionManager', () => { const mockGeoValidator = { validate: jest.fn(), - }; + } satisfies Partial; + + const mockChecksum = { + calculate: jest.fn(), + } satisfies Partial; let createIngestionJobSpy: jest.SpyInstance; let findJobsSpy: jest.SpyInstance; let existsMapproxySpy: jest.SpyInstance; let existsCatalogSpy: jest.SpyInstance; - let calcualteChecksumSpy: jest.SpyInstance; let findByIdSpy: jest.SpyInstance; let catalogClient: CatalogClient; @@ -56,19 +55,6 @@ describe('IngestionManager', () => { beforeEach(() => { registerDefaultConfig(); - // Reset container for a clean test - container.reset(); - container.register(SERVICES.TRACER, { useValue: testTracer }); - container.register(SERVICES.LOGGER, { useValue: testLogger }); - container.register(CHECKSUM_PROCESSOR, { - useFactory: (): (() => Promise) => { - return async () => { - const xxhash = await xxhashFactory(); - return { ...xxhash.create64(), algorithm: 'XXH64' }; - }; - }, - }); - mapProxyClient = new MapProxyClient(configMock, testLogger, testTracer); catalogClient = new CatalogClient(configMock, testLogger, testTracer); jobManagerWrapper = new JobManagerWrapper(configMock, testLogger, testTracer); @@ -77,7 +63,7 @@ describe('IngestionManager', () => { existsMapproxySpy = jest.spyOn(MapProxyClient.prototype, 'exists'); existsCatalogSpy = jest.spyOn(CatalogClient.prototype, 'exists'); findByIdSpy = jest.spyOn(CatalogClient.prototype, 'findById'); - calcualteChecksumSpy = jest.spyOn(Checksum.prototype, 'calculate'); + jest.spyOn(container, 'resolve').mockReturnValue(mockChecksum); ingestionManager = new IngestionManager( testLogger, @@ -89,7 +75,7 @@ describe('IngestionManager', () => { catalogClient, jobManagerWrapper, mapProxyClient, - productManager as unknown as ProductManager + mockProductManager as unknown as ProductManager ); }); @@ -111,12 +97,12 @@ describe('IngestionManager', () => { mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(false); findJobsSpy.mockResolvedValue([]); - calcualteChecksumSpy.mockResolvedValue(generateChecksum()); + mockChecksum.calculate.mockResolvedValue(generateChecksum()); createIngestionJobSpy.mockResolvedValue(createJobResponse); const expectedResponse = { jobId: createJobResponse.id, taskId: createJobResponse.taskIds[0] }; @@ -130,6 +116,7 @@ describe('IngestionManager', () => { const layerRequest = generateNewLayerRequest(); const expectedErrorMessage = 'error message'; mockValidateManager.validateShapefiles.mockRejectedValue(new FileNotFoundError(expectedErrorMessage)); + mockValidateManager.validateShapefiles.mockRejectedValue(new FileNotFoundError(expectedErrorMessage)); const promise = ingestionManager.newLayer(layerRequest); @@ -142,6 +129,8 @@ describe('IngestionManager', () => { const expectedErrorMessage = 'errror message'; mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockRejectedValue(new Error(expectedErrorMessage)); + mockValidateManager.validateShapefiles.mockResolvedValue(undefined); + mockValidateManager.validateGpkgsSources.mockRejectedValue(new Error(expectedErrorMessage)); const promise = ingestionManager.newLayer(layerRequest); @@ -154,6 +143,9 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockRejectedValue(new Error()); + mockValidateManager.validateShapefiles.mockResolvedValue(undefined); + mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); + mockInfoManager.getGpkgsInformation.mockRejectedValue(new Error()); const promise = ingestionManager.newLayer(layerRequest); @@ -166,7 +158,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockRejectedValue(new Error()); + mockProductManager.read.mockRejectedValue(new Error()); const promise = ingestionManager.newLayer(layerRequest); @@ -179,7 +171,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockImplementation(() => { throw new Error(); }); @@ -197,7 +189,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(true); @@ -212,7 +204,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockRejectedValue(new Error()); @@ -228,7 +220,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(true); @@ -244,7 +236,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockRejectedValue(new Error()); @@ -261,7 +253,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(false); @@ -278,7 +270,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(false); @@ -297,12 +289,12 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(false); findJobsSpy.mockResolvedValue([]); - calcualteChecksumSpy.mockRejectedValue(new ChecksumError(expectedErrorMessage)); + mockChecksum.calculate.mockRejectedValue(new ChecksumError(expectedErrorMessage)); const promise = ingestionManager.newLayer(layerRequest); @@ -315,12 +307,12 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); existsCatalogSpy.mockResolvedValue(false); findJobsSpy.mockResolvedValue([]); - calcualteChecksumSpy.mockResolvedValue(generateChecksum()); + mockChecksum.calculate.mockResolvedValue(generateChecksum()); createIngestionJobSpy.mockRejectedValue(new Error()); const promise = ingestionManager.newLayer(layerRequest); @@ -350,11 +342,11 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(true); findJobsSpy.mockResolvedValue([]); - calcualteChecksumSpy.mockResolvedValue(generateChecksum()); + mockChecksum.calculate.mockResolvedValue(generateChecksum()); createIngestionJobSpy.mockResolvedValue(createJobResponse); const expectedResponse = { jobId: createJobResponse.id, taskId: createJobResponse.taskIds[0] }; @@ -379,10 +371,10 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(true); - calcualteChecksumSpy.mockResolvedValue(generateChecksum()); + mockChecksum.calculate.mockResolvedValue(generateChecksum()); findJobsSpy.mockResolvedValue([]); createIngestionJobSpy.mockResolvedValue(createJobResponse); const expectedResponse = { jobId: createJobResponse.id, taskId: createJobResponse.taskIds[0] }; @@ -426,7 +418,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(false); @@ -444,7 +436,7 @@ describe('IngestionManager', () => { mockValidateManager.validateShapefiles.mockResolvedValue(undefined); mockValidateManager.validateGpkgsSources.mockResolvedValue(undefined); mockInfoManager.getGpkgsInformation.mockResolvedValue(undefined); - productManager.read.mockResolvedValue(undefined); + mockProductManager.read.mockResolvedValue(undefined); mockGeoValidator.validate.mockResolvedValue(undefined); existsMapproxySpy.mockResolvedValue(true); findJobsSpy.mockResolvedValue([{ status: OperationStatus.IN_PROGRESS }]); diff --git a/tests/unit/ingestion/models/productManager.spec.ts b/tests/unit/ingestion/models/productManager.spec.ts index 9ee64eec..33ab68fe 100644 --- a/tests/unit/ingestion/models/productManager.spec.ts +++ b/tests/unit/ingestion/models/productManager.spec.ts @@ -11,6 +11,27 @@ import type { SchemasValidator } from '../../../../src/utils/validation/schemasV import { registerDefaultConfig } from '../../../mocks/configMock'; import { generateInputFiles } from '../../../mocks/mockFactory'; +const mockReadShapefile = jest + .fn<{ done: boolean; value?: Feature }, unknown[]>() + .mockReturnValueOnce({ done: false, value: { type: 'Feature', properties: {}, geometry: { type: 'Polygon', coordinates: [[[]]] } } }) + .mockReturnValueOnce({ done: true, value: undefined }) + .mockReturnValueOnce({ done: false, value: { type: 'Feature', properties: {}, geometry: { type: 'Polygon', coordinates: [[[]]] } } }) + .mockReturnValueOnce({ done: true, value: undefined }); +const mockOpenShapefile = jest.fn<{ read: () => unknown }, unknown[]>().mockReturnValue({ + read: () => { + return mockReadShapefile(); + }, +}); +jest.mock('shapefile', () => { + return { + // eslint-disable-next-line @typescript-eslint/naming-convention + __esmodule: true, + open: () => { + return mockOpenShapefile(); + }, + }; +}); + describe('ProductManager', () => { let productManager: ProductManager; const mockSchemaValidator = { @@ -33,6 +54,35 @@ describe('ProductManager', () => { }); describe('read', () => { + describe('deep mock (white-box) - test private processor', () => { + it('should successfully read product shapefile and return product geometry', async () => { + const { productShapefilePath } = generateInputFiles(); + const featurePolygon: Feature = { + type: 'Feature', + properties: {}, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [1, 0], + [1, 1], + [0, 0], + ], + ], + }, + }; + mockSchemaValidator.validateProductFeature.mockResolvedValue(featurePolygon.geometry); + + const response = await productManager.read(productShapefilePath); + + expect(response).toStrictEqual(featurePolygon.geometry); + expect(mockSchemaValidator.validateProductFeature).toHaveBeenCalledTimes(1); + expect(mockOpenShapefile).toHaveBeenCalledTimes(2); + expect(mockReadShapefile).toHaveBeenCalledTimes(4); + }); + }); + it('should successfully read product shapefile and return product geometry - polygon', async () => { const { productShapefilePath } = generateInputFiles(); const featurePolygon: Feature = { @@ -115,6 +165,18 @@ describe('ProductManager', () => { ); }); + it('should throw an error when reading and processing product shapefile throws an unexpected error', async () => { + const { productShapefilePath } = generateInputFiles(); + const errorMessage = `Shapefile ${productShapefilePath} has no valid features or vertices`; + jest.spyOn(ShapefileChunkReader.prototype, 'readAndProcess').mockRejectedValue(new Error(errorMessage)); + + const promise = productManager.read(productShapefilePath); + + await expect(promise).rejects.toThrow( + new ValidationError(`Failed to read product shapefile of file: ${productShapefilePath}: ${errorMessage}`) + ); + }); + it('should throw an error when product shapefile has 0 features', async () => { const { productShapefilePath } = generateInputFiles(); const errorMessage = 'error'; diff --git a/tests/unit/ingestion/validators/sourceValidator.spec.ts b/tests/unit/ingestion/validators/sourceValidator.spec.ts index 3a4b0fdd..ba166d74 100644 --- a/tests/unit/ingestion/validators/sourceValidator.spec.ts +++ b/tests/unit/ingestion/validators/sourceValidator.spec.ts @@ -11,33 +11,68 @@ import { generateInputFiles } from '../../../mocks/mockFactory'; describe('SourceValidator', () => { let sourceValidator: SourceValidator; - let mockGdalInfoManager: GdalInfoManager; - let mockGpkgManager: GpkgManager; - let fspAccessSpy: jest.SpyInstance; + const mockGpkgManager = { validateGpkgFiles: jest.fn() } satisfies Partial; + const mockGdalInfoManager = { getInfoData: jest.fn(), validateInfoData: jest.fn() } satisfies Partial; + const fspAccessSpy = jest.spyOn(fsp, 'access'); beforeEach(() => { - mockGdalInfoManager = { getInfoData: jest.fn, validateInfoData: jest.fn } as unknown as GdalInfoManager; - mockGpkgManager = { validateGpkgFiles: jest.fn } as unknown as GpkgManager; sourceValidator = new SourceValidator( jsLogger({ enabled: false }), configMock, trace.getTracer('testTracer'), - mockGdalInfoManager, - mockGpkgManager + mockGdalInfoManager as unknown as GdalInfoManager, + mockGpkgManager as unknown as GpkgManager ); - fspAccessSpy = jest.spyOn(fsp, 'access'); }); + afterEach(() => { - jest.clearAllMocks(); + jest.resetAllMocks(); + }); + + describe('validateGdalInfo', () => { + it('should succesfully validate gdal info with no errors', async () => { + const { gpkgFilesPath } = generateInputFiles(); + mockGdalInfoManager.getInfoData.mockResolvedValue([mockGdalInfoDataWithFile]); + + await expect(sourceValidator.validateGdalInfo(gpkgFilesPath)).resolves.not.toThrow(); + + expect(mockGdalInfoManager.validateInfoData).toHaveBeenCalledWith([mockGdalInfoDataWithFile]); + expect(mockGdalInfoManager.validateInfoData).toHaveBeenCalledTimes(gpkgFilesPath.length); + }); + }); + + describe('validateGpkgFiles', () => { + it('should succesfully validate gpkg with no errors', () => { + const { gpkgFilesPath } = generateInputFiles(); + mockGpkgManager.validateGpkgFiles.mockReturnValue(undefined); + + const action = () => sourceValidator.validateGpkgFiles(gpkgFilesPath); + + expect(action).not.toThrow(); + expect(mockGpkgManager.validateGpkgFiles).toHaveBeenCalledTimes(gpkgFilesPath.length); + }); + + it('should throw error when gpkg validation fails', () => { + const { gpkgFilesPath } = generateInputFiles(); + mockGpkgManager.validateGpkgFiles.mockImplementation(() => { + throw new Error(); + }); + + const action = () => sourceValidator.validateGpkgFiles(gpkgFilesPath); + + expect(action).toThrow(); + expect(mockGpkgManager.validateGpkgFiles).toHaveBeenCalledTimes(gpkgFilesPath.length); + }); }); describe('validateFilesExist', () => { - it('should validate that all files exist', async () => { + it('should successfully validate that all files exist', async () => { const { gpkgFilesPath } = generateInputFiles(); fspAccessSpy.mockResolvedValue(undefined); - await sourceValidator.validateFilesExist(gpkgFilesPath); + const promise = sourceValidator.validateFilesExist(gpkgFilesPath); + await expect(promise).resolves.not.toThrow(); expect(fspAccessSpy).toHaveBeenCalledTimes(gpkgFilesPath.length); gpkgFilesPath.forEach((filePath) => { expect(fspAccessSpy).toHaveBeenNthCalledWith(1, filePath, fsConstants.F_OK); @@ -57,17 +92,4 @@ describe('SourceValidator', () => { }); }); }); - - describe('validateGdalInfo', () => { - it('should succesfully validate gdal info with no errors', async () => { - const { gpkgFilesPath } = generateInputFiles(); - jest.spyOn(mockGdalInfoManager, 'getInfoData').mockResolvedValue([mockGdalInfoDataWithFile]); - const gdalInfoValidatorSpy = jest.spyOn(mockGdalInfoManager, 'validateInfoData'); - - await expect(sourceValidator.validateGdalInfo(gpkgFilesPath)).resolves.not.toThrow(); - - expect(gdalInfoValidatorSpy).toHaveBeenCalledWith([mockGdalInfoDataWithFile]); - expect(gdalInfoValidatorSpy).toHaveBeenCalledTimes(gpkgFilesPath.length); - }); - }); }); diff --git a/tests/unit/serverClients/db/sqliteClient.spec.ts b/tests/unit/serverClients/db/sqliteClient.spec.ts index 010f39ad..6925740f 100644 --- a/tests/unit/serverClients/db/sqliteClient.spec.ts +++ b/tests/unit/serverClients/db/sqliteClient.spec.ts @@ -120,7 +120,7 @@ describe('SQLClient', () => { expect(result).toBe(false); }); - it('should throw SqliteError error - isGpkgIndexExist', function () { + it('should throw SqliteError error - isGpkgIndexExist', () => { jest.spyOn(SQLiteClient.prototype, 'getDB').mockImplementation(() => { throw sqlLiteError; }); @@ -136,7 +136,7 @@ describe('SQLClient', () => { expect(handleErrorSpy).toHaveBeenCalled(); }); - it('should throw Unknown error - isGpkgIndexExist', function () { + it('should throw Unknown error - isGpkgIndexExist', () => { jest.spyOn(SQLiteClient.prototype, 'getDB').mockImplementation(() => { throw new Error('Unexpected Error'); }); diff --git a/tests/unit/utils/gdalUtilities.spec.ts b/tests/unit/utils/gdalUtilities.spec.ts index b72a5d6d..c71a3044 100644 --- a/tests/unit/utils/gdalUtilities.spec.ts +++ b/tests/unit/utils/gdalUtilities.spec.ts @@ -1,33 +1,67 @@ +import { faker } from '@faker-js/faker'; import jsLogger from '@map-colonies/js-logger'; import { trace } from '@opentelemetry/api'; -import { getApp } from '../../../src/app'; +import { Dataset } from 'gdal-async'; +import type { GdalInfo } from '../../../src/ingestion/schemas/gdalDataSchema'; import { InfoData } from '../../../src/ingestion/schemas/infoDataSchema'; import { GdalUtilities } from '../../../src/utils/gdal/gdalUtilities'; -import { INGESTION_SCHEMAS_VALIDATOR_SYMBOL, SchemasValidator } from '../../../src/utils/validation/schemasValidator'; +import type { SchemasValidator } from '../../../src/utils/validation/schemasValidator'; import { registerDefaultConfig } from '../../mocks/configMock'; +import { generateInputFiles } from '../../mocks/mockFactory'; -let gdalUtilities: GdalUtilities; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type OverloadedReturnAndParamsType any> = T extends { + (...args: infer P1): infer R1; + (...args: infer P2): infer R2; +} + ? [[P1, R1], [P2, R2]] + : T extends (...args: infer P) => infer R + ? [[P, R]] + : never; + +type GdalAsync = typeof import('gdal-async'); +type GdalOpenAsyncReturn = OverloadedReturnAndParamsType[1][1]; +type GdalOpenAsyncParameters = OverloadedReturnAndParamsType[1][0]; +const mockGdalOpenAsync = jest.fn(); +const mockGdalInfoAsync = jest.fn, Parameters>(); + +jest.mock('gdal-async', () => { + const originalModule = jest.requireActual('gdal-async'); + return { + ...originalModule, + openAsync: jest.fn().mockImplementation(async (...args) => { + return mockGdalOpenAsync(...args); + }), + infoAsync: jest.fn, Parameters>().mockImplementation(async (...args) => { + return mockGdalInfoAsync(...args); + }), + } as unknown as GdalAsync; +}); describe('gdalUtilities', () => { - beforeEach(function () { - const [, container] = getApp(); - const schemasValidator = container.resolve(INGESTION_SCHEMAS_VALIDATOR_SYMBOL); - jest.resetAllMocks(); - gdalUtilities = new GdalUtilities(jsLogger({ enabled: false }), trace.getTracer('testTracer'), schemasValidator); + let gdalUtilities: GdalUtilities; + const schemasValidator = { validateGdalInfo: jest.fn, [unknown]>() } satisfies Partial; + + beforeEach(() => { + gdalUtilities = new GdalUtilities(jsLogger({ enabled: false }), trace.getTracer('testTracer'), schemasValidator as unknown as SchemasValidator); registerDefaultConfig(); }); afterEach(() => { - jest.resetAllMocks(); + jest.restoreAllMocks(); }); describe('getInfoData', () => { it('should extract CRS, fileFormat, pixelSize and footprint from gpkg file', async () => { - const filePath = 'tests/mocks/testFiles/gpkg/validIndexed.gpkg'; - const result = await gdalUtilities.getInfoData(filePath); - const expected: InfoData = { - crs: 4326, - extentPolygon: { + const { gpkgFilesPath } = generateInputFiles(); + const mockedGeotransform = faker.helpers.multiple(() => faker.number.float(), { count: 6 }); + const mockedDataset = { geoTransform: mockedGeotransform, close: () => {} }; + const infoData = { + driverShortName: 'GPKG', + geoTransform: mockedGeotransform, + // eslint-disable-next-line @typescript-eslint/naming-convention + stac: { 'proj:epsg': 4326 }, + wgs84Extent: { coordinates: [ [ [34.61517, 34.10156], @@ -37,57 +71,144 @@ describe('gdalUtilities', () => { [34.61517, 34.10156], ], ], - type: 'Polygon', + type: 'Polygon' as const, }, - fileFormat: 'GPKG', - pixelSize: 0.001373291015625, }; + const expected: InfoData = { + crs: infoData.stac['proj:epsg'], + extentPolygon: infoData.wgs84Extent, + fileFormat: infoData.driverShortName, + pixelSize: mockedGeotransform[1], + }; + mockGdalOpenAsync.mockResolvedValue(mockedDataset as unknown as Dataset); + mockGdalInfoAsync.mockResolvedValue(JSON.stringify(infoData)); + schemasValidator.validateGdalInfo.mockResolvedValue(infoData); + + const response = await gdalUtilities.getInfoData(gpkgFilesPath[0]); - expect(result).toStrictEqual(expected); + expect(response).toStrictEqual(expected); + expect(mockGdalOpenAsync).toHaveBeenCalledTimes(1); + expect(mockGdalInfoAsync).toHaveBeenCalledTimes(1); + expect(schemasValidator.validateGdalInfo).toHaveBeenCalledTimes(1); }); - //Added this test to make sure that pixelSize is not a rounded number but the exact number resolution - it('should extract CRS, fileFormat, pixelSize and footprint from gpkg file with zoom level 21', async () => { - const filePath = 'tests/mocks/testFiles/gpkg/zoom21.gpkg'; - const result = await gdalUtilities.getInfoData(filePath); - const expected: InfoData = { - crs: 4326, - extentPolygon: { - type: 'Polygon', + it('should throw error when fails to open dataset', async () => { + const { gpkgFilesPath } = generateInputFiles(); + const errorMessage = 'error'; + mockGdalOpenAsync.mockRejectedValue(new Error(errorMessage)); + + const promise = gdalUtilities.getInfoData(gpkgFilesPath[0]); + + await expect(promise).rejects.toThrow(new Error(`failed to get gdal info on file: ${gpkgFilesPath[0]}: ${errorMessage}`)); + }); + + it('should throw error when fails to read info from dataset', async () => { + const { gpkgFilesPath } = generateInputFiles(); + const errorMessage = 'error'; + const mockedGeotransform = faker.helpers.multiple(() => faker.number.float(), { count: 6 }); + const mockedDataset = { geoTransform: mockedGeotransform, close: () => {} }; + mockGdalOpenAsync.mockResolvedValue(mockedDataset as unknown as Dataset); + mockGdalInfoAsync.mockRejectedValue(new Error(errorMessage)); + + const promise = gdalUtilities.getInfoData(gpkgFilesPath[0]); + + await expect(promise).rejects.toThrow(new Error(`failed to get gdal info on file: ${gpkgFilesPath[0]}: ${errorMessage}`)); + }); + + it('should throw error when fails to parse info from dataset', async () => { + const { gpkgFilesPath } = generateInputFiles(); + const errorMessage = 'error'; + const mockedGeotransform = faker.helpers.multiple(() => faker.number.float(), { count: 6 }); + const mockedDataset = { geoTransform: null, close: () => {} }; + const infoData = { + driverShortName: 'GPKG', + geoTransform: mockedGeotransform, + // eslint-disable-next-line @typescript-eslint/naming-convention + stac: { 'proj:epsg': 4326 }, + wgs84Extent: { coordinates: [ [ - [34.4870513, 31.5316438], - [34.4870513, 31.5297716], - [34.4892373, 31.5297716], - [34.4892373, 31.5316438], - [34.4870513, 31.5316438], + [34.61517, 34.10156], + [34.61517, 32.242124], + [36.4361539, 32.242124], + [36.4361539, 34.10156], + [34.61517, 34.10156], ], ], + type: 'Polygon' as const, }, - fileFormat: 'GPKG', - pixelSize: 0.000000335276126861572, }; + mockGdalOpenAsync.mockResolvedValue(mockedDataset as unknown as Dataset); + mockGdalInfoAsync.mockResolvedValue(JSON.stringify(infoData)); + jest.spyOn(JSON, 'parse').mockImplementation(() => { + throw new Error(errorMessage); + }); + const promise = gdalUtilities.getInfoData(gpkgFilesPath[0]); - expect(result).toStrictEqual(expected); + await expect(promise).rejects.toThrow(new Error(`failed to get gdal info on file: ${gpkgFilesPath[0]}: ${errorMessage}`)); }); - it('should throw error when fails to create dataset', async () => { - const filePath = 'tests/mocks/testFiles/gpkg/invalidFile.gpkg'; - const action = async () => gdalUtilities.getInfoData(filePath); - await expect(action).rejects.toThrow(Error); - }); + it('should throw error when fails to validate info from dataset', async () => { + const { gpkgFilesPath } = generateInputFiles(); + const errorMessage = 'error'; + const mockedGeotransform = faker.helpers.multiple(() => faker.number.float(), { count: 6 }); + const mockedDataset = { geoTransform: null, close: () => {} }; + const infoData = { + driverShortName: 'GPKG', + geoTransform: mockedGeotransform, + // eslint-disable-next-line @typescript-eslint/naming-convention + stac: { 'proj:epsg': 4326 }, + wgs84Extent: { + coordinates: [ + [ + [34.61517, 34.10156], + [34.61517, 32.242124], + [36.4361539, 32.242124], + [36.4361539, 34.10156], + [34.61517, 34.10156], + ], + ], + type: 'Polygon' as const, + }, + }; + mockGdalOpenAsync.mockResolvedValue(mockedDataset as unknown as Dataset); + mockGdalInfoAsync.mockResolvedValue(JSON.stringify(infoData)); + schemasValidator.validateGdalInfo.mockRejectedValue(new Error(errorMessage)); + + const promise = gdalUtilities.getInfoData(gpkgFilesPath[0]); - it('should throw error when fails to extract data', async () => { - const filePath = 'tests/mocks/files/unsupportedFormats/world.jp2'; - const action = async () => gdalUtilities.getInfoData(filePath); - await expect(action).rejects.toThrow(Error); + await expect(promise).rejects.toThrow(new Error(`failed to get gdal info on file: ${gpkgFilesPath[0]}: ${errorMessage}`)); }); - //TODO: This test should pass when we have appropriate GDAL version with ECW licence - it('should throw error when recieves ecw file', async () => { - const filePath = 'tests/mocks/files/unsupportedFormats/test.ecw'; - const action = async () => gdalUtilities.getInfoData(filePath); - await expect(action).rejects.toThrow(Error); + it('should throw error when fails to read geoTransform info from dataset', async () => { + const { gpkgFilesPath } = generateInputFiles(); + const mockedGeotransform = faker.helpers.multiple(() => faker.number.float(), { count: 6 }); + const mockedDataset = { geoTransform: null, close: () => {} }; + const infoData = { + driverShortName: 'GPKG', + geoTransform: mockedGeotransform, + // eslint-disable-next-line @typescript-eslint/naming-convention + stac: { 'proj:epsg': 4326 }, + wgs84Extent: { + coordinates: [ + [ + [34.61517, 34.10156], + [34.61517, 32.242124], + [36.4361539, 32.242124], + [36.4361539, 34.10156], + [34.61517, 34.10156], + ], + ], + type: 'Polygon' as const, + }, + }; + mockGdalOpenAsync.mockResolvedValue(mockedDataset as unknown as Dataset); + mockGdalInfoAsync.mockResolvedValue(JSON.stringify(infoData)); + schemasValidator.validateGdalInfo.mockResolvedValue(infoData as unknown as GdalInfo); + + const promise = gdalUtilities.getInfoData(gpkgFilesPath[0]); + + await expect(promise).rejects.toThrow(new Error(`failed to get gdal info on file: ${gpkgFilesPath[0]}: dataset.geoTransform is null`)); }); }); }); diff --git a/tests/unit/validate/models/validateManager.spec.ts b/tests/unit/validate/models/validateManager.spec.ts index 17fc94ba..a4224b03 100644 --- a/tests/unit/validate/models/validateManager.spec.ts +++ b/tests/unit/validate/models/validateManager.spec.ts @@ -1,7 +1,6 @@ +import { faker } from '@faker-js/faker'; import jsLogger from '@map-colonies/js-logger'; import { trace } from '@opentelemetry/api'; -import { container } from 'tsyringe'; -import { SERVICES } from '../../../../src/common/constants'; import { FileNotFoundError, GdalInfoError } from '../../../../src/ingestion/errors/ingestionErrors'; import { SourceValidator } from '../../../../src/ingestion/validators/sourceValidator'; import { GpkgError } from '../../../../src/serviceClients/database/errors'; @@ -23,10 +22,6 @@ describe('ValidateManager', () => { beforeEach(() => { registerDefaultConfig(); - // Reset container for a clean test - container.reset(); - container.register(SERVICES.TRACER, { useValue: testTracer }); - container.register(SERVICES.LOGGER, { useValue: testLogger }); validateManager = new ValidateManager(testLogger, configMock, testTracer, sourceValidator as unknown as SourceValidator); }); @@ -38,8 +33,8 @@ describe('ValidateManager', () => { describe('validateGpkgs', () => { it('should return successfully validation response when all validations pass', async () => { - sourceValidator.validateFilesExist.mockImplementation(async () => Promise.resolve()); - sourceValidator.validateGdalInfo.mockImplementation(async () => Promise.resolve()); + sourceValidator.validateFilesExist.mockResolvedValue(undefined); + sourceValidator.validateGdalInfo.mockResolvedValue(undefined); sourceValidator.validateGpkgFiles.mockReturnValue(undefined); const response = await validateManager.validateGpkgs({ gpkgFilesPath: generateInputFiles().gpkgFilesPath }); @@ -58,24 +53,86 @@ describe('ValidateManager', () => { }); it('should return failed validation response when gdal info validation throws an error', async () => { + const validateGpkgRequest = { gpkgFilesPath: generateInputFiles().gpkgFilesPath }; + const expectedError = 'Error while validating gpkg files'; sourceValidator.validateFilesExist.mockResolvedValue(undefined); - sourceValidator.validateGdalInfo.mockRejectedValue(new GdalInfoError('Error while validating gdal info')); + sourceValidator.validateGdalInfo.mockRejectedValue(new GdalInfoError(expectedError)); - const response = await validateManager.validateGpkgs({ gpkgFilesPath: generateInputFiles().gpkgFilesPath }); + const response = await validateManager.validateGpkgs(validateGpkgRequest); - expect(response).toStrictEqual({ isValid: false, message: 'Error while validating gdal info' }); + expect(response).toStrictEqual({ isValid: false, message: expectedError }); }); it('should return failed validation response when gpkg validation throws an error', async () => { - sourceValidator.validateFilesExist.mockImplementation(async () => Promise.resolve()); - sourceValidator.validateGdalInfo.mockImplementation(async () => Promise.resolve()); + const validateGpkgRequest = { gpkgFilesPath: generateInputFiles().gpkgFilesPath }; + const expectedError = 'Error while validating gpkg files'; + sourceValidator.validateFilesExist.mockResolvedValue(undefined); + sourceValidator.validateGdalInfo.mockResolvedValue(undefined); sourceValidator.validateGpkgFiles.mockImplementation(() => { - throw new GpkgError('Error while validating gpkg files'); + throw new GpkgError(expectedError); }); - const response = await validateManager.validateGpkgs({ gpkgFilesPath: generateInputFiles().gpkgFilesPath }); + const response = await validateManager.validateGpkgs(validateGpkgRequest); + + expect(response).toStrictEqual({ isValid: false, message: expectedError }); + }); + + it('should return failed validation response when gpkg validation throws unexpected error', async () => { + const validateGpkgRequest = { gpkgFilesPath: generateInputFiles().gpkgFilesPath }; + const expectedError = 'error'; + sourceValidator.validateFilesExist.mockResolvedValue(undefined); + sourceValidator.validateGdalInfo.mockResolvedValue(undefined); + sourceValidator.validateGpkgFiles.mockImplementation(() => { + // eslint-disable-next-line @typescript-eslint/no-throw-literal + throw { error: expectedError }; + }); + + const promise = validateManager.validateGpkgs(validateGpkgRequest); + + await expect(promise).rejects.toEqual({ error: expectedError }); + }); + }); + + describe('validateShapefiles', () => { + it('should return successfully validation response when all validations pass', async () => { + const shapefilePaths = [generateInputFiles().metadataShapefilePath]; + sourceValidator.validateFilesExist.mockResolvedValue(undefined); + + const promise = validateManager.validateShapefiles(shapefilePaths); + + await expect(promise).resolves.not.toThrow(); + }); + + it('should throw file not fount error when file does not exists', async () => { + const shapefilePaths = faker.helpers.multiple(() => generateInputFiles().metadataShapefilePath); + sourceValidator.validateFilesExist.mockRejectedValue(new FileNotFoundError(shapefilePaths)); + + const promise = validateManager.validateShapefiles(shapefilePaths); + + await expect(promise).rejects.toThrow(new FileNotFoundError(shapefilePaths)); + }); + + it('should throw file not fount error when file is invalid', async () => { + const shapefilePaths = faker.helpers.multiple(() => generateInputFiles().metadataShapefilePath); + const expectedError = 'error'; + sourceValidator.validateFilesExist.mockRejectedValue(new Error(expectedError)); + + const promise = validateManager.validateShapefiles(shapefilePaths); + + await expect(promise).rejects.toThrow(new Error(expectedError)); + }); + + it('should throw file not fount error when unexpected error occures', async () => { + const shapefilePaths = faker.helpers.multiple(() => generateInputFiles().metadataShapefilePath); + const expectedError = 'error'; + sourceValidator.validateFilesExist.mockImplementation(() => { + // eslint-disable-next-line @typescript-eslint/no-throw-literal + throw { error: expectedError }; + }); + + const promise = validateManager.validateShapefiles(shapefilePaths); - expect(response).toStrictEqual({ isValid: false, message: 'Error while validating gpkg files' }); + await expect(promise).rejects.toEqual({ error: expectedError }); }); }); });