diff --git a/.eslintrc.js b/.eslintrc.js index 6d9ac42..3b5bd29 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -22,7 +22,7 @@ module.exports = { 'no-unused-vars': ['error', { ignoreRestSiblings: true }], indent: ['error', 2, { SwitchCase: 1 }], 'linebreak-style': ['error', 'unix'], - quotes: ['error', 'single'], + //quotes: ['error', 'single'], semi: ['error', 'always'], 'no-only-tests/no-only-tests': 'error', }, diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..95d07c5 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,19 @@ +version: 2 +updates: + - package-ecosystem: "npm" + target-branch: "main" + versioning-strategy: "increase" + schedule: + interval: "weekly" + groups: + production-dependencies: + dependency-type: "production" + update-types: + - "minor" + - "patch" + development-dependencies: + dependency-type: "development" + update-types: + - "minor" + - "patch" + rebase-strategy: "auto" diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml new file mode 100644 index 0000000..ae1eac6 --- /dev/null +++ b/.github/workflows/ci-tests.yml @@ -0,0 +1,41 @@ +name: CI tests + +on: + push: + branches: [ main ] + pull_request: + branches: + - main + paths: + - ".github/**.yml" + - "**/**.js" + - "package.json" + +jobs: + pr-tests: + name: Install, lint, test + runs-on: ${{ matrix.os }} + strategy: + matrix: + node-version: [18.x] + os: [ubuntu-latest, windows-latest] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Install npm 7 + run: npm i -g npm@7 --registry=https://registry.npmjs.org + + - name: Install + run: npm ci + + - name: Lint + run: npm run lint:ci + + - name: Unit tests + run: npm test + diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..25a17c3 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,73 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + pull_request: + branches: [ "main"] + paths: + - "**/*.js" + schedule: + - cron: '35 13 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'javascript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..38ca438 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,37 @@ +/** + * For a detailed explanation regarding each configuration property, visit: + * https://jestjs.io/docs/configuration + */ + +/** @type {import('jest').Config} */ +const config = { + // Indicates which provider should be used to instrument code for coverage + coverageProvider: 'v8', + + // A list of reporter names that Jest uses when writing coverage reports + // coverageReporters: [ + // "json", + // "text", + // "lcov", + // "clover" + // ], + + roots: ['src'], + + // The glob patterns Jest uses to detect test files + testMatch: ['**/*.spec.js'], + + // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped + testPathIgnorePatterns: ['/node_modules/'], + + + coverageDirectory: './coverage', + testEnvironment: 'node', + // The regexp pattern or array of patterns that Jest uses to detect test files + // testRegex: [], + + // This option allows the use of a custom results processor + // testResultsProcessor: undefined, +}; + +module.exports = config; diff --git a/src/index.js b/src/index.js index f38a9e4..2b803f4 100644 --- a/src/index.js +++ b/src/index.js @@ -3,7 +3,7 @@ const packageInfo = require('../package.json'); const provider = { type: 'provider', version: packageInfo.version, - name: 'elastic', + name: 'elastic-sql', Model: require('./model'), }; diff --git a/src/index.spec.js b/src/index.spec.js new file mode 100644 index 0000000..b6b05b2 --- /dev/null +++ b/src/index.spec.js @@ -0,0 +1,10 @@ +const { name, type, Model, version } = require('./index'); + +describe('registration object', () => { + test('has expected properties', () => { + expect(name).toBe('elastic-sql'); + expect(type).toBe('provider'); + expect(version).toBeDefined(); + expect(Model).toBeInstanceOf(Function); + }); +}); \ No newline at end of file diff --git a/src/model.js b/src/model.js index 0bc4884..5802de3 100644 --- a/src/model.js +++ b/src/model.js @@ -20,7 +20,7 @@ class Model { #geometryFieldMap; #idFieldMap; - constructor({ logger }, { conn, geometryFieldMap, idFieldMap }) { + constructor({ logger }, { conn, geometryFieldMap, idFieldMap } = {}) { this.#logger = logger; // Validate registration options @@ -85,6 +85,7 @@ class Model { geojson.metadata = { idField }; geojson.filtersApplied = generateFiltersApplied({ + ...geoserviceParams, idField, geometryField, geometry, @@ -99,15 +100,14 @@ class Model { #handleError(error) { const messagePrefix = 'Provider error:'; - const statusCode = error?.body?.status || 500; if (error.name === 'ResponseError') { this.#logger.error( `${messagePrefix} data-store query failure, ${error.message}`, ); - const message = statusCode === 400 ? 'invalid input' : error.message; + const message = error?.body?.status === 400 ? 'invalid input' : error.message; const err = new Error(message); - err.code = statusCode; + err.code = error?.body?.status || 500; return err; } @@ -171,7 +171,7 @@ function buildGeoshapeFilter({ geometryField, geometry, inSR, spatialRel }) { const { geometry: geometryFilter, relation } = standardizeGeometryFilter({ geometry, inSR, - reprojecitonSR: 4326, + reprojectionSR: 4326, spatialRel, }); @@ -213,17 +213,22 @@ function convertRowToFeature(featureAttributeKeys, geometryField, row) { }; } -function generateFiltersApplied({ idField, geometry }) { - const filtersApplied = { - where: true, - orderByFields: true, - }; +function generateFiltersApplied({ where, objectIds, orderByFields, idField, geometry, geometryField }) { + const filtersApplied = {}; - if (idField) { + if (where) { + filtersApplied.where = true; + } + + if (objectIds && idField) { filtersApplied.objectIds = true; } - if (geometry) { + if (orderByFields) { + filtersApplied.orderByFields = true; + } + + if (geometry && geometryField) { filtersApplied.geometry = true; } diff --git a/src/model.spec.js b/src/model.spec.js new file mode 100644 index 0000000..c9c542a --- /dev/null +++ b/src/model.spec.js @@ -0,0 +1,529 @@ +const { promisify } = require('util'); +const Provider = require('./model'); +const elastic = require('@elastic/elasticsearch'); + +jest.mock('@elastic/elasticsearch'); +const elasticQueryMock = jest.fn(() => { + return { + rows: [ + ['AK64', 'POINT (-144.67 64.48)', 'GLACIER CREEK'], + ['SD43', 'POINT (-102.84 43.08)', 'SANDOZ'], + ], + columns: [{ name: 'id' }, { name: 'location' }, { name: 'name' }], + }; +}); + +elastic.Client.mockImplementation(function () { + return { + sql: { + query: elasticQueryMock, + }, + }; +}); + +const logger = { + error: () => {}, +}; + +describe('Model', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('constructor', () => { + test('instantiate without options', () => { + const provider = new Provider( + { logger }, + { conn: { node: 'http://localhost' } }, + ); + expect(provider.getData).toBeInstanceOf(Function); + }); + + test('instantiate with options', () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + expect(provider.getData).toBeInstanceOf(Function); + }); + + test('instantiate with invalid options', () => { + try { + new Provider( + { logger }, + { + conn: 'test', + }, + ); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe( + 'invalid "conn", must be of type object', + ); + } + }); + }); + + describe('getData', () => { + test('with field maps, no query options', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ query: {}, params: { id: 'my-index' } }); + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: {}, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index', + }, + ]); + }); + + test('with field maps, with geometry filter', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + geometry: '-123,45,-120,49', + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + geometry: true, + }, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index', + filter: { + geo_shape: { + location: { + relation: 'intersects', + shape: { + coordinates: [ + [ + [-123, 45], + [-120, 45], + [-120, 49], + [-123, 49], + [-123, 45], + ], + ], + type: 'polygon', + }, + }, + }, + }, + }, + ]); + }); + + test('with field maps, with geometry filter w/ relation', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + geometry: '-123,45,-120,49', + spatialRel: 'esriSpatialRelUnknown' + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + geometry: true, + }, + }); + + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index', + filter: { + geo_shape: { + location: { + relation: 'intersects', + shape: { + coordinates: [ + [ + [-123, 45], + [-120, 45], + [-120, 49], + [-123, 49], + [-123, 45], + ], + ], + type: 'polygon', + }, + }, + }, + }, + }, + ]); + }); + + test('with field maps, with where filter', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + where: 'foo=\'bar\'', + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + where: true, + }, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: "SELECT *, location FROM my-index WHERE foo='bar'", + }, + ]); + }); + + test('with field maps, with objectIds filter', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + objectIds: 'a,b,c', + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + objectIds: true, + }, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index WHERE id IN (\'a\',\'b\',\'c\')', + }, + ]); + }); + + test('with field maps, with numeric objectIds filter', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + objectIds: '1,2', + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + objectIds: true, + }, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index WHERE id IN (1,2)', + }, + ]); + }); + + test('with field maps, with orderByFields sorting', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ + query: { + orderByFields: 'name', + }, + params: { id: 'my-index' }, + }); + + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: { type: 'Point', coordinates: [-144.67, 64.48] }, + properties: { id: 'AK64', name: 'GLACIER CREEK' }, + }, + { + geometry: { type: 'Point', coordinates: [-102.84, 43.08] }, + properties: { id: 'SD43', name: 'SANDOZ' }, + }, + ], + metadata: { idField: 'id' }, + filtersApplied: { + orderByFields: true, + }, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT *, location FROM my-index ORDER BY name', + }, + ]); + }); + + test('without field maps, no query options', async () => { + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' } + }, + ); + const getData = promisify(provider.getData).bind(provider); + + const result = await getData({ query: {}, params: { id: 'my-index' } }); + expect(result).toEqual({ + type: 'FeatureCollection', + features: [ + { + geometry: undefined, + properties: { id: 'AK64', location: 'POINT (-144.67 64.48)', name: 'GLACIER CREEK' }, + }, + { + geometry: undefined, + properties: { id: 'SD43', location: 'POINT (-102.84 43.08)', name: 'SANDOZ' }, + }, + ], + metadata: { idField: undefined }, + filtersApplied: {}, + }); + expect(elasticQueryMock.mock.calls[0]).toEqual([ + { + fetch_size: 1000, + query: 'SELECT * FROM my-index', + }, + ]); + }); + + test('handle elastic 400 error', async () => { + elastic.Client.mockImplementationOnce(function () { + return { + sql: { + query: () => { + const error = new Error('bad input'); + error.name = 'ResponseError'; + error.body = { status: 400 }; + throw error; + }, + }, + }; + }); + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + try { + await getData({ query: {}, params: { id: 'my-index' } }); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('invalid input'); + expect(error.code).toBe(400); + } + }); + + test('handle elastic 500 error', async () => { + elastic.Client.mockImplementationOnce(function () { + return { + sql: { + query: () => { + const error = new Error('something went wrong'); + error.name = 'ResponseError'; + throw error; + }, + }, + }; + }); + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + try { + await getData({ query: {}, params: { id: 'my-index' } }); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('something went wrong'); + expect(error.code).toBe(500); + } + }); + + test('handle other error', async () => { + elastic.Client.mockImplementationOnce(function () { + return { + sql: { + query: () => { + throw new Error('something went wrong'); + }, + }, + }; + }); + const provider = new Provider( + { logger }, + { + conn: { node: 'http://localhost' }, + idFieldMap: { 'my-index': 'id' }, + geometryFieldMap: { 'my-index': 'location' }, + }, + ); + const getData = promisify(provider.getData).bind(provider); + + try { + await getData({ query: {}, params: { id: 'my-index' } }); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('something went wrong'); + } + }); + }); +}); diff --git a/src/validate.js b/src/validate.js index d9e0238..1329d18 100644 --- a/src/validate.js +++ b/src/validate.js @@ -8,9 +8,13 @@ const geometryFieldMapSchema = joi.object().required(); function validateConn(conn) { const { error } = connSchema.validate(conn); + if (error?.message === '"value" is required') { + throw new Error('client connection configuration object is required'); + } + if (error) { throw new Error( - `invalid client connection config, ${error.details[0].message}`, + `invalid "conn", ${error.details[0].message.replace('"value" ', '')}`, ); } } @@ -18,20 +22,14 @@ function validateConn(conn) { function validateIdFieldMap(idFieldMap) { const { error } = idFieldMapSchema.validate(idFieldMap); if (error) { - if (error.message === '"value" must be of type object') { - throw new Error('invalid "idFieldMap", must be a key/value object'); - } - throw new Error(`invalid "idFieldMap", ${error.details[0].message}`); + throw new Error(`invalid "idFieldMap", ${error.details[0].message.replace('"value" ', '')}`); } } function validateGeometryFieldMap(geometryFieldMap) { const { error } = geometryFieldMapSchema.validate(geometryFieldMap); if (error) { - if (error.message === '"value" must be of type object') { - throw new Error('invalid "geometryFieldMap", must be a key/value object'); - } - throw new Error(`invalid "geometryFieldMap", ${error.details[0].message}`); + throw new Error(`invalid "geometryFieldMap", ${error.details[0].message.replace('"value" ', '')}`); } } diff --git a/src/validate.spec.js b/src/validate.spec.js new file mode 100644 index 0000000..6c9a776 --- /dev/null +++ b/src/validate.spec.js @@ -0,0 +1,57 @@ +const { validateConn, validateGeometryFieldMap, validateIdFieldMap } = require('./validate'); + +describe('validation functions', () => { + describe('validateConn', () => { + test('validateConn, success', () => { + expect(validateConn({})).toBe(undefined); + }); + + test('validateConn, failure when missing', () => { + try { + validateConn(); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('client connection configuration object is required'); + } + }); + + test('validateConn, failure when wrong data type', () => { + try { + validateConn('string'); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('invalid "conn", must be of type object'); + } + }); + }); + + describe('validateIdFieldMap', () => { + test('validateIdFieldMap, success', () => { + expect(validateIdFieldMap({ index: 'field' })).toBe(undefined); + }); + + test('validateIdFieldMap, failure when wrong data type', () => { + try { + validateIdFieldMap('string'); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('invalid "idFieldMap", must be of type object'); + } + }); + }); + + describe('validateGeometryFieldMap', () => { + test('validateGeometryFieldMap, success', () => { + expect(validateGeometryFieldMap({ index: 'field' })).toBe(undefined); + }); + + test('validateGeometryFieldMap, failure when wrong data type', () => { + try { + validateGeometryFieldMap('string'); + throw new Error('should have thrown'); + } catch (error) { + expect(error.message).toBe('invalid "geometryFieldMap", must be of type object'); + } + }); + }); +}); \ No newline at end of file