diff --git a/.eslintignore b/.eslintignore
index f8e66fae..a4fd9ee9 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -4,12 +4,3 @@ coverage
**/*.js
**/*.d.ts
**/*.js.map
-
-engines/config-query-sparql-incremental/lib/index.ts
-engines/query-sparql-incremental/bin/query.ts
-engines/query-sparql-incremental/lib/QueryEngine.ts
-**/test/**
-**/test-browser/**
-
-packages/dev-tools/*
-packages/incremental-jest/*
diff --git a/.eslintrc.js b/.eslintrc.js
deleted file mode 100644
index 44eaee68..00000000
--- a/.eslintrc.js
+++ /dev/null
@@ -1,185 +0,0 @@
-module.exports = {
- root: true,
- parser: '@typescript-eslint/parser',
- parserOptions: {
- tsconfigRootDir: __dirname, // this is the reason this is a .js file
- project: ['./tsconfig.eslint.json'],
- },
- plugins: [
- 'eslint-plugin-tsdoc',
- 'eslint-plugin-import',
- 'eslint-plugin-jest',
- 'eslint-plugin-unused-imports'
- ],
- extends: [
- 'es/node',
- 'plugin:import/errors',
- 'plugin:import/warnings',
- 'plugin:import/typescript'
- ],
- settings: {
- 'import/parsers': {
- '@typescript-eslint/parser': ['.ts', '.tsx']
- },
- 'import/resolver': {
- 'typescript': {
- 'alwaysTryTypes': true // always try to resolve incremental-types under `@incremental-types` directory even it doesn't contain any source code, like `@incremental-types/rdf-js`
- },
- }
- },
- globals: {
- window: false,
- fetch: false,
- Headers: false,
- Request: false,
- XMLHttpRequest: false,
- },
- rules: {
- // Default
- 'import/group-exports': 'off',
- 'class-methods-use-this': 'off', // Conflicts with functions from interfaces that sometimes don't require `this`
- 'comma-dangle': ['error', 'always-multiline'],
- 'dot-location': ['error', 'property'],
- 'lines-between-class-members': ['error', 'always', { exceptAfterSingleLine: true }],
- 'no-underscore-dangle': 'off', // Conflicts with external libraries
- 'padding-line-between-statements': 'off',
- 'no-param-reassign': 'off',
- 'func-style': 'off',
- 'new-cap': 'off',
- 'lines-around-comment': ['error', {
- beforeBlockComment: false,
- afterBlockComment: false,
- beforeLineComment: false,
- afterLineComment: false,
- }],
- 'no-multi-assign': 'off',
- 'no-plusplus': 'off',
- 'guard-for-in': 'off',
- 'sort-imports': 'off', // Disabled in favor of eslint-plugin-import
- 'prefer-named-capture-group': 'off',
- 'max-len': ['error', {
- code: 120,
- ignoreTemplateLiterals: true,
- }],
- 'unicorn/consistent-function-scoping': 'off',
- 'no-warning-comments': 'off',
- 'no-mixed-operators': 'off',
- 'prefer-destructuring': 'off',
- 'default-case': 'off', // TSC already takes care of these checks
- 'no-loop-func': 'off',
- 'unicorn/no-fn-reference-in-iterator': 'off',
- 'extended/consistent-err-names': 'off',
- 'unicorn/prefer-replace-all': 'off',
- 'unicorn/catch-error-name': ['error', { name: 'error' }],
- 'unicorn/no-reduce': 'off',
- 'no-duplicate-imports': 'off', // Incompatible with type imports
- 'unicorn/consistent-destructuring': 'off',
- 'unicorn/no-array-callback-reference': 'off',
- 'unicorn/no-new-array': 'off',
-
- // TS
- '@typescript-eslint/lines-between-class-members': ['error', { exceptAfterSingleLine: true }],
- '@typescript-eslint/no-invalid-void-type': 'off', // breaks with default void in Asynchandler 2nd generic
- '@typescript-eslint/array-type': ['error', { default: 'array' }],
- '@typescript-eslint/generic-type-naming': 'off',
- '@typescript-eslint/no-empty-interface': 'off',
- '@typescript-eslint/no-unnecessary-condition': 'off', // Problems with optional parameters
- '@typescript-eslint/space-before-function-paren': ['error', 'never'],
- '@typescript-eslint/promise-function-async': 'off',
- '@typescript-eslint/consistent-type-assertions': ['error', { assertionStyle: 'angle-bracket' }],
- '@typescript-eslint/member-naming': 'off',
- '@typescript-eslint/naming-convention': [
- 'error',
- {
- 'selector': 'interface',
- 'format': ['PascalCase'],
- 'custom': {
- 'regex': '^I[A-Z]',
- 'match': true
- }
- }
- ],
- '@typescript-eslint/no-dynamic-delete': 'off',
- '@typescript-eslint/explicit-function-return-type': ['error', {
- allowExpressions: true,
- allowTypedFunctionExpressions: true,
- allowHigherOrderFunctions: true,
- allowConciseArrowFunctionExpressionsStartingWithVoid: true,
- }],
- '@typescript-eslint/no-use-before-define': 'off',
- '@typescript-eslint/prefer-nullish-coalescing': 'off',
- '@typescript-eslint/consistent-type-imports': ['error', { prefer: 'type-imports' }],
-
- // Import
- 'import/order': ['error', {
- alphabetize: {
- order: 'asc',
- caseInsensitive: true
- }
- }],
- 'import/no-unused-modules': 'off',
- 'unused-imports/no-unused-imports-ts': 'error',
- 'import/no-extraneous-dependencies': 'error',
-
- // TODO: Try to re-enable the following rules in the future
- 'global-require': 'off',
- '@typescript-eslint/no-require-imports': 'off',
- '@typescript-eslint/no-var-requires': 'off',
- '@typescript-eslint/no-unused-vars': 'off',
- 'tsdoc/syntax': 'off',
- 'unicorn/expiring-todo-comments': 'off',
- 'unicorn/import-style': 'off',
- 'unicorn/prefer-at': 'off',
- 'unicorn/prefer-string-replace-all': 'off',
- },
- overrides: [
- {
- // Specific rules for bin files
- files: ['**/bin/*.ts'],
- rules: {
- 'unicorn/filename-case': ['error', {
- 'case': 'kebabCase'
- }],
- 'no-process-env': 'off',
- 'unicorn/no-process-exit': 'off',
- }
- },
- {
- // Specific rules for test files
- files: ['**/test/**/*.ts'],
- env: {
- 'jest/globals': true,
- },
- globals: {
- 'spyOn': false,
- 'fail': false,
- },
- rules: {
- 'mocha/no-synchronous-tests': 'off',
- 'mocha/valid-test-description': 'off',
- 'mocha/no-sibling-hooks': 'off',
-
- 'max-statements-per-line': 'off',
- 'id-length': 'off',
- 'arrow-body-style': 'off',
- 'line-comment-position': 'off',
- 'no-inline-comments': 'off',
- 'unicorn/filename-case': 'off',
- 'no-new': 'off',
- 'unicorn/no-nested-ternary': 'off',
- 'no-return-assign': 'off',
- 'no-useless-call': 'off',
- 'no-sync': 'off',
-
- '@typescript-eslint/brace-style': 'off',
- '@typescript-eslint/ban-ts-comment': 'off',
- '@typescript-eslint/ban-ts-ignore': 'off',
- '@typescript-eslint/explicit-function-return-type': 'off',
- '@typescript-eslint/unbound-method': 'off',
- '@typescript-eslint/no-extra-parens': 'off',
- '@typescript-eslint/restrict-plus-operands': 'off',
- 'import/no-extraneous-dependencies': 'off',
- }
- }
- ],
-};
diff --git a/README.md b/README.md
index 0db16006..2068470a 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@
-This is a monorepo that builds upon the core comunica packages to allow for incremental query evaluation.
+This is a monorepo that builds upon the core comunica packages to allow for incremental query evaluation.
## Querying with Incremunica
diff --git a/engines/config-query-sparql-incremental/config/query-source-identify-hypermedia/actors.json b/engines/config-query-sparql-incremental/config/query-source-identify-hypermedia/actors.json
index f7afd9d0..b2ffd0e0 100644
--- a/engines/config-query-sparql-incremental/config/query-source-identify-hypermedia/actors.json
+++ b/engines/config-query-sparql-incremental/config/query-source-identify-hypermedia/actors.json
@@ -10,7 +10,7 @@
{
"@id": "urn:comunica:default:query-source-identify-hypermedia/actors#stream-none",
"@type": "ActorQuerySourceIdentifyHypermediaStreamNone",
- "mediatorGuard": { "@id": "urn:comunica:default:guard/mediators#main" },
+ "mediatorGuard": { "@id": "urn:comunica:default:guard/mediators#main" },
"mediatorMergeBindingsContext": { "@id": "urn:comunica:default:merge-bindings-context/mediators#main" }
}
]
diff --git a/engines/config-query-sparql-incremental/config/rdf-metadata-extract/actors.json b/engines/config-query-sparql-incremental/config/rdf-metadata-extract/actors.json
index ac21a4a8..9efba124 100644
--- a/engines/config-query-sparql-incremental/config/rdf-metadata-extract/actors.json
+++ b/engines/config-query-sparql-incremental/config/rdf-metadata-extract/actors.json
@@ -9,7 +9,7 @@
"https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-metadata-extract-request-time/^3.0.0/components/context.jsonld",
"https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-metadata-extract-allow-http-methods/^3.0.0/components/context.jsonld",
"https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-metadata-extract-put-accepted/^3.0.0/components/context.jsonld",
- "https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-metadata-extract-patch-sparql-update/^3.0.0/components/context.jsonld",
+ "https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-metadata-extract-patch-sparql-update/^3.0.0/components/context.jsonld"
],
"@id": "urn:comunica:default:Runner",
"@type": "Runner",
diff --git a/engines/config-query-sparql-incremental/lib/index.ts b/engines/config-query-sparql-incremental/lib/index.ts
index d15de7d9..e69de29b 100644
--- a/engines/config-query-sparql-incremental/lib/index.ts
+++ b/engines/config-query-sparql-incremental/lib/index.ts
@@ -1 +0,0 @@
-/* eslint-disable */
diff --git a/engines/config-query-sparql-incremental/package.json b/engines/config-query-sparql-incremental/package.json
index 831dc8bb..ae0085d1 100644
--- a/engines/config-query-sparql-incremental/package.json
+++ b/engines/config-query-sparql-incremental/package.json
@@ -3,17 +3,16 @@
"version": "1.3.0",
"description": "default configuration files for Comunica SPARQL Incremental",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica",
"directory": "engines/config-query-sparql-incremental"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"sparql",
@@ -23,11 +22,12 @@
"config",
"incremental"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"config",
diff --git a/engines/query-sparql-incremental/bin/http.ts b/engines/query-sparql-incremental/bin/http.ts
deleted file mode 100644
index 49380f1e..00000000
--- a/engines/query-sparql-incremental/bin/http.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env node
-import { HttpServiceSparqlEndpoint } from '@comunica/actor-init-query';
-
-const defaultConfigPath = `${__dirname}/../config/config-default.json`;
-
-HttpServiceSparqlEndpoint.runArgsInProcess(process.argv.slice(2), process.stdout, process.stderr, `${__dirname}/../`, process.env, defaultConfigPath, code => process.exit(code))
- .catch(error => process.stderr.write(`${error.message}/n`));
diff --git a/engines/query-sparql-incremental/bin/query-dynamic.ts b/engines/query-sparql-incremental/bin/query-dynamic.ts
index 3290a83e..82815e38 100644
--- a/engines/query-sparql-incremental/bin/query-dynamic.ts
+++ b/engines/query-sparql-incremental/bin/query-dynamic.ts
@@ -1,4 +1,5 @@
#!/usr/bin/env node
import { runArgsInProcess } from '@comunica/runner-cli';
+// eslint-disable-next-line node/no-path-concat
runArgsInProcess(`${__dirname}/../`, `${__dirname}/../config/config-default.json`);
diff --git a/engines/query-sparql-incremental/bin/query.ts b/engines/query-sparql-incremental/bin/query.ts
index 64147a19..a7fe01ed 100644
--- a/engines/query-sparql-incremental/bin/query.ts
+++ b/engines/query-sparql-incremental/bin/query.ts
@@ -1,6 +1,6 @@
#!/usr/bin/env node
-/* eslint-disable */
import { runArgsInProcessStatic } from '@comunica/runner-cli';
+// eslint-disable-next-line ts/no-require-imports,ts/no-var-requires,import/extensions
runArgsInProcessStatic(require('../engine-default.js'));
diff --git a/engines/query-sparql-incremental/lib/QueryEngine.ts b/engines/query-sparql-incremental/lib/QueryEngine.ts
index 46597978..05220cc3 100644
--- a/engines/query-sparql-incremental/lib/QueryEngine.ts
+++ b/engines/query-sparql-incremental/lib/QueryEngine.ts
@@ -2,6 +2,7 @@ import { QueryEngineBase } from '@comunica/actor-init-query';
import type { ActorInitQueryBase } from '@comunica/actor-init-query';
import type { IQueryContextCommon } from '@comunica/types';
+// eslint-disable-next-line ts/no-require-imports,ts/no-var-requires,import/extensions
const engineDefault = require('../engine-default.js');
/**
diff --git a/engines/query-sparql-incremental/lib/QueryEngineFactory.ts b/engines/query-sparql-incremental/lib/QueryEngineFactory.ts
index d8b1a304..5365a0b6 100644
--- a/engines/query-sparql-incremental/lib/QueryEngineFactory.ts
+++ b/engines/query-sparql-incremental/lib/QueryEngineFactory.ts
@@ -1,3 +1,4 @@
+/* eslint-disable node/no-path-concat */
import { QueryEngineFactoryBase } from '@comunica/actor-init-query';
import { QueryEngine } from './QueryEngine';
diff --git a/engines/query-sparql-incremental/package.json b/engines/query-sparql-incremental/package.json
index e63fed66..62b12c87 100644
--- a/engines/query-sparql-incremental/package.json
+++ b/engines/query-sparql-incremental/package.json
@@ -2,17 +2,16 @@
"name": "@incremunica/query-sparql-incremental",
"version": "1.3.0",
"description": "A SPARQL query engine for incremental querying over decentralized RDF knowledge graphs on the Web",
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "engines/query-sparql-incremental"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"sparql",
@@ -24,27 +23,44 @@
"sparql update",
"sparql 1.1"
],
- "license": "MIT",
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
+ },
"bin": {
"comunica-sparql-incremental": "bin/query.js",
"comunica-sparql-http-incremental": "bin/http.js",
"comunica-dynamic-sparql-incremental": "bin/query-dynamic.js"
},
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
- },
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
+ "bin/**/*.d.ts",
+ "bin/**/*.js",
"components",
"config",
+ "engine-default.js",
"lib/**/*.d.ts",
- "lib/**/*.js",
- "bin/**/*.d.ts",
- "bin/**/*.js",
- "engine-default.js"
+ "lib/**/*.js"
],
- "devDependencies": {
- "arrayify-stream": "^2.0.1"
+ "scripts": {
+ "build": "npm run build:ts",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "prepare": "comunica-compile-config config/config-default.json > engine-default.js",
+ "browser": "npm run prepare && \"../../node_modules/webpack/bin/webpack.js\" --config webpack.config.js --mode production",
+ "browser-profile": "\"../../node_modules/webpack/bin/webpack.js\" --config webpack.config.js --profile --mode production --json > stats.json && webpack-bundle-analyzer stats.json && rm stats.json",
+ "spec:base": "node ../../node_modules/rdf-test-suite/bin/Runner.js spec/sparql-engine.js http://w3c.github.io/rdf-tests/sparql11/data-sparql11/manifest-all.ttl -c ../../.rdf-test-suite-cache/",
+ "spec:query": "yarn run spec:base -s http://www.w3.org/TR/sparql11-query/",
+ "spec:update": "yarn run spec:base -s http://www.w3.org/TR/sparql11-update/ -e",
+ "spec:csv-tsv": "yarn run spec:base -s http://www.w3.org/TR/sparql11-results-csv-tsv/",
+ "spec:json": "yarn run spec:base -s http://www.w3.org/TR/sparql11-results-json/",
+ "spec:fed": "yarn run spec:base -s http://www.w3.org/TR/sparql11-federated-query/",
+ "spec:sd": "yarn run spec:base -s http://www.w3.org/TR/sparql11-service-description/",
+ "spec:prot": "yarn run spec:base -s http://www.w3.org/TR/sparql11-protocol/",
+ "spec:graphstore": "yarn run spec:base -s http://www.w3.org/TR/sparql11-http-rdf-update/",
+ "spec": "yarn run spec:query && yarn run spec:update",
+ "spec-earl": "yarn run spec:query -o earl -p spec/earl-meta.json > earl.ttl",
+ "integration": "rdf-test-suite-ldf spec/sparql-engine.js https://comunica.github.io/manifest-ldf-tests/next/sparql/sparql-manifest.ttl -d 200000 -c ../../.rdf-test-suite-ldf-cache/"
},
"dependencies": {
"@comunica/actor-context-preprocess-convert-shortcuts": "^3.2.1",
@@ -157,6 +173,7 @@
"@comunica/actor-rdf-serialize-shaclc": "^3.2.1",
"@comunica/bus-http-invalidate": "^3.2.1",
"@comunica/bus-query-operation": "^3.2.2",
+ "@comunica/config-query-sparql": "^3.2.1",
"@comunica/core": "^3.2.1",
"@comunica/logger-void": "^3.2.1",
"@comunica/mediator-all": "^3.2.1",
@@ -168,9 +185,7 @@
"@comunica/runner": "^3.2.1",
"@comunica/runner-cli": "^3.2.1",
"@comunica/types": "^3.2.1",
- "@comunica/config-query-sparql": "^3.2.1",
- "@incremunica/config-query-sparql-incremental": "^1.3.0",
"@incremunica/actor-guard-naive": "^1.3.0",
"@incremunica/actor-merge-bindings-context-is-addition": "^1.3.0",
"@incremunica/actor-query-operation-incremental-distinct-hash": "^1.3.0",
@@ -185,30 +200,21 @@
"@incremunica/actor-resource-watch-solid-notification-websockets": "^1.3.0",
"@incremunica/bus-guard": "^1.3.0",
"@incremunica/bus-resource-watch": "^1.3.0",
+ "@incremunica/config-query-sparql-incremental": "^1.3.0",
"@incremunica/context-entries": "^1.3.0",
"@incremunica/hash-bindings": "^1.3.0",
"@incremunica/incremental-inner-join": "^1.3.0",
"@incremunica/incremental-rdf-streaming-store": "^1.3.0",
"@incremunica/incremental-types": "^1.3.0"
},
- "scripts": {
- "build": "npm run build:ts",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "prepare": "comunica-compile-config config/config-default.json > engine-default.js",
- "browser": "npm run prepare && \"../../node_modules/webpack/bin/webpack.js\" --config webpack.config.js --mode production",
- "browser-profile": "\"../../node_modules/webpack/bin/webpack.js\" --config webpack.config.js --profile --mode production --json > stats.json && webpack-bundle-analyzer stats.json && rm stats.json",
- "spec:base": "node ../../node_modules/rdf-test-suite/bin/Runner.js spec/sparql-engine.js http://w3c.github.io/rdf-tests/sparql11/data-sparql11/manifest-all.ttl -c ../../.rdf-test-suite-cache/",
- "spec:query": "yarn run spec:base -s http://www.w3.org/TR/sparql11-query/",
- "spec:update": "yarn run spec:base -s http://www.w3.org/TR/sparql11-update/ -e",
- "spec:csv-tsv": "yarn run spec:base -s http://www.w3.org/TR/sparql11-results-csv-tsv/",
- "spec:json": "yarn run spec:base -s http://www.w3.org/TR/sparql11-results-json/",
- "spec:fed": "yarn run spec:base -s http://www.w3.org/TR/sparql11-federated-query/",
- "spec:sd": "yarn run spec:base -s http://www.w3.org/TR/sparql11-service-description/",
- "spec:prot": "yarn run spec:base -s http://www.w3.org/TR/sparql11-protocol/",
- "spec:graphstore": "yarn run spec:base -s http://www.w3.org/TR/sparql11-http-rdf-update/",
- "spec": "yarn run spec:query && yarn run spec:update",
- "spec-earl": "yarn run spec:query -o earl -p spec/earl-meta.json > earl.ttl",
- "integration": "rdf-test-suite-ldf spec/sparql-engine.js https://comunica.github.io/manifest-ldf-tests/next/sparql/sparql-manifest.ttl -d 200000 -c ../../.rdf-test-suite-ldf-cache/"
+ "devDependencies": {
+ "@incremunica/dev-tools": "^1.3.0",
+ "@incremunica/incremental-jest": "^1.3.0",
+ "@playwright/test": "^1.47.2",
+ "arrayify-stream": "^2.0.1",
+ "jest-rdf": "^1.7.1",
+ "rdf-data-factory": "^1.1.2",
+ "rdf-quad": "^1.5.0"
},
"browser": {
"./lib/index.js": "./lib/index-browser.js"
diff --git a/engines/query-sparql-incremental/test-browser/QuerySparql-test.ts b/engines/query-sparql-incremental/test-browser/QuerySparql-test.ts
index ae45de0e..1ff6df59 100644
--- a/engines/query-sparql-incremental/test-browser/QuerySparql-test.ts
+++ b/engines/query-sparql-incremental/test-browser/QuerySparql-test.ts
@@ -3,22 +3,22 @@
// Needed to undo automock from actor-http-native, cleaner workarounds do not appear to be working.
import 'jest-rdf';
import '@incremunica/incremental-jest';
+import type { EventEmitter } from 'node:events';
+import type { BindingsFactory } from '@comunica/bindings-factory';
+import type { BindingsStream, QueryStringContext } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
+import { DevTools } from '@incremunica/dev-tools';
+import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
+import { expect } from '@playwright/test';
import { DataFactory } from 'rdf-data-factory';
-import type { BindingsStream, QueryStringContext} from '@comunica/types';
-import {Factory} from 'sparqlalgebrajs';
-import {QueryEngine} from '../lib/QueryEngine';
-import {usePolly} from '../test/util';
-import {EventEmitter} from "events";
-import {StreamingStore} from "@incremunica/incremental-rdf-streaming-store";
-import {Quad} from "@incremunica/incremental-types";
-import {BindingsFactory} from "@comunica/bindings-factory";
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {DevTools} from "@incremunica/dev-tools";
+import { QueryEngine } from '../lib/QueryEngine';
+import { usePolly } from '../test/util';
async function partialArrayifyStream(stream: EventEmitter, num: number): Promise {
- let array: any[] = [];
+ const array: any[] = [];
for (let i = 0; i < num; i++) {
- await new Promise((resolve) => stream.once("data", (bindings: any) => {
+ await new Promise(resolve => stream.once('data', (bindings: any) => {
array.push(bindings);
resolve();
}));
@@ -38,107 +38,107 @@ describe('System test: QuerySparql (without polly)', () => {
let BF: BindingsFactory;
let engine: QueryEngine;
- beforeEach(async () => {
+ beforeEach(async() => {
engine = new QueryEngine();
BF = await DevTools.createBindingsFactory(DF);
});
- describe("using Streaming Store", () => {
+ describe('using Streaming Store', () => {
let streamingStore: StreamingStore;
- beforeEach(async () => {
+ beforeEach(async() => {
streamingStore = new StreamingStore();
- })
+ });
- it('simple query', async () => {
- streamingStore.addQuad(quad("s1", "p1", "o1"));
- streamingStore.addQuad(quad("s2", "p2", "o2"));
+ it('simple query', async() => {
+ streamingStore.addQuad(quad('s1', 'p1', 'o1'));
+ streamingStore.addQuad(quad('s2', 'p2', 'o2'));
- let bindingStream = await engine.queryBindings(`SELECT * WHERE {
+ const bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, {
- sources: [streamingStore]
+ sources: [ streamingStore ],
});
expect(await partialArrayifyStream(bindingStream, 2)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s1')],
- [DF.variable('p'), DF.namedNode('p1')],
- [DF.variable('o'), DF.namedNode('o1')],
+ [ DF.variable('s'), DF.namedNode('s1') ],
+ [ DF.variable('p'), DF.namedNode('p1') ],
+ [ DF.variable('o'), DF.namedNode('o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('s'), DF.namedNode('s2')],
- [DF.variable('p'), DF.namedNode('p2')],
- [DF.variable('o'), DF.namedNode('o2')],
+ [ DF.variable('s'), DF.namedNode('s2') ],
+ [ DF.variable('p'), DF.namedNode('p2') ],
+ [ DF.variable('o'), DF.namedNode('o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.addQuad(quad("s3", "p3", "o3"));
+ streamingStore.addQuad(quad('s3', 'p3', 'o3'));
expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s3')],
- [DF.variable('p'), DF.namedNode('p3')],
- [DF.variable('o'), DF.namedNode('o3')],
- ])
+ [ DF.variable('s'), DF.namedNode('s3') ],
+ [ DF.variable('p'), DF.namedNode('p3') ],
+ [ DF.variable('o'), DF.namedNode('o3') ],
+ ]),
]);
- streamingStore.removeQuad(quad("s3", "p3", "o3"));
+ streamingStore.removeQuad(quad('s3', 'p3', 'o3'));
expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s3')],
- [DF.variable('p'), DF.namedNode('p3')],
- [DF.variable('o'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), false)
+ [ DF.variable('s'), DF.namedNode('s3') ],
+ [ DF.variable('p'), DF.namedNode('p3') ],
+ [ DF.variable('o'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
streamingStore.end();
});
- it('query with joins', async () => {
- streamingStore.addQuad(quad("s1", "p1", "o1"));
- streamingStore.addQuad(quad("o1", "p2", "o2"));
+ it('query with joins', async() => {
+ streamingStore.addQuad(quad('s1', 'p1', 'o1'));
+ streamingStore.addQuad(quad('o1', 'p2', 'o2'));
- let bindingStream = await engine.queryBindings(`SELECT * WHERE {
+ const bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s1 ?p1 ?o1.
?o1 ?p2 ?o2.
}`, {
- sources: [streamingStore]
+ sources: [ streamingStore ],
});
expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p2')],
- [DF.variable('o2'), DF.namedNode('o2')],
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p2') ],
+ [ DF.variable('o2'), DF.namedNode('o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.addQuad(quad("o1", "p3", "o3"));
+ streamingStore.addQuad(quad('o1', 'p3', 'o3'));
expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p3')],
- [DF.variable('o2'), DF.namedNode('o3')],
- ])
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p3') ],
+ [ DF.variable('o2'), DF.namedNode('o3') ],
+ ]),
]);
- streamingStore.removeQuad(quad("o1", "p3", "o3"));
+ streamingStore.removeQuad(quad('o1', 'p3', 'o3'));
expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p3')],
- [DF.variable('o2'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), false)
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p3') ],
+ [ DF.variable('o2'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
streamingStore.end();
@@ -151,30 +151,22 @@ describe('System test: QuerySparql (with polly)', () => {
let bindingStream: BindingsStream;
let engine: QueryEngine;
- beforeEach(() => {
+ beforeEach(async() => {
engine = new QueryEngine();
- engine.invalidateHttpCache();
+ await engine.invalidateHttpCache();
});
afterEach(() => {
bindingStream.destroy();
- })
+ });
describe('simple SPO on a raw RDF document', () => {
it('with results', async() => {
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
-
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
it('repeated with the same engine', async() => {
@@ -183,71 +175,23 @@ describe('System test: QuerySparql (with polly)', () => {
}`;
const context: QueryStringContext = { sources: [ 'https://www.rubensworks.net/' ]};
- let count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
it('repeated with the same engine and wait a bit until the polling is removed', async() => {
@@ -256,29 +200,13 @@ describe('System test: QuerySparql (with polly)', () => {
}`;
const context: QueryStringContext = { sources: [ 'https://www.rubensworks.net/' ]};
- let count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
-
- await new Promise((resolve) => setTimeout(()=>resolve(),10000));
-
- count = 0;
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
+
+ await new Promise(resolve => setTimeout(() => resolve(), 10000));
+
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
describe('simple SPS', () => {
@@ -287,11 +215,7 @@ describe('System test: QuerySparql (with polly)', () => {
?s ?p ?s.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- await new Promise((resolve) => bindingStream.on("data", async () => {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }));
+ expect((await partialArrayifyStream(bindingStream, 1)).length).toBeGreaterThan(0);
});
});
@@ -302,15 +226,7 @@ describe('System test: QuerySparql (with polly)', () => {
?v0 ?name.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 20) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 20)).length).toBeGreaterThan(20);
});
it('for the single source entry', async() => {
@@ -319,37 +235,21 @@ describe('System test: QuerySparql (with polly)', () => {
?v0 ?name.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 20) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 20)).length).toBeGreaterThan(20);
});
describe('SHACL Compact Syntax Serialisation', () => {
- it('handles the query with SHACL compact syntax as a source', async () => {
+ it('handles the query with SHACL compact syntax as a source', async() => {
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s a .
}`, {
sources: [
'https://raw.githubusercontent.com/w3c/data-shapes/gh-pages/shacl-compact-syntax/' +
'tests/valid/basic-shape-iri.shaclc',
- ]
+ ],
});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 0) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 1)).length).toBeGreaterThan(0);
});
});
});
diff --git a/engines/query-sparql-incremental/test-browser/util-browser.js b/engines/query-sparql-incremental/test-browser/util-browser.js
index fc493c8d..68363de3 100644
--- a/engines/query-sparql-incremental/test-browser/util-browser.js
+++ b/engines/query-sparql-incremental/test-browser/util-browser.js
@@ -1,3 +1,3 @@
export function usePolly() {
- // No-op
+ // No-op
}
diff --git a/engines/query-sparql-incremental/test/QuerySparql-test.ts b/engines/query-sparql-incremental/test/QuerySparql-test.ts
index d550222d..f4301dbc 100644
--- a/engines/query-sparql-incremental/test/QuerySparql-test.ts
+++ b/engines/query-sparql-incremental/test/QuerySparql-test.ts
@@ -3,22 +3,23 @@
// Needed to undo automock from actor-http-native, cleaner workarounds do not appear to be working.
import 'jest-rdf';
import '@incremunica/incremental-jest';
+import type { EventEmitter } from 'node:events';
+import * as http from 'node:http';
+import type { BindingsFactory } from '@comunica/bindings-factory';
+import type { Bindings, BindingsStream, QueryStringContext } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
+import { DevTools } from '@incremunica/dev-tools';
+import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
+import { expect } from '@playwright/test';
import { DataFactory } from 'rdf-data-factory';
-import type {Bindings, BindingsStream, QueryStringContext} from '@comunica/types';
-import {QueryEngine} from '../lib/QueryEngine';
-import {usePolly} from './util';
-import {EventEmitter} from "events";
-import * as http from "http";
-import {StreamingStore} from "@incremunica/incremental-rdf-streaming-store";
-import {Quad} from "@incremunica/incremental-types";
-import {BindingsFactory} from "@comunica/bindings-factory";
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {DevTools} from "@incremunica/dev-tools";
+import { QueryEngine } from '../lib/QueryEngine';
+import { usePolly } from './util';
async function partialArrayifyStream(stream: EventEmitter, num: number): Promise {
- let array: any[] = [];
+ const array: any[] = [];
for (let i = 0; i < num; i++) {
- await new Promise((resolve) => stream.once("data", (bindings: any) => {
+ await new Promise(resolve => stream.once('data', (bindings: any) => {
array.push(bindings);
resolve();
}));
@@ -38,107 +39,107 @@ describe('System test: QuerySparql (without polly)', () => {
let BF: BindingsFactory;
let engine: QueryEngine;
- beforeEach(async () => {
+ beforeEach(async() => {
engine = new QueryEngine();
BF = await DevTools.createBindingsFactory(DF);
});
- describe("using Streaming Store", () => {
+ describe('using Streaming Store', () => {
let streamingStore: StreamingStore;
- beforeEach(async () => {
+ beforeEach(async() => {
streamingStore = new StreamingStore();
- })
+ });
- it('simple query', async () => {
- streamingStore.addQuad(quad("s1", "p1", "o1"));
- streamingStore.addQuad(quad("s2", "p2", "o2"));
+ it('simple query', async() => {
+ streamingStore.addQuad(quad('s1', 'p1', 'o1'));
+ streamingStore.addQuad(quad('s2', 'p2', 'o2'));
- let bindingStream = await engine.queryBindings(`SELECT * WHERE {
+ const bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, {
- sources: [streamingStore]
+ sources: [ streamingStore ],
});
- expect(await partialArrayifyStream(bindingStream, 2)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 2)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s1')],
- [DF.variable('p'), DF.namedNode('p1')],
- [DF.variable('o'), DF.namedNode('o1')],
+ [ DF.variable('s'), DF.namedNode('s1') ],
+ [ DF.variable('p'), DF.namedNode('p1') ],
+ [ DF.variable('o'), DF.namedNode('o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('s'), DF.namedNode('s2')],
- [DF.variable('p'), DF.namedNode('p2')],
- [DF.variable('o'), DF.namedNode('o2')],
+ [ DF.variable('s'), DF.namedNode('s2') ],
+ [ DF.variable('p'), DF.namedNode('p2') ],
+ [ DF.variable('o'), DF.namedNode('o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.addQuad(quad("s3", "p3", "o3"));
+ streamingStore.addQuad(quad('s3', 'p3', 'o3'));
- expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 1)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s3')],
- [DF.variable('p'), DF.namedNode('p3')],
- [DF.variable('o'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), true)
+ [ DF.variable('s'), DF.namedNode('s3') ],
+ [ DF.variable('p'), DF.namedNode('p3') ],
+ [ DF.variable('o'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.removeQuad(quad("s3", "p3", "o3"));
+ streamingStore.removeQuad(quad('s3', 'p3', 'o3'));
- expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 1)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s3')],
- [DF.variable('p'), DF.namedNode('p3')],
- [DF.variable('o'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), false)
+ [ DF.variable('s'), DF.namedNode('s3') ],
+ [ DF.variable('p'), DF.namedNode('p3') ],
+ [ DF.variable('o'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
streamingStore.end();
});
- it('query with joins', async () => {
- streamingStore.addQuad(quad("s1", "p1", "o1"));
- streamingStore.addQuad(quad("o1", "p2", "o2"));
+ it('query with joins', async() => {
+ streamingStore.addQuad(quad('s1', 'p1', 'o1'));
+ streamingStore.addQuad(quad('o1', 'p2', 'o2'));
- let bindingStream = await engine.queryBindings(`SELECT * WHERE {
+ const bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s1 ?p1 ?o1.
?o1 ?p2 ?o2.
}`, {
- sources: [streamingStore]
+ sources: [ streamingStore ],
});
- expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 1)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p2')],
- [DF.variable('o2'), DF.namedNode('o2')],
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p2') ],
+ [ DF.variable('o2'), DF.namedNode('o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.addQuad(quad("o1", "p3", "o3"));
+ streamingStore.addQuad(quad('o1', 'p3', 'o3'));
- expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 1)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p3')],
- [DF.variable('o2'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), true)
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p3') ],
+ [ DF.variable('o2'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- streamingStore.removeQuad(quad("o1", "p3", "o3"));
+ streamingStore.removeQuad(quad('o1', 'p3', 'o3'));
- expect(await partialArrayifyStream(bindingStream, 1)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(bindingStream, 1)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s1'), DF.namedNode('s1')],
- [DF.variable('p1'), DF.namedNode('p1')],
- [DF.variable('o1'), DF.namedNode('o1')],
- [DF.variable('p2'), DF.namedNode('p3')],
- [DF.variable('o2'), DF.namedNode('o3')],
- ]).setContextEntry(new ActionContextKeyIsAddition(), false)
+ [ DF.variable('s1'), DF.namedNode('s1') ],
+ [ DF.variable('p1'), DF.namedNode('p1') ],
+ [ DF.variable('o1'), DF.namedNode('o1') ],
+ [ DF.variable('p2'), DF.namedNode('p3') ],
+ [ DF.variable('o2'), DF.namedNode('o3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
streamingStore.end();
@@ -146,29 +147,29 @@ describe('System test: QuerySparql (without polly)', () => {
});
describe('simple hypermedia queries', () => {
- let fetchData = {
- dataString: "",
- etag: "0",
- "cache-control": "max-age=2",
- age: "1"
- }
+ const fetchData = {
+ dataString: '',
+ etag: '0',
+ 'cache-control': 'max-age=2',
+ age: '1',
+ };
let server: http.Server;
let bindingStream: BindingsStream;
- beforeEach(async () => {
+ beforeEach(async() => {
server = http.createServer((req, res) => {
- if (req.method == "HEAD") {
- res.writeHead(200, "OK", {
- "etag": fetchData.etag,
- "content-type": "text/turtle",
- "cache-control": fetchData["cache-control"],
- age: fetchData.age
+ if (req.method === 'HEAD') {
+ res.writeHead(200, 'OK', {
+ etag: fetchData.etag,
+ 'content-type': 'text/turtle',
+ 'cache-control': fetchData['cache-control'],
+ age: fetchData.age,
});
} else {
- res.setHeader("etag", fetchData.etag);
- res.setHeader("content-type", "text/turtle");
- res.setHeader("cache-control", fetchData["cache-control"]);
- res.setHeader("age", fetchData.age);
+ res.setHeader('etag', fetchData.etag);
+ res.setHeader('content-type', 'text/turtle');
+ res.setHeader('cache-control', fetchData['cache-control']);
+ res.setHeader('age', fetchData.age);
res.write(fetchData.dataString);
}
res.end();
@@ -179,121 +180,121 @@ describe('System test: QuerySparql (without polly)', () => {
}));
});
- afterEach(async () => {
+ afterEach(async() => {
bindingStream.destroy();
await new Promise(resolve => server.close(() => {
resolve();
}));
await new Promise(resolve => setTimeout(() => resolve(), 500));
- })
+ });
- it('simple query', async () => {
- fetchData.dataString = " .";
- fetchData.etag = "0";
+ it('simple query', async() => {
+ fetchData.dataString = ' .';
+ fetchData.etag = '0';
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, {
- sources: ['http://localhost:8787']
+ sources: [ 'http://localhost:8787' ],
});
- await expect(new Promise((resolve) => bindingStream.once("data", (bindings) => {
+ await expect(new Promise(resolve => bindingStream.once('data', (bindings) => {
resolve(bindings);
}))).resolves.toEqualBindings(BF.bindings([
- [DF.variable('s'), DF.namedNode('http://localhost:8787/s1')],
- [DF.variable('p'), DF.namedNode('http://localhost:8787/p1')],
- [DF.variable('o'), DF.namedNode('http://localhost:8787/o1')],
+ [ DF.variable('s'), DF.namedNode('http://localhost:8787/s1') ],
+ [ DF.variable('p'), DF.namedNode('http://localhost:8787/p1') ],
+ [ DF.variable('o'), DF.namedNode('http://localhost:8787/o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
});
- it('simple addition update query', async () => {
- fetchData.dataString = " .";
- fetchData.etag = "0";
+ it('simple addition update query', async() => {
+ fetchData.dataString = ' .';
+ fetchData.etag = '0';
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, {
- sources: ['http://localhost:8787']
+ sources: [ 'http://localhost:8787' ],
});
- await expect(new Promise((resolve) => bindingStream.once("data", (bindings) => {
+ await expect(new Promise(resolve => bindingStream.once('data', (bindings) => {
resolve(bindings);
}))).resolves.toEqualBindings(BF.bindings([
- [DF.variable('s'), DF.namedNode('http://localhost:8787/s1')],
- [DF.variable('p'), DF.namedNode('http://localhost:8787/p1')],
- [DF.variable('o'), DF.namedNode('http://localhost:8787/o1')],
+ [ DF.variable('s'), DF.namedNode('http://localhost:8787/s1') ],
+ [ DF.variable('p'), DF.namedNode('http://localhost:8787/p1') ],
+ [ DF.variable('o'), DF.namedNode('http://localhost:8787/o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
- fetchData.dataString += "\n .";
- fetchData.etag = "1";
+ fetchData.dataString += '\n .';
+ fetchData.etag = '1';
- await expect(new Promise((resolve) => bindingStream.once("data", (bindings) => {
+ await expect(new Promise(resolve => bindingStream.once('data', (bindings) => {
resolve(bindings);
}))).resolves.toEqualBindings(BF.bindings([
- [DF.variable('s'), DF.namedNode('http://localhost:8787/s2')],
- [DF.variable('p'), DF.namedNode('http://localhost:8787/p2')],
- [DF.variable('o'), DF.namedNode('http://localhost:8787/o2')],
+ [ DF.variable('s'), DF.namedNode('http://localhost:8787/s2') ],
+ [ DF.variable('p'), DF.namedNode('http://localhost:8787/p2') ],
+ [ DF.variable('o'), DF.namedNode('http://localhost:8787/o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
});
- it('simple deletion update query', async () => {
- fetchData.dataString = " .";
- fetchData.etag = "0";
+ it('simple deletion update query', async() => {
+ fetchData.dataString = ' .';
+ fetchData.etag = '0';
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, {
- sources: ['http://localhost:8787']
+ sources: [ 'http://localhost:8787' ],
});
- await expect(new Promise((resolve) => bindingStream.once("data", (bindings) => {
+ await expect(new Promise(resolve => bindingStream.once('data', (bindings) => {
resolve(bindings);
}))).resolves.toEqualBindings(BF.bindings([
- [DF.variable('s'), DF.namedNode('http://localhost:8787/s1')],
- [DF.variable('p'), DF.namedNode('http://localhost:8787/p1')],
- [DF.variable('o'), DF.namedNode('http://localhost:8787/o1')],
+ [ DF.variable('s'), DF.namedNode('http://localhost:8787/s1') ],
+ [ DF.variable('p'), DF.namedNode('http://localhost:8787/p1') ],
+ [ DF.variable('o'), DF.namedNode('http://localhost:8787/o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
- fetchData.dataString = "";
- fetchData.etag = "1";
+ fetchData.dataString = '';
+ fetchData.etag = '1';
- await expect(new Promise((resolve) => bindingStream.once("data", (bindings) => {
+ await expect(new Promise(resolve => bindingStream.once('data', (bindings) => {
resolve(bindings);
}))).resolves.toEqualBindings(BF.bindings([
- [DF.variable('s'), DF.namedNode('http://localhost:8787/s1')],
- [DF.variable('p'), DF.namedNode('http://localhost:8787/p1')],
- [DF.variable('o'), DF.namedNode('http://localhost:8787/o1')],
+ [ DF.variable('s'), DF.namedNode('http://localhost:8787/s1') ],
+ [ DF.variable('p'), DF.namedNode('http://localhost:8787/p1') ],
+ [ DF.variable('o'), DF.namedNode('http://localhost:8787/o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false));
});
- /*
- it('simple addition update query with optional', async () => {
- fetchData.dataString = " .";
- fetchData.etag = "0";
-
- bindingStream = await engine.queryBindings(`SELECT * WHERE {
- ?s ?o .
- OPTIONAL { ?s ?o . }
- }`, {
- sources: ['http://localhost:6565']
- });
-
- await new Promise((resolve) => bindingStream.once("data", async (bindings) => {
- console.log("s: ", bindings.get("s"), "p: ", bindings.get("p"), "o: ", bindings.get("o"));
- expect(true).toEqual(true);
- resolve();
- }));
-
- fetchData.dataString = " . .";
- fetchData.etag = "1";
-
- await new Promise((resolve) => bindingStream.once("data", async (bindings) => {
- console.log("s: ", bindings.get("s"), "p: ", bindings.get("p"), "o: ", bindings.get("o"));
- expect(true).toEqual(true);
- resolve();
- }));
- });
- */
+ // eslint-disable-next-line jest/no-commented-out-tests
+ // it('simple addition update query with optional', async () => {
+ // fetchData.dataString = " .";
+ // fetchData.etag = "0";
+ //
+ // bindingStream = await engine.queryBindings(`SELECT * WHERE {
+ // ?s ?o .
+ // OPTIONAL { ?s ?o . }
+ // }`, {
+ // sources: ['http://localhost:6565']
+ // });
+ //
+ // await new Promise((resolve) => bindingStream.once("data", async (bindings) => {
+ // console.log("s: ", bindings.get("s"), "p: ", bindings.get("p"), "o: ", bindings.get("o"));
+ // expect(true).toEqual(true);
+ // resolve();
+ // }));
+ //
+ // fetchData.dataString = " . .";
+ // fetchData.etag = "1";
+ //
+ // await new Promise((resolve) => bindingStream.once("data", async (bindings) => {
+ // console.log("s: ", bindings.get("s"), "p: ", bindings.get("p"), "o: ", bindings.get("o"));
+ // expect(true).toEqual(true);
+ // resolve();
+ // }));
+ // });
+ //
});
});
@@ -302,30 +303,22 @@ describe('System test: QuerySparql (with polly)', () => {
let bindingStream: BindingsStream;
let engine: QueryEngine;
- beforeEach(() => {
+ beforeEach(async() => {
engine = new QueryEngine();
- engine.invalidateHttpCache();
+ await engine.invalidateHttpCache();
});
afterEach(() => {
bindingStream.destroy();
- })
+ });
describe('simple SPO on a raw RDF document', () => {
it('with results', async() => {
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s ?p ?o.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
-
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
it('repeated with the same engine', async() => {
@@ -334,71 +327,23 @@ describe('System test: QuerySparql (with polly)', () => {
}`;
const context: QueryStringContext = { sources: [ 'https://www.rubensworks.net/' ]};
- let count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
it('repeated with the same engine and wait a bit until the polling is removed', async() => {
@@ -407,29 +352,13 @@ describe('System test: QuerySparql (with polly)', () => {
}`;
const context: QueryStringContext = { sources: [ 'https://www.rubensworks.net/' ]};
- let count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
- await new Promise((resolve) => setTimeout(()=>resolve(),10000));
+ await new Promise(resolve => setTimeout(() => resolve(), 10000));
- count = 0;
bindingStream = await engine.queryBindings(query, context);
- await new Promise(async (resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 100) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 100)).length).toBeGreaterThan(100);
});
describe('simple SPS', () => {
@@ -438,11 +367,7 @@ describe('System test: QuerySparql (with polly)', () => {
?s ?p ?s.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- await new Promise((resolve) => bindingStream.on("data", async () => {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }));
+ expect((await partialArrayifyStream(bindingStream, 1)).length).toBeGreaterThan(0);
});
});
@@ -453,15 +378,7 @@ describe('System test: QuerySparql (with polly)', () => {
?v0 ?name.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 20) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 20)).length).toBeGreaterThan(20);
});
it('for the single source entry', async() => {
@@ -470,37 +387,21 @@ describe('System test: QuerySparql (with polly)', () => {
?v0 ?name.
}`, { sources: [ 'https://www.rubensworks.net/' ]});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 20) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 20)).length).toBeGreaterThan(20);
});
describe('SHACL Compact Syntax Serialisation', () => {
- it('handles the query with SHACL compact syntax as a source', async () => {
+ it('handles the query with SHACL compact syntax as a source', async() => {
bindingStream = await engine.queryBindings(`SELECT * WHERE {
?s a .
}`, {
sources: [
'https://raw.githubusercontent.com/w3c/data-shapes/gh-pages/shacl-compact-syntax/' +
'tests/valid/basic-shape-iri.shaclc',
- ]
+ ],
});
- let count = 0;
- await new Promise((resolve) => bindingStream.on("data", async () => {
- count++;
- if (count > 0) {
- expect(true).toEqual(true);
- bindingStream.destroy();
- resolve();
- }
- }));
+ expect((await partialArrayifyStream(bindingStream, 1)).length).toBeGreaterThan(0);
});
});
});
diff --git a/engines/query-sparql-incremental/test/util.ts b/engines/query-sparql-incremental/test/util.ts
index 4832d284..4a88a14f 100644
--- a/engines/query-sparql-incremental/test/util.ts
+++ b/engines/query-sparql-incremental/test/util.ts
@@ -1,4 +1,4 @@
-import { resolve } from 'path';
+import { resolve } from 'node:path';
import { Polly } from '@pollyjs/core';
import { setupPolly } from 'setup-polly-jest';
@@ -11,25 +11,23 @@ Polly.register(FSPersister);
Polly.register(NodeHttpAdapter);
// Configure everything related to PollyJS
-// eslint-disable-next-line mocha/no-exports
export function usePolly() {
const pollyContext = mockHttp();
- // eslint-disable-next-line mocha/no-top-level-hooks
+ // eslint-disable-next-line jest/require-top-level-describe
beforeEach(() => {
pollyContext.polly.server.any().on('beforePersist', (req, recording) => {
recording.request.headers = recording.request.headers.filter(({ name }: any) => name !== 'user-agent');
});
});
- // eslint-disable-next-line mocha/no-top-level-hooks
+ // eslint-disable-next-line jest/require-top-level-describe
afterEach(async() => {
await pollyContext.polly.flush();
});
}
// Mocks HTTP requests using Polly.JS
-// eslint-disable-next-line mocha/no-exports
export function mockHttp() {
return setupPolly({
adapters: [ NodeHttpAdapter ],
diff --git a/engines/query-sparql-incremental/webpack.config.js b/engines/query-sparql-incremental/webpack.config.js
index 3052f3bf..3e02c92b 100644
--- a/engines/query-sparql-incremental/webpack.config.js
+++ b/engines/query-sparql-incremental/webpack.config.js
@@ -1,5 +1,6 @@
-const path = require('path');
+const path = require('node:path');
const superConfig = require('@comunica/actor-init-query/webpack.config');
+
superConfig.entry = [ '@babel/polyfill', path.resolve(__dirname, 'lib/index-browser.js') ];
superConfig.output.path = __dirname;
module.exports = superConfig;
diff --git a/eslint.config.js b/eslint.config.js
new file mode 100644
index 00000000..d881d21d
--- /dev/null
+++ b/eslint.config.js
@@ -0,0 +1,96 @@
+const config = require('@rubensworks/eslint-config');
+
+module.exports = config([
+ {
+ files: [ '**/*.ts' ],
+ languageOptions: {
+ parserOptions: {
+ tsconfigRootDir: __dirname,
+ project: [ './tsconfig.eslint.json' ],
+ },
+ },
+ },
+ {
+ rules: {
+ // Default
+ 'unicorn/consistent-destructuring': 'off',
+ 'unicorn/no-array-callback-reference': 'off',
+
+ // TODO: check if these can be enabled
+ 'ts/naming-convention': 'off',
+ 'ts/no-unsafe-return': 'off',
+ 'ts/no-unsafe-argument': 'off',
+ 'ts/no-unsafe-assignment': 'off',
+ 'import/no-nodejs-modules': 'off',
+
+ 'ts/no-require-imports': [ 'error', { allow: [
+ 'process/',
+ 'web-streams-ponyfill',
+ 'is-stream',
+ 'readable-stream-node-to-web',
+ 'stream-to-string',
+ ]}],
+ 'ts/no-var-requires': [ 'error', { allow: [
+ 'process/',
+ 'web-streams-ponyfill',
+ 'is-stream',
+ 'readable-stream-node-to-web',
+ 'stream-to-string',
+ ]}],
+ },
+ },
+ {
+ // Specific rules for NodeJS-specific files
+ files: [
+ '**/test/**/*.ts',
+ '**/test-browser/*-test.ts',
+ ],
+ rules: {
+ 'import/no-nodejs-modules': 'off',
+ 'unused-imports/no-unused-vars': 'off',
+ 'ts/no-require-imports': 'off',
+ 'ts/no-var-requires': 'off',
+ 'unicorn/filename-case': 'off',
+ },
+ },
+ {
+ // The config packages use an empty index.ts
+ files: [
+ 'engines/config-*/lib/index.ts',
+ ],
+ rules: {
+ 'import/unambiguous': 'off',
+ },
+ },
+ {
+ // Some test files import 'jest-rdf' which triggers this
+ // The http actors import 'cross-fetch/polyfill' which also triggers this
+ // Some jest tests import '../../lib' which triggers this
+ files: [
+ '**/test/*-test.ts',
+ '**/test-browser/*-test.ts',
+ ],
+ rules: {
+ 'import/no-unassigned-import': 'off',
+ },
+ },
+ {
+ // Files that do not require linting
+ ignores: [
+ 'setup-jest.js',
+ '**/engine-default.js',
+ '.github/**',
+ 'lerna.json',
+ ],
+ },
+ {
+ files: [ '**/*.js' ],
+ rules: {
+ 'ts/no-require-imports': 'off',
+ 'ts/no-var-requires': 'off',
+ 'import/no-nodejs-modules': 'off',
+ 'import/no-extraneous-dependencies': 'off',
+ 'import/extensions': 'off',
+ },
+ },
+]);
diff --git a/jest.config.js b/jest.config.js
index 7ba0809e..3726f7b9 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -1,26 +1,26 @@
module.exports = {
transform: {
- '^.+\\.ts$': 'ts-jest'
+ '^.+\\.ts$': [ 'ts-jest', {
+ isolatedModules: true,
+ }],
},
testRegex: '/test/.*-test.ts$',
moduleFileExtensions: [
'ts',
- 'js'
+ 'js',
],
globals: {
- 'ts-jest': {
- // Enabling this can fix issues when using prereleases of typings packages
- //isolatedModules: true
+ window: {
+ location: new URL('http://localhost'),
},
},
setupFilesAfterEnv: [ './setup-jest.js' ],
collectCoverage: true,
coveragePathIgnorePatterns: [
'/actor-query-operation-incremental-filter/',
- 'util.ts',
'/node_modules/',
'/mocks/',
- 'index.js'
+ 'index.js',
],
testEnvironment: 'node',
coverageThreshold: {
@@ -28,7 +28,7 @@ module.exports = {
branches: 100,
functions: 100,
lines: 100,
- statements: 100
- }
- }
+ statements: 100,
+ },
+ },
};
diff --git a/karma-setup.js b/karma-setup.js
index 1e1318ee..8c7dd191 100644
--- a/karma-setup.js
+++ b/karma-setup.js
@@ -1,15 +1,13 @@
-import jest from "jest-mock";
-import expect from "expect";
-
-//change timeout to 20 seconds
-jasmine.DEFAULT_TIMEOUT_INTERVAL = 20000;
+import expect from 'expect';
+import jest from 'jest-mock';
// Add missing Jest functions
window.test = window.it;
-window.test.each = (inputs) => (testName, test) =>
- inputs.forEach((args) => window.it(testName, () => test(...args)));
-window.test.todo = function () {
- return undefined;
+window.test.each = inputs => (testName, test) => {
+ for (const args of inputs) {
+ window.it(testName, () => test(...args));
+ }
};
+window.test.todo = function() {};
window.jest = jest;
window.expect = expect;
diff --git a/karma.config.js b/karma.config.js
index fd79af18..24b27723 100644
--- a/karma.config.js
+++ b/karma.config.js
@@ -1,94 +1,96 @@
+const Path = require('node:path');
const NodePolyfillPlugin = require('node-polyfill-webpack-plugin');
const webpack = require('webpack');
-const Path = require('path');
const testFiles = [
- 'engines/query-sparql-incremental/test-browser/QuerySparql-test.ts',
+ 'engines/query-sparql-incremental/test-browser/QuerySparql-test.ts',
];
// Based on https://github.com/tom-sherman/blog/blob/main/posts/02-running-jest-tests-in-a-browser.md
-module.exports = function (config) {
- config.set({
- basePath: '',
- plugins: [
- 'karma-webpack',
- 'karma-jasmine',
- 'karma-chrome-launcher',
- 'karma-firefox-launcher',
- 'karma-sourcemap-loader',
- 'karma-jasmine-html-reporter',
- ],
- frameworks: ['jasmine', 'webpack'],
+module.exports = function(config) {
+ config.set({
+ basePath: '',
+ plugins: [
+ 'karma-webpack',
+ 'karma-jasmine',
+ 'karma-chrome-launcher',
+ 'karma-firefox-launcher',
+ 'karma-sourcemap-loader',
+ 'karma-jasmine-html-reporter',
+ ],
+ frameworks: [ 'jasmine', 'webpack' ],
+
+ files: [ './karma-setup.js', ...testFiles ],
+ client: {
+ args: [ '--grep', '/^(?!.*no browser).*$/' ],
+ },
+ preprocessors: {
+ './karma-setup.js': [ 'webpack' ],
+ ...Object.fromEntries(testFiles.map(key => [ key, [ 'webpack', 'sourcemap' ]])),
+ },
- files: ['./karma-setup.js', ...testFiles],
- client: {
- args: ['--grep', '/^(?!.*no browser).*$/'],
+ webpack: {
+ mode: 'production',
+ devtool: 'inline-source-map',
+ resolve: {
+ alias: {
+ fs: false,
+ module: false,
+ [Path.resolve(__dirname, 'engines/query-sparql-incremental/test/util.js')]: Path.resolve(__dirname, 'engines/query-sparql-incremental/test-browser/util-browser.js'),
+ 'jest.unmock': false,
},
- preprocessors: {
- './karma-setup.js': ['webpack'],
- ...Object.fromEntries(testFiles.map(key => [ key, ['webpack', 'sourcemap'] ]))
+ extensions: [ '.js', '.jsx', '.ts', '.tsx' ],
+ },
+ module: {
+ rules: [
+ {
+ test: /\.tsx?$/u,
+ loader: 'ts-loader',
+ exclude: /node_modules/u,
+ options: { transpileOnly: true },
+ },
+ ],
+ },
+ plugins: [
+ new NodePolyfillPlugin({
+ additionalAliases: [ 'process' ],
+ }),
+ new webpack.DefinePlugin({
+ 'process.stdout.isTTY': false,
+ }),
+ ],
+ ignoreWarnings: [
+ {
+ module: /jest/u,
},
-
- webpack: {
- mode: 'production',
- devtool: 'inline-source-map',
- resolve: {
- alias: {
- fs: false,
- module: false,
- [Path.resolve(__dirname, 'engines/query-sparql-incremental/test/util.js')]: Path.resolve(__dirname, 'engines/query-sparql-incremental/test-browser/util-browser.js'),
- 'jest.unmock': false,
- },
- extensions: ['.js', '.jsx', '.ts', '.tsx'],
- },
- module: {
- rules: [
- {
- test: /\.tsx?$/,
- loader: 'ts-loader',
- exclude: /node_modules/,
- options: { transpileOnly: true },
- },
- ],
- },
- plugins: [
- new NodePolyfillPlugin(),
- new webpack.DefinePlugin({
- 'process.stdout.isTTY': false,
- }),
- ],
- ignoreWarnings: [
- {
- module: /jest/,
- },
- {
- module: /karma-setup/,
- },
- ],
- stats: {
- colors: true,
- hash: false,
- version: false,
- timings: false,
- assets: false,
- chunks: false,
- modules: false,
- reasons: false,
- children: false,
- source: false,
- errors: false,
- errorDetails: false,
- warnings: false,
- publicPath: false,
- },
- performance: {
- hints: false,
- }
+ {
+ module: /karma-setup/u,
},
+ ],
+ stats: {
+ colors: true,
+ hash: false,
+ version: false,
+ timings: false,
+ assets: false,
+ chunks: false,
+ modules: false,
+ reasons: false,
+ children: false,
+ source: false,
+ errors: false,
+ errorDetails: false,
+ warnings: false,
+ publicPath: false,
+ },
+ performance: {
+ hints: false,
+ },
+ },
- browsers: [
- 'ChromeHeadless',
- 'FirefoxHeadless',
- ],
- });
+ browsers: [
+ 'ChromeHeadless',
+ 'FirefoxHeadless',
+ ],
+ });
};
diff --git a/lerna.js b/lerna.js
deleted file mode 100644
index e4b13734..00000000
--- a/lerna.js
+++ /dev/null
@@ -1,139 +0,0 @@
-const {loadPackages, exec, iter} = require('lerna-script')
-const checkDeps = require('depcheck')
-const path = require('path');
-const { readFileSync, writeFileSync, readdirSync, readdir } = require('fs');
-
-async function depInfo({ location, name }, log) {
- const folders = readdirSync(location, { withFileTypes: true });
-
- const { files } = JSON.parse(readFileSync(path.join(location, 'package.json'), 'utf8'));
- let ignore = files ? folders.filter(elem => files.every(file => !file.startsWith(elem.name))) : folders;
- ignore = ignore.map(x => x.isDirectory() ? `${x.name}/**` : x.name)
-
- const {dependencies, devDependencies, missing, using} = await checkDeps(location, { ignorePatterns: ignore }, val => val);
-
- return {
- unusedDeps: [...dependencies, ...devDependencies].filter(elem => !Object.keys(using).includes(elem)),
- missingDeps: Object.keys(missing),
- allDeps: Object.keys(using),
- }
-}
-
-async function depfixTask(log) {
- const packages = (await (log.packages || loadPackages())).filter(package => package.location.startsWith(path.join(__dirname, '/packages')));
- const resolutions = Object.keys(JSON.parse(readFileSync(path.join(__dirname, 'package.json'), 'utf8')).resolutions ?? {});
-
- await iter.forEach(packages, { log })(async package => {
- log.info(package.name)
-
- const { missingDeps, unusedDeps, allDeps } = await depInfo(package);
-
- if (allDeps.includes(package.name))
- log.error(' package is a dependency of itself')
-
- if (missingDeps.length > 0) {
- try {
- log.info(' add:', missingDeps.join(', '))
- await exec.command(package)(`yarn add ${missingDeps.join(' ')}`);
- } catch (e) {
- for (const dep of missingDeps) {
- try {
- await exec.command(package)(`yarn add ${dep}`);
- } catch (e) {
- log.error(' CANNOT ADD:', dep);
- }
- }
- }
- }
-
- if (unusedDeps.length > 0) {
- try {
- log.info(' remove:', unusedDeps.join(', '))
- await exec.command(package)(`yarn remove ${unusedDeps.join(' ')}`);
- } catch (e) {
- for (const dep of unusedDeps) {
- try {
- await exec.command(package)(`yarn remove ${dep}`);
- } catch (e) {
- log.error(' CANNOT REMOVE:', dep);
- }
- }
- }
- }
-
- // Now fix up any resolutions to use a star ("*") import
- const packageJson = JSON.parse(readFileSync(path.join(package.location, 'package.json'), 'utf8'));
- for (const dep of Object.keys(packageJson.dependencies ?? {})) {
- if (resolutions.includes(dep) && packageJson.dependencies[dep] !== '*') {
- log.info(' converting to \'*\' import for', dep)
- packageJson.dependencies[dep] = '*';
- }
- }
- writeFileSync(path.join(package.location, 'package.json'), JSON.stringify(packageJson, null, 2) + '\n');
- })
-}
-
-async function depcheckTask(log) {
- const packages = (await (log.packages || loadPackages())).filter(package => package.location.startsWith(path.join(__dirname, '/packages')));
- const resolutions = Object.keys(JSON.parse(readFileSync(path.join(__dirname, 'package.json'), 'utf8')).resolutions ?? {});
-
- return iter.forEach(packages, { log })(async package => {
- const { missingDeps, unusedDeps, allDeps } = await depInfo(package)
-
- if (missingDeps.length > 0) {
- throw new Error(`Missing dependencies: ${missingDeps.join(', ')} from ${package.name}`);
- }
-
- if (unusedDeps.length > 0) {
- throw new Error(`Extra dependencies: ${unusedDeps.join(', ')} in ${package.name}`);
- }
-
- if (allDeps.includes(package.name))
- throw new Error(`${package.name} is a dependency of itself`);
-
-
- // Now check all resolutions use a star ("*") import
- const packageJson = JSON.parse(readFileSync(path.join(package.location, 'package.json'), 'utf8'));
- for (const dep of Object.keys(packageJson.dependencies ?? {})) {
- if (resolutions.includes(dep) && packageJson.dependencies[dep] !== '*') {
- throw new Error(`Resolution not using \'*\' import for ${dep} in ${package.name}`);
- }
- }
- })
-}
-
-module.exports.depfixTask = depfixTask
-module.exports.depcheckTask = depcheckTask
-
-const ncu = require('npm-check-updates');
-async function updateTask(log) {
- const packages = (await (log.packages || loadPackages())).filter(
- package => package.location.startsWith(path.join(__dirname, '/packages')) ||
- package.location.startsWith(path.join(__dirname, '/engines'))
- );
-
- await iter.forEach(packages, { log })(async package => {
- const upgraded = await ncu.run({
- // Pass any cli option
- packageFile: path.join(package.location, 'package.json'),
- upgrade: true,
- target: 'minor'
- });
- log.info(package.name, upgraded);
- })
-}
-
-async function updateTaskMajor(log) {
- const packages = (await (log.packages || loadPackages())).filter(package => package.location.startsWith(path.join(__dirname, '/packages')));
-
- await iter.forEach(packages, { log })(async package => {
- const upgraded = await ncu.run({
- // Pass any cli option
- packageFile: path.join(package.location, 'package.json'),
- });
- log.info(package.name, upgraded);
- })
-}
-
-module.exports.updateTask = updateTask
-module.exports.updateTaskMajor = updateTaskMajor
diff --git a/lerna.json b/lerna.json
index 55493d57..26048f5b 100644
--- a/lerna.json
+++ b/lerna.json
@@ -22,7 +22,5 @@
"loglevel": "success",
"registry": "https://registry.npmjs.org/",
"npmClient": "yarn",
- "npmClientArgs": [
- "--pure-lockfile"
- ]
+ "lerna-script-tasks": "./node_modules/@comunica/utils-monorepo/lib/depCheck.js"
}
diff --git a/package.json b/package.json
index 0a2898b8..999d9458 100644
--- a/package.json
+++ b/package.json
@@ -5,14 +5,42 @@
"engines/*",
"packages/*"
],
+ "scripts": {
+ "pre-commit": "yarn run build && yarn run lint-no-cache && yarn run depcheck && yarn run test-browser && yarn run test",
+ "test-changed": "lerna run test --since HEAD",
+ "build-changed": "lerna run build --since HEAD",
+ "test": "jest",
+ "test-ci": "jest --ci --maxWorkers=4 --coverage",
+ "test-browser": "karma start karma.config.js --single-run",
+ "lint": "eslint . --cache",
+ "lint-no-cache": "eslint .",
+ "lint-fix": "eslint . --fix",
+ "build": "npm run build-ts && npm run build-components",
+ "build-ts": "tsc",
+ "build-components": "componentsjs-generator engines/* packages/*",
+ "build-watch": "nodemon -e ts --ignore '*.d.ts' --exec yarn run build",
+ "build-watch:ts": "tsc --watch",
+ "build-watch:components": "nodemon -e d.ts --exec yarn run build:components",
+ "publish": "yarn install && yarn pre-commit && yarn publish-release",
+ "publish-release": "lerna publish",
+ "publish-bare": "lerna exec -- npm publish --silent",
+ "publish-canary": "yarn run build && lerna version prerelease --preid alpha.$(.github/get-next-alpha-version.sh) --exact --ignore-scripts --force-publish --no-push --no-git-tag-version --yes && git update-index --assume-unchanged $(git ls-files | tr '\\n' ' ') && lerna publish from-package --no-git-reset --pre-dist-tag next --force-publish --no-push --no-git-tag-version --yes && git update-index --no-assume-unchanged $(git ls-files | tr '\\n' ' ') && git checkout .",
+ "doc": "typedoc",
+ "postinstall": "yarn run build && lerna run prepare",
+ "version": "manual-git-changelog onversion",
+ "depcheck": "lerna-script depcheckTask",
+ "depcheck-fix": "lerna-script depfixTask"
+ },
"devDependencies": {
- "rdf-terms": "^1.9.1",
- "sparqlalgebrajs": "^4.0.5",
"@babel/core": "^7.20.2",
"@babel/preset-env": "^7.20.2",
+ "@comunica/jest": "3.2.1",
+ "@comunica/utils-monorepo": "^4.0.1",
+ "@playwright/test": "^1.47.2",
"@pollyjs/adapter-node-http": "^6.0.5",
"@pollyjs/core": "^6.0.5",
"@pollyjs/persister-fs": "^6.0.5",
+ "@rubensworks/eslint-config": "^3.0.0",
"@strictsoftware/typedoc-plugin-monorepo": "^0.4.2",
"@types/jest": "^29.0.0",
"@types/node": "^20.0.0",
@@ -20,8 +48,6 @@
"@types/readable-stream": "^4.0.0",
"@types/setup-polly-jest": "^0.5.2",
"@types/ws": "^8.5.10",
- "@typescript-eslint/eslint-plugin": "5.62.0",
- "@typescript-eslint/parser": "^5.43.0",
"abort-controller": "^3.0.0",
"arrayify-stream": "^2.0.1",
"asynciterator": "^3.9.0",
@@ -43,15 +69,17 @@
"lerna-script": "^1.4.0",
"manual-git-changelog": "^1.0.2",
"memory-streams": "^0.1.3",
- "node-polyfill-webpack-plugin": "^3.0.0",
+ "node-polyfill-webpack-plugin": "^4.0.0",
"nodemon": "^3.0.0",
"npm-check-updates": "^16.4.1",
"pre-commit": "^1.2.2",
"rdf-data-factory": "^1.1.1",
"rdf-quad": "^1.5.0",
+ "rdf-terms": "^1.9.1",
"rdf-test-suite": "^1.19.3",
"rdf-test-suite-ldf": "^1.4.2",
"setup-polly-jest": "^0.11.0",
+ "sparqlalgebrajs": "^4.0.5",
"stream-to-string": "^1.2.0",
"streamify-array": "^1.0.1",
"streamify-string": "^1.0.1",
@@ -59,11 +87,9 @@
"ts-loader": "^9.4.1",
"typedoc": "^0.25.0",
"typescript": "^5.0.0",
- "webpack": "^5.75.0",
- "webpack-cli": "^5.0.0",
- "ws": "^8.14.2",
- "@comunica/jest": "3.2.1",
- "@rubensworks/eslint-config": "^3.0.0"
+ "webpack": "^5.88.2",
+ "webpack-cli": "^5.1.4",
+ "ws": "^8.14.2"
},
"pre-commit": [
"build",
@@ -72,34 +98,7 @@
"test",
"test-browser"
],
- "scripts": {
- "pre-commit": "yarn run build && yarn run lint-no-cache && yarn run depcheck && yarn run test-browser && yarn run test",
- "test-changed": "lerna run test --since HEAD",
- "build-changed": "lerna run build --since HEAD",
- "test": "jest",
- "test-ci": "jest --ci --maxWorkers=4 --coverage",
- "test-browser": "karma start karma.config.js --single-run",
- "lint": "eslint . --ext .ts --cache",
- "lint-no-cache": "eslint . --ext .ts",
- "lint-fix": "eslint . --ext .ts --fix",
- "build": "npm run build-ts && npm run build-components",
- "build-ts": "tsc",
- "build-components": "componentsjs-generator engines/* packages/*",
- "build-watch": "nodemon -e ts --ignore '*.d.ts' --exec yarn run build",
- "build-watch:ts": "tsc --watch",
- "build-watch:components": "nodemon -e d.ts --exec yarn run build:components",
- "publish": "yarn install && yarn pre-commit && yarn publish-release",
- "publish-release": "lerna publish",
- "publish-bare": "lerna exec -- npm publish --silent",
- "publish-canary": "yarn run build && lerna version prerelease --preid alpha.$(.github/get-next-alpha-version.sh) --exact --ignore-scripts --force-publish --no-push --no-git-tag-version --yes && git update-index --assume-unchanged $(git ls-files | tr '\\n' ' ') && lerna publish from-package --no-git-reset --pre-dist-tag next --force-publish --no-push --no-git-tag-version --yes && git update-index --no-assume-unchanged $(git ls-files | tr '\\n' ' ') && git checkout .",
- "doc": "typedoc",
- "postinstall": "yarn run build && lerna run prepare",
- "version": "manual-git-changelog onversion",
- "depcheck": "lerna-script depcheckTask",
- "depcheck-fix": "lerna-script depfixTask"
- },
"resolutions": {
"@rdfjs/types": "1.1.0"
- },
- "dependencies": {}
+ }
}
diff --git a/packages/actor-guard-naive/README.md b/packages/actor-guard-naive/README.md
index 63eb2e0f..42768372 100644
--- a/packages/actor-guard-naive/README.md
+++ b/packages/actor-guard-naive/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-guard-naive/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-guard-naive/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-guard-naive/lib/ActorGuardNaive.ts b/packages/actor-guard-naive/lib/ActorGuardNaive.ts
index 98533789..a6471838 100644
--- a/packages/actor-guard-naive/lib/ActorGuardNaive.ts
+++ b/packages/actor-guard-naive/lib/ActorGuardNaive.ts
@@ -1,4 +1,4 @@
-import { EventEmitter } from 'events';
+import { EventEmitter } from 'node:events';
import type { MediatorDereferenceRdf } from '@comunica/bus-dereference-rdf';
import type { IActorTest } from '@comunica/core';
import type { IActionGuard, IActorGuardOutput, IActorGuardArgs } from '@incremunica/bus-guard';
@@ -17,7 +17,7 @@ export class ActorGuardNaive extends ActorGuard {
super(args);
}
- public async test(action: IActionGuard): Promise {
+ public async test(_action: IActionGuard): Promise {
return true;
}
@@ -40,31 +40,34 @@ export class ActorGuardNaive extends ActorGuard {
resourceWatch.stopFunction();
});
- resourceWatch.events.on('update', async() => {
+ resourceWatch.events.on('update', () => {
guardEvents.emit('modified');
const deletionStore = action.streamingSource.store.copyOfStore();
const additionArray: Quad[] = [];
- const responseGet = await this.mediatorDereferenceRdf.mediate({
+ this.mediatorDereferenceRdf.mediate({
context: action.context,
url: action.url,
- });
-
- responseGet.data.on('data', quad => {
- if (deletionStore.has(quad)) {
- deletionStore.delete(quad);
- return;
- }
- additionArray.push(quad);
- });
+ }).then((responseGet) => {
+ responseGet.data.on('data', (quad) => {
+ if (deletionStore.has(quad)) {
+ deletionStore.delete(quad);
+ return;
+ }
+ additionArray.push(quad);
+ });
- responseGet.data.on('end', () => {
- for (const quad of deletionStore) {
- action.streamingSource.store.removeQuad(quad);
- }
- for (const quad of additionArray) {
- action.streamingSource.store.addQuad(quad);
- }
- guardEvents.emit('up-to-date');
+ responseGet.data.on('end', () => {
+ for (const quad of deletionStore) {
+ action.streamingSource.store.removeQuad(quad);
+ }
+ for (const quad of additionArray) {
+ action.streamingSource.store.addQuad(quad);
+ }
+ guardEvents.emit('up-to-date');
+ });
+ }).catch((error) => {
+ // eslint-disable-next-line no-console
+ console.warn(error);
});
});
diff --git a/packages/actor-guard-naive/package.json b/packages/actor-guard-naive/package.json
index 03da74b4..9e665b87 100644
--- a/packages/actor-guard-naive/package.json
+++ b/packages/actor-guard-naive/package.json
@@ -3,43 +3,43 @@
"version": "1.3.0",
"description": "A naive guard actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-guard-naive"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"actor",
"guard",
"naive"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
"@comunica/bus-dereference-rdf": "^3.2.1",
"@comunica/core": "^3.2.1",
"@incremunica/bus-guard": "^1.3.0",
"@incremunica/bus-resource-watch": "^1.3.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
}
}
diff --git a/packages/actor-guard-naive/test/ActorGuardNaive-test.ts b/packages/actor-guard-naive/test/ActorGuardNaive-test.ts
index 95ec8526..fff17836 100644
--- a/packages/actor-guard-naive/test/ActorGuardNaive-test.ts
+++ b/packages/actor-guard-naive/test/ActorGuardNaive-test.ts
@@ -1,12 +1,16 @@
-import {Bus} from '@comunica/core';
-import {IActionDereferenceRdf, MediatorDereferenceRdf} from "@comunica/bus-dereference-rdf";
-import {IActionGuard} from "@incremunica/bus-guard";
-import {Transform} from "readable-stream";
+import EventEmitter = require('events');
+import type { IActionDereferenceRdf, MediatorDereferenceRdf } from '@comunica/bus-dereference-rdf';
+import { Bus } from '@comunica/core';
+import type { IActionGuard } from '@incremunica/bus-guard';
+import type {
+ IActionResourceWatch,
+ IActorResourceWatchOutput,
+ MediatorResourceWatch,
+} from '@incremunica/bus-resource-watch';
+import { Store, DataFactory } from 'n3';
+import type { Transform } from 'readable-stream';
import 'jest-rdf';
-import { Store, DataFactory} from "n3";
-import EventEmitter = require("events");
-import {ActorGuardNaive} from "../lib";
-import {IActionResourceWatch, IActorResourceWatchOutput, MediatorResourceWatch} from "@incremunica/bus-resource-watch";
+import { ActorGuardNaive } from '../lib';
const quad = require('rdf-quad');
const streamifyArray = require('streamify-array');
@@ -16,7 +20,9 @@ function captureEvents(item: EventEmitter, ...events: string[]) {
const counts = (item)._eventCounts = Object.create(null);
for (const event of events) {
counts[event] = 0;
- item.on(event, () => { counts[event]++; });
+ item.on(event, () => {
+ counts[event]++;
+ });
}
return item;
}
@@ -41,7 +47,7 @@ describe('ActorGuardNaive', () => {
let addQuadFn = jest.fn();
let stopFn = jest.fn();
let onFn: () => void;
- let hasEnded: {value: boolean};
+ let hasEnded: { value: boolean };
beforeEach(() => {
quadArray = [];
@@ -51,35 +57,35 @@ describe('ActorGuardNaive', () => {
removeQuadFn = jest.fn();
addQuadFn = jest.fn();
stopFn = jest.fn();
- hasEnded = {value: false};
+ hasEnded = { value: false };
mediatorDereferenceRdf = {
- mediate: async (action: IActionDereferenceRdf) => {
+ mediate: async(action: IActionDereferenceRdf) => {
return {
- data: streamifyArray(quadArray)
+ data: streamifyArray(quadArray),
};
- }
- }
+ },
+ };
mediatorResourceWatch = {
- mediate: async (action: IActionResourceWatch): Promise => {
+ mediate: async(action: IActionResourceWatch): Promise => {
return {
events: changeNotificationEventEmitter,
- stopFunction: stopFn
+ stopFunction: stopFn,
};
- }
- }
+ },
+ };
actor = new ActorGuardNaive({
name: 'actor',
bus,
mediatorResourceWatch,
- mediatorDereferenceRdf
+ mediatorDereferenceRdf,
});
action = {
context: {},
- url: "www.test.com",
+ url: 'www.test.com',
metadata: {},
streamingSource: {
store: {
@@ -90,7 +96,7 @@ describe('ActorGuardNaive', () => {
return hasEnded.value;
},
import: (stream: Transform) => {
- streamingStoreEventEmitter.emit("data", stream);
+ streamingStoreEventEmitter.emit('data', stream);
return stream;
},
copyOfStore: () => {
@@ -101,139 +107,130 @@ describe('ActorGuardNaive', () => {
},
removeQuad: (quad: any) => removeQuadFn(quad),
addQuad: (quad: any) => addQuadFn(quad),
- }
- }
- }
+ },
+ },
+ };
});
- it('should test', () => {
- return expect(actor.test(action)).resolves.toBeTruthy();
+ it('should test', async() => {
+ await expect(actor.test(action)).resolves.toBeTruthy();
});
- it('should stop resource watcher if store stops', async () => {
+ it('should stop resource watcher if store stops', async() => {
await actor.run(action);
onFn();
- expect(stopFn).toHaveBeenCalled();
+ expect(stopFn).toHaveBeenCalledWith();
});
-
- it('should stop resource when the store has stopped really early', async () => {
+ it('should stop resource when the store has stopped really early', async() => {
hasEnded.value = true;
await actor.run(action);
- expect(stopFn).toHaveBeenCalled();
+ expect(stopFn).toHaveBeenCalledWith();
});
- it('should attach a positive changes stream', async () => {
+ it('should attach a positive changes stream', async() => {
quadArrayStore = [
quad('s1', 'p1', 'o1'),
- quad('s2', 'p2', 'o2')
+ quad('s2', 'p2', 'o2'),
];
- let {guardEvents} = await actor.run(action);
+ const { guardEvents } = await actor.run(action);
captureEvents(guardEvents, 'modified', 'up-to-date');
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toEqual(1);
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toEqual(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toBe(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toBe(1);
quadArray = [
quad('s1', 'p1', 'o1'),
quad('s2', 'p2', 'o2'),
- quad('s3', 'p3', 'o3')
+ quad('s3', 'p3', 'o3'),
];
- changeNotificationEventEmitter.emit("update");
+ changeNotificationEventEmitter.emit('update');
await new Promise(resolve => guardEvents.once('up-to-date', resolve));
- expect((guardEvents)._eventCounts.modified).toEqual(1);
- expect((guardEvents)._eventCounts['up-to-date']).toEqual(1);
+ expect((guardEvents)._eventCounts.modified).toBe(1);
+ expect((guardEvents)._eventCounts['up-to-date']).toBe(1);
expect(addQuadFn).toHaveBeenCalledTimes(1);
expect(addQuadFn).toHaveBeenCalledWith(quad('s3', 'p3', 'o3'));
});
- it('should attach a negative changes stream', async () => {
+ it('should attach a negative changes stream', async() => {
quadArrayStore = [
quad('s1', 'p1', 'o1'),
quad('s2', 'p2', 'o2'),
- quad('s3', 'p3', 'o3')
+ quad('s3', 'p3', 'o3'),
];
- let {guardEvents} = await actor.run(action);
+ const { guardEvents } = await actor.run(action);
captureEvents(guardEvents, 'modified', 'up-to-date');
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toEqual(1);
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toEqual(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toBe(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toBe(1);
quadArray = [
quad('s1', 'p1', 'o1'),
- quad('s2', 'p2', 'o2')
+ quad('s2', 'p2', 'o2'),
];
- changeNotificationEventEmitter.emit("update");
+ changeNotificationEventEmitter.emit('update');
await new Promise(resolve => guardEvents.once('up-to-date', resolve));
- expect((guardEvents)._eventCounts.modified).toEqual(1);
- expect((guardEvents)._eventCounts['up-to-date']).toEqual(1);
+ expect((guardEvents)._eventCounts.modified).toBe(1);
+ expect((guardEvents)._eventCounts['up-to-date']).toBe(1);
expect(removeQuadFn).toHaveBeenCalledTimes(1);
expect(removeQuadFn).toHaveBeenCalledWith(
DataFactory.quad(
DataFactory.namedNode('s3'),
DataFactory.namedNode('p3'),
- DataFactory.namedNode('o3')
- )
+ DataFactory.namedNode('o3'),
+ ),
);
});
- it('should handle delete events', async () => {
+ it('should handle delete events', async() => {
quadArrayStore = [
quad('s1', 'p1', 'o1'),
quad('s2', 'p2', 'o2'),
- quad('s3', 'p3', 'o3')
+ quad('s3', 'p3', 'o3'),
];
- let {guardEvents} = await actor.run(action);
+ const { guardEvents } = await actor.run(action);
captureEvents(guardEvents, 'modified', 'up-to-date');
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toEqual(1);
- expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toEqual(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'update')).toBe(1);
+ expect(EventEmitter.listenerCount(changeNotificationEventEmitter, 'delete')).toBe(1);
- let updatePromise = new Promise(resolve => guardEvents.once('up-to-date', resolve));
+ const updatePromise = new Promise(resolve => guardEvents.once('up-to-date', resolve));
- changeNotificationEventEmitter.emit("delete");
+ changeNotificationEventEmitter.emit('delete');
await updatePromise;
- expect((guardEvents)._eventCounts.modified).toEqual(1);
- expect((guardEvents)._eventCounts['up-to-date']).toEqual(1);
+ expect((guardEvents)._eventCounts.modified).toBe(1);
+ expect((guardEvents)._eventCounts['up-to-date']).toBe(1);
expect(removeQuadFn).toHaveBeenCalledTimes(3);
- expect(removeQuadFn).toHaveBeenNthCalledWith(1,
- DataFactory.quad(
- DataFactory.namedNode('s1'),
- DataFactory.namedNode('p1'),
- DataFactory.namedNode('o1')
- )
- );
- expect(removeQuadFn).toHaveBeenNthCalledWith(2,
- DataFactory.quad(
- DataFactory.namedNode('s2'),
- DataFactory.namedNode('p2'),
- DataFactory.namedNode('o2')
- )
- );
- expect(removeQuadFn).toHaveBeenNthCalledWith(3,
- DataFactory.quad(
- DataFactory.namedNode('s3'),
- DataFactory.namedNode('p3'),
- DataFactory.namedNode('o3')
- )
- );
+ expect(removeQuadFn).toHaveBeenNthCalledWith(1, DataFactory.quad(
+ DataFactory.namedNode('s1'),
+ DataFactory.namedNode('p1'),
+ DataFactory.namedNode('o1'),
+ ));
+ expect(removeQuadFn).toHaveBeenNthCalledWith(2, DataFactory.quad(
+ DataFactory.namedNode('s2'),
+ DataFactory.namedNode('p2'),
+ DataFactory.namedNode('o2'),
+ ));
+ expect(removeQuadFn).toHaveBeenNthCalledWith(3, DataFactory.quad(
+ DataFactory.namedNode('s3'),
+ DataFactory.namedNode('p3'),
+ DataFactory.namedNode('o3'),
+ ));
});
});
});
-
-
diff --git a/packages/actor-merge-bindings-context-is-addition/README.md b/packages/actor-merge-bindings-context-is-addition/README.md
index 067d0a78..0a2c5dcf 100644
--- a/packages/actor-merge-bindings-context-is-addition/README.md
+++ b/packages/actor-merge-bindings-context-is-addition/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-merge-bindings-context-is-addition/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-merge-bindings-context-is-addition/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-merge-bindings-context-is-addition/lib/ActorMergeBindingsContextIsAddition.ts b/packages/actor-merge-bindings-context-is-addition/lib/ActorMergeBindingsContextIsAddition.ts
index 1ba6dd60..cc908bd1 100644
--- a/packages/actor-merge-bindings-context-is-addition/lib/ActorMergeBindingsContextIsAddition.ts
+++ b/packages/actor-merge-bindings-context-is-addition/lib/ActorMergeBindingsContextIsAddition.ts
@@ -1,16 +1,18 @@
-import type { IActorTest } from '@comunica/core';
+import type {
+ IActorMergeBindingsContextOutput,
+ IActorMergeBindingsContextArgs,
+ IActionMergeBindingsContext,
+} from '@comunica/bus-merge-bindings-context';
import {
ActorMergeBindingsContext,
- IActorMergeBindingsContextOutput,
- IActorMergeBindingsContextArgs, IActionMergeBindingsContext
} from '@comunica/bus-merge-bindings-context';
+import type { IActorTest } from '@comunica/core';
import type { IActionContextKey } from '@comunica/types';
/**
* A incremunica actor for the creation of merge handlers for binding context keys.
*/
export class ActorMergeBindingsContextIsAddition extends ActorMergeBindingsContext {
-
public constructor(args: IActorMergeBindingsContextArgs) {
super(args);
}
@@ -20,18 +22,18 @@ export class ActorMergeBindingsContextIsAddition extends ActorMergeBindingsConte
}
public async run(_action: IActionMergeBindingsContext): Promise {
- //TODO change to boolean[] => boolean when comuncia V4
+ // TODO change to boolean[] => boolean when comuncia V4
const handlerFunc: (...args: any[]) => any = (...args: boolean[]): boolean => args.reduce((acc, cur) => acc && cur);
return {
mergeHandlers: {
- "isAddition": {
- run: handlerFunc
- }
- }
+ isAddition: {
+ run: handlerFunc,
+ },
+ },
};
}
}
export class ActionContextKeyIsAddition implements IActionContextKey {
- readonly name = 'isAddition';
+ public readonly name = 'isAddition';
}
diff --git a/packages/actor-merge-bindings-context-is-addition/package.json b/packages/actor-merge-bindings-context-is-addition/package.json
index 8a24b87f..2a1e9f3f 100644
--- a/packages/actor-merge-bindings-context-is-addition/package.json
+++ b/packages/actor-merge-bindings-context-is-addition/package.json
@@ -3,40 +3,42 @@
"version": "1.3.0",
"description": "An incremunica Merge Bindings Context actor that merges the isAddition attribute.",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-merge-bindings-context-is-addition"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"actor",
"merge-bindings-context",
"isAddition"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
- "dependencies": {
- "@comunica/bus-merge-bindings-context": "^3.2.0"
- },
"scripts": {
"build": "npm run build:ts && npm run build:components",
"build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
"build:components": "componentsjs-generator"
+ },
+ "dependencies": {
+ "@comunica/bus-merge-bindings-context": "^3.2.0",
+ "@comunica/core": "^3.2.0",
+ "@comunica/types": "^3.2.0"
}
}
diff --git a/packages/actor-merge-bindings-context-is-addition/test/ActorMergeBindingsContextIsAddition-test.ts b/packages/actor-merge-bindings-context-is-addition/test/ActorMergeBindingsContextIsAddition-test.ts
index f343963a..c9669875 100644
--- a/packages/actor-merge-bindings-context-is-addition/test/ActorMergeBindingsContextIsAddition-test.ts
+++ b/packages/actor-merge-bindings-context-is-addition/test/ActorMergeBindingsContextIsAddition-test.ts
@@ -1,21 +1,21 @@
+import type { BindingsFactory } from '@comunica/bindings-factory';
import { ActionContext, Bus } from '@comunica/core';
import type { IActionContext } from '@comunica/types';
-import {ActionContextKeyIsAddition, ActorMergeBindingsContextIsAddition} from "../lib";
-import {DataFactory} from "rdf-data-factory";
-import {BindingsFactory} from "@comunica/bindings-factory";
+import { DevTools } from '@incremunica/dev-tools';
+import { DataFactory } from 'rdf-data-factory';
+import { ActionContextKeyIsAddition, ActorMergeBindingsContextIsAddition } from '../lib';
import '@incremunica/incremental-jest';
-import {DevTools} from "@incremunica/dev-tools";
-let DF = new DataFactory();
+const DF = new DataFactory();
describe('ActorMergeBindingsContextIsAddition', () => {
let bus: any;
let actor: ActorMergeBindingsContextIsAddition;
let context: IActionContext;
- beforeEach(async () => {
- bus = new Bus({name: 'bus'});
- actor = new ActorMergeBindingsContextIsAddition({name: 'actor', bus});
+ beforeEach(async() => {
+ bus = new Bus({ name: 'bus' });
+ actor = new ActorMergeBindingsContextIsAddition({ name: 'actor', bus });
context = new ActionContext();
});
@@ -25,70 +25,70 @@ describe('ActorMergeBindingsContextIsAddition', () => {
it('should run', async() => {
await expect(actor.run({ context })).resolves.toMatchObject(
- { mergeHandlers: { "isAddition": { run: expect.any(Function) } } },
+ { mergeHandlers: { isAddition: { run: expect.any(Function) }}},
);
});
describe('merge handler', () => {
- //TODO when comuncia V4 change to boolean[] => boolean
+ // TODO when comuncia V4 change to boolean[] => boolean
let mergeHandler: (...args: any) => any;
- beforeEach(async () => {
- mergeHandler = (await actor.run({context})).mergeHandlers["isAddition"].run;
+ beforeEach(async() => {
+ mergeHandler = (await actor.run({ context })).mergeHandlers.isAddition.run;
});
- it('should return false if the first is false', async () => {
- const inputSets = [false, true];
- expect(mergeHandler(...inputSets)).toStrictEqual(false);
+ it('should return false if the first is false', async() => {
+ const inputSets = [ false, true ];
+ expect(mergeHandler(...inputSets)).toBe(false);
});
- it('should return false if the second is false', async () => {
- const inputSets = [true, false];
- expect(mergeHandler(...inputSets)).toStrictEqual(false);
+ it('should return false if the second is false', async() => {
+ const inputSets = [ true, false ];
+ expect(mergeHandler(...inputSets)).toBe(false);
});
- it('should return false if both are false', async () => {
- const inputSets = [false, false];
- expect(mergeHandler(...inputSets)).toStrictEqual(false);
+ it('should return false if both are false', async() => {
+ const inputSets = [ false, false ];
+ expect(mergeHandler(...inputSets)).toBe(false);
});
- it('should return true if both are true', async () => {
- const inputSets = [true, true];
- expect(mergeHandler(...inputSets)).toStrictEqual(true);
+ it('should return true if both are true', async() => {
+ const inputSets = [ true, true ];
+ expect(mergeHandler(...inputSets)).toBe(true);
});
- it('should work with multiple values', async () => {
- const inputSets = [true, true, false, true];
- expect(mergeHandler(...inputSets)).toStrictEqual(false);
+ it('should work with multiple values', async() => {
+ const inputSets = [ true, true, false, true ];
+ expect(mergeHandler(...inputSets)).toBe(false);
});
});
describe('actual bindings', () => {
let BF: BindingsFactory;
- beforeEach(async () => {
+ beforeEach(async() => {
BF = await DevTools.createBindingsFactory(DF);
});
- it('should work with addition bindings', async () => {
+ it('should work with addition bindings', async() => {
const bindings1 = BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true);
const bindings2 = BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true);
expect(bindings1.merge(bindings2)).toEqualBindings(BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
});
- it('should work with deletion bindings', async () => {
+ it('should work with deletion bindings', async() => {
const bindings1 = BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false);
const bindings2 = BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true);
expect(bindings1.merge(bindings2)).toEqualBindings(BF.bindings([
- [DF.variable('a'), DF.literal("1")],
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false));
});
});
diff --git a/packages/actor-query-operation-incremental-distinct-hash/README.md b/packages/actor-query-operation-incremental-distinct-hash/README.md
index a571f9d9..fa2cfbd3 100644
--- a/packages/actor-query-operation-incremental-distinct-hash/README.md
+++ b/packages/actor-query-operation-incremental-distinct-hash/README.md
@@ -23,7 +23,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@Incremunica/actor-query-operation-incremental-distinct-hash/^2.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@Incremunica/actor-query-operation-incremental-distinct-hash/^2.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-query-operation-incremental-distinct-hash/lib/ActorQueryOperationIncrementalDistinctHash.ts b/packages/actor-query-operation-incremental-distinct-hash/lib/ActorQueryOperationIncrementalDistinctHash.ts
index c53f630b..9ed54b28 100644
--- a/packages/actor-query-operation-incremental-distinct-hash/lib/ActorQueryOperationIncrementalDistinctHash.ts
+++ b/packages/actor-query-operation-incremental-distinct-hash/lib/ActorQueryOperationIncrementalDistinctHash.ts
@@ -1,16 +1,20 @@
+import type { Bindings } from '@comunica/bindings-factory';
import type { IActorQueryOperationTypedMediatedArgs } from '@comunica/bus-query-operation';
import {
ActorQueryOperation,
ActorQueryOperationTypedMediated,
} from '@comunica/bus-query-operation';
import type { IActorTest } from '@comunica/core';
-import type { IActionContext, IQueryOperationResult, IQueryOperationResultBindings } from '@comunica/types';
+import type {
+ IActionContext,
+ IQueryOperationResult,
+ IQueryOperationResultBindings,
+ BindingsStream,
+} from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
import { HashBindings } from '@incremunica/hash-bindings';
-import type { Bindings } from '@comunica/bindings-factory';
-import type { BindingsStream } from '@comunica/types';
+import type { AsyncIterator } from 'asynciterator';
import type { Algebra } from 'sparqlalgebrajs';
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import type {AsyncIterator} from "asynciterator";
/**
* An Incremunica Distinct Hash Query Operation Actor.
@@ -20,7 +24,7 @@ export class ActorQueryOperationIncrementalDistinctHash extends ActorQueryOperat
super(args, 'distinct');
}
- public async testOperation(operation: Algebra.Distinct, context: IActionContext): Promise {
+ public async testOperation(_operation: Algebra.Distinct, _context: IActionContext): Promise {
return true;
}
diff --git a/packages/actor-query-operation-incremental-distinct-hash/package.json b/packages/actor-query-operation-incremental-distinct-hash/package.json
index f0e4aa52..bffff147 100644
--- a/packages/actor-query-operation-incremental-distinct-hash/package.json
+++ b/packages/actor-query-operation-incremental-distinct-hash/package.json
@@ -3,45 +3,46 @@
"version": "1.3.0",
"description": "An incremental distinct-hash query-operation actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-query-operation-incremental-distinct-hash"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"incremunica",
"actor",
"query-operation",
"distinct-hash"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
- "@incremunica/hash-bindings": "^1.3.0",
- "@incremunica/incremental-types": "^1.3.0",
+ "@comunica/bindings-factory": "^3.3.0",
"@comunica/bus-query-operation": "^3.2.2",
"@comunica/core": "^3.2.1",
"@comunica/types": "^3.2.1",
+ "@incremunica/actor-merge-bindings-context-is-addition": "^1.3.0",
+ "@incremunica/hash-bindings": "^1.3.0",
"sparqlalgebrajs": "^4.2.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
}
}
diff --git a/packages/actor-query-operation-incremental-distinct-hash/test/ActorQueryOperationIncrementalDistinctHash-test.ts b/packages/actor-query-operation-incremental-distinct-hash/test/ActorQueryOperationIncrementalDistinctHash-test.ts
index 29186c82..0e323c03 100644
--- a/packages/actor-query-operation-incremental-distinct-hash/test/ActorQueryOperationIncrementalDistinctHash-test.ts
+++ b/packages/actor-query-operation-incremental-distinct-hash/test/ActorQueryOperationIncrementalDistinctHash-test.ts
@@ -1,12 +1,11 @@
-import { BindingsFactory } from '@comunica/bindings-factory';
+import type { BindingsFactory } from '@comunica/bindings-factory';
import { ActionContext, Bus } from '@comunica/core';
-import type { IQueryOperationResultBindings } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
+import { DevTools } from '@incremunica/dev-tools';
import { ArrayIterator } from 'asynciterator';
import { DataFactory } from 'rdf-data-factory';
import { ActorQueryOperationIncrementalDistinctHash } from '../lib';
import '@comunica/jest';
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {DevTools} from "@incremunica/dev-tools";
const DF = new DataFactory();
@@ -15,19 +14,19 @@ describe('ActorQueryOperationIncrementalDistinctHash', () => {
let mediatorQueryOperation: any;
let BF: BindingsFactory;
- beforeEach(async () => {
+ beforeEach(async() => {
BF = await DevTools.createBindingsFactory(DF);
- bus = new Bus({name: 'bus'});
+ bus = new Bus({ name: 'bus' });
mediatorQueryOperation = {
mediate: (arg: any) => Promise.resolve({
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
]),
- metadata: () => Promise.resolve({cardinality: 5, variables: [DF.variable('a')]}),
+ metadata: () => Promise.resolve({ cardinality: 5, variables: [ DF.variable('a') ]}),
operated: arg,
type: 'bindings',
}),
@@ -39,85 +38,179 @@ describe('ActorQueryOperationIncrementalDistinctHash', () => {
beforeEach(() => {
actor = new ActorQueryOperationIncrementalDistinctHash(
- {name: 'actor', bus, mediatorQueryOperation},
+ { name: 'actor', bus, mediatorQueryOperation },
);
});
- it('should create a filter', async () => {
+ it('should create a filter', async() => {
expect(actor.newHashFilter()).toBeInstanceOf(Function);
});
- it('should create a filter that is a predicate', async () => {
+ it('should create a filter that is a predicate', async() => {
const filter = actor.newHashFilter();
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
});
- it('should create a filter that only returns true once for equal objects', async () => {
+ it('should create a filter that only returns true once for equal objects', async() => {
const filter = actor.newHashFilter();
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
-
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
});
- it('should create a filters that are independent', async () => {
+ it('should create a filters that are independent', async() => {
const filter1 = actor.newHashFilter();
const filter2 = actor.newHashFilter();
const filter3 = actor.newHashFilter();
- expect(filter1(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter1(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
-
- expect(filter2(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter2(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
-
- expect(filter3(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter3(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter1(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter1(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+
+ expect(filter2(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter2(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+
+ expect(filter3(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter3(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
});
- it('should create a filter that returns true if everything is deleted', async () => {
+ it('should create a filter that returns true if everything is deleted', async() => {
const filter = actor.newHashFilter();
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
-
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
});
- it('should create a filter that returns false if too much is deleted', async () => {
+ it('should create a filter that returns false if too much is deleted', async() => {
const filter = actor.newHashFilter();
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
-
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('a')]]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
- expect(filter(BF.bindings([[DF.variable('a'), DF.literal('b')]]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(true);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('a') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false))).toBe(false);
+ expect(filter(BF.bindings([
+ [ DF.variable('a'), DF.literal('b') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true))).toBe(true);
});
});
@@ -125,31 +218,30 @@ describe('ActorQueryOperationIncrementalDistinctHash', () => {
let actor: ActorQueryOperationIncrementalDistinctHash;
beforeEach(() => {
actor = new ActorQueryOperationIncrementalDistinctHash(
- {name: 'actor', bus, mediatorQueryOperation},
+ { name: 'actor', bus, mediatorQueryOperation },
);
});
- it('should test on distinct', () => {
- const op: any = {operation: {type: 'distinct'}, context: new ActionContext()};
- return expect(actor.test(op)).resolves.toBeTruthy();
+ it('should test on distinct', async() => {
+ const op: any = { operation: { type: 'distinct' }, context: new ActionContext() };
+ await expect(actor.test(op)).resolves.toBeTruthy();
});
- it('should not test on non-distinct', () => {
- const op: any = {operation: {type: 'some-other-type'}, context: new ActionContext()};
- return expect(actor.test(op)).rejects.toBeTruthy();
+ it('should not test on non-distinct', async() => {
+ const op: any = { operation: { type: 'some-other-type' }, context: new ActionContext() };
+ await expect(actor.test(op)).rejects.toBeTruthy();
});
- it('should run', () => {
- const op: any = {operation: {type: 'distinct'}, context: new ActionContext()};
- return actor.run(op).then(async (output: IQueryOperationResultBindings) => {
- expect(await output.metadata()).toEqual({cardinality: 5, variables: [DF.variable('a')]});
- expect(output.type).toEqual('bindings');
- await expect(output.bindingsStream).toEqualBindingsStream([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ]);
- });
+ it('should run', async() => {
+ const op: any = { operation: { type: 'distinct' }, context: new ActionContext() };
+ const output = await actor.run(op);
+ await expect(output.metadata()).resolves.toEqual({ cardinality: 5, variables: [ DF.variable('a') ]});
+ expect(output.type).toBe('bindings');
+ await expect(output.bindingsStream).toEqualBindingsStream([
+ BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ]);
});
});
});
diff --git a/packages/actor-query-operation-incremental-filter/README.md b/packages/actor-query-operation-incremental-filter/README.md
index 473ce20f..e7ae1e1e 100644
--- a/packages/actor-query-operation-incremental-filter/README.md
+++ b/packages/actor-query-operation-incremental-filter/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-operation-incremental-filter/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-operation-incremental-filter/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-query-operation-incremental-filter/lib/ActorQueryOperationIncrementalFilter.ts b/packages/actor-query-operation-incremental-filter/lib/ActorQueryOperationIncrementalFilter.ts
index fde33b48..5eb8617d 100644
--- a/packages/actor-query-operation-incremental-filter/lib/ActorQueryOperationIncrementalFilter.ts
+++ b/packages/actor-query-operation-incremental-filter/lib/ActorQueryOperationIncrementalFilter.ts
@@ -1,19 +1,20 @@
+import type { Bindings } from '@comunica/bindings-factory';
+import { BindingsFactory, bindingsToString } from '@comunica/bindings-factory';
+import type { MediatorMergeBindingsContext } from '@comunica/bus-merge-bindings-context';
import type { IActorQueryOperationTypedMediatedArgs } from '@comunica/bus-query-operation';
-import { ActorQueryOperation,
+import {
+ ActorQueryOperation,
ActorQueryOperationTypedMediated,
- materializeOperation } from '@comunica/bus-query-operation';
+ materializeOperation,
+} from '@comunica/bus-query-operation';
import type { IActorTest } from '@comunica/core';
import { AsyncEvaluator, isExpressionError } from '@comunica/expression-evaluator';
-import type {IActionContext, IQueryOperationResult} from '@comunica/types';
-import {Bindings, BindingsFactory} from '@comunica/bindings-factory';
-import { bindingsToString } from '@comunica/bindings-factory';
+import type { IActionContext, IQueryOperationResult, BindingsStream } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
import { HashBindings } from '@incremunica/hash-bindings';
-import type { BindingsStream } from '@comunica/types';
+import type { AsyncIterator } from 'asynciterator';
import { EmptyIterator, SingletonIterator, UnionIterator } from 'asynciterator';
import type { Algebra } from 'sparqlalgebrajs';
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {MediatorMergeBindingsContext} from "@comunica/bus-merge-bindings-context";
-import {AsyncIterator} from "asynciterator";
/**
* A comunica Filter Sparqlee Query Operation Actor.
@@ -32,7 +33,11 @@ export class ActorQueryOperationIncrementalFilter extends ActorQueryOperationTyp
return true;
}
if (operation.expression.expressionType === 'operator') {
- const config = { ...ActorQueryOperation.getAsyncExpressionContext(context, this.mediatorQueryOperation, new BindingsFactory()) };
+ const config = { ...ActorQueryOperation.getAsyncExpressionContext(
+ context,
+ this.mediatorQueryOperation,
+ await BindingsFactory.create(this.mediatorMergeBindingsContext, context),
+ ) };
const _ = new AsyncEvaluator(operation.expression, config);
return true;
}
@@ -90,7 +95,7 @@ export class ActorQueryOperationIncrementalFilter extends ActorQueryOperationTyp
bindings: bindingsToString(item),
}));
} else {
- //TODO is this the correct way of making the bindingsStream emit an error?
+ // TODO is this the correct way of making the bindingsStream emit an error?
bindingsStream.emit('error', error);
}
}
@@ -109,7 +114,11 @@ export class ActorQueryOperationIncrementalFilter extends ActorQueryOperationTyp
const hashBindings = new HashBindings();
- const binder = async(bindings: Bindings, done: () => void, push: (i: AsyncIterator) => void): Promise => {
+ const binder = async(
+ bindings: Bindings,
+ done: () => void,
+ push: (i: AsyncIterator) => void,
+ ): Promise => {
const hash = hashBindings.hash(bindings);
let hashData = transformMap.get(hash);
if (bindings.getContextEntry(new ActionContextKeyIsAddition())) {
@@ -209,10 +218,13 @@ export class ActorQueryOperationIncrementalFilter extends ActorQueryOperationTyp
done();
};
- const bindingsStream = new UnionIterator((>output.bindingsStream)
- .transform({
- transform: binder,
- }), { autoStart: false });
+ const bindingsStream = new UnionIterator(
+ (>output.bindingsStream)
+ .transform({
+ transform: binder,
+ }),
+ { autoStart: false },
+ );
return { type: 'bindings', bindingsStream, metadata: output.metadata };
}
}
diff --git a/packages/actor-query-operation-incremental-filter/package.json b/packages/actor-query-operation-incremental-filter/package.json
index fa0de0d0..e128e086 100644
--- a/packages/actor-query-operation-incremental-filter/package.json
+++ b/packages/actor-query-operation-incremental-filter/package.json
@@ -3,46 +3,49 @@
"version": "1.3.0",
"description": "An incremental-filter query-operation actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-query-operation-incremental-filter"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"actor",
"query-operation",
"incremental-filter"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
+ "@comunica/bindings-factory": "^3.3.0",
+ "@comunica/bus-merge-bindings-context": "^3.3.0",
"@comunica/bus-query-operation": "^3.2.2",
"@comunica/core": "^3.2.1",
"@comunica/expression-evaluator": "^3.2.1",
"@comunica/types": "^3.2.1",
+ "@incremunica/actor-merge-bindings-context-is-addition": "^1.3.0",
"@incremunica/hash-bindings": "^1.3.0",
"asynciterator": "^3.9.0",
"sparqlalgebrajs": "^4.2.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
}
}
diff --git a/packages/actor-query-operation-incremental-filter/test/ActorQueryOperationIncrementalFilter-test.ts b/packages/actor-query-operation-incremental-filter/test/ActorQueryOperationIncrementalFilter-test.ts
index 5ba01e58..90d64c58 100644
--- a/packages/actor-query-operation-incremental-filter/test/ActorQueryOperationIncrementalFilter-test.ts
+++ b/packages/actor-query-operation-incremental-filter/test/ActorQueryOperationIncrementalFilter-test.ts
@@ -1,10 +1,15 @@
-import { BindingsFactory } from '@comunica/bindings-factory';
+import type { BindingsFactory } from '@comunica/bindings-factory';
import { ActorQueryOperation } from '@comunica/bus-query-operation';
import { KeysInitQuery } from '@comunica/context-entries';
import { ActionContext, Bus } from '@comunica/core';
import * as sparqlee from '@comunica/expression-evaluator';
import { isExpressionError } from '@comunica/expression-evaluator';
import type { IQueryOperationResultBindings, Bindings } from '@comunica/types';
+import {
+ ActionContextKeyIsAddition,
+ ActorMergeBindingsContextIsAddition,
+} from '@incremunica/actor-merge-bindings-context-is-addition';
+import {DevTools} from "@incremunica/dev-tools";
import { ArrayIterator } from 'asynciterator';
import { DataFactory } from 'rdf-data-factory';
import type { Algebra } from 'sparqlalgebrajs';
@@ -13,12 +18,7 @@ import { ActorQueryOperationIncrementalFilter } from '../lib';
import '@comunica/jest';
import '@incremunica/incremental-jest';
import {EventEmitter} from "events";
-import {DevTools} from "@incremunica/dev-tools";
-import {
- ActionContextKeyIsAddition,
- ActorMergeBindingsContextIsAddition
-} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {MediatorMergeBindingsContext} from "@comunica/bus-merge-bindings-context";
+import type { MediatorMergeBindingsContext } from '@comunica/bus-merge-bindings-context';
const DF = new DataFactory();
@@ -40,9 +40,9 @@ function parse(query: string): Algebra.Expression {
}
async function partialArrayifyStream(stream: EventEmitter, num: number): Promise {
- let array: V[] = [];
+ const array: V[] = [];
for (let i = 0; i < num; i++) {
- await new Promise((resolve) => stream.once("data", (bindings: V) => {
+ await new Promise(resolve => stream.once('data', (bindings: V) => {
array.push(bindings);
resolve();
}));
@@ -69,17 +69,17 @@ describe('ActorQueryOperationFilterSparqlee', () => {
};
let BF: BindingsFactory;
- beforeEach(async () => {
+ beforeEach(async() => {
BF = await DevTools.createBindingsFactory(DF);
- bus = new Bus({name: 'bus'});
+ bus = new Bus({ name: 'bus' });
mediatorQueryOperation = {
mediate: (arg: any) => Promise.resolve({
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
- metadata: () => Promise.resolve({cardinality: 3, canContainUndefs: false, variables: [DF.variable('a')]}),
+ BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
+ metadata: () => Promise.resolve({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]}),
operated: arg,
type: 'bindings',
}),
@@ -99,7 +99,9 @@ describe('ActorQueryOperationFilterSparqlee', () => {
});
it('should not be able to create new ActorQueryOperationFilterSparqlee objects without \'new\'', () => {
- expect(() => { ( ActorQueryOperationIncrementalFilter)(); }).toThrow();
+ expect(() => {
+ ( ActorQueryOperationIncrementalFilter)();
+ }).toThrow();
});
});
@@ -108,12 +110,12 @@ describe('ActorQueryOperationFilterSparqlee', () => {
let factory: Factory;
beforeEach(() => {
- let mediatorMergeBindingsContext: MediatorMergeBindingsContext = {
- mediate: async () => Promise.resolve((await new ActorMergeBindingsContextIsAddition({
+ const mediatorMergeBindingsContext: MediatorMergeBindingsContext = {
+ mediate: async() => (await new ActorMergeBindingsContextIsAddition({
bus: new Bus({name: 'bus'}),
name: 'actor'
- }).run({})).mergeHandlers),
- }
+ }).run({})).mergeHandlers,
+ };
actor = new ActorQueryOperationIncrementalFilter({ name: 'actor', bus, mediatorQueryOperation, mediatorMergeBindingsContext });
factory = new Factory();
});
@@ -124,7 +126,7 @@ describe('ActorQueryOperationFilterSparqlee', () => {
});
it('should test on filter existence', () => {
- const op: any = { operation: { type: 'filter', expression: {expressionType: 'existence'} }, context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', expression: { expressionType: 'existence' }}, context: new ActionContext() };
return expect(actor.test(op)).resolves.toBeTruthy();
});
@@ -144,68 +146,62 @@ describe('ActorQueryOperationFilterSparqlee', () => {
});
it('should return the full stream for a truthy filter', async() => {
- const op: any = { operation: { type: 'filter', input: {}, expression: truthyExpression },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: truthyExpression }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
- expect(await partialArrayifyStream(output.bindingsStream, 3)).toEqualBindingsArray([
+ await expect(partialArrayifyStream(output.bindingsStream, 3)).resolves.toEqualBindingsArray([
BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- expect(output.type).toEqual('bindings');
- expect(await output.metadata())
+ expect(output.type).toBe('bindings');
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
});
it('should return an empty stream for a falsy filter', async() => {
- const op: any = { operation: { type: 'filter', input: {}, expression: falsyExpression },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: falsyExpression }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
await expect(output.bindingsStream).toEqualBindingsStream([]);
- expect(await output.metadata())
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
- expect(output.type).toEqual('bindings');
+ expect(output.type).toBe('bindings');
});
it('should return an empty stream when the expressions error', async() => {
- const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
await expect(output.bindingsStream).toEqualBindingsStream([]);
- expect(await output.metadata())
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
- expect(output.type).toEqual('bindings');
+ expect(output.type).toBe('bindings');
});
it('Should log warning for an expressionError', async() => {
// The order is very important. This item requires isExpressionError to still have it's right definition.
const logWarnSpy = jest.spyOn( actor, 'logWarn');
- const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
output.bindingsStream.on('data', () => {
// This is here to force the stream to start.
});
await new Promise(resolve => output.bindingsStream.on('end', resolve));
expect(logWarnSpy).toHaveBeenCalledTimes(3);
- logWarnSpy.mock.calls.forEach((call, index) => {
+ for (const [ index, call ] of logWarnSpy.mock.calls) {
if (index === 0) {
const dataCB = <() => { error: any; bindings: Bindings }>call[2];
const { error, bindings } = dataCB();
expect(isExpressionError(error)).toBeTruthy();
- expect(bindings).toEqual(`{
+ expect(bindings).toBe(`{
"a": "\\"1\\""
}`);
}
- });
+ }
});
it('should emit an error for a hard erroring filter', async() => {
- // eslint-disable-next-line no-import-assign
Object.defineProperty(sparqlee, 'isExpressionError', { writable: true });
- ( sparqlee).isExpressionError = jest.fn(() => false);
- const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression },
- context: new ActionContext() };
+ jest.spyOn(( sparqlee, 'isExpressionError').mockImplementation(() => false);
+ const op: any = { operation: { type: 'filter', input: {}, expression: erroringExpression }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
output.bindingsStream.on('data', () => {
// This is here to force the stream to start.
@@ -225,33 +221,31 @@ describe('ActorQueryOperationFilterSparqlee', () => {
BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- expect(output.type).toEqual('bindings');
- expect(await output.metadata())
+ expect(output.type).toBe('bindings');
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
});
describe('should be able to handle EXIST filters', () => {
it('like a simple EXIST that is true', async() => {
// The actual bgp isn't used
- const op: any = { operation: { type: 'filter', input: {}, expression: parse("EXISTS {?a a ?a}") },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: parse('EXISTS {?a a ?a}') }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
- expect(await partialArrayifyStream(output.bindingsStream, 3)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(output.bindingsStream, 3)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- expect(await output.metadata())
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
- expect(output.type).toEqual('bindings');
+ expect(output.type).toBe('bindings');
});
it('like a simple NOT EXIST that is true', async() => {
// The actual bgp isn't used
- const op: any = { operation: { type: 'filter', input: {}, expression: parse("NOT EXISTS {?a a ?a}") },
- context: new ActionContext() };
+ const op: any = { operation: { type: 'filter', input: {}, expression: parse('NOT EXISTS {?a a ?a}') }, context: new ActionContext() };
const output: IQueryOperationResultBindings = await actor.run(op);
- expect(await partialArrayifyStream(output.bindingsStream, 6)).toBeIsomorphicBindingsArray([
+ await expect(partialArrayifyStream(output.bindingsStream, 6)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([[ DF.variable('a'), DF.literal('1') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), true),
@@ -259,9 +253,9 @@ describe('ActorQueryOperationFilterSparqlee', () => {
BF.bindings([[ DF.variable('a'), DF.literal('2') ]]).setContextEntry(new ActionContextKeyIsAddition(), false),
BF.bindings([[ DF.variable('a'), DF.literal('3') ]]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
- expect(await output.metadata())
+ await expect(output.metadata()).resolves
.toMatchObject({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
- expect(output.type).toEqual('bindings');
+ expect(output.type).toBe('bindings');
});
});
});
diff --git a/packages/actor-query-source-identify-hypermedia-stream-none/README.md b/packages/actor-query-source-identify-hypermedia-stream-none/README.md
index 3ec9deb6..518e2bb6 100644
--- a/packages/actor-query-source-identify-hypermedia-stream-none/README.md
+++ b/packages/actor-query-source-identify-hypermedia-stream-none/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-source-identify-hypermedia-stream-none/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-source-identify-hypermedia-stream-none/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-query-source-identify-hypermedia-stream-none/lib/ActorQuerySourceIdentifyHypermediaStreamNone.ts b/packages/actor-query-source-identify-hypermedia-stream-none/lib/ActorQuerySourceIdentifyHypermediaStreamNone.ts
index e5ed69b3..7263b44c 100644
--- a/packages/actor-query-source-identify-hypermedia-stream-none/lib/ActorQuerySourceIdentifyHypermediaStreamNone.ts
+++ b/packages/actor-query-source-identify-hypermedia-stream-none/lib/ActorQuerySourceIdentifyHypermediaStreamNone.ts
@@ -1,9 +1,3 @@
-import { ActionContext } from '@comunica/core';
-import type { MediatorGuard } from '@incremunica/bus-guard';
-import { KeysGuard } from '@incremunica/context-entries';
-import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
-import type { Quad } from '@incremunica/incremental-types';
-import type * as RDF from '@rdfjs/types';
import { BindingsFactory } from '@comunica/bindings-factory';
import type { MediatorMergeBindingsContext } from '@comunica/bus-merge-bindings-context';
import type {
@@ -13,7 +7,13 @@ import type {
IActorQuerySourceIdentifyHypermediaTest,
} from '@comunica/bus-query-source-identify-hypermedia';
import { ActorQuerySourceIdentifyHypermedia } from '@comunica/bus-query-source-identify-hypermedia';
-import {StreamingQuerySourceRdfJs} from "@incremunica/actor-query-source-identify-streaming-rdfjs";
+import { ActionContext } from '@comunica/core';
+import { StreamingQuerySourceRdfJs } from '@incremunica/actor-query-source-identify-streaming-rdfjs';
+import type { MediatorGuard } from '@incremunica/bus-guard';
+import { KeysGuard } from '@incremunica/context-entries';
+import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
+import type * as RDF from '@rdfjs/types';
/**
* An incremunica Stream None Query Source Identify Hypermedia Actor.
diff --git a/packages/actor-query-source-identify-hypermedia-stream-none/package.json b/packages/actor-query-source-identify-hypermedia-stream-none/package.json
index 7a052bd0..602f8199 100644
--- a/packages/actor-query-source-identify-hypermedia-stream-none/package.json
+++ b/packages/actor-query-source-identify-hypermedia-stream-none/package.json
@@ -3,44 +3,47 @@
"version": "1.3.0",
"description": "A stream none query-source-identify-hypermedia actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-query-source-identify-hypermedia-stream-none"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"incremunica",
"actor",
"query-source-identify-hypermedia",
"stream-none"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
- "asynciterator": "^3.9.0",
+ "@comunica/bindings-factory": "^3.3.0",
+ "@comunica/bus-merge-bindings-context": "^3.3.0",
+ "@comunica/bus-query-source-identify-hypermedia": "^3.3.0",
"@comunica/core": "^3.2.1",
+ "@incremunica/actor-query-source-identify-streaming-rdfjs": "^1.3.0",
"@incremunica/bus-guard": "^1.3.0",
"@incremunica/context-entries": "^1.3.0",
"@incremunica/incremental-rdf-streaming-store": "^1.3.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
}
}
diff --git a/packages/actor-query-source-identify-hypermedia-stream-none/test/ActorQuerySourceIdentifyHypermediaStreamNone-test.ts b/packages/actor-query-source-identify-hypermedia-stream-none/test/ActorQuerySourceIdentifyHypermediaStreamNone-test.ts
index a0f0cc4e..6f09fae6 100644
--- a/packages/actor-query-source-identify-hypermedia-stream-none/test/ActorQuerySourceIdentifyHypermediaStreamNone-test.ts
+++ b/packages/actor-query-source-identify-hypermedia-stream-none/test/ActorQuerySourceIdentifyHypermediaStreamNone-test.ts
@@ -1,20 +1,20 @@
-import {ActionContext, Bus} from '@comunica/core';
-import { ActorQuerySourceIdentifyHypermediaStreamNone } from '../lib';
-import {IActionGuard, MediatorGuard} from "@incremunica/bus-guard";
-import {DataFactory} from "rdf-data-factory";
-import arrayifyStream from "arrayify-stream";
-import 'jest-rdf'
-import '@incremunica/incremental-jest'
-import {EventEmitter} from "events";
-import {KeysGuard} from "@incremunica/context-entries";
-import { Factory } from 'sparqlalgebrajs';
-import {IGuardEvents} from "@incremunica/incremental-types";
+import 'jest-rdf';
+import '@incremunica/incremental-jest';
+import { EventEmitter } from 'node:events';
+import type { BindingsFactory } from '@comunica/bindings-factory';
+import { ActionContext, Bus } from '@comunica/core';
import {
ActionContextKeyIsAddition,
- ActorMergeBindingsContextIsAddition
-} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {BindingsFactory} from "@comunica/bindings-factory";
-import {DevTools} from "@incremunica/dev-tools";
+ ActorMergeBindingsContextIsAddition,
+} from '@incremunica/actor-merge-bindings-context-is-addition';
+import type { IActionGuard, MediatorGuard } from '@incremunica/bus-guard';
+import { KeysGuard } from '@incremunica/context-entries';
+import { DevTools } from '@incremunica/dev-tools';
+import type { IGuardEvents } from '@incremunica/incremental-types';
+import arrayifyStream from 'arrayify-stream';
+import { DataFactory } from 'rdf-data-factory';
+import { Factory } from 'sparqlalgebrajs';
+import { ActorQuerySourceIdentifyHypermediaStreamNone } from '../lib';
const DF = new DataFactory();
const AF = new Factory();
@@ -25,7 +25,9 @@ function captureEvents(item: EventEmitter, ...events: string[]) {
const counts = (item)._eventCounts = Object.create(null);
for (const event of events) {
counts[event] = 0;
- item.on(event, () => { counts[event]++; });
+ item.on(event, () => {
+ counts[event]++;
+ });
}
return item;
}
@@ -34,8 +36,8 @@ describe('ActorRdfResolveHypermediaStreamNone', () => {
let bus: any;
let BF: BindingsFactory;
- beforeEach(async () => {
- bus = new Bus({name: 'bus'});
+ beforeEach(async() => {
+ bus = new Bus({ name: 'bus' });
BF = await DevTools.createBindingsFactory(DF);
});
@@ -53,117 +55,122 @@ describe('ActorRdfResolveHypermediaStreamNone', () => {
mediatorGuard = {
mediate: (action: IActionGuard) => {
mediatorFn(action);
- return { guardEvents }
- }
+ return { guardEvents };
+ },
};
mediatorMergeBindingsContext = {
- mediate: async (action: any) => {
+ mediate: async(action: any) => {
return (await new ActorMergeBindingsContextIsAddition({
- bus: new Bus({name: 'bus'}),
- name: 'actor'
+ bus: new Bus({ name: 'bus' }),
+ name: 'actor',
}).run({})).mergeHandlers;
- }
- }
- actor = new ActorQuerySourceIdentifyHypermediaStreamNone({ name: 'actor', bus, mediatorGuard, mediatorMergeBindingsContext});
+ },
+ };
+ actor = new ActorQuerySourceIdentifyHypermediaStreamNone({
+ name: 'actor',
+ bus,
+ mediatorGuard,
+ mediatorMergeBindingsContext,
+ });
});
- it('should test', async () => {
- let action = {};
- expect(await actor.test(action)).toMatchObject({filterFactor: 0});
+ it('should test', async() => {
+ const action = {};
+ await expect(actor.test(action)).resolves.toMatchObject({ filterFactor: 0 });
});
- it('should run and make a streaming store', async () => {
- let deletedQuad = quad("s1","p1","o1");
- deletedQuad.diff = false
- let action = {
+ it('should run and make a streaming store', async() => {
+ const deletedQuad = quad('s1', 'p1', 'o1');
+ deletedQuad.diff = false;
+ const action = {
context: {
get: () => {
- return ""
- }
+ return '';
+ },
},
- url: "http://test.com",
+ url: 'http://test.com',
quads: streamifyArray([
- quad("s1","p1","o1"),
- quad("s2","p2","o2"),
- deletedQuad
- ])
+ quad('s1', 'p1', 'o1'),
+ quad('s2', 'p2', 'o2'),
+ deletedQuad,
+ ]),
};
- let result = (await actor.run(action))
- let stream = result.source.queryBindings(
+ const result = (await actor.run(action));
+ const stream = result.source.queryBindings(
AF.createPattern(DF.variable('s'), DF.variable('p'), DF.variable('o')),
- new ActionContext()
+ new ActionContext(),
);
- let number = 2
- stream.on("data", () => {
+ let number = 2;
+ stream.on('data', () => {
number--;
- if (number == 0) {
+ if (number === 0) {
stream.close();
}
- })
- expect(await arrayifyStream(stream)).toBeIsomorphicBindingsArray([
+ });
+ await expect(arrayifyStream(stream)).resolves.toBeIsomorphicBindingsArray([
BF.bindings([
- [DF.variable('s'), DF.namedNode('s1')],
- [DF.variable('p'), DF.namedNode('p1')],
- [DF.variable('o'), DF.namedNode('o1')],
+ [ DF.variable('s'), DF.namedNode('s1') ],
+ [ DF.variable('p'), DF.namedNode('p1') ],
+ [ DF.variable('o'), DF.namedNode('o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('s'), DF.namedNode('s2')],
- [DF.variable('p'), DF.namedNode('p2')],
- [DF.variable('o'), DF.namedNode('o2')],
+ [ DF.variable('s'), DF.namedNode('s2') ],
+ [ DF.variable('p'), DF.namedNode('p2') ],
+ [ DF.variable('o'), DF.namedNode('o2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('s'), DF.namedNode('s1')],
- [DF.variable('p'), DF.namedNode('p1')],
- [DF.variable('o'), DF.namedNode('o1')],
+ [ DF.variable('s'), DF.namedNode('s1') ],
+ [ DF.variable('p'), DF.namedNode('p1') ],
+ [ DF.variable('o'), DF.namedNode('o1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
});
- it('should run and add a guard', async () => {
- let action = {
+ it('should run and add a guard', async() => {
+ const action = {
context: {
get: () => {
- return ""
- }
+ return '';
+ },
},
- url: "http://test.com",
- quads: streamifyArray([])
+ url: 'http://test.com',
+ quads: streamifyArray([]),
};
await actor.run(action);
expect(mediatorFn).toHaveBeenCalledTimes(1);
});
- it('should add the guard events to the source', async () => {
- let action = {
+ it('should add the guard events to the source', async() => {
+ const action = {
context: {
get: () => {
- return ""
- }
+ return '';
+ },
},
- url: "http://test.com",
- quads: streamifyArray([])
+ url: 'http://test.com',
+ quads: streamifyArray([]),
};
- let result = await actor.run(action);
- let events = ( result.source).context.get(KeysGuard.events);
+ const result = await actor.run(action);
+ const events = ( result.source).context.get(KeysGuard.events);
expect(events).toEqual(guardEvents);
- guardEvents.emit("modified");
- expect((guardEvents)._eventCounts.modified).toEqual(1);
+ guardEvents.emit('modified');
+ expect((guardEvents)._eventCounts.modified).toBe(1);
});
- it('should add the guard events to the source even if the source has no context', async () => {
+ it('should add the guard events to the source even if the source has no context', async() => {
mediatorFn = jest.fn((action: IActionGuard) => {
action.streamingSource.context = undefined;
});
- let action = {
+ const action = {
context: {
get: () => {
- return ""
- }
+ return '';
+ },
},
- url: "http://test.com",
- quads: streamifyArray([])
+ url: 'http://test.com',
+ quads: streamifyArray([]),
};
- let result = await actor.run(action);
+ const result = await actor.run(action);
expect(mediatorFn).toHaveBeenCalledTimes(1);
expect(( result.source).context.get(KeysGuard.events)).toEqual(guardEvents);
});
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/README.md b/packages/actor-query-source-identify-streaming-rdfjs/README.md
index 5e888aab..02e725cf 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/README.md
+++ b/packages/actor-query-source-identify-streaming-rdfjs/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-source-identify-streaming-rdfjs/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-query-source-identify-streaming-rdfjs/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/lib/ActorQuerySourceIdentifyStreamingRdfJs.ts b/packages/actor-query-source-identify-streaming-rdfjs/lib/ActorQuerySourceIdentifyStreamingRdfJs.ts
index 04ae39d8..d5979e2f 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/lib/ActorQuerySourceIdentifyStreamingRdfJs.ts
+++ b/packages/actor-query-source-identify-streaming-rdfjs/lib/ActorQuerySourceIdentifyStreamingRdfJs.ts
@@ -1,15 +1,16 @@
-import {ActionContext, IActorTest} from '@comunica/core'
-import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import { BindingsFactory } from '@comunica/bindings-factory';
+import type { MediatorMergeBindingsContext } from '@comunica/bus-merge-bindings-context';
import type {
IActionQuerySourceIdentify,
IActorQuerySourceIdentifyOutput,
IActorQuerySourceIdentifyArgs,
} from '@comunica/bus-query-source-identify';
import { ActorQuerySourceIdentify } from '@comunica/bus-query-source-identify';
-import {MediatorMergeBindingsContext} from "@comunica/bus-merge-bindings-context";
-import {StreamingQuerySourceRdfJs} from "./StreamingQuerySourceRdfJs";
-import {BindingsFactory} from "@comunica/bindings-factory";
-import {Quad} from "@incremunica/incremental-types";
+import type { IActorTest } from '@comunica/core';
+import { ActionContext } from '@comunica/core';
+import type { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
+import { StreamingQuerySourceRdfJs } from './StreamingQuerySourceRdfJs';
/**
* An incremunica Streaming RDFJS Query Source Identify Actor.
@@ -29,10 +30,11 @@ export class ActorQuerySourceIdentifyStreamingRdfJs extends ActorQuerySourceIden
if (typeof source.value === 'string' || !('match' in source.value)) {
throw new Error(`${this.name} actor received an invalid streaming rdfjs query source.`);
}
- //TODO add check to make sure the store is a streaming store
- //if (!(source.value instanceof StreamingStore) && !(!('match' in source) && (source.value instanceof StreamingStore))) {
+ // TODO add check to make sure the store is a streaming store
+ // if (!(source.value instanceof StreamingStore)
+ // && !(!('match' in source) && (source.value instanceof StreamingStore))) {
// throw new Error(`${this.name} didn't receive a StreamingStore.`);
- //}
+ // }
return true;
}
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/lib/IRdfJsSourceExtended.ts b/packages/actor-query-source-identify-streaming-rdfjs/lib/IRdfJsSourceExtended.ts
index 671753eb..619dc9b2 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/lib/IRdfJsSourceExtended.ts
+++ b/packages/actor-query-source-identify-streaming-rdfjs/lib/IRdfJsSourceExtended.ts
@@ -1,6 +1,6 @@
+import type { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
import type * as RDF from '@rdfjs/types';
-import {StreamingStore} from "@incremunica/incremental-rdf-streaming-store";
-import {Quad} from "@incremunica/incremental-types";
export interface IIncementalRdfJsSourceExtended extends RDF.Source {
/**
@@ -17,7 +17,7 @@ export interface IIncementalRdfJsSourceExtended extends RDF.Source {
/**
*
*/
- streamingStore: StreamingStore
+ streamingStore: StreamingStore;
/**
* Return an estimated count of the number of quads matching the given pattern.
*
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/lib/StreamingQuerySourceRdfJs.ts b/packages/actor-query-source-identify-streaming-rdfjs/lib/StreamingQuerySourceRdfJs.ts
index f8f143a5..d3ca0238 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/lib/StreamingQuerySourceRdfJs.ts
+++ b/packages/actor-query-source-identify-streaming-rdfjs/lib/StreamingQuerySourceRdfJs.ts
@@ -1,27 +1,34 @@
+import type { BindingsFactory } from '@comunica/bindings-factory';
+import { ClosableIterator } from '@comunica/bus-query-operation';
+import {
+ filterMatchingQuotedQuads,
+ getVariables,
+ getDuplicateElementLinks,
+ setMetadata,
+} from '@comunica/bus-query-source-identify';
+import { KeysQueryOperation } from '@comunica/context-entries';
import { MetadataValidationState } from '@comunica/metadata';
-import type {IQuerySource, BindingsStream, IActionContext, FragmentSelectorShape, Bindings} from '@comunica/types';
+import type { IQuerySource, BindingsStream, IActionContext, FragmentSelectorShape, Bindings } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
import { KeysGuard, KeysStreamingSource } from '@incremunica/context-entries';
-import {IGuardEvents, Quad} from '@incremunica/incremental-types';
+import type { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { IGuardEvents, Quad } from '@incremunica/incremental-types';
import type * as RDF from '@rdfjs/types';
+import type { AsyncIterator } from 'asynciterator';
import { wrap as wrapAsyncIterator } from 'asynciterator';
-import { AsyncIterator } from 'asynciterator';
-import {Algebra, Factory} from 'sparqlalgebrajs';
import { DataFactory } from 'rdf-data-factory';
-import {BindingsFactory} from "@comunica/bindings-factory";
+import type {
+ QuadTermName,
+} from 'rdf-terms';
import {
filterTermsNested,
getValueNestedPath,
- QuadTermName,
reduceTermsNested,
someTermsNested,
- uniqTerms
-} from "rdf-terms";
-import { filterMatchingQuotedQuads, getVariables, getDuplicateElementLinks, setMetadata } from '@comunica/bus-query-source-identify';
-import {KeysQueryOperation} from "@comunica/context-entries";
-import {StreamingStore} from "@incremunica/incremental-rdf-streaming-store";
-import { ClosableIterator } from '@comunica/bus-query-operation';
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {Duplex, Transform} from "readable-stream";
+ uniqTerms,
+} from 'rdf-terms';
+import { Factory } from 'sparqlalgebrajs';
+import type { Algebra } from 'sparqlalgebrajs';
const AF = new Factory();
const DF = new DataFactory();
@@ -85,7 +92,7 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
StreamingQuerySourceRdfJs.nullifyVariables(operation.object, false),
StreamingQuerySourceRdfJs.nullifyVariables(operation.graph, false),
matchOptions,
- )
+ );
if (context) {
const matchOptionsArray: ({ stopMatch: () => void })[] | undefined = context.get(
@@ -96,7 +103,7 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
}
}
- let quads = filterMatchingQuotedQuads(operation, wrapAsyncIterator(rawStream, { autoStart: false }));
+ const quads = filterMatchingQuotedQuads(operation, wrapAsyncIterator(rawStream, { autoStart: false }));
// Set up-to-date property
quads.setProperty('up-to-date', true);
@@ -129,7 +136,7 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
// TODO implement setMetadata make a proper estimation for the cardinality
protected async setMetadata(
it: AsyncIterator,
- operation: Algebra.Pattern,
+ _operation: Algebra.Pattern,
): Promise {
const cardinality = 1;
@@ -165,7 +172,7 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
return `StreamingQuerySourceRdfJs(${this.store.constructor.name})`;
}
- static quadsToBindings(
+ private static quadsToBindings(
quads: AsyncIterator,
pattern: Algebra.Pattern,
bindingsFactory: BindingsFactory,
@@ -174,8 +181,8 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
const variables = getVariables(pattern);
// If non-default-graph triples need to be filtered out
- const filterNonDefaultQuads = pattern.graph.termType === 'Variable'
- && !unionDefaultGraph;
+ const filterNonDefaultQuads = pattern.graph.termType === 'Variable' &&
+ !unionDefaultGraph;
// Detect duplicate variables in the pattern
const duplicateElementLinks: Record | undefined = getDuplicateElementLinks(pattern);
@@ -220,21 +227,28 @@ export class StreamingQuerySourceRdfJs implements IQuerySource {
}
// Wrap it in a ClosableIterator, so we can propagate destroy calls
- const bindingsStream = new ClosableIterator(filteredOutput.map(quad => {
- return bindingsFactory
- .bindings(Object.keys(elementVariables).map((key) => {
- const keys: QuadTermName[] = key.split('_');
- const variable = elementVariables[key];
- const term = getValueNestedPath(quad, keys);
- return [ DF.variable(variable), term ];
- //TODO write a test for this
- })).setContextEntry(new ActionContextKeyIsAddition(), ((quad).diff == undefined)? true : (quad).diff);
- }), {
+ const bindingsStream = new ClosableIterator(filteredOutput.map(quad => bindingsFactory
+ .bindings(Object.keys(elementVariables).map((key) => {
+ const keys: QuadTermName[] = key.split('_');
+ const variable = elementVariables[key];
+ const term = getValueNestedPath(quad, keys);
+ return [ DF.variable(variable), term ];
+ // TODO write a test for this
+ })).setContextEntry(
+ new ActionContextKeyIsAddition(),
+ ((quad).diff === undefined) ? true : (quad).diff,
+ )), {
onClose: () => quads.destroy(),
});
// Set the metadata property
- setMetadata(bindingsStream, quads, elementVariables, variables, filterNonDefaultQuads || Boolean(duplicateElementLinks));
+ setMetadata(
+ bindingsStream,
+ quads,
+ elementVariables,
+ variables,
+ filterNonDefaultQuads || Boolean(duplicateElementLinks),
+ );
return bindingsStream;
}
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/package.json b/packages/actor-query-source-identify-streaming-rdfjs/package.json
index 83f0b39d..1214d6dc 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/package.json
+++ b/packages/actor-query-source-identify-streaming-rdfjs/package.json
@@ -3,47 +3,56 @@
"version": "1.3.0",
"description": "A Streaming rdfjs query-source-identify actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-query-source-identify-streaming-rdfjs"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"actor",
"query-source-identify",
"streaming-rdfjs"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
+ "@comunica/bindings-factory": "^3.3.0",
+ "@comunica/bus-merge-bindings-context": "^3.3.0",
+ "@comunica/bus-query-operation": "^3.3.0",
+ "@comunica/bus-query-source-identify": "^3.3.0",
+ "@comunica/context-entries": "^3.3.0",
"@comunica/core": "^3.2.1",
"@comunica/metadata": "^3.2.1",
"@comunica/types": "^3.2.1",
+ "@incremunica/actor-merge-bindings-context-is-addition": "^1.3.0",
"@incremunica/context-entries": "^1.3.0",
"@incremunica/incremental-rdf-streaming-store": "^1.3.0",
"@incremunica/incremental-types": "^1.3.0",
"@rdfjs/types": "*",
- "asynciterator": "^3.9.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
+ "asynciterator": "^3.9.0",
+ "rdf-data-factory": "^1.1.2",
+ "rdf-terms": "^1.11.0",
+ "sparqlalgebrajs": "^4.3.8"
}
}
diff --git a/packages/actor-query-source-identify-streaming-rdfjs/test/StreamingQuerySourceRdfJs-test.ts b/packages/actor-query-source-identify-streaming-rdfjs/test/StreamingQuerySourceRdfJs-test.ts
index 8db13b4b..b23126df 100644
--- a/packages/actor-query-source-identify-streaming-rdfjs/test/StreamingQuerySourceRdfJs-test.ts
+++ b/packages/actor-query-source-identify-streaming-rdfjs/test/StreamingQuerySourceRdfJs-test.ts
@@ -1,20 +1,20 @@
import { Readable } from 'node:stream';
-import { BindingsFactory } from '@comunica/bindings-factory';
+import type { BindingsFactory } from '@comunica/bindings-factory';
import { KeysQueryOperation } from '@comunica/context-entries';
import { ActionContext } from '@comunica/core';
import { MetadataValidationState } from '@comunica/metadata';
import type { IActionContext } from '@comunica/types';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
+import { DevTools } from '@incremunica/dev-tools';
+import { StreamingStore } from '@incremunica/incremental-rdf-streaming-store';
+import type { Quad } from '@incremunica/incremental-types';
import arrayifyStream from 'arrayify-stream';
+import { ArrayIterator } from 'asynciterator';
import { DataFactory } from 'rdf-data-factory';
import { Factory } from 'sparqlalgebrajs';
import { StreamingQuerySourceRdfJs } from '../lib';
import '@incremunica/incremental-jest';
-import {StreamingStore} from "@incremunica/incremental-rdf-streaming-store";
-import {Quad} from "@incremunica/incremental-types";
import 'jest-rdf';
-import {DevTools} from "@incremunica/dev-tools";
-import {ArrayIterator} from "asynciterator";
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
const quad = require('rdf-quad');
@@ -26,8 +26,8 @@ describe('StreamingQuerySourceRdfJs', () => {
let store: StreamingStore;
let source: StreamingQuerySourceRdfJs;
- let BF: BindingsFactory
- beforeEach(async () => {
+ let BF: BindingsFactory;
+ beforeEach(async() => {
ctx = new ActionContext({});
store = new StreamingStore();
BF = await DevTools.createBindingsFactory(DF);
@@ -85,15 +85,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o: DF.namedNode('o1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 2 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 2 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o') ],
+ // });
+ //
});
it('should return triples in a named graph', async() => {
@@ -138,15 +138,15 @@ describe('StreamingQuerySourceRdfJs', () => {
g: DF.namedNode('g1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'estimate', value: 2 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o'), DF.variable('g') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'estimate', value: 2 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o'), DF.variable('g') ],
+ // });
+ //
});
it('should return quads in named graphs and the default graph with union default graph', async() => {
@@ -173,15 +173,15 @@ describe('StreamingQuerySourceRdfJs', () => {
g: DF.namedNode('g1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 2 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o'), DF.variable('g') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 2 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o'), DF.variable('g') ],
+ // });
+ //
});
it('should use countQuads if available', async() => {
@@ -206,15 +206,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o: DF.namedNode('o1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 123 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 123 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o') ],
+ // });
+ //
});
it('should fallback to match if countQuads is not available', async() => {
@@ -241,15 +241,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o: DF.namedNode('o1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 2 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 2 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o') ],
+ // });
+ //
});
it('should delegate errors', async() => {
@@ -312,15 +312,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o: DF.namedNode('o1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 3 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 3 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o') ],
+ // });
+ //
});
it('should run when containing quoted triples with a quoted pattern (1)', async() => {
@@ -513,15 +513,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o: DF.namedNode('o1'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'exact', value: 3 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('o') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'exact', value: 3 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('o') ],
+ // });
+ //
});
it('should run when containing quoted triples with a quoted pattern (1)', async() => {
@@ -556,16 +556,16 @@ describe('StreamingQuerySourceRdfJs', () => {
o1: DF.namedNode('oa3'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'estimate', value: 3 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('s1'), DF.variable('p1'), DF.variable('o1') ],
- });
-
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'estimate', value: 3 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('s1'), DF.variable('p1'), DF.variable('o1') ],
+ // });
+ //
+ //
});
it('should run when containing quoted triples with a quoted pattern (2)', async() => {
@@ -600,15 +600,15 @@ describe('StreamingQuerySourceRdfJs', () => {
o1: DF.namedNode('ob3'),
}).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
- /*
- await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
- .toEqual({
- cardinality: { type: 'estimate', value: 5 },
- canContainUndefs: false,
- state: expect.any(MetadataValidationState),
- variables: [ DF.variable('s'), DF.variable('p'), DF.variable('s1'), DF.variable('o1') ],
- });
- */
+ //
+ // await expect(new Promise(resolve => data.getProperty('metadata', resolve))).resolves
+ // .toEqual({
+ // cardinality: { type: 'estimate', value: 5 },
+ // canContainUndefs: false,
+ // state: expect.any(MetadataValidationState),
+ // variables: [ DF.variable('s'), DF.variable('p'), DF.variable('s1'), DF.variable('o1') ],
+ // });
+ //
});
});
});
diff --git a/packages/actor-rdf-join-incremental-minus-hash/README.md b/packages/actor-rdf-join-incremental-minus-hash/README.md
index 04a248c5..144b5cdf 100644
--- a/packages/actor-rdf-join-incremental-minus-hash/README.md
+++ b/packages/actor-rdf-join-incremental-minus-hash/README.md
@@ -17,7 +17,7 @@ After installing, this package can be added to your engine's configuration as fo
{
"@context": [
...
- "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-rdf-join-incremental-minus-hash/^1.0.0/components/context.jsonld"
+ "https://linkedsoftwaredependencies.org/bundles/npm/@incremunica/actor-rdf-join-incremental-minus-hash/^1.0.0/components/context.jsonld"
],
"actors": [
...
diff --git a/packages/actor-rdf-join-incremental-minus-hash/lib/ActorRdfJoinIncrementalMinusHash.ts b/packages/actor-rdf-join-incremental-minus-hash/lib/ActorRdfJoinIncrementalMinusHash.ts
index 2013cd95..80421e4a 100644
--- a/packages/actor-rdf-join-incremental-minus-hash/lib/ActorRdfJoinIncrementalMinusHash.ts
+++ b/packages/actor-rdf-join-incremental-minus-hash/lib/ActorRdfJoinIncrementalMinusHash.ts
@@ -1,11 +1,11 @@
+import type { Bindings } from '@comunica/bindings-factory';
import type { IActionRdfJoin, IActorRdfJoinArgs, IActorRdfJoinOutputInner } from '@comunica/bus-rdf-join';
import { ActorRdfJoin } from '@comunica/bus-rdf-join';
import type { IMediatorTypeJoinCoefficients } from '@comunica/mediatortype-join-coefficients';
-import type {BindingsStream, MetadataBindings} from '@comunica/types';
+import type { BindingsStream, MetadataBindings } from '@comunica/types';
import type * as RDF from '@rdfjs/types';
-import { IncrementalMinusHash } from './IncrementalMinusHash';
-import type { Bindings } from '@comunica/bindings-factory';
import type { AsyncIterator } from 'asynciterator';
+import { IncrementalMinusHash } from './IncrementalMinusHash';
/**
* An Incremunica Minus Hash RDF Join Actor.
@@ -27,7 +27,7 @@ export class ActorRdfJoinIncrementalMinusHash extends ActorRdfJoin {
const metadatas = await ActorRdfJoin.getMetadatas(action.entries);
const commonVariables: RDF.Variable[] = ActorRdfJoin.overlappingVariables(metadatas);
if (commonVariables.length > 0) {
- const bindingsStream = new IncrementalMinusHash(
+ const bindingsStream = new IncrementalMinusHash(
>output.bindingsStream,
>buffer.bindingsStream,
commonVariables,
@@ -46,8 +46,8 @@ export class ActorRdfJoinIncrementalMinusHash extends ActorRdfJoin {
}
protected async getJoinCoefficients(
- action: IActionRdfJoin,
- metadatas: MetadataBindings[],
+ _action: IActionRdfJoin,
+ _metadatas: MetadataBindings[],
): Promise {
return {
iterations: 0,
diff --git a/packages/actor-rdf-join-incremental-minus-hash/lib/IncrementalMinusHash.ts b/packages/actor-rdf-join-incremental-minus-hash/lib/IncrementalMinusHash.ts
index df088054..8ced5c2f 100644
--- a/packages/actor-rdf-join-incremental-minus-hash/lib/IncrementalMinusHash.ts
+++ b/packages/actor-rdf-join-incremental-minus-hash/lib/IncrementalMinusHash.ts
@@ -1,8 +1,8 @@
-import { HashBindings } from '@incremunica/hash-bindings';
import type { Bindings } from '@comunica/bindings-factory';
+import { ActionContextKeyIsAddition } from '@incremunica/actor-merge-bindings-context-is-addition';
+import { HashBindings } from '@incremunica/hash-bindings';
import type * as RDF from '@rdfjs/types';
import { AsyncIterator } from 'asynciterator';
-import {ActionContextKeyIsAddition} from "@incremunica/actor-merge-bindings-context-is-addition";
export class IncrementalMinusHash extends AsyncIterator {
private readonly leftIterator: AsyncIterator;
@@ -94,7 +94,7 @@ export class IncrementalMinusHash extends AsyncIterator {
const matchingBindings = this.leftMemory.get(hash);
if (matchingBindings !== undefined) {
for (let matchingBinding of matchingBindings) {
- //TODO check if the 2 bindings are equal for common variables
+ // TODO check if the 2 bindings are equal for common variables
matchingBinding = matchingBinding.setContextEntry(new ActionContextKeyIsAddition(), false);
this.bindingBuffer.push(matchingBinding);
}
@@ -145,7 +145,7 @@ export class IncrementalMinusHash extends AsyncIterator {
} else {
for (let i = 0; i < currentArray.length; i++) {
if (currentArray[i].equals(element)) {
- currentArray[i] = currentArray[currentArray.length - 1];
+ currentArray[i] = currentArray.at(-1)!;
currentArray.pop();
break;
}
diff --git a/packages/actor-rdf-join-incremental-minus-hash/package.json b/packages/actor-rdf-join-incremental-minus-hash/package.json
index db3c9dc7..9b67954f 100644
--- a/packages/actor-rdf-join-incremental-minus-hash/package.json
+++ b/packages/actor-rdf-join-incremental-minus-hash/package.json
@@ -3,47 +3,47 @@
"version": "1.3.0",
"description": "An incremental-minus-hash rdf-join actor",
"lsd:module": true,
- "main": "lib/index.js",
- "typings": "lib/index",
+ "license": "MIT",
+ "homepage": "https://maartyman.github.io/incremunica/",
"repository": {
"type": "git",
"url": "https://github.com/maartyman/incremunica.git",
"directory": "packages/actor-rdf-join-incremental-minus-hash"
},
- "publishConfig": {
- "access": "public"
+ "bugs": {
+ "url": "https://github.com/maartyman/incremunica/issues"
},
- "sideEffects": false,
"keywords": [
"comunica",
"actor",
"rdf-join",
"incremental-minus-hash"
],
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/maartyman/incremunica/issues"
+ "sideEffects": false,
+ "main": "lib/index.js",
+ "typings": "lib/index",
+ "publishConfig": {
+ "access": "public"
},
- "homepage": "https://maartyman.github.io/incremunica/",
"files": [
"components",
"lib/**/*.d.ts",
"lib/**/*.js",
"lib/**/*.js.map"
],
+ "scripts": {
+ "build": "npm run build:ts && npm run build:components",
+ "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
+ "build:components": "componentsjs-generator"
+ },
"dependencies": {
"@comunica/bindings-factory": "^3.2.1",
"@comunica/bus-rdf-join": "^3.2.1",
"@comunica/mediatortype-join-coefficients": "^3.2.1",
"@comunica/types": "^3.2.1",
+ "@incremunica/actor-merge-bindings-context-is-addition": "^1.3.0",
"@incremunica/hash-bindings": "^1.3.0",
- "@incremunica/incremental-types": "^1.3.0",
"@rdfjs/types": "*",
"asynciterator": "^3.9.0"
- },
- "scripts": {
- "build": "npm run build:ts && npm run build:components",
- "build:ts": "node \"../../node_modules/typescript/bin/tsc\"",
- "build:components": "componentsjs-generator"
}
}
diff --git a/packages/actor-rdf-join-incremental-minus-hash/test/ActorRdfJoinIncrementalMinusHash-test.ts b/packages/actor-rdf-join-incremental-minus-hash/test/ActorRdfJoinIncrementalMinusHash-test.ts
index 71ee9120..787843e4 100644
--- a/packages/actor-rdf-join-incremental-minus-hash/test/ActorRdfJoinIncrementalMinusHash-test.ts
+++ b/packages/actor-rdf-join-incremental-minus-hash/test/ActorRdfJoinIncrementalMinusHash-test.ts
@@ -1,17 +1,17 @@
-import { BindingsFactory } from '@comunica/bindings-factory';
+import type { BindingsFactory } from '@comunica/bindings-factory';
import type { IActionRdfJoin } from '@comunica/bus-rdf-join';
import type { IActionRdfJoinSelectivity, IActorRdfJoinSelectivityOutput } from '@comunica/bus-rdf-join-selectivity';
import type { Actor, IActorTest, Mediator } from '@comunica/core';
import { ActionContext, Bus } from '@comunica/core';
import type { IActionContext } from '@comunica/types';
-import {ArrayIterator} from 'asynciterator';
+import {
+ ActionContextKeyIsAddition,
+} from '@incremunica/actor-merge-bindings-context-is-addition';
+import { DevTools } from '@incremunica/dev-tools';
+import { ArrayIterator } from 'asynciterator';
import { DataFactory } from 'rdf-data-factory';
import { ActorRdfJoinIncrementalMinusHash } from '../lib/ActorRdfJoinIncrementalMinusHash';
import '@comunica/jest';
-import {
- ActionContextKeyIsAddition
-} from "@incremunica/actor-merge-bindings-context-is-addition";
-import {DevTools} from "@incremunica/dev-tools";
const DF = new DataFactory();
@@ -20,8 +20,8 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
let context: IActionContext;
let BF: BindingsFactory;
- beforeEach(async () => {
- bus = new Bus({name: 'bus'});
+ beforeEach(async() => {
+ bus = new Bus({ name: 'bus' });
context = new ActionContext();
BF = await DevTools.createBindingsFactory(DF);
});
@@ -29,18 +29,21 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
describe('An ActorRdfJoinIncrementalMinusHash instance', () => {
let mediatorJoinSelectivity: Mediator<
Actor,
- IActionRdfJoinSelectivity, IActorTest, IActorRdfJoinSelectivityOutput>;
+ IActionRdfJoinSelectivity,
+IActorTest,
+IActorRdfJoinSelectivityOutput
+>;
let actor: ActorRdfJoinIncrementalMinusHash;
beforeEach(() => {
mediatorJoinSelectivity = {
- mediate: async () => ({selectivity: 1}),
+ mediate: async() => ({ selectivity: 1 }),
};
- actor = new ActorRdfJoinIncrementalMinusHash({name: 'actor', bus, mediatorJoinSelectivity});
+ actor = new ActorRdfJoinIncrementalMinusHash({ name: 'actor', bus, mediatorJoinSelectivity });
});
describe('test', () => {
- it('should not test on zero entries', async () => {
+ it('should not test on zero entries', async() => {
await expect(actor.test({
type: 'minus',
entries: [],
@@ -48,7 +51,7 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
})).rejects.toThrow('actor requires at least two join entries.');
});
- it('should not test on one entry', async () => {
+ it('should not test on one entry', async() => {
await expect(actor.test({
type: 'minus',
entries: [{}],
@@ -56,7 +59,7 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
})).rejects.toThrow('actor requires at least two join entries.');
});
- it('should not test on three entries', async () => {
+ it('should not test on three entries', async() => {
await expect(actor.test({
type: 'minus',
entries: [{}, {}, {}],
@@ -64,7 +67,7 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
})).rejects.toThrow('actor requires 2 join entries at most. The input contained 3.');
});
- it('should not test on a non-minus operation', async () => {
+ it('should not test on a non-minus operation', async() => {
await expect(actor.test({
type: 'inner',
entries: [{}, {}],
@@ -72,15 +75,15 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
})).rejects.toThrow(`actor can only handle logical joins of type 'minus', while 'inner' was given.`);
});
- it('should test on two entries with undefs', async () => {
- expect(await actor.test({
+ it('should test on two entries with undefs', async() => {
+ await expect(actor.test({
type: 'minus',
entries: [
{
output: {
type: 'bindings',
metadata: () => Promise.resolve(
- {cardinality: 4, pageSize: 100, requestTime: 10, canContainUndefs: true},
+ { cardinality: 4, pageSize: 100, requestTime: 10, canContainUndefs: true },
),
},
},
@@ -88,13 +91,13 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
output: {
type: 'bindings',
metadata: () => Promise.resolve(
- {cardinality: 4, pageSize: 100, requestTime: 10, canContainUndefs: true},
+ { cardinality: 4, pageSize: 100, requestTime: 10, canContainUndefs: true },
),
},
},
],
context,
- })).toEqual({
+ })).resolves.toEqual({
iterations: 0,
blockingItems: 0,
persistedItems: 0,
@@ -102,15 +105,15 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
});
});
- it('should test on two entries', async () => {
- expect(await actor.test({
+ it('should test on two entries', async() => {
+ await expect(actor.test({
type: 'minus',
entries: [
{
output: {
type: 'bindings',
metadata: () => Promise.resolve({
- cardinality: {type: 'estimate', value: 4},
+ cardinality: { type: 'estimate', value: 4 },
pageSize: 100,
requestTime: 10,
canContainUndefs: false,
@@ -121,7 +124,7 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
output: {
type: 'bindings',
metadata: () => Promise.resolve({
- cardinality: {type: 'estimate', value: 4},
+ cardinality: { type: 'estimate', value: 4 },
pageSize: 100,
requestTime: 10,
canContainUndefs: false,
@@ -130,7 +133,7 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
},
],
context,
- })).toEqual({
+ })).resolves.toEqual({
iterations: 0,
blockingItems: 0,
persistedItems: 0,
@@ -140,21 +143,27 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
});
describe('getOutput', () => {
- it('should handle entries with common variables', async () => {
+ it('should handle entries with common variables', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -163,13 +172,17 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -178,32 +191,40 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(result.type).toEqual('bindings');
- expect(await result.metadata())
- .toEqual({cardinality: 3, canContainUndefs: false, variables: [DF.variable('a')]});
+ expect(result.type).toBe('bindings');
+ await expect(result.metadata()).resolves
+ .toEqual({ cardinality: 3, canContainUndefs: false, variables: [ DF.variable('a') ]});
await expect(result.bindingsStream).toEqualBindingsStream([
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
});
- it('should handle entries with common variables and deletions', async () => {
+ it('should handle entries with common variables and deletions', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -212,19 +233,29 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
null,
null,
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
null,
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- BF.bindings([[DF.variable('a'), DF.literal('0')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('0') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -233,48 +264,62 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(result.type).toEqual('bindings');
+ expect(result.type).toBe('bindings');
await expect(result.bindingsStream).toEqualBindingsStream([
BF.bindings([
- [DF.variable('a'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
BF.bindings([
- [DF.variable('a'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('3')]
+ [ DF.variable('a'), DF.literal('3') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
]);
});
- it('should handle entries with common variables and deletions II', async () => {
+ it('should handle entries with common variables and deletions II', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('0')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- BF.bindings([[DF.variable('a'), DF.literal('3')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('0') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('3') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -283,14 +328,18 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
null,
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -299,21 +348,21 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(result.type).toEqual('bindings');
+ expect(result.type).toBe('bindings');
await expect(result.bindingsStream).toEqualBindingsStream([
BF.bindings([
- [DF.variable('a'), DF.literal('3')]
+ [ DF.variable('a'), DF.literal('3') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('3')]
+ [ DF.variable('a'), DF.literal('3') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
});
- it('should handle entries with common variables and deletions III', async () => {
+ it('should handle entries with common variables and deletions III', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
@@ -321,22 +370,22 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
output: {
bindingsStream: new ArrayIterator([
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
- ], {autoStart: false}),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a'), DF.variable('b')],
+ variables: [ DF.variable('a'), DF.variable('b') ],
}),
type: 'bindings',
},
@@ -348,15 +397,23 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
null,
null,
null,
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('2')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('2') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -365,43 +422,45 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(result.type).toEqual('bindings');
+ expect(result.type).toBe('bindings');
await expect(result.bindingsStream).toEqualBindingsStream([
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true),
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
BF.bindings([
- [DF.variable('a'), DF.literal('1')],
- [DF.variable('b'), DF.literal('2')]
+ [ DF.variable('a'), DF.literal('1') ],
+ [ DF.variable('b'), DF.literal('2') ],
]).setContextEntry(new ActionContextKeyIsAddition(), false),
]);
});
- it('should return null on ended', async () => {
+ it('should return null on ended', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -410,12 +469,14 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -424,28 +485,32 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(result.type).toEqual('bindings');
+ expect(result.type).toBe('bindings');
await expect(result.bindingsStream).toEqualBindingsStream([]);
- await expect(result.bindingsStream.read()).toEqual(null);
+ expect(result.bindingsStream.read()).toBeNull();
});
- it('should be able to end when buffer is full', async () => {
+ it('should be able to end when buffer is full', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -454,15 +519,19 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
{
output: {
bindingsStream: new ArrayIterator([
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), true),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), true),
null,
null,
- BF.bindings([[DF.variable('a'), DF.literal('1')]]).setContextEntry(new ActionContextKeyIsAddition(), false),
- ], {autoStart: false}),
+ BF.bindings([
+ [ DF.variable('a'), DF.literal('1') ],
+ ]).setContextEntry(new ActionContextKeyIsAddition(), false),
+ ], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -471,18 +540,18 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
],
context,
};
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
// Validate output
- expect(action.entries[0].output.bindingsStream.readable = false);
- expect(action.entries[1].output.bindingsStream.readable = false);
+ expect(action.entries[0].output.bindingsStream.readable).toBeFalsy();
+ expect(action.entries[1].output.bindingsStream.readable).toBeFalsy();
await new Promise(resolve => setTimeout(resolve, 0));
- expect(action.entries[0].output.bindingsStream.readable = true);
- expect(action.entries[1].output.bindingsStream.readable = true);
+ expect(action.entries[0].output.bindingsStream.readable).toBeTruthy();
+ expect(action.entries[1].output.bindingsStream.readable).toBeTruthy();
await new Promise(resolve => setTimeout(resolve, 0));
expect(result.bindingsStream.read())
.toEqualBindings(BF.bindings([
- [DF.variable('a'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
action.entries[0].output.bindingsStream.close();
action.entries[1].output.bindingsStream.close();
@@ -492,25 +561,25 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
expect(result.bindingsStream.ended).toBeFalsy();
expect(result.bindingsStream.read())
.toEqualBindings(BF.bindings([
- [DF.variable('a'), DF.literal('1')]
+ [ DF.variable('a'), DF.literal('1') ],
]).setContextEntry(new ActionContextKeyIsAddition(), true));
- expect(result.bindingsStream.read()).toEqual(null);
+ expect(result.bindingsStream.read()).toBeNull();
expect(action.entries[0].output.bindingsStream.ended).toBeTruthy();
expect(action.entries[1].output.bindingsStream.ended).toBeTruthy();
expect(result.bindingsStream.ended).toBeTruthy();
});
- it('should be not readable if both inputs are not readable', async () => {
+ it('should be not readable if both inputs are not readable', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -518,11 +587,11 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
},
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -533,22 +602,22 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
};
action.entries[0].output.bindingsStream.readable = false;
action.entries[1].output.bindingsStream.readable = false;
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
await new Promise(resolve => setTimeout(resolve, 0));
expect(result.bindingsStream.readable).toBeFalsy();
});
- it('should be readable if input 1 is readable', async () => {
+ it('should be readable if input 1 is readable', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -556,11 +625,11 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
},
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -571,22 +640,22 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
};
action.entries[0].output.bindingsStream.readable = true;
action.entries[1].output.bindingsStream.readable = false;
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
await new Promise(resolve => setTimeout(resolve, 0));
expect(result.bindingsStream.readable).toBeTruthy();
});
- it('should be readable if input 2 is readable', async () => {
+ it('should be readable if input 2 is readable', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 3,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -594,11 +663,11 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
},
{
output: {
- bindingsStream: new ArrayIterator([], {autoStart: false}),
+ bindingsStream: new ArrayIterator([], { autoStart: false }),
metadata: () => Promise.resolve({
cardinality: 2,
canContainUndefs: false,
- variables: [DF.variable('a')],
+ variables: [ DF.variable('a') ],
}),
type: 'bindings',
},
@@ -609,22 +678,22 @@ describe('ActorRdfJoinIncrementalMinusHash', () => {
};
action.entries[0].output.bindingsStream.readable = false;
action.entries[1].output.bindingsStream.readable = true;
- const {result} = await actor.getOutput(action);
+ const { result } = await actor.getOutput(action);
await new Promise(resolve => setTimeout(resolve, 0));
expect(result.bindingsStream.readable).toBeTruthy();
});
- it('should handle errors from right iterator', async () => {
+ it('should handle errors from right iterator', async() => {
const action: IActionRdfJoin = {
type: 'minus',
entries: [
{
output: