From 8fc299b2d9df36b49a7ccb97f08b06e9ff47d42c Mon Sep 17 00:00:00 2001 From: David Mihalcik Date: Wed, 8 May 2024 22:21:58 -0400 Subject: [PATCH 1/8] feat!: lets nanoTDF client take options instead - Adds `tsc` build of web-app to makefile - fixes existing typescript issues in web app, and some cleanup --- Makefile | 2 +- lib/README.md | 2 +- lib/src/index.ts | 48 ++--- lib/src/nanotdf/Client.ts | 154 ++++++++------- lib/tdf3/src/client/builders.ts | 2 + lib/tdf3/src/client/index.ts | 22 ++- lib/tdf3/src/crypto/crypto-utils.ts | 18 ++ lib/tdf3/src/models/attribute-set.ts | 6 +- lib/tests/web/nano-roundtrip.test.ts | 4 +- lib/tests/web/nanotdf/Client.test.ts | 4 +- remote-store/package-lock.json | 3 +- web-app/package-lock.json | 29 +-- web-app/package.json | 4 +- web-app/src/App.tsx | 269 ++++++++++----------------- web-app/src/config.ts | 27 +++ web-app/src/session.ts | 124 ++++++++++-- web-app/tsconfig.node.json | 4 +- 17 files changed, 417 insertions(+), 305 deletions(-) create mode 100644 web-app/src/config.ts diff --git a/Makefile b/Makefile index 464ecf64..9c2b7bd0 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ remote-store/opentdf-remote-store-$(version).tgz: lib/opentdf-client-$(version). (cd remote-store && npm ci ../lib/opentdf-client-$(version).tgz && npm pack) web-app/opentdf-web-app-$(version).tgz: lib/opentdf-client-$(version).tgz $(shell find web-app -not -path '*/dist*' -and -not -path '*/coverage*' -and -not -path '*/node_modules*') - (cd web-app && npm ci ../lib/opentdf-client-$(version).tgz && npm pack) + (cd web-app && npm ci ../lib/opentdf-client-$(version).tgz && npm pack && npm run build) lib/opentdf-client-$(version).tgz: $(shell find lib -not -path '*/dist*' -and -not -path '*/coverage*' -and -not -path '*/node_modules*') (cd lib && npm ci --including=dev && npm pack) diff --git a/lib/README.md b/lib/README.md index 571e3919..18566d4a 100644 --- a/lib/README.md +++ b/lib/README.md @@ -15,7 +15,7 @@ TDF3 with JSON envelopes. oidcOrigin: keycloakUrl, } const authProvider = await AuthProviders.refreshAuthProvider(oidcCredentials); - const client = new NanoTDFClient(authProvider, access); + const client = new NanoTDFClient({authProvider, kasEndpoint}); const cipherText = await client.encrypt(plainText); const clearText = await client.decrypt(cipherText); ``` diff --git a/lib/src/index.ts b/lib/src/index.ts index 25a39f73..20173bb3 100644 --- a/lib/src/index.ts +++ b/lib/src/index.ts @@ -10,7 +10,7 @@ import { } from './nanotdf/index.js'; import { keyAgreement, extractPublicFromCertToCrypto } from './nanotdf-crypto/index.js'; import { TypedArray, createAttribute, Policy } from './tdf/index.js'; -import { type AuthProvider } from './auth/auth.js'; +import { ClientConfig } from './nanotdf/Client.js'; async function fetchKasPubKey(kasUrl: string): Promise { const kasPubKeyResponse = await fetch(`${kasUrl}/kas_public_key?algorithm=ec:secp256r1`); @@ -33,13 +33,14 @@ async function fetchKasPubKey(kasUrl: string): Promise { * const KAS_URL = 'http://localhost:65432/api/kas/'; * * const ciphertext = '...'; - * const client = new NanoTDFClient( - * await clientSecretAuthProvider({ + * const client = new NanoTDFClient({ + * authProvider: await clientSecretAuthProvider({ * clientId: 'tdf-client', * clientSecret: '123-456', * oidcOrigin: OIDC_ENDPOINT, * }), - * KAS_URL + * kasEndpoint: KAS_URL + * } * ); * client.decrypt(ciphertext) * .then(plaintext => { @@ -120,9 +121,9 @@ export class NanoTDFClient extends Client { */ async encrypt(data: string | TypedArray | ArrayBuffer): Promise { // For encrypt always generate the client ephemeralKeyPair - const ephemeralKeyPair = await this.generateEphemeralKeyPair(); - + const ephemeralKeyPair = await this.ephemeralKeyPair; const initializationVector = this.iv; + if (typeof initializationVector !== 'number') { throw new Error('NanoTDF clients are single use. Please generate a new client and keypair.'); } @@ -174,6 +175,10 @@ export class NanoTDFClient extends Client { } } +export type DatasetConfig = ClientConfig & { + maxKeyIterations?: number; +}; + /** * NanoTDF Dataset SDK Client * @@ -186,15 +191,15 @@ export class NanoTDFClient extends Client { * const KAS_URL = 'http://localhost:65432/api/kas/'; * * const ciphertext = '...'; - * const client = new NanoTDFDatasetClient.default( - * await clientSecretAuthProvider({ + * const client = new NanoTDFDatasetClient({ + * authProvider: await clientSecretAuthProvider({ * clientId: 'tdf-client', * clientSecret: '123-456', * exchange: 'client', * oidcOrigin: OIDC_ENDPOINT, * }), - * KAS_URL - * ); + * kasEndpoint: KAS_URL, + * }); * const plaintext = client.decrypt(ciphertext); * console.log('Plaintext', plaintext); * ``` @@ -223,19 +228,18 @@ export class NanoTDFDatasetClient extends Client { * @param ephemeralKeyPair (optional) ephemeral key pair to use * @param maxKeyIterations Max iteration to performe without a key rotation */ - constructor( - authProvider: AuthProvider, - kasUrl: string, - maxKeyIterations: number = NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS, - ephemeralKeyPair?: Required> - ) { - if (maxKeyIterations > NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS) { - throw new Error('Key iteration exceeds max iterations(8388606)'); + constructor(opts: DatasetConfig) { + if ( + opts.maxKeyIterations && + opts.maxKeyIterations > NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS + ) { + throw new Error( + `Key iteration exceeds max iterations(${NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS})` + ); } + super(opts); - super(authProvider, kasUrl, ephemeralKeyPair); - - this.maxKeyIteration = maxKeyIterations; + this.maxKeyIteration = opts.maxKeyIterations || NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS; this.keyIterationCount = 0; } @@ -250,7 +254,7 @@ export class NanoTDFDatasetClient extends Client { // Intial encrypt if (this.keyIterationCount == 0) { // For encrypt always generate the client ephemeralKeyPair - const ephemeralKeyPair = await this.generateEphemeralKeyPair(); + const ephemeralKeyPair = await this.ephemeralKeyPair; if (!this.kasPubKey) { this.kasPubKey = await fetchKasPubKey(this.kasUrl); diff --git a/lib/src/nanotdf/Client.ts b/lib/src/nanotdf/Client.ts index 794fd8db..1c9802b1 100644 --- a/lib/src/nanotdf/Client.ts +++ b/lib/src/nanotdf/Client.ts @@ -16,6 +16,59 @@ import { cryptoPublicToPem, safeUrlCheck, validateSecureUrl } from '../utils.js' const { KeyUsageType, AlgorithmName, NamedCurve } = cryptoEnums; +export interface ClientConfig { + authProvider: AuthProvider; + dpopEnabled?: boolean; + dpopKeys?: Promise; + ephemeralKeyPair?: Promise; + kasEndpoint: string; +} + +function toJWSAlg(c: CryptoKey): string { + const { algorithm } = c; + switch (algorithm.name) { + case 'RSASSA-PKCS1-v1_5': + case 'RSA-PSS': + case 'RSA-OAEP': { + const r = algorithm as RsaHashedKeyGenParams; + switch (r.modulusLength) { + case 2048: + return 'RS256'; + case 3072: + return 'RS384'; + case 3072: + return 'RS512'; + } + } + case 'ECDSA': + case 'ECDH': { + return 'ES256'; + } + } + throw new Error(`Unsupported key algorithm ${JSON.stringify(algorithm)}`); +} + +async function generateEphemeralKeyPair(): Promise { + const { publicKey, privateKey } = await generateKeyPair(); + if (!privateKey || !publicKey) { + throw Error('Key pair generation failed'); + } + return { publicKey, privateKey }; +} + +async function generateSignerKeyPair(): Promise { + const { publicKey, privateKey } = await generateKeyPair({ + type: AlgorithmName.ECDSA, + curve: NamedCurve.P256, + keyUsages: [KeyUsageType.Sign, KeyUsageType.Verify], + isExtractable: true, + }); + if (!privateKey || !publicKey) { + throw Error('Signer key pair generation failed'); + } + return { publicKey, privateKey }; +} + /** * A Client encapsulates sessions interacting with TDF3 and nanoTDF backends, KAS and any * plugin-based sessions like identity and further attribute control. Most importantly, it is responsible @@ -63,8 +116,8 @@ export default class Client { readonly dpopEnabled: boolean; dissems: string[] = []; dataAttributes: string[] = []; - protected ephemeralKeyPair?: Required>; - protected requestSignerKeyPair?: Required>; + protected ephemeralKeyPair: Promise; + protected requestSignerKeyPair: Promise; protected iv?: number; /** @@ -74,59 +127,32 @@ export default class Client { * cannot be changed. If a new ephemeral key is desired it a new client should be initialized. * There is no performance impact for creating a new client IFF the ephemeral key pair is provided. */ - constructor( - authProvider: AuthProvider, - kasUrl: string, - ephemeralKeyPair?: Required>, - dpopEnabled = false - ) { + constructor({ + authProvider, + ephemeralKeyPair, + kasEndpoint, + dpopEnabled, + dpopKeys, + }: ClientConfig) { this.authProvider = authProvider; // TODO Disallow http KAS. For now just log as error - validateSecureUrl(kasUrl); - this.kasUrl = kasUrl; - this.allowedKases = [kasUrl]; + validateSecureUrl(kasEndpoint); + this.kasUrl = kasEndpoint; + this.allowedKases = [kasEndpoint]; this.kasPubKey = ''; - this.dpopEnabled = dpopEnabled; + this.dpopEnabled = !!dpopEnabled; + if (dpopKeys) { + this.requestSignerKeyPair = dpopKeys; + } else { + this.requestSignerKeyPair = generateSignerKeyPair(); + } if (ephemeralKeyPair) { this.ephemeralKeyPair = ephemeralKeyPair; - this.iv = 1; + } else { + this.ephemeralKeyPair = generateEphemeralKeyPair(); } - } - - /** - * Get ephemeral key pair - * - * Returns the ephemeral key pair to be used in other clients or undefined if not set or generated - * - * @security allow returning ephemeral key pair has unknown security risks. - */ - getEphemeralKeyPair(): CryptoKeyPair | undefined { - return this.ephemeralKeyPair; - } - - async generateEphemeralKeyPair(): Promise>> { - const { publicKey, privateKey } = await generateKeyPair(); - if (!privateKey || !publicKey) { - throw Error('Key pair generation failed'); - } - this.ephemeralKeyPair = { publicKey, privateKey }; this.iv = 1; - return { publicKey, privateKey }; - } - - async generateSignerKeyPair(): Promise>> { - const { publicKey, privateKey } = await generateKeyPair({ - type: AlgorithmName.ECDSA, - curve: NamedCurve.P256, - keyUsages: [KeyUsageType.Sign, KeyUsageType.Verify], - isExtractable: true, - }); - if (!privateKey || !publicKey) { - throw Error('Signer key pair generation failed'); - } - this.requestSignerKeyPair = { publicKey, privateKey }; - return { publicKey, privateKey }; } /** @@ -150,18 +176,7 @@ export default class Client { * either be set on the first call or passed in the constructor. */ async fetchOIDCToken(): Promise { - // Generate the ephemeral key pair if not set - const promises: Promise>>[] = []; - if (!this.ephemeralKeyPair) { - promises.push(this.generateEphemeralKeyPair()); - } - - if (!this.requestSignerKeyPair) { - promises.push(this.generateSignerKeyPair()); - } - await Promise.all(promises); - - const signer = this.requestSignerKeyPair; + const signer = await this.requestSignerKeyPair; if (!signer) { throw new Error('Unexpected state'); } @@ -190,13 +205,15 @@ export default class Client { // Ensure the ephemeral key pair has been set or generated (see createOidcServiceProvider) await this.fetchOIDCToken(); + const ephemeralKeyPair = await this.ephemeralKeyPair; + const requestSignerKeyPair = await this.requestSignerKeyPair; // Ensure the ephemeral key pair has been set or generated (see fetchEntityObject) - if (!this.ephemeralKeyPair?.privateKey) { + if (!ephemeralKeyPair?.privateKey) { throw new Error('Ephemeral key has not been set or generated'); } - if (!this.requestSignerKeyPair?.privateKey) { + if (!requestSignerKeyPair?.privateKey) { throw new Error('Signer key has not been set or generated'); } @@ -210,13 +227,13 @@ export default class Client { protocol: Client.KAS_PROTOCOL, header: base64.encodeArrayBuffer(nanoTdfHeader), }, - clientPublicKey: await cryptoPublicToPem(this.ephemeralKeyPair.publicKey), + clientPublicKey: await cryptoPublicToPem(ephemeralKeyPair.publicKey), }); const jwtPayload = { requestBody: requestBodyStr }; const requestBody = { - signedRequestToken: await reqSignature(jwtPayload, this.requestSignerKeyPair.privateKey, { - alg: AlgorithmName.ES256, + signedRequestToken: await reqSignature(jwtPayload, requestSignerKeyPair.privateKey, { + alg: toJWSAlg(requestSignerKeyPair.publicKey), }), }; @@ -239,10 +256,10 @@ export default class Client { const iv = entityWrappedKey.subarray(0, ivLength); const encryptedSharedKey = entityWrappedKey.subarray(ivLength); - let publicKey; + let kasPublicKey; try { // Get session public key as crypto key - publicKey = await pemPublicToCrypto(wrappedKey.sessionPublicKey); + kasPublicKey = await pemPublicToCrypto(wrappedKey.sessionPublicKey); } catch (cause) { throw new Error( `PEM Public Key to crypto public key failed. Is PEM formatted correctly?\n Caused by: ${cause.message}`, @@ -257,12 +274,13 @@ export default class Client { } catch (e) { throw new Error(`Salting hkdf failed\n Caused by: ${e.message}`); } + const { privateKey } = await this.ephemeralKeyPair; // Get the unwrapping key const unwrappingKey = await keyAgreement( // Ephemeral private key - this.ephemeralKeyPair.privateKey, - publicKey, + privateKey, + kasPublicKey, hkdfSalt ); diff --git a/lib/tdf3/src/client/builders.ts b/lib/tdf3/src/client/builders.ts index 99ee102a..f88c008c 100644 --- a/lib/tdf3/src/client/builders.ts +++ b/lib/tdf3/src/client/builders.ts @@ -7,6 +7,7 @@ import { IllegalArgumentError } from '../../../src/errors.js'; import { PemKeyPair } from '../crypto/declarations.js'; import { EntityObject } from '../../../src/tdf/EntityObject.js'; import { DecoratedReadableStream } from './DecoratedReadableStream.js'; +import { type Chunker } from '../utils/chunkers.js'; export const DEFAULT_SEGMENT_SIZE: number = 1024 * 1024; export type Scope = { @@ -470,6 +471,7 @@ export type DecryptStreamMiddleware = ( export type DecryptSource = | { type: 'buffer'; location: Uint8Array } + | { type: 'chunker'; location: Chunker } | { type: 'remote'; location: string } | { type: 'stream'; location: ReadableStream } | { type: 'file-browser'; location: Blob }; diff --git a/lib/tdf3/src/client/index.ts b/lib/tdf3/src/client/index.ts index 367b3114..fa56a955 100644 --- a/lib/tdf3/src/client/index.ts +++ b/lib/tdf3/src/client/index.ts @@ -95,14 +95,20 @@ const makeChunkable = async (source: DecryptSource) => { // we don't support streams anyways (see zipreader.js) let initialChunker: Chunker; let buf = null; - if (source.type === 'stream') { - buf = await streamToBuffer(source.location); - initialChunker = fromBuffer(buf); - } else if (source.type === 'buffer') { - buf = source.location; - initialChunker = fromBuffer(buf); - } else { - initialChunker = await fromDataSource(source); + switch (source.type) { + case 'stream': + buf = await streamToBuffer(source.location); + initialChunker = fromBuffer(buf); + break; + case 'buffer': + buf = source.location; + initialChunker = fromBuffer(buf); + break; + case 'chunker': + initialChunker = source.location; + break; + default: + initialChunker = await fromDataSource(source); } const magic: string = await getFirstTwoBytes(initialChunker); diff --git a/lib/tdf3/src/crypto/crypto-utils.ts b/lib/tdf3/src/crypto/crypto-utils.ts index 42468f59..4fab0248 100644 --- a/lib/tdf3/src/crypto/crypto-utils.ts +++ b/lib/tdf3/src/crypto/crypto-utils.ts @@ -1,4 +1,5 @@ import { base64 } from '../../../src/encodings/index.js'; +import { IllegalArgumentError } from '../../../src/errors.js'; import { type AnyKeyPair, type PemKeyPair } from './declarations.js'; import { rsaPkcs1Sha256 } from './index.js'; @@ -116,3 +117,20 @@ export const toCryptoKeyPair = async (input: AnyKeyPair): Promise ]); return { privateKey, publicKey }; }; + +export async function cryptoToPem(k: CryptoKey): Promise { + switch (k.type) { + case 'private': { + const exPrivate = await crypto.subtle.exportKey('pkcs8', k); + const privateBase64String = base64.encodeArrayBuffer(exPrivate); + return formatAsPem(privateBase64String, 'PRIVATE KEY'); + } + case 'public': { + const exPublic = await crypto.subtle.exportKey('spki', k); + const publicBase64String = base64.encodeArrayBuffer(exPublic); + return formatAsPem(publicBase64String, 'PUBLIC KEY'); + } + default: + throw new IllegalArgumentError(`unsupported key type [${k.type}]`); + } +} diff --git a/lib/tdf3/src/models/attribute-set.ts b/lib/tdf3/src/models/attribute-set.ts index e3ca19b8..806b6bdf 100644 --- a/lib/tdf3/src/models/attribute-set.ts +++ b/lib/tdf3/src/models/attribute-set.ts @@ -1,8 +1,6 @@ import Ajv, { JSONSchemaType } from 'ajv'; import { decodeJwt } from 'jose'; -const verbose = false; - export type AttributeObject = { attribute: string; kasUrl: string; @@ -42,6 +40,8 @@ const validator = (() => { export class AttributeSet { attributes: AttributeObject[]; + verbose: boolean = false; + defaultAttribute?: AttributeObject; constructor() { @@ -103,7 +103,7 @@ export class AttributeSet { if (!result) { // TODO: Determine if an error should be thrown // console.log("WARNING - AttributeSet.addAttribute: AttributeObject is malformed. AddAttribute failed:"); - if (verbose) console.log(attrObj); + if (this.verbose) console.log(attrObj); return null; } // Check for duplicate entries to assure idempotency. diff --git a/lib/tests/web/nano-roundtrip.test.ts b/lib/tests/web/nano-roundtrip.test.ts index 5cce2f17..c7e1cd18 100644 --- a/lib/tests/web/nano-roundtrip.test.ts +++ b/lib/tests/web/nano-roundtrip.test.ts @@ -46,14 +46,14 @@ function initSandbox() { return sandbox; } -const kasUrl = 'http://localhost:65432/api/kas'; +const kasEndpoint = 'http://localhost:65432/api/kas'; describe('Local roundtrip Tests', () => { it('roundtrip string', async () => { // const sandbox = initSandbox(); const sandbox = initSandbox(); try { - const client = new NanoTDFClient(authProvider, kasUrl); + const client = new NanoTDFClient({ authProvider, kasEndpoint }); const keyAgreementSpy = sandbox.spy(globalThis.crypto.subtle, 'deriveKey'); sandbox.stub(client, 'rewrapKey').callsFake(async () => keyAgreementSpy.lastCall.returnValue); const cipherText = await client.encrypt('hello world'); diff --git a/lib/tests/web/nanotdf/Client.test.ts b/lib/tests/web/nanotdf/Client.test.ts index 4238be3a..75a883b5 100644 --- a/lib/tests/web/nanotdf/Client.test.ts +++ b/lib/tests/web/nanotdf/Client.test.ts @@ -4,14 +4,14 @@ import Client from '../../../src/nanotdf/Client.js'; describe('nanotdf client', () => { it('Can create a client with a mock EAS', async () => { - const kasUrl = 'https://etheria.local/kas'; + const kasEndpoint = 'https://etheria.local/kas'; const authProvider = await clientAuthProvider({ clientId: 'string', oidcOrigin: 'string', exchange: 'client', clientSecret: 'password', }); - const client = new Client(authProvider, kasUrl); + const client = new Client({ authProvider, kasEndpoint }); expect(client.authProvider).to.be.ok; }); }); diff --git a/remote-store/package-lock.json b/remote-store/package-lock.json index f9843628..8663192a 100644 --- a/remote-store/package-lock.json +++ b/remote-store/package-lock.json @@ -1649,7 +1649,8 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", + "license": "BSD-3-Clause-Clear", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", diff --git a/web-app/package-lock.json b/web-app/package-lock.json index be4c9267..55bb632a 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", - "native-file-system-adapter": "^3.0.0", + "native-file-system-adapter": "^3.0.1", "react": "^18.2.0", "react-dom": "^18.2.0" }, @@ -20,7 +20,7 @@ "@rollup/plugin-inject": "^5.0.3", "@types/react": "^18.2.17", "@types/react-dom": "^18.2.7", - "@types/wicg-file-system-access": "^2020.9.6", + "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", "@vitejs/plugin-react": "^4.0.4", @@ -602,7 +602,8 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", + "license": "BSD-3-Clause-Clear", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", @@ -770,9 +771,10 @@ "license": "MIT" }, "node_modules/@types/wicg-file-system-access": { - "version": "2020.9.6", - "dev": true, - "license": "MIT" + "version": "2023.10.5", + "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", + "integrity": "sha512-e9kZO9kCdLqT2h9Tw38oGv9UNzBBWaR1MzuAavxPcsV/7FJ3tWbU6RI3uB+yKIDPGLkGVbplS52ub0AcRLvrhA==", + "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "6.2.1", @@ -2563,7 +2565,9 @@ } }, "node_modules/native-file-system-adapter": { - "version": "3.0.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/native-file-system-adapter/-/native-file-system-adapter-3.0.1.tgz", + "integrity": "sha512-ocuhsYk2SY0906LPc3QIMW+rCV3MdhqGiy7wV5Bf0e8/5TsMjDdyIwhNiVPiKxzTJLDrLT6h8BoV9ERfJscKhw==", "funding": [ { "type": "github", @@ -2574,7 +2578,6 @@ "url": "https://paypal.me/jimmywarting" } ], - "license": "MIT", "engines": { "node": ">=14.8.0" }, @@ -4097,7 +4100,7 @@ }, "@opentdf/client": { "version": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", "requires": { "ajv": "^8.12.0", "axios": "^1.6.1", @@ -4218,7 +4221,9 @@ "dev": true }, "@types/wicg-file-system-access": { - "version": "2020.9.6", + "version": "2023.10.5", + "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", + "integrity": "sha512-e9kZO9kCdLqT2h9Tw38oGv9UNzBBWaR1MzuAavxPcsV/7FJ3tWbU6RI3uB+yKIDPGLkGVbplS52ub0AcRLvrhA==", "dev": true }, "@typescript-eslint/eslint-plugin": { @@ -5286,7 +5291,9 @@ "dev": true }, "native-file-system-adapter": { - "version": "3.0.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/native-file-system-adapter/-/native-file-system-adapter-3.0.1.tgz", + "integrity": "sha512-ocuhsYk2SY0906LPc3QIMW+rCV3MdhqGiy7wV5Bf0e8/5TsMjDdyIwhNiVPiKxzTJLDrLT6h8BoV9ERfJscKhw==", "requires": { "fetch-blob": "^3.2.0" } diff --git a/web-app/package.json b/web-app/package.json index abb22566..84f1ac94 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -17,7 +17,7 @@ "dependencies": { "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", - "native-file-system-adapter": "^3.0.0", + "native-file-system-adapter": "^3.0.1", "react": "^18.2.0", "react-dom": "^18.2.0" }, @@ -26,7 +26,7 @@ "@rollup/plugin-inject": "^5.0.3", "@types/react": "^18.2.17", "@types/react-dom": "^18.2.7", - "@types/wicg-file-system-access": "^2020.9.6", + "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", "@vitejs/plugin-react": "^4.0.4", diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index 2b59410b..94202284 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -2,8 +2,9 @@ import { clsx } from 'clsx'; import { useState, useEffect, type ChangeEvent } from 'react'; import { showSaveFilePicker } from 'native-file-system-adapter'; import './App.css'; -import { TDF3Client, type DecryptSource, NanoTDFClient, AuthProviders } from '@opentdf/client'; +import { type Chunker, type DecryptSource, NanoTDFClient, TDF3Client } from '@opentdf/client'; import { type SessionInformation, OidcClient } from './session.js'; +import { c } from './config.js'; function decryptedFileName(encryptedFileName: string): string { // Groups: 1 file 'name' bit @@ -29,11 +30,7 @@ function decryptedFileExtension(encryptedFileName: string): string { return m[2]; } -const oidcClient = new OidcClient( - 'http://localhost:65432/auth/realms/tdf', - 'browsertest', - 'otdf-sample-web-app' -); +const oidcClient = new OidcClient(c.oidc.host, c.oidc.clientId, 'otdf-sample-web-app'); function saver(blob: Blob, name: string) { const a = document.createElement('a'); @@ -61,23 +58,27 @@ async function getNewFileHandle( ], suggestedName, }; + //@ts-expect-error //TS2739: not a complete file picker interface return showSaveFilePicker(options); } type Containers = 'html' | 'tdf' | 'nano'; type CurrentDataController = AbortController | undefined; -type FileInputSource = { file: File }; +type FileInputSource = { + type: 'file'; + file: File; +}; type UrlInputSource = { + type: 'url'; url: URL; }; -type RandomType = 'bytes'; type RandomInputSource = { - type: RandomType; + type: 'bytes'; length: number; }; -type InputSource = FileInputSource | UrlInputSource | RandomInputSource | undefined; +type InputSource = FileInputSource | UrlInputSource | RandomInputSource; type SinkType = 'file' | 'fsapi' | 'none'; function fileNameFor(inputSource: InputSource) { @@ -206,7 +207,7 @@ function App() { const [decryptContainerType, setDecryptContainerType] = useState('tdf'); const [downloadState, setDownloadState] = useState(); const [encryptContainerType, setEncryptContainerType] = useState('tdf'); - const [inputSource, setInputSource] = useState(); + const [inputSource, setInputSource] = useState(); const [sinkType, setSinkType] = useState('file'); const [streamController, setStreamController] = useState(); @@ -232,7 +233,7 @@ function App() { const target = event.target as HTMLInputElement; if (target.files?.length) { const [file] = target.files; - setInputSource({ file }); + setInputSource({ type: 'file', file }); } else { setInputSource(undefined); } @@ -248,7 +249,7 @@ function App() { const setUrlHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.value && target.validity.valid) { - setInputSource({ url: new URL(target.value) }); + setInputSource({ type: 'url', url: new URL(target.value) }); } else { setInputSource(undefined); } @@ -329,12 +330,6 @@ function App() { } const inputFileName = fileNameFor(inputSource); console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); - const authProvider = await AuthProviders.refreshAuthProvider({ - exchange: 'refresh', - clientId: oidcClient.clientId, - oidcOrigin: oidcClient.host, - refreshToken, - }); switch (encryptContainerType) { case 'nano': { if ('url' in inputSource) { @@ -344,7 +339,11 @@ function App() { 'file' in inputSource ? await inputSource.file.arrayBuffer() : randomArrayBuffer(inputSource); - const nanoClient = new NanoTDFClient(authProvider, 'http://localhost:65432/api/kas'); + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); setDownloadState('Encrypting...'); switch (sinkType) { case 'file': @@ -375,36 +374,42 @@ function App() { } case 'html': { const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', - readerUrl: 'https://secure.virtru.com/start?htmlProtocol=1', + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + readerUrl: c.reader, }); let source: ReadableStream, size: number; const sc = new AbortController(); setStreamController(sc); - if ('file' in inputSource) { - size = inputSource.file.size; - source = inputSource.file.stream() as unknown as ReadableStream; - } else if ('type' in inputSource) { - size = inputSource.length; - source = randomStream(inputSource); - } else { - // NOTE: Attaching the signal to the pipeline (in pipeTo, below) - // is insufficient (at least in Chrome) to abort the fetch itself. - // So aborting a sink in a pipeline does *NOT* cancel its sources - const fr = await fetch(inputSource.url, { signal: sc.signal }); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - size = parseInt(fr.headers.get('Content-Length') || '-1'); - source = fr.body; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = inputSource.file.stream() as unknown as ReadableStream; + break; + case 'bytes': + + size = inputSource.length; + source = randomStream(inputSource); + break; + case 'url': + // NOTE: Attaching the signal to the pipeline (in pipeTo, below) + // is insufficient (at least in Chrome) to abort the fetch itself. + // So aborting a sink in a pipeline does *NOT* cancel its sources + const fr = await fetch(inputSource.url, { signal: sc.signal }); + if (!fr.ok) { + throw Error( + `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + if (!fr.body) { + throw Error( + `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + size = parseInt(fr.headers.get('Content-Length') || '-1'); + source = fr.body; + break; } try { const downloadName = `${inputFileName}.tdf.html`; @@ -443,32 +448,37 @@ function App() { } case 'tdf': { const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, }); const sc = new AbortController(); setStreamController(sc); let source: ReadableStream, size: number; - if ('file' in inputSource) { - size = inputSource.file.size; - source = inputSource.file.stream() as unknown as ReadableStream; - } else if ('type' in inputSource) { - size = inputSource.length; - source = randomStream(inputSource); - } else { - const fr = await fetch(inputSource.url, { signal: sc.signal }); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - size = parseInt(fr.headers.get('Content-Length') || '-1'); - source = fr.body; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = inputSource.file.stream() as unknown as ReadableStream; + break; + case 'bytes': + size = inputSource.length; + source = randomStream(inputSource); + break; + case 'url': + const fr = await fetch(inputSource.url, { signal: sc.signal }); + if (!fr.ok) { + throw Error( + `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + if (!fr.body) { + throw Error( + `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + size = parseInt(fr.headers.get('Content-Length') || '-1'); + source = fr.body; + break; } try { let f; @@ -521,12 +531,6 @@ function App() { console.log( `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` ); - const authProvider = await AuthProviders.refreshAuthProvider({ - exchange: 'refresh', - clientId: oidcClient.clientId, - oidcOrigin: oidcClient.host, - refreshToken: authState.user.refreshToken, - }); let f; if (sinkType === 'fsapi') { f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); @@ -534,24 +538,29 @@ function App() { switch (decryptContainerType) { case 'tdf': { const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, }); try { const sc = new AbortController(); setStreamController(sc); let source: DecryptSource; let size: number; - if ('file' in inputSource) { - size = inputSource.file.size; - source = { type: 'file-browser', location: inputSource.file }; - } else if ('type' in inputSource) { - size = inputSource.length; - source = { type: 'chunker', location: randomChunker(inputSource) }; - } else { - const hr = await fetch(inputSource.url, { method: 'HEAD' }); - size = parseInt(hr.headers.get('Content-Length') || '-1'); - source = { type: 'remote', location: inputSource.url.toString() }; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = { type: 'file-browser', location: inputSource.file }; + break; + case 'bytes': + size = inputSource.length; + source = { type: 'chunker', location: randomChunker(inputSource) }; + break; + case 'url': + const hr = await fetch(inputSource.url, { method: 'HEAD' }); + size = parseInt(hr.headers.get('Content-Length') || '-1'); + source = { type: 'remote', location: inputSource.url.toString() }; + break; } const progressTransformers = makeProgressPair(size, 'Decrypt'); // XXX chunker doesn't have an equivalent 'stream' interaface @@ -587,7 +596,11 @@ function App() { if ('url' in inputSource) { throw new Error('Unsupported : fetch the url I guess?'); } - const nanoClient = new NanoTDFClient(authProvider, 'http://localhost:65432/api/kas'); + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); try { const cipherText = 'file' in inputSource @@ -623,81 +636,6 @@ function App() { return false; }; - const handleScan = async () => { - const searchTerm = 'service workers'; - // Chars to show either side of the result in the match - const contextBefore = 30; - const contextAfter = 30; - const caseInsensitive = true; - - if (!inputSource) { - console.warn('PLEASE SELECT FILE ∨ URL'); - return false; - } - let source; - if ('file' in inputSource) { - source = inputSource.file.stream() as unknown as ReadableStream; - } else { - const sc = new AbortController(); - setStreamController(sc); - const fr = await fetch(inputSource.url, { cache: 'no-store', signal: sc.signal }); - console.log(`Received headers ${fr.headers}`); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - source = fr.body; - } - const reader = source.getReader(); - - const decoder = new TextDecoder(); - const toMatch = caseInsensitive ? searchTerm.toLowerCase() : searchTerm; - const bufferSize = Math.max(toMatch.length - 1, contextBefore); - - let bytesReceived = 0; - let buffer = ''; - let matchFoundAt = -1; - - while (true) { - const { value: chunk, done } = await reader.read(); - if (done) { - console.log('Failed to find match'); - return; - } - bytesReceived += chunk.length; - console.log(`Received ${bytesReceived.toLocaleString()} bytes of data so far`); - buffer += decoder.decode(chunk, { stream: true }); - - // already found match & just context-gathering? - if (matchFoundAt === -1) { - matchFoundAt = (caseInsensitive ? buffer.toLowerCase() : buffer).indexOf(toMatch); - } - - if (matchFoundAt === -1) { - buffer = buffer.slice(-bufferSize); - } else if (buffer.slice(matchFoundAt + toMatch.length).length >= contextAfter) { - console.log("Here's the match:"); - console.log( - buffer.slice( - Math.max(0, matchFoundAt - contextBefore), - matchFoundAt + toMatch.length + contextAfter - ) - ); - console.log('Cancelling fetch'); - reader.cancel(); - return; - } else { - console.log('Found match, but need more context…'); - } - } - }; - const SessionInfo = authState.sessionState == 'start' ? ( -
diff --git a/web-app/src/config.ts b/web-app/src/config.ts new file mode 100644 index 00000000..7dc2f709 --- /dev/null +++ b/web-app/src/config.ts @@ -0,0 +1,27 @@ +export type TDFConfig = { + oidc: { + // eg 'http://localhost:65432/auth/realms/opentdf' + host: string; + // eg browsertest + clientId: string; + }; + kas: string; + reader: string; +}; + +function cfg(): TDFConfig { + const { VITE_TDF_CFG } = import.meta.env; + if (!VITE_TDF_CFG) { + return { + oidc: { + host: 'http://localhost:65432/auth/realms/tdf', + clientId: 'browsertest', + }, + kas: 'http://localhost:65432/api/kas', + reader: 'https://secure.virtru.com/start?htmlProtocol=1', + }; + } + return JSON.parse(VITE_TDF_CFG); +} + +export const c = cfg(); diff --git a/web-app/src/session.ts b/web-app/src/session.ts index b177c75b..0efe46e8 100644 --- a/web-app/src/session.ts +++ b/web-app/src/session.ts @@ -1,5 +1,7 @@ import { decodeJwt } from 'jose'; +import { default as dpopFn } from 'dpop'; import { base64 } from '@opentdf/client/encodings'; +import { AuthProvider, HttpRequest, withHeaders } from '@opentdf/client'; export type OpenidConfiguration = { issuer: string; @@ -89,12 +91,25 @@ export type Sessions = { requests: Record; /** state for most recent request */ lastRequest?: string; + /** DPoP key */ + k?: string[]; }; function getTimestampInSeconds() { return Math.floor(Date.now() / 1000); } +function rsaPkcs1Sha256(): RsaHashedKeyGenParams { + return { + name: 'RSASSA-PKCS1-v1_5', + hash: { + name: 'SHA-256', + }, + modulusLength: 2048, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), // 24 bit representation of 65537 + }; +} + const extractAuthorizationResponse = (url: string): AuthorizationResponse | null => { const queryParams = new URLSearchParams(url); console.log(`response: ${JSON.stringify(queryParams.toString())}`); @@ -152,12 +167,13 @@ async function fetchConfig(server: string): Promise { return response.json(); } -export class OidcClient { +export class OidcClient implements AuthProvider { clientId: string; host: string; scope: string; sessionIdentifier: string; _sessions?: Sessions; + signingKey?: CryptoKeyPair; constructor(host: string, clientId: string, sessionIdentifier: string) { this.clientId = clientId; @@ -189,7 +205,7 @@ export class OidcClient { return this._sessions; } - async storeSessions() { + storeSessions() { sessionStorage.setItem(this.ssk('sessions'), JSON.stringify(this._sessions)); } @@ -234,18 +250,25 @@ export class OidcClient { window.location.href = whereto; } + _cs?: Promise; + async currentSession(): Promise { - const s = await this.handleRedirect(); - if (s) { - console.log('redirected'); - return s; + if (!this._cs) { + this._cs = (async (): Promise => { + const s = await this.handleRedirect(); + if (s) { + console.log('redirected'); + return s; + } + const sessions = await this.loadSessions(); + if (!sessions?.lastRequest) { + return { sessionState: 'start' }; + } + const thisSession = sessions.requests[sessions.lastRequest]; + return thisSession; + })(); } - const sessions = await this.loadSessions(); - if (!sessions?.lastRequest) { - return { sessionState: 'start' }; - } - const thisSession = sessions.requests[sessions.lastRequest]; - return thisSession; + return this._cs; } async currentUser(): Promise { @@ -271,6 +294,8 @@ export class OidcClient { console.log('Ignoring repeated redirect code'); return; } + currentSession.usedCodes.push(response.code); + this.storeSessions(); try { currentSession.user = await this._makeAccessTokenRequest({ grantType: 'authorization_code', @@ -288,6 +313,24 @@ export class OidcClient { } } + async getSigningKey(): Promise { + if (this.signingKey) { + return this.signingKey; + } + if (this._sessions?.k) { + const k = this._sessions?.k.map((e) => base64.decodeArrayBuffer(e)); + const algorithm = rsaPkcs1Sha256(); + const [publicKey, privateKey] = await Promise.all([ + crypto.subtle.importKey('spki', k[0], algorithm, true, ['verify']), + crypto.subtle.importKey('pkcs8', k[1], algorithm, false, ['sign']), + ]); + this.signingKey = { privateKey, publicKey }; + } else { + this.signingKey = await crypto.subtle.generateKey(rsaPkcs1Sha256(), true, ['sign']); + } + return this.signingKey; + } + private async _makeAccessTokenRequest(options: { grantType: 'authorization_code' | 'refresh_token'; codeOrRefreshToken: string; @@ -312,11 +355,30 @@ export class OidcClient { if (!config) { throw new Error('Unable to autoconfigure OIDC'); } + const headers: Record = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const signingKey = await this.getSigningKey(); + if (this._sessions && this.signingKey) { + const k = await Promise.all([ + crypto.subtle.exportKey('spki', this.signingKey.publicKey), + crypto.subtle.exportKey('pkcs8', this.signingKey.privateKey), + ]); + this._sessions.k = k.map((e) => base64.encodeArrayBuffer(e)); + } + console.info( + `signing token request with DPoP key ${JSON.stringify( + await crypto.subtle.exportKey('jwk', signingKey.publicKey) + )}` + ); + headers.DPoP = await dpopFn( + signingKey, + config.token_endpoint, + 'POST' + ); const response = await fetch(config.token_endpoint, { method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, + headers, body: params, credentials: 'include', }); @@ -335,4 +397,36 @@ export class OidcClient { refreshToken: refresh_token, }; } + + async updateClientPublicKey(signingKey: CryptoKeyPair): Promise { + this.signingKey = signingKey; + } + + async withCreds(httpReq: HttpRequest): Promise { + const user = await this.currentUser(); + if (!user) { + console.error('Not logged in'); + return httpReq; + } + const { accessToken } = user; + const { signingKey } = this; + if (!signingKey || !signingKey.publicKey) { + console.error('missing DPoP key'); + return httpReq; + } + console.info( + `signing request for ${httpReq.url} with DPoP key ${JSON.stringify( + await crypto.subtle.exportKey('jwk', signingKey.publicKey) + )}` + ); + const dpopToken = await dpopFn( + signingKey, + httpReq.url, + httpReq.method, + /* nonce */ undefined, + accessToken + ); + // TODO: Consider: only set DPoP if cnf.jkt is present in access token? + return withHeaders(httpReq, { Authorization: `Bearer ${accessToken}`, DPoP: dpopToken }); + } } diff --git a/web-app/tsconfig.node.json b/web-app/tsconfig.node.json index 7a836f70..a5843902 100644 --- a/web-app/tsconfig.node.json +++ b/web-app/tsconfig.node.json @@ -5,5 +5,7 @@ "moduleResolution": "node16", "allowSyntheticDefaultImports": true }, - "include": ["vite.config.ts"] + "include": [ + "*.ts", + ] } From 71cca45e51b7ce5adfc5c3b9f64a99c5a9caa7bd Mon Sep 17 00:00:00 2001 From: David Mihalcik Date: Wed, 8 May 2024 23:20:33 -0400 Subject: [PATCH 2/8] WIP multifile encrypt/decrypt --- remote-store/package-lock.json | 3 +- web-app/package-lock.json | 5 +- web-app/src/App.tsx | 601 +++++++++++++++++---------------- web-app/src/session.ts | 6 +- 4 files changed, 314 insertions(+), 301 deletions(-) diff --git a/remote-store/package-lock.json b/remote-store/package-lock.json index 8663192a..a248882b 100644 --- a/remote-store/package-lock.json +++ b/remote-store/package-lock.json @@ -1649,8 +1649,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", - "license": "BSD-3-Clause-Clear", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", diff --git a/web-app/package-lock.json b/web-app/package-lock.json index 55bb632a..a9144ef8 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -602,8 +602,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", - "license": "BSD-3-Clause-Clear", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", @@ -4100,7 +4099,7 @@ }, "@opentdf/client": { "version": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-1m+aZ7BjED8QSsRvCTx5cts5M761oS17CncTObWU1uMI5u7P+LGSTNu1PjKyLTZFqoY4VCSpDyOIoUlMSLBFOA==", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "requires": { "ajv": "^8.12.0", "axios": "^1.6.1", diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index 94202284..71a1cb57 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -79,7 +79,7 @@ type RandomInputSource = { }; type InputSource = FileInputSource | UrlInputSource | RandomInputSource; -type SinkType = 'file' | 'fsapi' | 'none'; +type SinkType = 'file' | 'fsapi' | 'memory' | 'none'; function fileNameFor(inputSource: InputSource) { if (!inputSource) { @@ -207,7 +207,7 @@ function App() { const [decryptContainerType, setDecryptContainerType] = useState('tdf'); const [downloadState, setDownloadState] = useState(); const [encryptContainerType, setEncryptContainerType] = useState('tdf'); - const [inputSource, setInputSource] = useState(); + const [inputSources, setInputSources] = useState([]); const [sinkType, setSinkType] = useState('file'); const [streamController, setStreamController] = useState(); @@ -232,26 +232,27 @@ function App() { const setFileHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.files?.length) { - const [file] = target.files; - setInputSource({ type: 'file', file }); + const fileArray = Array.from(target.files); + const srcs = fileArray.map((file): FileInputSource => ({ type: 'file', file })); + setInputSources(srcs); } else { - setInputSource(undefined); + setInputSources([]); } }; const setRandomHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.value && target.validity.valid) { - setInputSource({ type: 'bytes', length: parseInt(target.value) }); + setInputSources([{ type: 'bytes', length: parseInt(target.value) }]); } else { - setInputSource(undefined); + setInputSources([]); } }; const setUrlHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.value && target.validity.valid) { - setInputSource({ type: 'url', url: new URL(target.value) }); + setInputSources([{ type: 'url', url: new URL(target.value) }]); } else { - setInputSource(undefined); + setInputSources([]); } }; @@ -319,7 +320,7 @@ function App() { }; const handleEncrypt = async () => { - if (!inputSource) { + if (!inputSources.length) { console.warn('No input source selected'); return false; } @@ -328,198 +329,201 @@ function App() { console.warn('PLEASE LOG IN'); return false; } - const inputFileName = fileNameFor(inputSource); - console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); - switch (encryptContainerType) { - case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); - } - const plainText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const nanoClient = new NanoTDFClient({ - authProvider: oidcClient, - kasEndpoint: c.kas, - dpopKeys: oidcClient.getSigningKey(), - }); - setDownloadState('Encrypting...'); - switch (sinkType) { - case 'file': - { - const cipherText = await nanoClient.encrypt(plainText); - saver(new Blob([cipherText]), `${inputFileName}.ntdf`); - } - break; - case 'fsapi': - { - const file = await getNewFileHandle('ntdf', `${inputFileName}.ntdf`); - const cipherText = await nanoClient.encrypt(plainText); - const writable = await file.createWritable(); - try { - await writable.write(cipherText); - setDownloadState('Encrypt Complete'); - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - } finally { - await writable.close(); - } - } - break; - case 'none': - break; - } - break; - } - case 'html': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - readerUrl: c.reader, - }); - let source: ReadableStream, size: number; - const sc = new AbortController(); - setStreamController(sc); - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = inputSource.file.stream() as unknown as ReadableStream; - break; - case 'bytes': - - size = inputSource.length; - source = randomStream(inputSource); - break; - case 'url': - // NOTE: Attaching the signal to the pipeline (in pipeTo, below) - // is insufficient (at least in Chrome) to abort the fetch itself. - // So aborting a sink in a pipeline does *NOT* cancel its sources - const fr = await fetch(inputSource.url, { signal: sc.signal }); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - size = parseInt(fr.headers.get('Content-Length') || '-1'); - source = fr.body; - break; - } - try { - const downloadName = `${inputFileName}.tdf.html`; - let f; - if (sinkType == 'fsapi') { - f = await getNewFileHandle('html', downloadName); + for (const inputSource of inputSources) { + const inputFileName = fileNameFor(inputSource); + console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); + switch (encryptContainerType) { + case 'nano': { + if ('url' in inputSource) { + throw new Error('Unsupported : fetch the url I guess?'); } - const progressTransformers = makeProgressPair(size, 'Encrypt'); - const cipherText = await client.encrypt({ - source: source.pipeThrough(progressTransformers.reader), - offline: true, - asHtml: true, + const plainText = + 'file' in inputSource + ? await inputSource.file.arrayBuffer() + : randomArrayBuffer(inputSource); + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), }); - cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); + setDownloadState('Encrypting...'); switch (sinkType) { case 'file': - await cipherText.toFile(downloadName, { signal: sc.signal }); + { + const cipherText = await nanoClient.encrypt(plainText); + saver(new Blob([cipherText]), `${inputFileName}.ntdf`); + } break; case 'fsapi': - if (!f) { - throw new Error(); + { + const file = await getNewFileHandle('ntdf', `${inputFileName}.ntdf`); + const cipherText = await nanoClient.encrypt(plainText); + const writable = await file.createWritable(); + try { + await writable.write(cipherText); + setDownloadState('Encrypt Complete'); + } catch (e) { + setDownloadState(`Encrypt Failed: ${e}`); + } finally { + await writable.close(); + } } - const writable = await f.createWritable(); - await cipherText.stream.pipeTo(writable, { signal: sc.signal }); break; case 'none': - await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); break; } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); + break; } - setStreamController(undefined); - break; - } - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - }); - const sc = new AbortController(); - setStreamController(sc); - let source: ReadableStream, size: number; - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = inputSource.file.stream() as unknown as ReadableStream; - break; - case 'bytes': - size = inputSource.length; - source = randomStream(inputSource); - break; - case 'url': - const fr = await fetch(inputSource.url, { signal: sc.signal }); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); + case 'html': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + readerUrl: c.reader, + }); + let source: ReadableStream, size: number; + const sc = new AbortController(); + setStreamController(sc); + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = inputSource.file.stream() as unknown as ReadableStream; + break; + + case 'bytes': + size = inputSource.length; + source = randomStream(inputSource); + break; + + case 'url': + // NOTE: Attaching the signal to the pipeline (in pipeTo, below) + // is insufficient (at least in Chrome) to abort the fetch itself. + // So aborting a sink in a pipeline does *NOT* cancel its sources + const fr = await fetch(inputSource.url, { signal: sc.signal }); + if (!fr.ok) { + throw Error( + `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + if (!fr.body) { + throw Error( + `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + size = parseInt(fr.headers.get('Content-Length') || '-1'); + source = fr.body; + break; + } + try { + const downloadName = `${inputFileName}.tdf.html`; + let f; + if (sinkType == 'fsapi') { + f = await getNewFileHandle('html', downloadName); } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); + const progressTransformers = makeProgressPair(size, 'Encrypt'); + const cipherText = await client.encrypt({ + source: source.pipeThrough(progressTransformers.reader), + offline: true, + asHtml: true, + }); + cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await cipherText.toFile(downloadName, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await cipherText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); + break; } - size = parseInt(fr.headers.get('Content-Length') || '-1'); - source = fr.body; - break; - } - try { - let f; - const downloadName = `${inputFileName}.tdf`; - if (sinkType === 'fsapi') { - f = await getNewFileHandle('tdf', downloadName); + } catch (e) { + setDownloadState(`Encrypt Failed: ${e}`); + console.error('Encrypt Failed', e); } - const progressTransformers = makeProgressPair(size, 'Encrypt'); - const cipherText = await client.encrypt({ - source: source.pipeThrough(progressTransformers.reader), - offline: true, + setStreamController(undefined); + break; + } + case 'tdf': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, }); - cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); - switch (sinkType) { + const sc = new AbortController(); + setStreamController(sc); + let source: ReadableStream, size: number; + switch (inputSource.type) { case 'file': - await cipherText.toFile(downloadName, { signal: sc.signal }); + size = inputSource.file.size; + source = inputSource.file.stream() as unknown as ReadableStream; break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await cipherText.stream.pipeTo(writable, { signal: sc.signal }); + case 'bytes': + size = inputSource.length; + source = randomStream(inputSource); break; - case 'none': - await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); + case 'url': + const fr = await fetch(inputSource.url, { signal: sc.signal }); + if (!fr.ok) { + throw Error( + `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + if (!fr.body) { + throw Error( + `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); + } + size = parseInt(fr.headers.get('Content-Length') || '-1'); + source = fr.body; break; } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); + try { + let f; + const downloadName = `${inputFileName}.tdf`; + if (sinkType === 'fsapi') { + f = await getNewFileHandle('tdf', downloadName); + } + const progressTransformers = makeProgressPair(size, 'Encrypt'); + const cipherText = await client.encrypt({ + source: source.pipeThrough(progressTransformers.reader), + offline: true, + }); + cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await cipherText.toFile(downloadName, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await cipherText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); + break; + } + } catch (e) { + setDownloadState(`Encrypt Failed: ${e}`); + console.error('Encrypt Failed', e); + } + setStreamController(undefined); + break; } - setStreamController(undefined); - break; } } return true; }; const handleDecrypt = async () => { - if (!inputSource) { + if (!inputSources.length) { console.log('PLEASE SELECT FILE'); return false; } @@ -527,110 +531,112 @@ function App() { console.error('decrypt while logged out doesnt work'); return false; } - const dfn = decryptedFileName(fileNameFor(inputSource)); - console.log( - `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` - ); - let f; - if (sinkType === 'fsapi') { - f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); - } - switch (decryptContainerType) { - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - }); - try { - const sc = new AbortController(); - setStreamController(sc); - let source: DecryptSource; - let size: number; - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = { type: 'file-browser', location: inputSource.file }; - break; - case 'bytes': - size = inputSource.length; - source = { type: 'chunker', location: randomChunker(inputSource) }; - break; - case 'url': - const hr = await fetch(inputSource.url, { method: 'HEAD' }); - size = parseInt(hr.headers.get('Content-Length') || '-1'); - source = { type: 'remote', location: inputSource.url.toString() }; - break; - } - const progressTransformers = makeProgressPair(size, 'Decrypt'); - // XXX chunker doesn't have an equivalent 'stream' interaface - // so we kinda fake it with percentages by tracking output, which should - // strictly be smaller than the input file. - const plainText = await client.decrypt({ source }); - plainText.stream = plainText.stream - .pipeThrough(progressTransformers.reader) - .pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await plainText.toFile(dfn, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await plainText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await plainText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } - setStreamController(undefined); - break; + for (const inputSource of inputSources) { + const dfn = decryptedFileName(fileNameFor(inputSource)); + console.log( + `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` + ); + let f; + if (sinkType === 'fsapi') { + f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); } - case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); + switch (decryptContainerType) { + case 'tdf': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + }); + try { + const sc = new AbortController(); + setStreamController(sc); + let source: DecryptSource; + let size: number; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = { type: 'file-browser', location: inputSource.file }; + break; + case 'bytes': + size = inputSource.length; + source = { type: 'chunker', location: randomChunker(inputSource) }; + break; + case 'url': + const hr = await fetch(inputSource.url, { method: 'HEAD' }); + size = parseInt(hr.headers.get('Content-Length') || '-1'); + source = { type: 'remote', location: inputSource.url.toString() }; + break; + } + const progressTransformers = makeProgressPair(size, 'Decrypt'); + // XXX chunker doesn't have an equivalent 'stream' interaface + // so we kinda fake it with percentages by tracking output, which should + // strictly be smaller than the input file. + const plainText = await client.decrypt({ source }); + plainText.stream = plainText.stream + .pipeThrough(progressTransformers.reader) + .pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await plainText.toFile(dfn, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await plainText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await plainText.stream.pipeTo(drain(), { signal: sc.signal }); + break; + } + } catch (e) { + console.error('Decrypt Failed', e); + setDownloadState(`Decrypt Failed: ${e}`); + } + setStreamController(undefined); + break; } - const nanoClient = new NanoTDFClient({ - authProvider: oidcClient, - kasEndpoint: c.kas, - dpopKeys: oidcClient.getSigningKey(), - }); - try { - const cipherText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const plainText = await nanoClient.decrypt(cipherText); - switch (sinkType) { - case 'file': - saver(new Blob([plainText]), dfn); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - try { - await writable.write(plainText); - setDownloadState('Decrypt Complete'); - } finally { - await writable.close(); - } - break; - case 'none': - break; + case 'nano': { + if ('url' in inputSource) { + throw new Error('Unsupported : fetch the url I guess?'); } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); + try { + const cipherText = + 'file' in inputSource + ? await inputSource.file.arrayBuffer() + : randomArrayBuffer(inputSource); + const plainText = await nanoClient.decrypt(cipherText); + switch (sinkType) { + case 'file': + saver(new Blob([plainText]), dfn); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + try { + await writable.write(plainText); + setDownloadState('Decrypt Complete'); + } finally { + await writable.close(); + } + break; + case 'none': + break; + } + } catch (e) { + console.error('Decrypt Failed', e); + setDownloadState(`Decrypt Failed: ${e}`); + } + break; } - break; } } return false; @@ -654,7 +660,19 @@ function App() {
{JSON.stringify(authState?.user, null, ' ')}
); - const hasFileInput = inputSource && 'file' in inputSource; + const detailsList = inputSources.map((inputSource) => ( + <> +

{fileNameFor(inputSource)}

+ {inputSource.type == 'file' && ( + <> +
Content Type: {inputSource.file.type}
+
Last Modified: {new Date(inputSource.file.lastModified).toLocaleString()}
+
Size: {new Intl.NumberFormat().format(inputSource.file.size)} bytes
+ + )} + + )); + return (
@@ -668,22 +686,13 @@ function App() {
Source - {hasFileInput ? ( + {inputSources.length ? (
-

{'file' in inputSource ? inputSource.file.name : '[rand]'}

- {'file' in inputSource && ( - <> -
Content Type: {inputSource.file.type}
-
- Last Modified: {new Date(inputSource.file.lastModified).toLocaleString()} -
-
Size: {new Intl.NumberFormat().format(inputSource.file.size)} bytes
- - )} + {detailsList}
) : ( @@ -755,6 +831,16 @@ function App() { />{' '}
+ setSinkType(e.target.value as SinkType)} + checked={sinkType === 'memory'} + />{' '} + +
Date: Thu, 9 May 2024 15:39:30 -0400 Subject: [PATCH 5/8] Update App.tsx --- web-app/src/App.tsx | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index 323104a7..a73b0f8f 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -361,8 +361,8 @@ function App() { 'file' == inputSource.type ? await inputSource.file.arrayBuffer() : 'memory' == inputSource.type - ? inputSource.src - : randomArrayBuffer(inputSource); + ? inputSource.src + : randomArrayBuffer(inputSource); setDownloadState('Encrypting...'); const cipherText = await nanoClient.encrypt(plainText); switch (sinkType) { @@ -587,8 +587,8 @@ function App() { 'file' == inputSource.type ? await inputSource.file.arrayBuffer() : 'memory' == inputSource.type - ? inputSource.src - : randomArrayBuffer(inputSource); + ? inputSource.src + : randomArrayBuffer(inputSource); const plainText = await nanoClient.decrypt(cipherText); switch (sinkType) { case 'file': @@ -675,6 +675,10 @@ function App() { size = inputSource.length; source = { type: 'chunker', location: randomChunker(inputSource) }; break; + case 'memory': + size = inputSource.src.byteLength; + source = { type: 'buffer', location: new Uint8Array(inputSource.src) }; + break; case 'url': const hr = await fetch(inputSource.url, { method: 'HEAD' }); size = parseInt(hr.headers.get('Content-Length') || '-1'); From c6027f5cdbd507dc96100e68de62a5451e6959e4 Mon Sep 17 00:00:00 2001 From: David Mihalcik Date: Thu, 9 May 2024 16:07:15 -0400 Subject: [PATCH 6/8] use p-limit to pool requests --- web-app/package-lock.json | 90 ++++++++++++++++++--------- web-app/package.json | 1 + web-app/src/App.tsx | 124 ++++++++++++++++++++++---------------- 3 files changed, 133 insertions(+), 82 deletions(-) diff --git a/web-app/package-lock.json b/web-app/package-lock.json index a9144ef8..112d1fb3 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -12,6 +12,7 @@ "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", "native-file-system-adapter": "^3.0.1", + "p-limit": "^5.0.0", "react": "^18.2.0", "react-dom": "^18.2.0" }, @@ -1014,17 +1015,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@vitest/runner/node_modules/yocto-queue": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/@vitest/snapshot": { "version": "0.33.0", "dev": true, @@ -2703,14 +2693,14 @@ } }, "node_modules/p-limit": { - "version": "3.1.0", - "dev": true, - "license": "MIT", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "dependencies": { - "yocto-queue": "^0.1.0" + "yocto-queue": "^1.0.0" }, "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -2730,6 +2720,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate/node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parent-module": { "version": "1.0.1", "dev": true, @@ -3734,11 +3751,11 @@ "license": "ISC" }, "node_modules/yocto-queue": { - "version": "0.1.0", - "dev": true, - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", "engines": { - "node": ">=10" + "node": ">=12.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -4344,10 +4361,6 @@ "requires": { "yocto-queue": "^1.0.0" } - }, - "yocto-queue": { - "version": "1.0.0", - "dev": true } } }, @@ -5377,10 +5390,11 @@ } }, "p-limit": { - "version": "3.1.0", - "dev": true, + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "requires": { - "yocto-queue": "^0.1.0" + "yocto-queue": "^1.0.0" } }, "p-locate": { @@ -5388,6 +5402,23 @@ "dev": true, "requires": { "p-limit": "^3.0.2" + }, + "dependencies": { + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } } }, "parent-module": { @@ -5933,8 +5964,9 @@ "dev": true }, "yocto-queue": { - "version": "0.1.0", - "dev": true + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==" } } } diff --git a/web-app/package.json b/web-app/package.json index 84f1ac94..a0e57958 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -18,6 +18,7 @@ "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", "native-file-system-adapter": "^3.0.1", + "p-limit": "^5.0.0", "react": "^18.2.0", "react-dom": "^18.2.0" }, diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index a73b0f8f..d9b4f2c6 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -5,6 +5,9 @@ import './App.css'; import { type Chunker, type DecryptSource, NanoTDFClient, TDF3Client } from '@opentdf/client'; import { type SessionInformation, OidcClient } from './session.js'; import { c } from './config.js'; +import pLimit from 'p-limit'; + +const limit = pLimit(16); function decryptedFileName(encryptedFileName: string): string { // Groups: 1 file 'name' bit @@ -390,40 +393,48 @@ function App() { } setDownloadState('Encrypt Complete'); } - for (const inputSource of inputSources) { - const inputFileName = fileNameFor(inputSource); - console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); - switch (encryptContainerType) { - case 'nano': { + let promises; + switch (encryptContainerType) { + case 'nano': { + promises = inputSources.map((inputSource): () => Promise => async () => { const nanoClient = new NanoTDFClient({ authProvider: oidcClient, kasEndpoint: c.kas, dpopKeys: oidcClient.getSigningKey(), }); + const inputFileName = fileNameFor(inputSource); + console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); await encryptNano(nanoClient, inputSource, inputFileName); - break; - } - case 'html': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - readerUrl: c.reader, - }); + }); + break; + } + case 'html': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + readerUrl: c.reader, + }); + promises = inputSources.map((inputSource): () => Promise => async () => { + const inputFileName = fileNameFor(inputSource); await encryptTdfHtml(inputSource, inputFileName, client); - break; - } - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - }); + }); + break; + } + case 'tdf': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + }); + promises = inputSources.map((inputSource): () => Promise => async () => { + const inputFileName = fileNameFor(inputSource); await encryptTdf(inputSource, inputFileName, client); - break; - } + }); + break; } } + await Promise.all(promises.map(limit)); if (memory.length) { setInputSources(memory); @@ -604,11 +615,16 @@ function App() { await writable.close(); } break; + case 'memory': + memory.push({ type: 'memory', name: dfn, src: cipherText }); + break; case 'none': break; } } + let promises: (() => Promise)[]; + const memory = []; const handleDecrypt = async () => { if (!inputSources.length) { console.log('PLEASE SELECT FILE'); @@ -619,41 +635,43 @@ function App() { return false; } - for (const inputSource of inputSources) { - const dfn = decryptedFileName(fileNameFor(inputSource)); - console.log( - `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` - ); - switch (decryptContainerType) { - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - }); - + switch (decryptContainerType) { + case 'tdf': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + }); + promises = inputSources.map((inputSource): () => Promise => async () => { + const dfn = decryptedFileName(fileNameFor(inputSource)); + console.log( + `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` + ); await decryptTdf(client, inputSource, dfn); - break; - } - case 'nano': { + }); + break; + } + case 'nano': { + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); + promises = inputSources.map((inputSource): () => Promise => async () => { if ('url' in inputSource) { throw new Error('Unsupported : fetch the url I guess?'); } - const nanoClient = new NanoTDFClient({ - authProvider: oidcClient, - kasEndpoint: c.kas, - dpopKeys: oidcClient.getSigningKey(), - }); - try { - await decryptNano(nanoClient, inputSource, dfn); - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } - break; - } + const dfn = decryptedFileName(fileNameFor(inputSource)); + await decryptNano(nanoClient, inputSource, dfn); + }); + break; } } + await Promise.all(promises.map(limit)); + + if (memory.length) { + setInputSources(memory); + } return false; async function decryptTdf(client: TDF3Client, inputSource: InputSource, dfn: string) { From 8b947da26ec845c1f2b0d8bc1d2755bfee7af926 Mon Sep 17 00:00:00 2001 From: David Mihalcik Date: Fri, 10 May 2024 10:36:39 -0400 Subject: [PATCH 7/8] Update App.tsx --- web-app/src/App.tsx | 39 +++++++++++++++++---------------------- 1 file changed, 17 insertions(+), 22 deletions(-) diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index d9b4f2c6..a2575c74 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -1,5 +1,5 @@ import { clsx } from 'clsx'; -import { useState, useEffect, type ChangeEvent } from 'react'; +import { useState, useEffect, type ChangeEvent, useRef } from 'react'; import { showSaveFilePicker } from 'native-file-system-adapter'; import './App.css'; import { type Chunker, type DecryptSource, NanoTDFClient, TDF3Client } from '@opentdf/client'; @@ -75,7 +75,7 @@ async function getNewFileHandle( } type Containers = 'html' | 'tdf' | 'nano'; -type CurrentDataController = AbortController | undefined; +type CurrentDataControllers = Record; type FileInputSource = { type: 'file'; file: File; @@ -230,7 +230,7 @@ function App() { const [encryptContainerType, setEncryptContainerType] = useState('tdf'); const [inputSources, setInputSources] = useState([]); const [sinkType, setSinkType] = useState('file'); - const [streamController, setStreamController] = useState(); + const streamControllers = useRef({}); const handleContainerFormatRadioChange = (handler: typeof setDecryptContainerType) => (e: ChangeEvent) => { @@ -449,7 +449,7 @@ function App() { ) { let source: ReadableStream, size: number; const sc = new AbortController(); - setStreamController(sc); + streamControllers.current[inputFileName] = sc; switch (inputSource.type) { case 'file': size = inputSource.file.size; @@ -514,16 +514,14 @@ function App() { await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); break; } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); + } finally { + delete streamControllers.current[inputFileName]; } - setStreamController(undefined); } async function encryptTdf(inputSource: InputSource, inputFileName: string, client: TDF3Client) { const sc = new AbortController(); - setStreamController(sc); + streamControllers.current[inputFileName] = sc; let source: ReadableStream, size: number; switch (inputSource.type) { case 'file': @@ -582,11 +580,9 @@ function App() { await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); break; } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); + } finally { + delete streamControllers.current[inputFileName]; } - setStreamController(undefined); } }; async function decryptNano( @@ -610,7 +606,6 @@ function App() { const writable = await f.createWritable(); try { await writable.write(plainText); - setDownloadState('Decrypt Complete'); } finally { await writable.close(); } @@ -624,7 +619,7 @@ function App() { } let promises: (() => Promise)[]; - const memory = []; + const memory: MemoryInputSource[] = []; const handleDecrypt = async () => { if (!inputSources.length) { console.log('PLEASE SELECT FILE'); @@ -667,7 +662,13 @@ function App() { break; } } - await Promise.all(promises.map(limit)); + try { + await Promise.all(promises.map(limit)); + setDownloadState('Decrypt Complete'); + } catch (e) { + console.error('Decrypt Failed', e); + setDownloadState(`Decrypt Failed: ${e}`); + } if (memory.length) { setInputSources(memory); @@ -679,7 +680,6 @@ function App() { if (sinkType === 'fsapi') { f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); } - try { const sc = new AbortController(); setStreamController(sc); let source: DecryptSource; @@ -726,11 +726,6 @@ function App() { await plainText.stream.pipeTo(drain(), { signal: sc.signal }); break; } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } - setStreamController(undefined); } }; From 15bbce3b6fb40a22517f93f5e864998d0a3d7e20 Mon Sep 17 00:00:00 2001 From: David Mihalcik Date: Fri, 10 May 2024 10:37:38 -0400 Subject: [PATCH 8/8] Update App.tsx --- web-app/src/App.tsx | 114 ++++++++++++++++++++++---------------------- 1 file changed, 58 insertions(+), 56 deletions(-) diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index a2575c74..a04c13dc 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -364,8 +364,8 @@ function App() { 'file' == inputSource.type ? await inputSource.file.arrayBuffer() : 'memory' == inputSource.type - ? inputSource.src - : randomArrayBuffer(inputSource); + ? inputSource.src + : randomArrayBuffer(inputSource); setDownloadState('Encrypting...'); const cipherText = await nanoClient.encrypt(plainText); switch (sinkType) { @@ -396,7 +396,7 @@ function App() { let promises; switch (encryptContainerType) { case 'nano': { - promises = inputSources.map((inputSource): () => Promise => async () => { + promises = inputSources.map((inputSource): (() => Promise) => async () => { const nanoClient = new NanoTDFClient({ authProvider: oidcClient, kasEndpoint: c.kas, @@ -415,7 +415,7 @@ function App() { kasEndpoint: c.kas, readerUrl: c.reader, }); - promises = inputSources.map((inputSource): () => Promise => async () => { + promises = inputSources.map((inputSource): (() => Promise) => async () => { const inputFileName = fileNameFor(inputSource); await encryptTdfHtml(inputSource, inputFileName, client); }); @@ -427,7 +427,7 @@ function App() { dpopKeys: oidcClient.getSigningKey(), kasEndpoint: c.kas, }); - promises = inputSources.map((inputSource): () => Promise => async () => { + promises = inputSources.map((inputSource): (() => Promise) => async () => { const inputFileName = fileNameFor(inputSource); await encryptTdf(inputSource, inputFileName, client); }); @@ -594,8 +594,8 @@ function App() { 'file' == inputSource.type ? await inputSource.file.arrayBuffer() : 'memory' == inputSource.type - ? inputSource.src - : randomArrayBuffer(inputSource); + ? inputSource.src + : randomArrayBuffer(inputSource); const plainText = await nanoClient.decrypt(cipherText); switch (sinkType) { case 'file': @@ -637,10 +637,12 @@ function App() { dpopKeys: oidcClient.getSigningKey(), kasEndpoint: c.kas, }); - promises = inputSources.map((inputSource): () => Promise => async () => { + promises = inputSources.map((inputSource): (() => Promise) => async () => { const dfn = decryptedFileName(fileNameFor(inputSource)); console.log( - `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` + `Decrypting ${decryptContainerType} ${JSON.stringify( + inputSource + )} to ${sinkType} ${dfn}` ); await decryptTdf(client, inputSource, dfn); }); @@ -652,7 +654,7 @@ function App() { kasEndpoint: c.kas, dpopKeys: oidcClient.getSigningKey(), }); - promises = inputSources.map((inputSource): () => Promise => async () => { + promises = inputSources.map((inputSource): (() => Promise) => async () => { if ('url' in inputSource) { throw new Error('Unsupported : fetch the url I guess?'); } @@ -680,52 +682,52 @@ function App() { if (sinkType === 'fsapi') { f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); } - const sc = new AbortController(); - setStreamController(sc); - let source: DecryptSource; - let size: number; - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = { type: 'file-browser', location: inputSource.file }; - break; - case 'bytes': - size = inputSource.length; - source = { type: 'chunker', location: randomChunker(inputSource) }; - break; - case 'memory': - size = inputSource.src.byteLength; - source = { type: 'buffer', location: new Uint8Array(inputSource.src) }; - break; - case 'url': - const hr = await fetch(inputSource.url, { method: 'HEAD' }); - size = parseInt(hr.headers.get('Content-Length') || '-1'); - source = { type: 'remote', location: inputSource.url.toString() }; - break; - } - const progressTransformers = makeProgressPair(size, 'Decrypt'); - // XXX chunker doesn't have an equivalent 'stream' interaface - // so we kinda fake it with percentages by tracking output, which should - // strictly be smaller than the input file. - const plainText = await client.decrypt({ source }); - plainText.stream = plainText.stream - .pipeThrough(progressTransformers.reader) - .pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await plainText.toFile(dfn, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await plainText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await plainText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } + const sc = new AbortController(); + setStreamController(sc); + let source: DecryptSource; + let size: number; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = { type: 'file-browser', location: inputSource.file }; + break; + case 'bytes': + size = inputSource.length; + source = { type: 'chunker', location: randomChunker(inputSource) }; + break; + case 'memory': + size = inputSource.src.byteLength; + source = { type: 'buffer', location: new Uint8Array(inputSource.src) }; + break; + case 'url': + const hr = await fetch(inputSource.url, { method: 'HEAD' }); + size = parseInt(hr.headers.get('Content-Length') || '-1'); + source = { type: 'remote', location: inputSource.url.toString() }; + break; + } + const progressTransformers = makeProgressPair(size, 'Decrypt'); + // XXX chunker doesn't have an equivalent 'stream' interaface + // so we kinda fake it with percentages by tracking output, which should + // strictly be smaller than the input file. + const plainText = await client.decrypt({ source }); + plainText.stream = plainText.stream + .pipeThrough(progressTransformers.reader) + .pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await plainText.toFile(dfn, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await plainText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await plainText.stream.pipeTo(drain(), { signal: sc.signal }); + break; + } } };