diff --git a/package-lock.json b/package-lock.json index f3177fbbae87..400124408f83 100644 --- a/package-lock.json +++ b/package-lock.json @@ -29,7 +29,7 @@ "better-sqlite3": "git+https://github.com/tutao/better-sqlite3-sqlcipher#53d4abb647a52eb0d3dc0d46acb192bc5e2c0f40", "cborg": "4.2.2", "dompurify": "3.2.3", - "electron": "33.2.0", + "electron": "33.3.0", "electron-updater": "6.3.4", "jszip": "3.10.1", "linkify-html": "4.1.3", @@ -68,7 +68,7 @@ "eslint": "8.57.0", "eslint-config-prettier": "9.1.0", "eslint-plugin-unicorn": "55.0.0", - "express": "4.21.0", + "express": "4.21.2", "fs-extra": "11.2.0", "full-icu": "1.5.0", "js-yaml": "4.1.0", @@ -4062,9 +4062,9 @@ } }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "dev": true, "license": "MIT", "engines": { @@ -4578,9 +4578,9 @@ } }, "node_modules/electron": { - "version": "33.2.0", - "resolved": "https://registry.npmjs.org/electron/-/electron-33.2.0.tgz", - "integrity": "sha512-PVw1ICAQDPsnnsmpNFX/b1i/49h67pbSPxuIENd9K9WpGO1tsRaQt+K2bmXqTuoMJsbzIc75Ce8zqtuwBPqawA==", + "version": "33.3.0", + "resolved": "https://registry.npmjs.org/electron/-/electron-33.3.0.tgz", + "integrity": "sha512-316ZlFUHJmzGrhRj87tVStxyYvknDqVR9eYSsGKAHY7auhVWFLIcPPGxcnbD/H1mez8CpDjXvEjcz76zpWxsXw==", "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -5378,9 +5378,9 @@ "license": "Apache-2.0" }, "node_modules/express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", "dev": true, "license": "MIT", "dependencies": { @@ -5389,7 +5389,7 @@ "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -5403,7 +5403,7 @@ "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.10", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", @@ -5418,6 +5418,10 @@ }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/express/node_modules/debug": { @@ -8300,9 +8304,9 @@ "license": "ISC" }, "node_modules/path-to-regexp": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", - "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==", + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", "dev": true, "license": "MIT" }, diff --git a/package.json b/package.json index 3d47917acfac..89be8359e96f 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "better-sqlite3": "git+https://github.com/tutao/better-sqlite3-sqlcipher#53d4abb647a52eb0d3dc0d46acb192bc5e2c0f40", "cborg": "4.2.2", "dompurify": "3.2.3", - "electron": "33.2.0", + "electron": "33.3.0", "electron-updater": "6.3.4", "jszip": "3.10.1", "linkify-html": "4.1.3", @@ -90,7 +90,7 @@ "eslint": "8.57.0", "eslint-config-prettier": "9.1.0", "eslint-plugin-unicorn": "55.0.0", - "express": "4.21.0", + "express": "4.21.2", "fs-extra": "11.2.0", "full-icu": "1.5.0", "js-yaml": "4.1.0", diff --git a/packages/otest/lib/otest.ts b/packages/otest/lib/otest.ts index ebf5304d9958..0a6bd7e8b39f 100644 --- a/packages/otest/lib/otest.ts +++ b/packages/otest/lib/otest.ts @@ -146,7 +146,12 @@ class OTest { } for (const before of spec.before) { - await before() + try { + await before() + } catch (e) { + console.error("Spec before() failed!", newPathSerialized, e) + throw e + } } const specMatches = filter === "" || spec.name.includes(filter) @@ -179,7 +184,12 @@ class OTest { } for (const after of spec.after) { - await after() + try { + await after() + } catch (e) { + console.error("Spec after() failed!", newPathSerialized, e) + throw e + } } return result diff --git a/src/RootView.ts b/src/RootView.ts index a0317cdd4eaa..dfb73e1003d5 100644 --- a/src/RootView.ts +++ b/src/RootView.ts @@ -76,7 +76,7 @@ export class RootView implements ClassComponent { height: "100%", }, }, - [m(overlay), m(modal), vnode.children], + [m(overlay), m(modal), m(".main-view", { inert: modal.visible }, vnode.children)], ) } diff --git a/src/common/api/common/threading/Transport.ts b/src/common/api/common/threading/Transport.ts index 410c8d0a761c..9805d77da088 100644 --- a/src/common/api/common/threading/Transport.ts +++ b/src/common/api/common/threading/Transport.ts @@ -10,7 +10,7 @@ export interface Transport { /** * Set the handler for messages coming from the other end of the transport */ - setMessageHandler(handler: (message: Message) => unknown): unknown + setMessageHandler(handler: (message: Message) => unknown): void } /** diff --git a/src/common/api/main/EntropyCollector.ts b/src/common/api/main/EntropyCollector.ts index a7cc5157d5a2..18cc90bbcf44 100644 --- a/src/common/api/main/EntropyCollector.ts +++ b/src/common/api/main/EntropyCollector.ts @@ -92,7 +92,7 @@ export class EntropyCollector { } private addPerformanceTimingValues() { - if (!this.window.performance) return + if (!this.window.performance?.getEntries) return const entries = this.window.performance.getEntries() let added: number[] = [] for (const entry of entries.map((e) => e.toJSON())) { diff --git a/src/common/api/main/WorkerClient.ts b/src/common/api/main/WorkerClient.ts index c686ba59d6ee..ae2fa7ecf15c 100644 --- a/src/common/api/main/WorkerClient.ts +++ b/src/common/api/main/WorkerClient.ts @@ -62,7 +62,7 @@ export class WorkerClient { const WorkerImpl = globalThis.testWorker const workerImpl = new WorkerImpl(this, true) await workerImpl.init(client.browserData()) - workerImpl._queue._transport = { + workerImpl._dispatcher.transport = { postMessage: (msg: any) => this._dispatcher.handleMessage(msg), } this._dispatcher = new MessageDispatcher( @@ -70,6 +70,9 @@ export class WorkerClient { postMessage: function (msg: any) { workerImpl._queue.handleMessage(msg) }, + setMessageHandler(handler: (message: any) => unknown): void { + return + }, } as Transport, this.queueCommands(locator), "main-worker", diff --git a/src/common/api/worker/offline/OfflineStorage.ts b/src/common/api/worker/offline/OfflineStorage.ts index 561aeb4ea4a4..f6074b5c4036 100644 --- a/src/common/api/worker/offline/OfflineStorage.ts +++ b/src/common/api/worker/offline/OfflineStorage.ts @@ -20,9 +20,9 @@ import { isDesktop, isOfflineStorageAvailable, isTest } from "../../common/Env.j import { modelInfos, resolveTypeReference } from "../../common/EntityFunctions.js" import { DateProvider } from "../../common/DateProvider.js" import { TokenOrNestedTokens } from "cborg/interface" -import { CalendarEventTypeRef } from "../../entities/tutanota/TypeRefs.js" +import { CalendarEventTypeRef, MailTypeRef } from "../../entities/tutanota/TypeRefs.js" import { OfflineStorageMigrator } from "./OfflineStorageMigrator.js" -import { CustomCacheHandlerMap, CustomCalendarEventCacheHandler } from "../rest/CustomCacheHandler.js" +import { CustomCacheHandlerMap, CustomCalendarEventCacheHandler, CustomMailEventCacheHandler } from "../rest/CustomCacheHandler.js" import { EntityRestClient } from "../rest/EntityRestClient.js" import { InterWindowEventFacadeSendDispatcher } from "../../../native/common/generatedipc/InterWindowEventFacadeSendDispatcher.js" import { SqlCipherFacade } from "../../../native/common/generatedipc/SqlCipherFacade.js" @@ -170,13 +170,24 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { let formattedQuery switch (typeModel.type) { case TypeId.Element: - formattedQuery = sql`DELETE FROM element_entities WHERE type = ${type} AND elementId = ${elementId}` + formattedQuery = sql`DELETE + FROM element_entities + WHERE type = ${type} + AND elementId = ${elementId}` break case TypeId.ListElement: - formattedQuery = sql`DELETE FROM list_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}` + formattedQuery = sql`DELETE + FROM list_entities + WHERE type = ${type} + AND listId = ${listId} + AND elementId = ${elementId}` break case TypeId.BlobElement: - formattedQuery = sql`DELETE FROM blob_element_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}` + formattedQuery = sql`DELETE + FROM blob_element_entities + WHERE type = ${type} + AND listId = ${listId} + AND elementId = ${elementId}` break default: throw new Error("must be a persistent type") @@ -191,15 +202,21 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { let formattedQuery switch (typeModel.type) { case TypeId.Element: - formattedQuery = sql`DELETE FROM element_entities WHERE type = ${type}` + formattedQuery = sql`DELETE + FROM element_entities + WHERE type = ${type}` break case TypeId.ListElement: - formattedQuery = sql`DELETE FROM list_entities WHERE type = ${type}` + formattedQuery = sql`DELETE + FROM list_entities + WHERE type = ${type}` await this.sqlCipherFacade.run(formattedQuery.query, formattedQuery.params) await this.deleteAllRangesForType(type) return case TypeId.BlobElement: - formattedQuery = sql`DELETE FROM blob_element_entities WHERE type = ${type}` + formattedQuery = sql`DELETE + FROM blob_element_entities + WHERE type = ${type}` break default: throw new Error("must be a persistent type") @@ -208,7 +225,9 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { } private async deleteAllRangesForType(type: string): Promise { - const { query, params } = sql`DELETE FROM ranges WHERE type = ${type}` + const { query, params } = sql`DELETE + FROM ranges + WHERE type = ${type}` await this.sqlCipherFacade.run(query, params) } @@ -219,13 +238,24 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { let formattedQuery switch (typeModel.type) { case TypeId.Element: - formattedQuery = sql`SELECT entity from element_entities WHERE type = ${type} AND elementId = ${elementId}` + formattedQuery = sql`SELECT entity + from element_entities + WHERE type = ${type} + AND elementId = ${elementId}` break case TypeId.ListElement: - formattedQuery = sql`SELECT entity from list_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}` + formattedQuery = sql`SELECT entity + from list_entities + WHERE type = ${type} + AND listId = ${listId} + AND elementId = ${elementId}` break case TypeId.BlobElement: - formattedQuery = sql`SELECT entity from blob_element_entities WHERE type = ${type} AND listId = ${listId} AND elementId = ${elementId}` + formattedQuery = sql`SELECT entity + from blob_element_entities + WHERE type = ${type} + AND listId = ${listId} + AND elementId = ${elementId}` break default: throw new Error("must be a persistent type") @@ -243,7 +273,11 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { const serializedList: ReadonlyArray> = await this.allChunked( MAX_SAFE_SQL_VARS - 2, elementIds, - (c) => sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND elementId IN ${paramList(c)}`, + (c) => sql`SELECT entity + FROM list_entities + WHERE type = ${type} + AND listId = ${listId} + AND elementId IN ${paramList(c)}`, ) return await this.deserializeList( typeRef, @@ -258,12 +292,13 @@ export class OfflineStorage implements CacheStorage, ExposedCacheStorage { if (range == null) { throw new Error(`no range exists for ${type} and list ${listId}`) } - const { query, params } = sql`SELECT elementId FROM list_entities -WHERE type = ${type} -AND listId = ${listId} -AND (elementId = ${range.lower} -OR ${firstIdBigger("elementId", range.lower)}) -AND NOT(${firstIdBigger("elementId", range.upper)})` + const { query, params } = sql`SELECT elementId + FROM list_entities + WHERE type = ${type} + AND listId = ${listId} + AND (elementId = ${range.lower} + OR ${firstIdBigger("elementId", range.lower)}) + AND NOT (${firstIdBigger("elementId", range.upper)})` const rows = await this.sqlCipherFacade.all(query, params) return rows.map((row) => customIdToBase64Url(typeModel, row.elementId.value as string)) } @@ -295,15 +330,19 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` const type = getTypeId(typeRef) let formattedQuery if (reverse) { - formattedQuery = sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND ${firstIdBigger( - start, - "elementId", - )} ORDER BY LENGTH(elementId) DESC, elementId DESC LIMIT ${count}` + formattedQuery = sql`SELECT entity + FROM list_entities + WHERE type = ${type} + AND listId = ${listId} + AND ${firstIdBigger(start, "elementId")} + ORDER BY LENGTH(elementId) DESC, elementId DESC LIMIT ${count}` } else { - formattedQuery = sql`SELECT entity FROM list_entities WHERE type = ${type} AND listId = ${listId} AND ${firstIdBigger( - "elementId", - start, - )} ORDER BY LENGTH(elementId) ASC, elementId ASC LIMIT ${count}` + formattedQuery = sql`SELECT entity + FROM list_entities + WHERE type = ${type} + AND listId = ${listId} + AND ${firstIdBigger("elementId", start)} + ORDER BY LENGTH(elementId) ASC, elementId ASC LIMIT ${count}` } const { query, params } = formattedQuery const serializedList: ReadonlyArray> = await this.sqlCipherFacade.all(query, params) @@ -323,13 +362,33 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` let formattedQuery: FormattedQuery switch (typeModel.type) { case TypeId.Element: - formattedQuery = sql`INSERT OR REPLACE INTO element_entities (type, elementId, ownerGroup, entity) VALUES (${type}, ${elementId}, ${ownerGroup}, ${serializedEntity})` + formattedQuery = sql`INSERT + OR REPLACE INTO element_entities (type, elementId, ownerGroup, entity) VALUES ( + ${type}, + ${elementId}, + ${ownerGroup}, + ${serializedEntity} + )` break case TypeId.ListElement: - formattedQuery = sql`INSERT OR REPLACE INTO list_entities (type, listId, elementId, ownerGroup, entity) VALUES (${type}, ${listId}, ${elementId}, ${ownerGroup}, ${serializedEntity})` + formattedQuery = sql`INSERT + OR REPLACE INTO list_entities (type, listId, elementId, ownerGroup, entity) VALUES ( + ${type}, + ${listId}, + ${elementId}, + ${ownerGroup}, + ${serializedEntity} + )` break case TypeId.BlobElement: - formattedQuery = sql`INSERT OR REPLACE INTO blob_element_entities (type, listId, elementId, ownerGroup, entity) VALUES (${type}, ${listId}, ${elementId}, ${ownerGroup}, ${serializedEntity})` + formattedQuery = sql`INSERT + OR REPLACE INTO blob_element_entities (type, listId, elementId, ownerGroup, entity) VALUES ( + ${type}, + ${listId}, + ${elementId}, + ${ownerGroup}, + ${serializedEntity} + )` break default: throw new Error("must be a persistent type") @@ -340,14 +399,20 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` async setLowerRangeForList(typeRef: TypeRef, listId: Id, lowerId: Id): Promise { lowerId = ensureBase64Ext(await resolveTypeReference(typeRef), lowerId) const type = getTypeId(typeRef) - const { query, params } = sql`UPDATE ranges SET lower = ${lowerId} WHERE type = ${type} AND listId = ${listId}` + const { query, params } = sql`UPDATE ranges + SET lower = ${lowerId} + WHERE type = ${type} + AND listId = ${listId}` await this.sqlCipherFacade.run(query, params) } async setUpperRangeForList(typeRef: TypeRef, listId: Id, upperId: Id): Promise { upperId = ensureBase64Ext(await resolveTypeReference(typeRef), upperId) const type = getTypeId(typeRef) - const { query, params } = sql`UPDATE ranges SET upper = ${upperId} WHERE type = ${type} AND listId = ${listId}` + const { query, params } = sql`UPDATE ranges + SET upper = ${upperId} + WHERE type = ${type} + AND listId = ${listId}` await this.sqlCipherFacade.run(query, params) } @@ -357,18 +422,30 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` upper = ensureBase64Ext(typeModel, upper) const type = getTypeId(typeRef) - const { query, params } = sql`INSERT OR REPLACE INTO ranges VALUES (${type}, ${listId}, ${lower}, ${upper})` + const { query, params } = sql`INSERT + OR REPLACE INTO ranges VALUES ( + ${type}, + ${listId}, + ${lower}, + ${upper} + )` return this.sqlCipherFacade.run(query, params) } async getLastBatchIdForGroup(groupId: Id): Promise { - const { query, params } = sql`SELECT batchId from lastUpdateBatchIdPerGroupId WHERE groupId = ${groupId}` + const { query, params } = sql`SELECT batchId + from lastUpdateBatchIdPerGroupId + WHERE groupId = ${groupId}` const row = (await this.sqlCipherFacade.get(query, params)) as { batchId: TaggedSqlValue } | null return (row?.batchId?.value ?? null) as Id | null } async putLastBatchIdForGroup(groupId: Id, batchId: Id): Promise { - const { query, params } = sql`INSERT OR REPLACE INTO lastUpdateBatchIdPerGroupId VALUES (${groupId}, ${batchId})` + const { query, params } = sql`INSERT + OR REPLACE INTO lastUpdateBatchIdPerGroupId VALUES ( + ${groupId}, + ${batchId} + )` await this.sqlCipherFacade.run(query, params) } @@ -383,29 +460,42 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` async purgeStorage(): Promise { for (let name of Object.keys(TableDefinitions)) { - await this.sqlCipherFacade.run(`DELETE FROM ${name}`, []) + await this.sqlCipherFacade.run( + `DELETE + FROM ${name}`, + [], + ) } } async deleteRange(typeRef: TypeRef, listId: string): Promise { - const { query, params } = sql`DELETE FROM ranges WHERE type = ${getTypeId(typeRef)} AND listId = ${listId}` + const { query, params } = sql`DELETE + FROM ranges + WHERE type = ${getTypeId(typeRef)} + AND listId = ${listId}` await this.sqlCipherFacade.run(query, params) } async getRawListElementsOfType(typeRef: TypeRef): Promise> { - const { query, params } = sql`SELECT entity from list_entities WHERE type = ${getTypeId(typeRef)}` + const { query, params } = sql`SELECT entity + from list_entities + WHERE type = ${getTypeId(typeRef)}` const items = (await this.sqlCipherFacade.all(query, params)) ?? [] return items.map((item) => this.decodeCborEntity(item.entity.value as Uint8Array) as Record & ListElementEntity) } async getRawElementsOfType(typeRef: TypeRef): Promise> { - const { query, params } = sql`SELECT entity from element_entities WHERE type = ${getTypeId(typeRef)}` + const { query, params } = sql`SELECT entity + from element_entities + WHERE type = ${getTypeId(typeRef)}` const items = (await this.sqlCipherFacade.all(query, params)) ?? [] return items.map((item) => this.decodeCborEntity(item.entity.value as Uint8Array) as Record & ElementEntity) } async getElementsOfType(typeRef: TypeRef): Promise> { - const { query, params } = sql`SELECT entity from element_entities WHERE type = ${getTypeId(typeRef)}` + const { query, params } = sql`SELECT entity + from element_entities + WHERE type = ${getTypeId(typeRef)}` const items = (await this.sqlCipherFacade.all(query, params)) ?? [] return await this.deserializeList( typeRef, @@ -414,7 +504,10 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` } async getWholeList(typeRef: TypeRef, listId: Id): Promise> { - const { query, params } = sql`SELECT entity FROM list_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId}` + const { query, params } = sql`SELECT entity + FROM list_entities + WHERE type = ${getTypeId(typeRef)} + AND listId = ${listId}` const items = (await this.sqlCipherFacade.all(query, params)) ?? [] return await this.deserializeList( typeRef, @@ -434,7 +527,13 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` getCustomCacheHandlerMap(entityRestClient: EntityRestClient): CustomCacheHandlerMap { if (this.customCacheHandler == null) { - this.customCacheHandler = new CustomCacheHandlerMap({ ref: CalendarEventTypeRef, handler: new CustomCalendarEventCacheHandler(entityRestClient) }) + this.customCacheHandler = new CustomCacheHandlerMap( + { + ref: CalendarEventTypeRef, + handler: new CustomCalendarEventCacheHandler(entityRestClient), + }, + { ref: MailTypeRef, handler: new CustomMailEventCacheHandler() }, + ) } return this.customCacheHandler } @@ -445,12 +544,16 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` async deleteAllOwnedBy(owner: Id): Promise { { - const { query, params } = sql`DELETE FROM element_entities WHERE ownerGroup = ${owner}` + const { query, params } = sql`DELETE + FROM element_entities + WHERE ownerGroup = ${owner}` await this.sqlCipherFacade.run(query, params) } { // first, check which list Ids contain entities owned by the lost group - const { query, params } = sql`SELECT listId, type FROM list_entities WHERE ownerGroup = ${owner}` + const { query, params } = sql`SELECT listId, type + FROM list_entities + WHERE ownerGroup = ${owner}` const rangeRows = await this.sqlCipherFacade.all(query, params) const rows = rangeRows.map((row) => untagSqlObject(row) as { listId: string; type: string }) const listIdsByType: Map> = groupByAndMapUniquely( @@ -463,16 +566,34 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` // this particular query uses one other SQL var for the type. const safeChunkSize = MAX_SAFE_SQL_VARS - 1 const listIdArr = Array.from(listIds) - await this.runChunked(safeChunkSize, listIdArr, (c) => sql`DELETE FROM ranges WHERE type = ${type} AND listId IN ${paramList(c)}`) - await this.runChunked(safeChunkSize, listIdArr, (c) => sql`DELETE FROM list_entities WHERE type = ${type} AND listId IN ${paramList(c)}`) + await this.runChunked( + safeChunkSize, + listIdArr, + (c) => sql`DELETE + FROM ranges + WHERE type = ${type} + AND listId IN ${paramList(c)}`, + ) + await this.runChunked( + safeChunkSize, + listIdArr, + (c) => sql`DELETE + FROM list_entities + WHERE type = ${type} + AND listId IN ${paramList(c)}`, + ) } } { - const { query, params } = sql`DELETE FROM blob_element_entities WHERE ownerGroup = ${owner}` + const { query, params } = sql`DELETE + FROM blob_element_entities + WHERE ownerGroup = ${owner}` await this.sqlCipherFacade.run(query, params) } { - const { query, params } = sql`DELETE FROM lastUpdateBatchIdPerGroupId WHERE groupId = ${owner}` + const { query, params } = sql`DELETE + FROM lastUpdateBatchIdPerGroupId + WHERE groupId = ${owner}` await this.sqlCipherFacade.run(query, params) } } @@ -480,7 +601,9 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` async deleteWholeList(typeRef: TypeRef, listId: Id): Promise { await this.lockRangesDbAccess(listId) await this.deleteRange(typeRef, listId) - const { query, params } = sql`DELETE FROM list_entities WHERE listId = ${listId}` + const { query, params } = sql`DELETE + FROM list_entities + WHERE listId = ${listId}` await this.sqlCipherFacade.run(query, params) await this.unlockRangesDbAccess(listId) } @@ -493,12 +616,18 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` console.log("[OfflineStorage] failed to encode metadata for key", key, "with value", value) throw e } - const { query, params } = sql`INSERT OR REPLACE INTO metadata VALUES (${key}, ${encodedValue})` + const { query, params } = sql`INSERT + OR REPLACE INTO metadata VALUES ( + ${key}, + ${encodedValue} + )` await this.sqlCipherFacade.run(query, params) } private async getMetadata(key: K): Promise { - const { query, params } = sql`SELECT value from metadata WHERE key = ${key}` + const { query, params } = sql`SELECT value + from metadata + WHERE key = ${key}` const encoded = await this.sqlCipherFacade.get(query, params) return encoded && cborg.decode(encoded.value.value as Uint8Array) } @@ -515,13 +644,22 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` private async createTables() { for (let [name, definition] of Object.entries(TableDefinitions)) { - await this.sqlCipherFacade.run(`CREATE TABLE IF NOT EXISTS ${name} (${definition})`, []) + await this.sqlCipherFacade.run( + `CREATE TABLE IF NOT EXISTS ${name} + ( + ${definition} + )`, + [], + ) } } async getRange(typeRef: TypeRef, listId: Id): Promise { const type = getTypeId(typeRef) - const { query, params } = sql`SELECT upper, lower FROM ranges WHERE type = ${type} AND listId = ${listId}` + const { query, params } = sql`SELECT upper, lower + FROM ranges + WHERE type = ${type} + AND listId = ${listId}` const row = (await this.sqlCipherFacade.get(query, params)) ?? null return mapNullable(row, untagSqlObject) as Range | null @@ -535,19 +673,30 @@ AND NOT(${firstIdBigger("elementId", range.upper)})` return await this.runChunked( MAX_SAFE_SQL_VARS - 1, elementIds, - (c) => sql`DELETE FROM element_entities WHERE type = ${getTypeId(typeRef)} AND elementId IN ${paramList(c)}`, + (c) => sql`DELETE + FROM element_entities + WHERE type = ${getTypeId(typeRef)} + AND elementId IN ${paramList(c)}`, ) case TypeId.ListElement: return await this.runChunked( MAX_SAFE_SQL_VARS - 2, elementIds, - (c) => sql`DELETE FROM list_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId} AND elementId IN ${paramList(c)}`, + (c) => sql`DELETE + FROM list_entities + WHERE type = ${getTypeId(typeRef)} + AND listId = ${listId} + AND elementId IN ${paramList(c)}`, ) case TypeId.BlobElement: return await this.runChunked( MAX_SAFE_SQL_VARS - 2, elementIds, - (c) => sql`DELETE FROM blob_element_entities WHERE type = ${getTypeId(typeRef)} AND listId = ${listId} AND elementId IN ${paramList(c)}`, + (c) => sql`DELETE + FROM blob_element_entities + WHERE type = ${getTypeId(typeRef)} + AND listId = ${listId} + AND elementId IN ${paramList(c)}`, ) default: throw new Error("must be a persistent type") diff --git a/src/common/api/worker/rest/CustomCacheHandler.ts b/src/common/api/worker/rest/CustomCacheHandler.ts index 5b567ca5e058..039252c230a3 100644 --- a/src/common/api/worker/rest/CustomCacheHandler.ts +++ b/src/common/api/worker/rest/CustomCacheHandler.ts @@ -1,18 +1,19 @@ import { ListElementEntity } from "../../common/EntityTypes.js" -import { CalendarEvent, CalendarEventTypeRef } from "../../entities/tutanota/TypeRefs.js" +import { CalendarEvent, CalendarEventTypeRef, Mail } from "../../entities/tutanota/TypeRefs.js" import { freezeMap, getTypeId, TypeRef } from "@tutao/tutanota-utils" import { CUSTOM_MAX_ID, CUSTOM_MIN_ID, firstBiggerThanSecond, getElementId, LOAD_MULTIPLE_LIMIT } from "../../common/utils/EntityUtils.js" import { resolveTypeReference } from "../../common/EntityFunctions.js" import { CacheStorage, ExposedCacheStorage, Range } from "./DefaultEntityRestCache.js" import { EntityRestClient } from "./EntityRestClient.js" import { ProgrammingError } from "../../common/error/ProgrammingError.js" +import { EntityUpdate } from "../../entities/sys/TypeRefs" /** * update when implementing custom cache handlers. * add new types to the union when implementing new * custom cache handlers. */ -type CustomCacheHandledType = never | CalendarEvent +type CustomCacheHandledType = never | CalendarEvent | Mail /** * makes sure that any {ref, handler} pair passed to @@ -34,7 +35,7 @@ type CustomCacheHandlerMapping = CustomCacheHandledType extends infer A export class CustomCacheHandlerMap { private readonly handlers: ReadonlyMap> - constructor(...args: Array) { + constructor(...args: ReadonlyArray) { const handlers: Map> = new Map() for (const { ref, handler } of args) { const key = getTypeId(ref) @@ -48,11 +49,6 @@ export class CustomCacheHandlerMap { // map is frozen after the constructor. constructor arg types are set up to uphold this invariant. return this.handlers.get(typeId) as CustomCacheHandler | undefined } - - has(typeRef: TypeRef): boolean { - const typeId = getTypeId(typeRef) - return this.handlers.has(typeId) - } } /** @@ -60,9 +56,11 @@ export class CustomCacheHandlerMap { * make sure to update CustomHandledType when implementing this for a new type. */ export interface CustomCacheHandler { - loadRange(storage: ExposedCacheStorage, listId: Id, start: Id, count: number, reverse: boolean): Promise + loadRange?: (storage: ExposedCacheStorage, listId: Id, start: Id, count: number, reverse: boolean) => Promise + + getElementIdsInCacheRange?: (storage: ExposedCacheStorage, listId: Id, ids: Array) => Promise> - getElementIdsInCacheRange(storage: ExposedCacheStorage, listId: Id, ids: Array): Promise> + shouldLoadOnCreateEvent?: (event: EntityUpdate) => Promise } /** @@ -125,3 +123,13 @@ export class CustomCalendarEventCacheHandler implements CustomCacheHandler { + async shouldLoadOnCreateEvent(): Promise { + // New emails should be pre-cached. + // - we need them to display the folder contents + // - will very likely be loaded by indexer later + // - we might have the instance in offline cache already because of notification process + return true + } +} diff --git a/src/common/api/worker/rest/DefaultEntityRestCache.ts b/src/common/api/worker/rest/DefaultEntityRestCache.ts index 2d6b6324b2f8..1d9f4aa380c7 100644 --- a/src/common/api/worker/rest/DefaultEntityRestCache.ts +++ b/src/common/api/worker/rest/DefaultEntityRestCache.ts @@ -40,8 +40,7 @@ import { import { CUSTOM_MAX_ID, CUSTOM_MIN_ID, firstBiggerThanSecond, GENERATED_MAX_ID, GENERATED_MIN_ID, getElementId, isSameId } from "../../common/utils/EntityUtils" import { ProgrammingError } from "../../common/error/ProgrammingError" import { assertWorkerOrNode } from "../../common/Env" -import type { ListElementEntity, SomeEntity, TypeModel } from "../../common/EntityTypes" -import { ElementEntity } from "../../common/EntityTypes" +import type { ElementEntity, ListElementEntity, SomeEntity, TypeModel } from "../../common/EntityTypes" import { QueuedBatch } from "../EventQueue.js" import { ENTITY_EVENT_BATCH_EXPIRE_MS } from "../EventBusClient" import { CustomCacheHandlerMap } from "./CustomCacheHandler.js" @@ -396,8 +395,9 @@ export class DefaultEntityRestCache implements EntityRestCache { } async loadRange(typeRef: TypeRef, listId: Id, start: Id, count: number, reverse: boolean): Promise { - if (this.storage.getCustomCacheHandlerMap(this.entityRestClient).has(typeRef)) { - return await this.storage.getCustomCacheHandlerMap(this.entityRestClient).get(typeRef)!.loadRange(this.storage, listId, start, count, reverse) + const customHandler = this.storage.getCustomCacheHandlerMap(this.entityRestClient).get(typeRef) + if (customHandler && customHandler.loadRange) { + return await customHandler.loadRange(this.storage, listId, start, count, reverse) } const typeModel = await resolveTypeReference(typeRef) @@ -673,10 +673,11 @@ export class DefaultEntityRestCache implements EntityRestCache { const ids = updates.map((update) => update.instanceId) // We only want to load the instances that are in cache range - const customHandlers = this.storage.getCustomCacheHandlerMap(this.entityRestClient) - const idsInCacheRange = customHandlers.has(typeRef) - ? await customHandlers.get(typeRef)!.getElementIdsInCacheRange(this.storage, instanceListId, ids) - : await this.getElementIdsInCacheRange(typeRef, instanceListId, ids) + const customHandler = this.storage.getCustomCacheHandlerMap(this.entityRestClient).get(typeRef) + const idsInCacheRange = + customHandler && customHandler.getElementIdsInCacheRange + ? await customHandler.getElementIdsInCacheRange(this.storage, instanceListId, ids) + : await this.getElementIdsInCacheRange(typeRef, instanceListId, ids) if (idsInCacheRange.length === 0) { postMultipleEventUpdates.push(updates) @@ -772,23 +773,30 @@ export class DefaultEntityRestCache implements EntityRestCache { await this.storage.deleteIfExists(typeRef, deleteEvent.instanceListId, instanceId) await this.updateListIdOfMailAndUpdateCache(mail, instanceListId, instanceId) return update - } else if (await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId)) { - // No need to try to download something that's not there anymore - // We do not consult custom handlers here because they are only needed for list elements. - console.log("downloading create event for", getTypeId(typeRef), instanceListId, instanceId) - return this.entityRestClient - .load(typeRef, [instanceListId, instanceId]) - .then((entity) => this.storage.put(entity)) - .then(() => update) - .catch((e) => { - if (isExpectedErrorForSynchronization(e)) { - return null - } else { - throw e - } - }) } else { - return update + // If there is a custom handler we follow its decision. + // Otherwise, we do a range check to see if we need to keep the range up-to-date. + const shouldLoad = + (await this.storage.getCustomCacheHandlerMap(this.entityRestClient).get(typeRef)?.shouldLoadOnCreateEvent?.(update)) ?? + (await this.storage.isElementIdInCacheRange(typeRef, instanceListId, instanceId)) + if (shouldLoad) { + // No need to try to download something that's not there anymore + // We do not consult custom handlers here because they are only needed for list elements. + console.log("downloading create event for", getTypeId(typeRef), instanceListId, instanceId) + return this.entityRestClient + .load(typeRef, [instanceListId, instanceId]) + .then((entity) => this.storage.put(entity)) + .then(() => update) + .catch((e) => { + if (isExpectedErrorForSynchronization(e)) { + return null + } else { + throw e + } + }) + } else { + return update + } } } else { return update diff --git a/src/common/gui/SidebarSection.ts b/src/common/gui/SidebarSection.ts index b79d52380e38..70850ca4b71f 100644 --- a/src/common/gui/SidebarSection.ts +++ b/src/common/gui/SidebarSection.ts @@ -20,7 +20,7 @@ export class SidebarSection implements Component { const content = vnode.children if (hideIfEmpty && content == false) return null // Using loose equality to check if children has any contents return m( - ".sidebar-section.mb", + ".sidebar-section", { style: { color: theme.navigation_button, diff --git a/src/common/gui/base/Modal.ts b/src/common/gui/base/Modal.ts index a5780018828c..793fbdd7d5ea 100644 --- a/src/common/gui/base/Modal.ts +++ b/src/common/gui/base/Modal.ts @@ -4,7 +4,7 @@ import { theme } from "../theme" import type { Shortcut } from "../../misc/KeyManager" import { keyManager } from "../../misc/KeyManager" import { windowFacade } from "../../misc/WindowFacade" -import { insideRect, remove } from "@tutao/tutanota-utils" +import { insideRect, lastIndex, remove } from "@tutao/tutanota-utils" import { LayerType } from "../../../RootView" import { assertMainOrNodeBoot } from "../../api/common/Env" @@ -36,12 +36,8 @@ class Modal implements Component { return m( "#modal.fill-absolute", { - oncreate: (_) => { - // const lastComponent = last(this.components) - // if (lastComponent) { - // lastComponent.component.backgroundClick(e) - // } - }, + "aria-modal": true, + inert: !this.visible, style: { "z-index": LayerType.Modal, display: this.visible ? "" : "none", @@ -52,6 +48,7 @@ class Modal implements Component { ".fill-absolute", { key: wrapper.key, + inert: i !== lastIndex(array), oncreate: (vnode) => { // do not set visible=true already in display() because it leads to modal staying open in a second window in Chrome // because onbeforeremove is not called in that case to set visible=false. this is probably an optimization in Chrome to reduce diff --git a/src/common/gui/main-styles.ts b/src/common/gui/main-styles.ts index 4f5140c3af24..d51ce664e745 100644 --- a/src/common/gui/main-styles.ts +++ b/src/common/gui/main-styles.ts @@ -606,7 +606,7 @@ styles.registerStyle("main", () => { "word-break": "break-all", }, ".break-word-links a": { - "word-wrap": "break-word", + "overflow-wrap": "anywhere", }, ".text-prewrap": { "white-space": "pre-wrap", diff --git a/src/mail-app/mailLocator.ts b/src/mail-app/mailLocator.ts index a0b0a61d6149..3995d0df48f0 100644 --- a/src/mail-app/mailLocator.ts +++ b/src/mail-app/mailLocator.ts @@ -838,8 +838,9 @@ class MailLocator { } } if (this.webAuthn == null) { + const credentials: CredentialsContainer = isTest() ? ({} as CredentialsContainer) : navigator.credentials this.webAuthn = new WebauthnClient( - new BrowserWebauthn(navigator.credentials, this.domainConfigProvider().getCurrentDomainConfig()), + new BrowserWebauthn(credentials, this.domainConfigProvider().getCurrentDomainConfig()), this.domainConfigProvider(), isApp(), ) diff --git a/src/mail-app/workerUtils/worker/WorkerLocator.ts b/src/mail-app/workerUtils/worker/WorkerLocator.ts index e6e8ae9f6591..c56144a0a5ff 100644 --- a/src/mail-app/workerUtils/worker/WorkerLocator.ts +++ b/src/mail-app/workerUtils/worker/WorkerLocator.ts @@ -164,7 +164,9 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData) const mainInterface = worker.getMainInterface() - const suspensionHandler = new SuspensionHandler(mainInterface.infoMessageHandler, self) + const contextObject = isTest() ? globalThis : self + + const suspensionHandler = new SuspensionHandler(mainInterface.infoMessageHandler, contextObject) locator.instanceMapper = new InstanceMapper() locator.rsa = await createRsaImplementation(worker) @@ -446,7 +448,7 @@ export async function initLocator(worker: WorkerImpl, browserData: BrowserData) nonCachingEntityClient, // without cache ) }) - const scheduler = new SchedulerImpl(dateProvider, self, self) + const scheduler = new SchedulerImpl(dateProvider, contextObject, contextObject) locator.configFacade = lazyMemoized(async () => { const { ConfigurationDatabase } = await import("../../../common/api/worker/facades/lazy/ConfigurationDatabase.js") diff --git a/test/tests/IntegrationTest.ts b/test/tests/IntegrationTest.ts index e2bcd974f2b0..f7fb480b9129 100644 --- a/test/tests/IntegrationTest.ts +++ b/test/tests/IntegrationTest.ts @@ -15,6 +15,7 @@ import { neverNull } from "@tutao/tutanota-utils" import { initLocator, locator } from "../../src/mail-app/workerUtils/worker/WorkerLocator.js" import { browserDataStub, createTestEntity } from "./TestUtils.js" import { SessionType } from "../../src/common/api/common/SessionType.js" +import { object } from "testdouble" function loadFolders(folderListId: Id): Promise { return locator.cachingEntityClient.loadAll(MailFolderTypeRef, folderListId) @@ -35,7 +36,7 @@ o.spec("integration test", function () { o("login, read mails, update contact", async function () { env.staticUrl = "http://localhost:9000" env.versionNumber - initLocator(null as any, browserDataStub) + initLocator(object(), browserDataStub) o.timeout(20000) await locator.login.createSession("map-free@tutanota.de", "map", "Linux node", SessionType.Temporary, null) const folders = await loadMailboxSystemFolders() diff --git a/test/tests/api/worker/rest/EntityRestCacheTest.ts b/test/tests/api/worker/rest/EntityRestCacheTest.ts index d1b1acb5319c..401d7d3272e5 100644 --- a/test/tests/api/worker/rest/EntityRestCacheTest.ts +++ b/test/tests/api/worker/rest/EntityRestCacheTest.ts @@ -712,8 +712,8 @@ export function testEntityRestCache(name: string, getStorage: (userId: Id) => Pr // Move mail event: we don't try to load the mail again, we just update our cached mail await cache.entityEventsReceived( makeBatch([ - createUpdate(MailTypeRef, "listId1", "id3", OperationType.DELETE), - createUpdate(MailTypeRef, "listId2", "id3", OperationType.CREATE), + createUpdate(MailTypeRef, "listId1", getElementId(mails[2]), OperationType.DELETE), + createUpdate(MailTypeRef, "listId2", getElementId(mails[2]), OperationType.CREATE), ]), ) @@ -722,7 +722,7 @@ export function testEntityRestCache(name: string, getStorage: (userId: Id) => Pr throw new Error("This is not the mail you're looking for") }) const loadMock = mockAttribute(entityRestClient, entityRestClient.load, load) - const thrown = await assertThrows(Error, () => cache.load(MailTypeRef, ["listId1", "id3"])) + const thrown = await assertThrows(Error, () => cache.load(MailTypeRef, ["listId1", getElementId(mails[2])])) o(thrown.message).equals("This is not the mail you're looking for") //load was called when we tried to load the moved mail when we tried to load the moved mail o(load.callCount).equals(1) @@ -744,9 +744,24 @@ export function testEntityRestCache(name: string, getStorage: (userId: Id) => Pr }) // list element notifications - o("list element create notifications are not put into cache", async function () { - await cache.entityEventsReceived(makeBatch([createUpdate(MailTypeRef, "listId1", createId("id1"), OperationType.CREATE)])) - }) + + if (name === "offline") { + o("when the list is not cache, list element create notifications are still put into cache", async function () { + const mail = createMailInstance("listId1", "id1", "i am a mail") + const load = func() + when(load(MailTypeRef, mail._id)).thenResolve(mail) + mockAttribute(entityRestClient, entityRestClient.load, load) + + await cache.entityEventsReceived(makeBatch([createUpdate(MailTypeRef, getListId(mail), getElementId(mail), OperationType.CREATE)])) + + o(await storage.get(MailTypeRef, getListId(mail), getElementId(mail))).deepEquals(mail) + }) + } else { + // With ephemeral cache we do not automatically download all mails because we don't need to. + o("when the list is not cached, mail create notifications are not put into cache", async function () { + await cache.entityEventsReceived(makeBatch([createUpdate(MailTypeRef, "listId1", createId("id1"), OperationType.CREATE)])) + }) + } o("list element update notifications are not put into cache", async function () { await cache.entityEventsReceived(makeBatch([createUpdate(MailTypeRef, "listId1", createId("id1"), OperationType.UPDATE)])) @@ -1152,6 +1167,7 @@ export function testEntityRestCache(name: string, getStorage: (userId: Id) => Pr o(loadRange.callCount).equals(1) // entities are provided from server unmockAttribute(loadRangeMock) }) + o("load list elements partly from server - range max to id2 loaded - loadMore", async function () { let mail0 = createMailInstance("listId1", "id0", "subject0") const cachedMails = await setupMailList(false, true)