diff --git a/core/app.js b/core/app.js index a8a37b3c1..76ddc8e02 100644 --- a/core/app.js +++ b/core/app.js @@ -3,35 +3,37 @@ * @module core/app * @flow */ -const autoBind = require('auto-bind') -const fse = require('fs-extra') -const _ = require('lodash') +const https = require('https') const os = require('os') const path = require('path') const url = require('url') -const uuid = require('uuid').v4 -const https = require('https') const { createGzip } = require('zlib') + +const autoBind = require('auto-bind') +const fse = require('fs-extra') +const _ = require('lodash') const semver = require('semver') +const uuid = require('uuid').v4 + const { rootCozyUrl } = require('cozy-client') const pkg = require('../package.json') const config = require('./config') -const { Pouch } = require('./pouch') -const { migrations, runMigrations } = require('./migrations') const Ignore = require('./ignore') +const { Local } = require('./local') const { Merge } = require('./merge') +const { migrations, runMigrations } = require('./migrations') +const { Pouch } = require('./pouch') const Prep = require('./prep') -const { Local } = require('./local') const { Remote } = require('./remote') +const Registration = require('./remote/registration') const { Sync } = require('./sync') const SyncState = require('./syncstate') -const Registration = require('./remote/registration') -const { baseLogger, logger, LOG_BASENAME } = require('./utils/logger') +const flags = require('./utils/flags') const { sendToTrash } = require('./utils/fs') +const { baseLogger, logger, LOG_BASENAME } = require('./utils/logger') const notes = require('./utils/notes') const web = require('./utils/web') -const flags = require('./utils/flags') /*:: import type EventEmitter from 'events' diff --git a/core/config.js b/core/config.js index 01eea89b8..80482feb1 100644 --- a/core/config.js +++ b/core/config.js @@ -5,9 +5,10 @@ */ const fs = require('fs') +const path = require('path') + const fse = require('fs-extra') const _ = require('lodash') -const path = require('path') const { hideOnWindows } = require('./utils/fs') const { logger } = require('./utils/logger') diff --git a/core/globals.js b/core/globals.js index b26dbc872..809a28758 100644 --- a/core/globals.js +++ b/core/globals.js @@ -7,15 +7,15 @@ require('../core/utils/modules_stubs').initialize() require('isomorphic-fetch') - +const Promise = require('bluebird') const WebSocket = require('ws') + global.WebSocket = WebSocket // We are using bluebird instead of native promises: // - they are easier to debug with long stack traces // - they have some nice helpers like Promise.delay, map, race, etc. // - to help transition from callbacks with asCallback and promisifyAll -const Promise = require('bluebird') global.Promise = Promise Promise.longStackTraces() diff --git a/core/ignore.js b/core/ignore.js index 42d4cf524..cb9fd51a0 100644 --- a/core/ignore.js +++ b/core/ignore.js @@ -47,9 +47,10 @@ * @flow */ +const fs = require('fs') const { basename, dirname, resolve } = require('path') + const { matcher } = require('micromatch') -const fs = require('fs') const { logger } = require('./utils/logger') @@ -128,7 +129,9 @@ function buildPattern(line /*: string */) /*: IgnorePattern */ { /** Parse many lines and build the corresponding pattern array */ function buildPatternArray(lines /*: string[] */) /*: IgnorePattern[] */ { - return Array.from(lines).filter(isNotBlankOrComment).map(buildPattern) + return Array.from(lines) + .filter(isNotBlankOrComment) + .map(buildPattern) } function isNotBlankOrComment(line /*: string */) /*: boolean */ { diff --git a/core/incompatibilities/platform.js b/core/incompatibilities/platform.js index fca2b8481..e6b12e879 100644 --- a/core/incompatibilities/platform.js +++ b/core/incompatibilities/platform.js @@ -299,26 +299,23 @@ const detectPathIncompatibilities = ( platform ).map(nameIncompatibility => _.merge({ path }, nameIncompatibility)) - const recursivePathIncompatibilities = ancestorNames.reduceRight( - ( - previousIncompatibilities /*: Array */, - name /*: string */, - index /*: number */, - pathComponents /*: string[] */ - ) => { - const path = pathComponents.slice(0, index + 1).join(sep) - const nameIncompatibilities = detectNameIncompatibilities( - name, - 'folder', - platform - ) - - return previousIncompatibilities.concat( - nameIncompatibilities.map(issue => _.merge({ path }, issue)) - ) - }, - pathIncompatibilities - ) + const recursivePathIncompatibilities = ancestorNames.reduceRight(( + previousIncompatibilities /*: Array */, + name /*: string */, + index /*: number */, + pathComponents /*: string[] */ + ) => { + const path = pathComponents.slice(0, index + 1).join(sep) + const nameIncompatibilities = detectNameIncompatibilities( + name, + 'folder', + platform + ) + + return previousIncompatibilities.concat( + nameIncompatibilities.map(issue => _.merge({ path }, issue)) + ) + }, pathIncompatibilities) return recursivePathIncompatibilities.filter(issue => issue != null) } diff --git a/core/local/channel_watcher/add_checksum.js b/core/local/channel_watcher/add_checksum.js index a6601635a..e5d6a44f5 100644 --- a/core/local/channel_watcher/add_checksum.js +++ b/core/local/channel_watcher/add_checksum.js @@ -14,9 +14,10 @@ * @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') + const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') diff --git a/core/local/channel_watcher/add_infos.js b/core/local/channel_watcher/add_infos.js index 486d4582a..8e08d6871 100644 --- a/core/local/channel_watcher/add_infos.js +++ b/core/local/channel_watcher/add_infos.js @@ -8,9 +8,10 @@ * @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') + const { kind } = require('../../metadata') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') diff --git a/core/local/channel_watcher/dispatch.js b/core/local/channel_watcher/dispatch.js index daa8a7f18..87f9df1f0 100644 --- a/core/local/channel_watcher/dispatch.js +++ b/core/local/channel_watcher/dispatch.js @@ -9,8 +9,8 @@ const _ = require('lodash') -const { buildDir, buildFile } = require('../../metadata') const { WINDOWS_DATE_MIGRATION_FLAG } = require('../../config') +const { buildDir, buildFile } = require('../../metadata') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') diff --git a/core/local/channel_watcher/incomplete_fixer.js b/core/local/channel_watcher/incomplete_fixer.js index 940beb747..7075c52b7 100644 --- a/core/local/channel_watcher/incomplete_fixer.js +++ b/core/local/channel_watcher/incomplete_fixer.js @@ -15,9 +15,9 @@ const path = require('path') -const stater = require('../stater') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') +const stater = require('../stater') const STEP_NAME = 'incompleteFixer' @@ -273,8 +273,9 @@ function step( // (e.g. a temporary document now renamed), we'll want to make sure the old // document is removed to avoid having 2 documents with the same inode. // We can do this by keeping the completing renamed event. - const incompleteForExistingDoc /*: ?Metadata */ = - await opts.pouch.byLocalPath(item.event.path) + const incompleteForExistingDoc /*: ?Metadata */ = await opts.pouch.byLocalPath( + item.event.path + ) if ( incompleteForExistingDoc && !incompleteForExistingDoc.trashed && diff --git a/core/local/channel_watcher/index.js b/core/local/channel_watcher/index.js index 836805961..62c8a9fcd 100644 --- a/core/local/channel_watcher/index.js +++ b/core/local/channel_watcher/index.js @@ -23,20 +23,19 @@ const Promise = require('bluebird') const _ = require('lodash') const checksumer = require('./../checksumer') -const Producer = require('./parcel_producer') +const addChecksum = require('./add_checksum') const addInfos = require('./add_infos') +const awaitWriteFinish = require('./await_write_finish') +const dispatch = require('./dispatch') const filterIgnored = require('./filter_ignored') const fireLocatStartEvent = require('./fire_local_start_event') -const winIdenticalRenaming = require('./win_identical_renaming') -const scanFolder = require('./scan_folder') -const awaitWriteFinish = require('./await_write_finish') -const initialDiff = require('./initial_diff') -const addChecksum = require('./add_checksum') const incompleteFixer = require('./incomplete_fixer') +const initialDiff = require('./initial_diff') const overwrite = require('./overwrite') -const dispatch = require('./dispatch') +const Producer = require('./parcel_producer') +const scanFolder = require('./scan_folder') +const winIdenticalRenaming = require('./win_identical_renaming') const { logger } = require('../../utils/logger') - const { LOCAL_WATCHER_FATAL_EVENT } = require('../constants') /*:: diff --git a/core/local/channel_watcher/initial_diff.js b/core/local/channel_watcher/initial_diff.js index 094fd3251..846b2c228 100644 --- a/core/local/channel_watcher/initial_diff.js +++ b/core/local/channel_watcher/initial_diff.js @@ -9,14 +9,15 @@ * @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') + +const Channel = require('./channel') const { WINDOWS_DATE_MIGRATION_FLAG } = require('../../config') const { kind } = require('../../metadata') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') -const Channel = require('./channel') /*:: import type { Config } from '../../config' diff --git a/core/local/channel_watcher/parcel_producer.js b/core/local/channel_watcher/parcel_producer.js index 6efbf90f7..e2d9916dc 100644 --- a/core/local/channel_watcher/parcel_producer.js +++ b/core/local/channel_watcher/parcel_producer.js @@ -3,10 +3,11 @@ * @flow */ -const autoBind = require('auto-bind') const path = require('path') -const Promise = require('bluebird') + const parcel = require('@parcel/watcher') +const autoBind = require('auto-bind') +const Promise = require('bluebird') const Channel = require('./channel') const { INITIAL_SCAN_DONE } = require('./event') diff --git a/core/local/channel_watcher/win_identical_renaming.js b/core/local/channel_watcher/win_identical_renaming.js index ac0cb6811..caa5f7435 100644 --- a/core/local/channel_watcher/win_identical_renaming.js +++ b/core/local/channel_watcher/win_identical_renaming.js @@ -13,8 +13,8 @@ const _ = require('lodash') const Channel = require('./channel') -const { logger } = require('../../utils/logger') const metadata = require('../../metadata') +const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') /*:: diff --git a/core/local/checksumer.js b/core/local/checksumer.js index 89b7d9453..b15ab3883 100644 --- a/core/local/checksumer.js +++ b/core/local/checksumer.js @@ -4,11 +4,12 @@ * @flow */ -const Promise = require('bluebird') -const async = require('async') const crypto = require('crypto') const fs = require('fs') +const async = require('async') +const Promise = require('bluebird') + const { measureTime } = require('../utils/perfs') /*:: @@ -27,12 +28,12 @@ function computeChecksum(filePath /*: string */, callback /*: Callback */) { const stream = fs.createReadStream(filePath) const checksum = crypto.createHash('md5') checksum.setEncoding('base64') - stream.on('end', function () { + stream.on('end', function() { stopMeasure() checksum.end() callback(null, checksum.read()) }) - stream.on('error', function (err) { + stream.on('error', function(err) { stopMeasure() checksum.end() callback(err) diff --git a/core/local/chokidar/analysis.js b/core/local/chokidar/analysis.js index 5b50f7280..9cc7a9386 100644 --- a/core/local/chokidar/analysis.js +++ b/core/local/chokidar/analysis.js @@ -29,13 +29,14 @@ */ const path = require('path') + const _ = require('lodash') -const { getInode } = require('./local_event') const localChange = require('./local_change') +const { getInode } = require('./local_event') +const metadata = require('../../metadata') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') -const metadata = require('../../metadata') /*:: import type { LocalEvent } from './local_event' @@ -234,8 +235,9 @@ function analyseEvent( localChange.fileUpdate(e) ) case 'unlink': { - const moveChange /*: ?LocalFileMove */ = - localChange.maybeMoveFile(sameInodeChange) + const moveChange /*: ?LocalFileMove */ = localChange.maybeMoveFile( + sameInodeChange + ) if (moveChange && !moveChange.wip) delete e.old return ( localChange.fileMoveFromAddUnlink(sameInodeChange, e) || @@ -250,8 +252,9 @@ function analyseEvent( ) } case 'unlinkDir': { - const moveChange /*: ?LocalDirMove */ = - localChange.maybeMoveFolder(sameInodeChange) + const moveChange /*: ?LocalDirMove */ = localChange.maybeMoveFolder( + sameInodeChange + ) if (moveChange && !moveChange.wip) delete e.old return ( localChange.dirMoveFromAddUnlink(sameInodeChange, e) || diff --git a/core/local/chokidar/initial_scan.js b/core/local/chokidar/initial_scan.js index 2f8e99efe..edd1dc165 100644 --- a/core/local/chokidar/initial_scan.js +++ b/core/local/chokidar/initial_scan.js @@ -5,8 +5,8 @@ */ const chokidarEvent = require('./event') -const { logger } = require('../../utils/logger') const metadata = require('../../metadata') +const { logger } = require('../../utils/logger') const { SYNC_DIR_EMPTY_MESSAGE } = require('../errors') const log = logger({ @@ -79,8 +79,11 @@ const step = async ( .filter(e => e.type.startsWith('add')) .forEach(e => initialScanParams.paths.push(metadata.id(e.path))) - const { offlineEvents, unappliedMoves, emptySyncDir } = - await detectOfflineUnlinkEvents(initialScanParams, pouch) + const { + offlineEvents, + unappliedMoves, + emptySyncDir + } = await detectOfflineUnlinkEvents(initialScanParams, pouch) events = offlineEvents.concat(events) events = events.filter(e => { diff --git a/core/local/chokidar/local_change.js b/core/local/chokidar/local_change.js index 3b9e3d0d3..f88946268 100644 --- a/core/local/chokidar/local_change.js +++ b/core/local/chokidar/local_change.js @@ -6,11 +6,12 @@ * @flow */ -const _ = require('lodash') const path = require('path') -const metadata = require('../../metadata') +const _ = require('lodash') + const { getInode } = require('./local_event') +const metadata = require('../../metadata') const { logger } = require('../../utils/logger') /*:: @@ -389,8 +390,9 @@ function fileMoveFromUnlinkAdd( sameInodeChange /*: ?LocalChange */, e /*: LocalFileAdded */ ) /*: * */ { - const unlinkChange /*: ?LocalFileDeletion */ = - maybeDeleteFile(sameInodeChange) + const unlinkChange /*: ?LocalFileDeletion */ = maybeDeleteFile( + sameInodeChange + ) if (!unlinkChange) return if ( unlinkChange.old && @@ -424,8 +426,9 @@ function dirMoveFromUnlinkAdd( sameInodeChange /*: ?LocalChange */, e /*: LocalDirAdded */ ) /*: * */ { - const unlinkChange /*: ?LocalDirDeletion */ = - maybeDeleteFolder(sameInodeChange) + const unlinkChange /*: ?LocalDirDeletion */ = maybeDeleteFolder( + sameInodeChange + ) if (!unlinkChange) return if ( unlinkChange.old && @@ -477,8 +480,9 @@ function fileMoveFromFileDeletionChange( sameInodeChange /*: ?LocalChange */, e /*: LocalFileUpdated */ ) { - const fileDeletion /*: ?LocalFileDeletion */ = - maybeDeleteFile(sameInodeChange) + const fileDeletion /*: ?LocalFileDeletion */ = maybeDeleteFile( + sameInodeChange + ) if (!fileDeletion) return // There was an unlink on the same file, this is most probably a move and replace const src = fileDeletion.old @@ -864,8 +868,9 @@ function convertDirMoveToDeletion(samePathChange /*: ?LocalChange */) { } function ignoreDirAdditionThenDeletion(samePathChange /*: ?LocalChange */) { - const addChangeSamePath /*: ?LocalDirAddition */ = - maybePutFolder(samePathChange) + const addChangeSamePath /*: ?LocalDirAddition */ = maybePutFolder( + samePathChange + ) if (addChangeSamePath && addChangeSamePath.wip) { // $FlowFixMe addChangeSamePath.type = 'Ignored' @@ -880,8 +885,9 @@ function ignoreDirAdditionThenDeletion(samePathChange /*: ?LocalChange */) { } function ignoreFileAdditionThenDeletion(samePathChange /*: ?LocalChange */) { - const addChangeSamePath /*: ?LocalFileAddition */ = - maybeAddFile(samePathChange) + const addChangeSamePath /*: ?LocalFileAddition */ = maybeAddFile( + samePathChange + ) if (addChangeSamePath && addChangeSamePath.wip) { // $FlowFixMe addChangeSamePath.type = 'Ignored' @@ -898,8 +904,9 @@ function ignoreFileAdditionThenDeletion(samePathChange /*: ?LocalChange */) { } function ignoreUnmergedDirMoveThenDeletion(samePathChange /*: ?LocalChange */) { - const moveChangeSamePath /*: ?LocalDirMove */ = - maybeMoveFolder(samePathChange) + const moveChangeSamePath /*: ?LocalDirMove */ = maybeMoveFolder( + samePathChange + ) if (moveChangeSamePath && !moveChangeSamePath.old) { // $FlowFixMe moveChangeSamePath.type = 'Ignored' diff --git a/core/local/chokidar/normalize_paths.js b/core/local/chokidar/normalize_paths.js index 0afd1669d..d7309fabe 100644 --- a/core/local/chokidar/normalize_paths.js +++ b/core/local/chokidar/normalize_paths.js @@ -4,9 +4,10 @@ * @flow */ -const Promise = require('bluebird') const path = require('path') +const Promise = require('bluebird') + const { logger } = require('../../utils/logger') const log = logger({ @@ -29,33 +30,32 @@ const step = async ( ) /*: Promise */ => { const normalizedPaths = [] - return new Promise.mapSeries( - changes, - async (c /*: LocalChange */) /*: Promise */ => { - if (c.type !== 'Ignored') { - const parentPath = path.dirname(c.path) - const parent = - parentPath !== '.' ? await pouch.bySyncedPath(parentPath) : null - const normalized = normalizedPath( - c.path, - c.old ? c.old.path : undefined, - parent, - normalizedPaths + return new Promise.mapSeries(changes, async ( + c /*: LocalChange */ + ) /*: Promise */ => { + if (c.type !== 'Ignored') { + const parentPath = path.dirname(c.path) + const parent = + parentPath !== '.' ? await pouch.bySyncedPath(parentPath) : null + const normalized = normalizedPath( + c.path, + c.old ? c.old.path : undefined, + parent, + normalizedPaths + ) + normalizedPaths.push(normalized) + + if (c.path !== normalized) { + log.debug( + 'normalizing local path to match existing doc and parent norms', + { path: c.path, normalized } ) - normalizedPaths.push(normalized) - - if (c.path !== normalized) { - log.debug( - 'normalizing local path to match existing doc and parent norms', - { path: c.path, normalized } - ) - c.path = normalized - } + c.path = normalized } - - return c } - ) + + return c + }) } const previouslyNormalizedPath = ( diff --git a/core/local/chokidar/prepare_events.js b/core/local/chokidar/prepare_events.js index b1e715853..2da77e198 100644 --- a/core/local/chokidar/prepare_events.js +++ b/core/local/chokidar/prepare_events.js @@ -24,9 +24,10 @@ * */ +const path = require('path') + const Promise = require('bluebird') const fse = require('fs-extra') -const path = require('path') const { logger } = require('../../utils/logger') diff --git a/core/local/chokidar/watcher.js b/core/local/chokidar/watcher.js index 98a15f399..50923f2da 100644 --- a/core/local/chokidar/watcher.js +++ b/core/local/chokidar/watcher.js @@ -16,10 +16,11 @@ * @flow */ +const path = require('path') + const autoBind = require('auto-bind') const Promise = require('bluebird') const chokidar = require('chokidar') -const path = require('path') const analysis = require('./analysis') const checksumer = require('../checksumer') @@ -29,12 +30,11 @@ const initialScan = require('./initial_scan') const normalizePaths = require('./normalize_paths') const prepareEvents = require('./prepare_events') const sendToPrep = require('./send_to_prep') -const stater = require('../stater') -const syncDir = require('../sync_dir') const { logger } = require('../../utils/logger') const { measureTime } = require('../../utils/perfs') - const { LOCAL_WATCHER_FATAL_EVENT } = require('../constants') +const stater = require('../stater') +const syncDir = require('../sync_dir') /*:: import type { Pouch } from '../../pouch' @@ -168,22 +168,22 @@ class LocalWatcher { 'unlink', 'unlinkDir' ]) { - this.watcher.on( - eventType, - (path /*: ?string */, stats /*: ?fs.Stats */) => { - const isInitialScan = !this.initialScanParams.flushed - log.chokidar.debug(eventType, { path, stats, isInitialScan }) - const newEvent = chokidarEvent.build(eventType, path, stats) - if (newEvent.type !== eventType) { - log.debug('fixed wrong fsevents event type', { - eventType, - event: newEvent - }) - } - this.buffer.push(newEvent) - this.events.emit('buffering-start') + this.watcher.on(eventType, ( + path /*: ?string */, + stats /*: ?fs.Stats */ + ) => { + const isInitialScan = !this.initialScanParams.flushed + log.chokidar.debug(eventType, { path, stats, isInitialScan }) + const newEvent = chokidarEvent.build(eventType, path, stats) + if (newEvent.type !== eventType) { + log.debug('fixed wrong fsevents event type', { + eventType, + event: newEvent + }) } - ) + this.buffer.push(newEvent) + this.events.emit('buffering-start') + }) } this.watcher diff --git a/core/local/index.js b/core/local/index.js index a1296c7c1..fdaaeac2d 100644 --- a/core/local/index.js +++ b/core/local/index.js @@ -4,23 +4,23 @@ * @flow */ -const async = require('async') -const autoBind = require('auto-bind') const fs = require('fs').promises -const fse = require('fs-extra') const path = require('path') const stream = require('stream') +const async = require('async') +const autoBind = require('auto-bind') const bluebird = require('bluebird') +const fse = require('fs-extra') const { TMP_DIR_NAME } = require('./constants') -const { NOTE_MIME_TYPE } = require('../remote/constants') -const { isRetryableNetworkError } = require('../remote/errors') const stater = require('./stater') const metadata = require('../metadata') -const { hideOnWindows } = require('../utils/fs') -const watcher = require('./watcher') const syncDir = require('./sync_dir') +const watcher = require('./watcher') +const { NOTE_MIME_TYPE } = require('../remote/constants') +const { isRetryableNetworkError } = require('../remote/errors') +const { hideOnWindows } = require('../utils/fs') const { logger } = require('../utils/logger') const { measureTime } = require('../utils/perfs') const sentry = require('../utils/sentry') @@ -359,7 +359,7 @@ class Local /*:: implements Reader, Writer */ { metadata.updateLocal(doc) } ], - function (err) { + function(err) { stopMeasure() if (err) { log.warn('addFile failed', { path: doc.path, err, doc }) diff --git a/core/local/stater.js b/core/local/stater.js index 304fe2c0d..4df5932e0 100644 --- a/core/local/stater.js +++ b/core/local/stater.js @@ -5,6 +5,7 @@ */ const fs = require('fs') + const fse = require('fs-extra') let winfs diff --git a/core/merge.js b/core/merge.js index 568cb4a83..67fed066f 100644 --- a/core/merge.js +++ b/core/merge.js @@ -3,16 +3,17 @@ * @flow */ +const path = require('path') + const autoBind = require('auto-bind') const _ = require('lodash') -const path = require('path') const IdConflict = require('./IdConflict') const metadata = require('./metadata') const move = require('./move') +const { FILE_TYPE: REMOTE_FILE_TYPE } = require('./remote/constants') const { otherSide } = require('./side') const { logger } = require('./utils/logger') -const { FILE_TYPE: REMOTE_FILE_TYPE } = require('./remote/constants') /*:: import type { IdConflictInfo } from './IdConflict' diff --git a/core/metadata.js b/core/metadata.js index 0312e70b1..f52b4b035 100644 --- a/core/metadata.js +++ b/core/metadata.js @@ -31,16 +31,11 @@ * @flow */ -const _ = require('lodash') -const { clone } = _ -const mime = require('./utils/mime') -const deepDiff = require('deep-diff').diff const path = require('path') -const { logger } = require('./utils/logger') -const timestamp = require('./utils/timestamp') -const pathUtils = require('./utils/path') -const conflicts = require('./utils/conflicts') +const deepDiff = require('deep-diff').diff +const _ = require('lodash') +const { clone } = _ const { detectPathIncompatibilities, @@ -51,6 +46,11 @@ const { FILE_TYPE: REMOTE_FILE_TYPE } = require('./remote/constants') const { SIDE_NAMES, otherSide } = require('./side') +const conflicts = require('./utils/conflicts') +const { logger } = require('./utils/logger') +const mime = require('./utils/mime') +const pathUtils = require('./utils/path') +const timestamp = require('./utils/timestamp') /*:: import type { PlatformIncompatibility } from './incompatibilities/platform' @@ -413,8 +413,10 @@ function detectIncompatibilities( path.join(syncPath, metadata.path), platform ) - const incompatibilities /*: PlatformIncompatibility[] */ = - detectPathIncompatibilities(metadata.path, metadata.docType) + const incompatibilities /*: PlatformIncompatibility[] */ = detectPathIncompatibilities( + metadata.path, + metadata.docType + ) if (pathLenghIncompatibility) { incompatibilities.unshift(pathLenghIncompatibility) } diff --git a/core/migrations/index.js b/core/migrations/index.js index 5d3cce4a0..9dd626977 100644 --- a/core/migrations/index.js +++ b/core/migrations/index.js @@ -6,9 +6,6 @@ const PouchDB = require('pouchdb') const uuid = require('uuid').v4 -const { logger } = require('../utils/logger') -const { PouchError } = require('../pouch/error') -const migrations = require('./migrations') const { INITIAL_SCHEMA, MIGRATION_RESULT_COMPLETE, @@ -17,6 +14,9 @@ const { SCHEMA_DOC_ID, SCHEMA_INITIAL_VERSION } = require('./constants') +const migrations = require('./migrations') +const { PouchError } = require('../pouch/error') +const { logger } = require('../utils/logger') /*:: import type { SavedMetadata } from '../metadata' @@ -175,9 +175,8 @@ async function migrationDBPath( originalDBInfo /*: PouchDBInfo */ ) /*: Promise */ { const date = new Date() - const dateString = `${date.getFullYear()}-${ - date.getMonth() + 1 - }-${date.getDate()}` + const dateString = `${date.getFullYear()}-${date.getMonth() + + 1}-${date.getDate()}` const safeUUID = uuid().replace(/-/g, '') return `${originalDBInfo.db_name}-migration-${dateString}-${safeUUID}` } diff --git a/core/migrations/migrations.js b/core/migrations/migrations.js index df88d7370..e0eed0b68 100644 --- a/core/migrations/migrations.js +++ b/core/migrations/migrations.js @@ -4,6 +4,7 @@ */ const path = require('path') + const { Promise } = require('bluebird') const metadata = require('../metadata') diff --git a/core/pouch/index.js b/core/pouch/index.js index d1d8fbdc4..a695b4678 100644 --- a/core/pouch/index.js +++ b/core/pouch/index.js @@ -3,19 +3,20 @@ * @flow */ +const path = require('path') + +const async = require('async') const autoBind = require('auto-bind') const Promise = require('bluebird') -const PouchDB = require('pouchdb') -const async = require('async') const fse = require('fs-extra') const _ = require('lodash') const { isEqual } = _ -const path = require('path') +const PouchDB = require('pouchdb') const metadata = require('../metadata') -const { logger } = require('../utils/logger') const { PouchError } = require('./error') const remoteConstants = require('../remote/constants') +const { logger } = require('../utils/logger') /*:: import type { Config } from '../config' @@ -511,7 +512,7 @@ class Pouch { async addByChecksumView() { /* !pragma no-coverage-next */ /* istanbul ignore next */ - const query = function (doc) { + const query = function(doc) { if ('md5sum' in doc) { // $FlowFixMe return emit(doc.md5sum) // eslint-disable-line no-undef @@ -524,7 +525,7 @@ class Pouch { async addByRemoteIdView() { /* !pragma no-coverage-next */ /* istanbul ignore next */ - const query = function (doc) { + const query = function(doc) { if ('remote' in doc) { // $FlowFixMe return emit(doc.remote._id) // eslint-disable-line no-undef @@ -534,7 +535,7 @@ class Pouch { } async addNeedsContentFetchingView() { - const query = function (doc) { + const query = function(doc) { if (doc.needsContentFetching && !doc.trashed) { // $FlowFixMe return emit(doc._id) // eslint-disable-line no-undef diff --git a/core/remote/cozy.js b/core/remote/cozy.js index c662fddca..eb6fb635f 100644 --- a/core/remote/cozy.js +++ b/core/remote/cozy.js @@ -3,14 +3,16 @@ * @flow */ +const path = require('path') + const autoBind = require('auto-bind') -const OldCozyClient = require('cozy-client-js').Client +const addSecretEventListener = require('secret-event-listener') + const CozyClient = require('cozy-client').default -const { FetchError } = require('cozy-stack-client') const { Q } = require('cozy-client') +const OldCozyClient = require('cozy-client-js').Client const cozyFlags = require('cozy-flags').default -const path = require('path') -const addSecretEventListener = require('secret-event-listener') +const { FetchError } = require('cozy-stack-client') const { FILES_DOCTYPE, @@ -20,7 +22,6 @@ const { MAX_FILE_SIZE, OAUTH_CLIENTS_DOCTYPE } = require('./constants') -const { DirectoryNotFound } = require('./errors') const { dropSpecialDocs, withDefaultValues, @@ -28,8 +29,9 @@ const { jsonApiToRemoteJsonDoc, jsonFileVersionToRemoteFileVersion } = require('./document') -const { logger } = require('../utils/logger') +const { DirectoryNotFound } = require('./errors') const { sortBy } = require('../utils/array') +const { logger } = require('../utils/logger') /*:: import type { CozyRealtime } from 'cozy-realtime' @@ -641,11 +643,7 @@ async function fetchChangesFromFeed( batchSize /*: number */, remoteDocs /*: $ReadOnlyArray */ = [] ) /*: Promise<{ last_seq: string, remoteDocs: $ReadOnlyArray }> */ { - const { - newLastSeq: last_seq, - pending, - results - } = await client + const { newLastSeq: last_seq, pending, results } = await client .collection(FILES_DOCTYPE) .fetchChanges( { since, includeDocs: true, limit: batchSize }, @@ -668,11 +666,7 @@ async function fetchInitialChanges( batchSize /*: number */, remoteDocs /*: CouchDBDoc[] */ = [] ) /*: Promise<{ last_seq: string, remoteDocs: CouchDBDoc[] }> */ { - const { - newLastSeq: last_seq, - pending, - results - } = await client + const { newLastSeq: last_seq, pending, results } = await client .collection(FILES_DOCTYPE) .fetchChanges( { since, includeDocs: true, limit: batchSize }, diff --git a/core/remote/index.js b/core/remote/index.js index 05f124f61..be097b7df 100644 --- a/core/remote/index.js +++ b/core/remote/index.js @@ -4,14 +4,12 @@ * @flow */ -const autoBind = require('auto-bind') -const Promise = require('bluebird') const path = require('path') + const async = require('async') +const autoBind = require('auto-bind') +const Promise = require('bluebird') -const { logger } = require('../utils/logger') -const { measureTime } = require('../utils/perfs') -const pathUtils = require('../utils/path') const metadata = require('../metadata') const { ROOT_DIR_ID, DIR_TYPE } = require('./constants') const { RemoteCozy } = require('./cozy') @@ -22,8 +20,11 @@ const { } = require('./errors') const { RemoteWarningPoller } = require('./warning_poller') const { RemoteWatcher } = require('./watcher') -const timestamp = require('../utils/timestamp') +const { logger } = require('../utils/logger') +const pathUtils = require('../utils/path') +const { measureTime } = require('../utils/perfs') const streamUtils = require('../utils/stream') +const timestamp = require('../utils/timestamp') /*:: import type EventEmitter from 'events' @@ -500,7 +501,10 @@ class Remote /*:: implements Reader, Writer */ { /** Extract the remote parent path and leaf name from a local path */ function dirAndName(localPath /*: string */) /*: [string, string] */ { - const dir = path.dirname(localPath).split(path.sep).join('/') + const dir = path + .dirname(localPath) + .split(path.sep) + .join('/') const name = path.basename(localPath) return [dir, name] } diff --git a/core/remote/registration.js b/core/remote/registration.js index 9e0ef0d9c..f577752de 100644 --- a/core/remote/registration.js +++ b/core/remote/registration.js @@ -3,9 +3,10 @@ * @module core/remote/registration */ -const autoBind = require('auto-bind') -const os = require('os') const http = require('http') +const os = require('os') + +const autoBind = require('auto-bind') const open = require('open') const CozyClient = require('cozy-client-js').Client diff --git a/core/remote/watcher/index.js b/core/remote/watcher/index.js index bef40e3bf..3fcea43f0 100644 --- a/core/remote/watcher/index.js +++ b/core/remote/watcher/index.js @@ -3,12 +3,13 @@ * @flow */ +const async = require('async') const autoBind = require('auto-bind') const Promise = require('bluebird') const _ = require('lodash') -const async = require('async') const metadata = require('../../metadata') +const { logger } = require('../../utils/logger') const remoteChange = require('../change') const { FILE_TYPE, @@ -17,12 +18,11 @@ const { REMOTE_WATCHER_ERROR_EVENT, REMOTE_WATCHER_FATAL_EVENT } = require('../constants') -const remoteErrors = require('../errors') const { inRemoteTrash } = require('../document') -const squashMoves = require('./squashMoves') +const remoteErrors = require('../errors') const normalizePaths = require('./normalizePaths') -const { logger } = require('../../utils/logger') const { RealtimeManager } = require('./realtime_manager') +const squashMoves = require('./squashMoves') /*:: import type { Config } from '../../config' diff --git a/core/remote/watcher/normalizePaths.js b/core/remote/watcher/normalizePaths.js index 4deded8bc..a4ba3b49d 100644 --- a/core/remote/watcher/normalizePaths.js +++ b/core/remote/watcher/normalizePaths.js @@ -25,45 +25,44 @@ const normalizePaths = async ( ) /*: Promise */ => { const normalizedPaths = [] - return new Promise.mapSeries( - changes, - async (c /*: RemoteChange */) /*: Promise */ => { - if ( - c.type === 'FileAddition' || - c.type === 'DirAddition' || - c.type === 'FileUpdate' || - c.type === 'DirUpdate' || - c.type === 'FileMove' || - c.type === 'DirMove' || - c.type === 'DescendantChange' - ) { - const old /*: ?SavedMetadata */ = - c.type === 'FileMove' || c.type === 'DirMove' - ? c.was - : await pouch.byRemoteIdMaybe(c.doc.remote._id) - const parent /*: ?SavedMetadata */ = c.doc.remote.dir_id - ? await pouch.byRemoteIdMaybe(c.doc.remote.dir_id) - : undefined - const normalized = normalizedPath( - c.doc.path, - old ? old.path : undefined, - parent, - normalizedPaths - ) - normalizedPaths.push(normalized) + return new Promise.mapSeries(changes, async ( + c /*: RemoteChange */ + ) /*: Promise */ => { + if ( + c.type === 'FileAddition' || + c.type === 'DirAddition' || + c.type === 'FileUpdate' || + c.type === 'DirUpdate' || + c.type === 'FileMove' || + c.type === 'DirMove' || + c.type === 'DescendantChange' + ) { + const old /*: ?SavedMetadata */ = + c.type === 'FileMove' || c.type === 'DirMove' + ? c.was + : await pouch.byRemoteIdMaybe(c.doc.remote._id) + const parent /*: ?SavedMetadata */ = c.doc.remote.dir_id + ? await pouch.byRemoteIdMaybe(c.doc.remote.dir_id) + : undefined + const normalized = normalizedPath( + c.doc.path, + old ? old.path : undefined, + parent, + normalizedPaths + ) + normalizedPaths.push(normalized) - if (c.doc.path !== normalized) { - log.trace('normalizing path to match existing doc and parent norms', { - path: normalized, - oldpath: c.doc.path - }) - c.doc.path = normalized - } + if (c.doc.path !== normalized) { + log.trace('normalizing path to match existing doc and parent norms', { + path: normalized, + oldpath: c.doc.path + }) + c.doc.path = normalized } - - return c } - ) + + return c + }) } module.exports = normalizePaths diff --git a/core/remote/watcher/realtime_manager.js b/core/remote/watcher/realtime_manager.js index cb4b567a9..7eb7ea329 100644 --- a/core/remote/watcher/realtime_manager.js +++ b/core/remote/watcher/realtime_manager.js @@ -5,8 +5,10 @@ const http = require('http') const https = require('https') -const _ = require('lodash') + const autoBind = require('auto-bind') +const _ = require('lodash') + const { RealtimePlugin } = require('cozy-realtime') const { logger } = require('../../utils/logger') diff --git a/core/remote/watcher/squashMoves.js b/core/remote/watcher/squashMoves.js index e5f754833..b24da0920 100644 --- a/core/remote/watcher/squashMoves.js +++ b/core/remote/watcher/squashMoves.js @@ -3,9 +3,10 @@ * @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') + const metadata = require('../../metadata') const remoteChange = require('../change') @@ -41,8 +42,9 @@ const findParentMoves = ( previousChanges /*: RemoteChange[] */, encounteredMoves /*: Array */ ) => { - const parentMove /*: ?RemoteDirMove|RemoteDescendantChange */ = - encounteredMoves.find(move => remoteChange.isChildMove(move, change)) + const parentMove /*: ?RemoteDirMove|RemoteDescendantChange */ = encounteredMoves.find( + move => remoteChange.isChildMove(move, change) + ) let squashedParentMove /*: ?RemoteDirMove|RemoteDescendantChange */ if (parentMove) { for (const previousChange of previousChanges) { diff --git a/core/sync/errors.js b/core/sync/errors.js index df0d493fd..472c1ecfb 100644 --- a/core/sync/errors.js +++ b/core/sync/errors.js @@ -1,7 +1,7 @@ /* @flow */ -const metadata = require('../metadata') const { IncompatibleDocError } = require('../incompatibilities/platform') +const metadata = require('../metadata') const { HEARTBEAT: REMOTE_HEARTBEAT } = require('../remote/constants') const remoteErrors = require('../remote/errors') const { logger } = require('../utils/logger') diff --git a/core/sync/index.js b/core/sync/index.js index 7a2068028..9bc0a8aa7 100644 --- a/core/sync/index.js +++ b/core/sync/index.js @@ -3,23 +3,23 @@ * @flow */ +const { dirname, sep } = require('path') + const autoBind = require('auto-bind') const Promise = require('bluebird') - -const { dirname, sep } = require('path') const _ = require('lodash') const { IncompatibleDocError } = require('../incompatibilities/platform') const metadata = require('../metadata') +const { DependencyGraph } = require('./dependency_graph') +const syncErrors = require('./errors') +const remoteConstants = require('../remote/constants') const remoteDocument = require('../remote/document') const remoteErrors = require('../remote/errors') -const remoteConstants = require('../remote/constants') const { otherSide } = require('../side') +const { LifeCycle } = require('../utils/lifecycle') const { logger } = require('../utils/logger') const { measureTime } = require('../utils/perfs') -const { LifeCycle } = require('../utils/lifecycle') -const syncErrors = require('./errors') -const { DependencyGraph } = require('./dependency_graph') /*:: import type EventEmitter from 'events' @@ -430,8 +430,9 @@ class Sync { } ) if (reschedulingStart > 0) { - const changesToReschedule = - this.currentChangesToApply.splice(reschedulingStart) + const changesToReschedule = this.currentChangesToApply.splice( + reschedulingStart + ) await rescheduleChanges(changesToReschedule, this) } diff --git a/core/syncstate.js b/core/syncstate.js index 4ff9333fd..f1d1116f3 100644 --- a/core/syncstate.js +++ b/core/syncstate.js @@ -3,8 +3,9 @@ * @flow */ -const autoBind = require('auto-bind') const EventEmitter = require('events') + +const autoBind = require('auto-bind') const deepDiff = require('deep-diff').diff /*:: @@ -205,16 +206,16 @@ module.exports = class SyncState extends EventEmitter { const updatedUserAlerts = newState.userAlerts || state.userAlerts const userAlerts = newState.syncCurrentSeq != null - ? updatedUserAlerts.reduce( - (alerts /*: UserAlert[] */, alert /*: UserAlert */) => { - if (alert.seq && alert.seq <= newState.syncCurrentSeq) { - return alerts - } else { - return alerts.concat(alert) - } - }, - [] - ) + ? updatedUserAlerts.reduce(( + alerts /*: UserAlert[] */, + alert /*: UserAlert */ + ) => { + if (alert.seq && alert.seq <= newState.syncCurrentSeq) { + return alerts + } else { + return alerts.concat(alert) + } + }, []) : updatedUserAlerts newState = { diff --git a/core/utils/capabilities.js b/core/utils/capabilities.js index 5855a772a..4532c1a4f 100644 --- a/core/utils/capabilities.js +++ b/core/utils/capabilities.js @@ -3,8 +3,8 @@ * @flow */ -const { RemoteCozy } = require('../remote/cozy') const { logger } = require('./logger') +const { RemoteCozy } = require('../remote/cozy') /*:: import type { Config } from '../config' */ diff --git a/core/utils/fs.js b/core/utils/fs.js index 0d10af879..3ec1b93fa 100644 --- a/core/utils/fs.js +++ b/core/utils/fs.js @@ -2,8 +2,9 @@ * @module core/utils/fs */ -const Promise = require('bluebird') const childProcess = require('child_process') + +const Promise = require('bluebird') const { shell } = require('electron') const fse = require('fs-extra') diff --git a/core/utils/logger.js b/core/utils/logger.js index 9e4c48e0d..8c0d48943 100644 --- a/core/utils/logger.js +++ b/core/utils/logger.js @@ -4,9 +4,10 @@ * @flow weak */ -const fse = require('fs-extra') const os = require('os') const path = require('path') + +const fse = require('fs-extra') const _ = require('lodash') const winston = require('winston') const DailyRotateFile = require('winston-daily-rotate-file') diff --git a/core/utils/mime.js b/core/utils/mime.js index 143a66421..ea50141d7 100644 --- a/core/utils/mime.js +++ b/core/utils/mime.js @@ -1,6 +1,7 @@ -const mime = require('mime') const path = require('path') +const mime = require('mime') + const { NOTE_MIME_TYPE } = require('../remote/constants') function lookup(filepath) { diff --git a/core/utils/notes.js b/core/utils/notes.js index 4a2bdd67b..dc7e77811 100644 --- a/core/utils/notes.js +++ b/core/utils/notes.js @@ -1,13 +1,15 @@ /* @flow */ -const fse = require('fs-extra') const path = require('path') + +const fse = require('fs-extra') const tar = require('tar') + const { default: CozyClient, models } = require('cozy-client') +const { logger } = require('./logger') const { NOTE_MIME_TYPE } = require('../remote/constants') -const { logger } = require('./logger') const log = logger({ component: 'Notes' }) diff --git a/core/utils/sentry.js b/core/utils/sentry.js index a1c4cce69..308fda2eb 100644 --- a/core/utils/sentry.js +++ b/core/utils/sentry.js @@ -14,18 +14,17 @@ * 3. `node sentry-symbols.js` */ -const { session } = require('electron') +const url = require('url') + const Sentry = require('@sentry/electron') const { ExtraErrorData: ExtraErrorDataIntegration } = require('@sentry/integrations') -const url = require('url') +const { session } = require('electron') const _ = require('lodash') const winston = require('winston') const { combine, json } = winston.format -const { SESSION_PARTITION_NAME } = require('../../gui/js/network') -const { HOURS } = require('./time') const { FATAL_LVL, ERROR_LVL, @@ -36,6 +35,8 @@ const { baseLogger, logger } = require('./logger') +const { HOURS } = require('./time') +const { SESSION_PARTITION_NAME } = require('../../gui/js/network') const log = logger({ component: 'Sentry' diff --git a/core/utils/web.js b/core/utils/web.js index 71e6d3b51..d71d13627 100644 --- a/core/utils/web.js +++ b/core/utils/web.js @@ -1,11 +1,12 @@ /* @flow */ const path = require('path') + const { generateWebLink } = require('cozy-client') -const { DIR_TYPE } = require('../remote/constants') const capabilities = require('./capabilities') const { logger } = require('./logger') +const { DIR_TYPE } = require('../remote/constants') const log = new logger({ component: 'Web' diff --git a/dev/capture.js b/dev/capture.js index 8ffa7b0ce..b0afc8407 100644 --- a/dev/capture.js +++ b/dev/capture.js @@ -4,8 +4,9 @@ * @flow */ -const program = require('commander') const path = require('path') + +const program = require('commander') const { app } = require('electron') const local = require('./capture/local') diff --git a/dev/capture/local.js b/dev/capture/local.js index c529751b1..5248513e7 100644 --- a/dev/capture/local.js +++ b/dev/capture/local.js @@ -4,18 +4,18 @@ * @flow */ +const EventEmitter = require('events') +const path = require('path') + const Promise = require('bluebird') const chokidar = require('chokidar') -const EventEmitter = require('events') const fse = require('fs-extra') -const path = require('path') const sinon = require('sinon') const { Config, watcherType } = require('../../core/config') const { Ignore } = require('../../core/ignore') const { INITIAL_SCAN_DONE } = require('../../core/local/channel_watcher/event') const ParcelProducer = require('../../core/local/channel_watcher/parcel_producer') - const fixturesHelpers = require('../../test/support/helpers/scenarios') /*:: @@ -134,7 +134,7 @@ const runAndRecordChokidarEvents = scenario => { return new Promise((resolve, reject) => { const watcher = chokidar.watch('.', chokidarOptions) const cleanCallback = cb => - function () { + function() { return watcher .close() .then(cb.apply(null, arguments), cb.apply(null, arguments)) diff --git a/dev/capture/remote.js b/dev/capture/remote.js index 94c99f973..e0b81fa9b 100644 --- a/dev/capture/remote.js +++ b/dev/capture/remote.js @@ -4,19 +4,19 @@ * @flow */ +const path = require('path') + const Promise = require('bluebird') const fse = require('fs-extra') -const path = require('path') const _ = require('lodash') const { Pouch } = require('../../core/pouch') -const { RemoteCozy } = require('../../core/remote/cozy') const { ROOT_DIR_ID } = require('../../core/remote/constants') +const { RemoteCozy } = require('../../core/remote/cozy') const timestamp = require('../../core/utils/timestamp') - +const Builders = require('../../test/support/builders') const configHelpers = require('../../test/support/helpers/config') const cozyHelpers = require('../../test/support/helpers/cozy') -const Builders = require('../../test/support/builders') /*:: import type { MetadataRemoteInfo } from '../../core/metadata' @@ -32,7 +32,7 @@ const ROOT_DIR = { path: '/' } -const createInitialTree = async function ( +const createInitialTree = async function( scenario /*: * */, cozy /*: * */, pouch /*: Pouch */ diff --git a/dev/chokidar.js b/dev/chokidar.js index c90d8476f..3163c5d48 100755 --- a/dev/chokidar.js +++ b/dev/chokidar.js @@ -1,10 +1,12 @@ #!/usr/bin/env node +const path = require('path') + const chokidar = require('chokidar') const program = require('commander') -const local = require('./capture/local') const fse = require('fs-extra') -const path = require('path') const open = require('open') + +const local = require('./capture/local') const scenarioHelpers = require('../test/support/helpers/scenarios') program diff --git a/dev/remote/automated_registration.js b/dev/remote/automated_registration.js index 102d36ce0..d2174aeff 100644 --- a/dev/remote/automated_registration.js +++ b/dev/remote/automated_registration.js @@ -4,9 +4,11 @@ * @flow */ -const cheerio = require('cheerio') const crypto = require('crypto') const url = require('url') + +const cheerio = require('cheerio') + const Registration = require('../../core/remote/registration') const { logger } = require('../../core/utils/logger') diff --git a/dev/remote/change-dir-exclusions.js b/dev/remote/change-dir-exclusions.js index ee44fa3a4..1e799ce69 100644 --- a/dev/remote/change-dir-exclusions.js +++ b/dev/remote/change-dir-exclusions.js @@ -1,11 +1,13 @@ require('../../core/globals') +const path = require('path') + const { app } = require('electron') -const yargs = require('yargs') +const _ = require('lodash') const treeify = require('treeify') -const path = require('path') -const OldCozyClient = require('cozy-client-js').Client +const yargs = require('yargs') + const { default: CozyClient, Q } = require('cozy-client') -const _ = require('lodash') +const OldCozyClient = require('cozy-client-js').Client const { Config } = require('../../core/config') const { @@ -108,7 +110,11 @@ async function getDirectoryContent(context) { } = j if (_deleted) continue - const parentPath = path.dirname(dirPath).split('/').slice(1).join('.') + const parentPath = path + .dirname(dirPath) + .split('/') + .slice(1) + .join('.') const key = parentPath === '' ? dir.name : `${dir.name}.${parentPath}` const parent = _.get(dirContent, key) if (!parent) continue diff --git a/dev/remote/generate-test-env.js b/dev/remote/generate-test-env.js index 18a65459b..7edfff2d7 100644 --- a/dev/remote/generate-test-env.js +++ b/dev/remote/generate-test-env.js @@ -1,11 +1,12 @@ require('../../core/globals') -const cozy = require('cozy-client-js') -const fse = require('fs-extra') const { app, session } = require('electron') +const fse = require('fs-extra') + +const cozy = require('cozy-client-js') -const pkg = require('../../package.json') const automatedRegistration = require('./automated_registration') const network = require('../../gui/js/network') +const pkg = require('../../package.json') const cozyUrl = chooseCozyUrl(process.env.BUILD_JOB) || diff --git a/gui/js/actions.js b/gui/js/actions.js index af253070c..5e56d96be 100644 --- a/gui/js/actions.js +++ b/gui/js/actions.js @@ -1,6 +1,7 @@ const { spawn } = require('child_process') -const { app } = require('electron') + const { Promise } = require('bluebird') +const { app } = require('electron') const { logger } = require('../../core/utils/logger') diff --git a/gui/js/appmenu.js b/gui/js/appmenu.js index 50cd0baea..45331ec92 100644 --- a/gui/js/appmenu.js +++ b/gui/js/appmenu.js @@ -4,6 +4,7 @@ */ const { Menu } = require('electron') + const { translate } = require('./i18n') // FIXME: killme diff --git a/gui/js/details.window.js b/gui/js/details.window.js index 5b0208527..74a5dca94 100644 --- a/gui/js/details.window.js +++ b/gui/js/details.window.js @@ -1,18 +1,18 @@ /* @flow */ const path = require('path') + const { enable: enableRemoteModule } = require('@electron/remote/main') const { buildTranslations } = require('./i18n') const WindowManager = require('./window_manager') - -const SCREEN_WIDTH = 750 -const SCREEN_HEIGHT = 800 - const log = require('../../core/app').logger({ component: 'GUI' }) +const SCREEN_WIDTH = 750 +const SCREEN_HEIGHT = 800 + /*:: import type { UserAlert } from '../../core/syncstate' */ diff --git a/gui/js/help.window.js b/gui/js/help.window.js index 712dffb75..fd3dd01ae 100644 --- a/gui/js/help.window.js +++ b/gui/js/help.window.js @@ -1,8 +1,12 @@ /* @flow */ -const WindowManager = require('./window_manager') const { enable: enableRemoteModule } = require('@electron/remote/main') +const WindowManager = require('./window_manager') +const log = require('../../core/app').logger({ + component: 'GUI' +}) + /*:: import type { Event as ElectronEvent } from 'electron' */ @@ -10,10 +14,6 @@ import type { Event as ElectronEvent } from 'electron' const HELP_SCREEN_WIDTH = 768 const HELP_SCREEN_HEIGHT = 570 -const log = require('../../core/app').logger({ - component: 'GUI' -}) - module.exports = class HelpWM extends WindowManager { windowOptions() { return { diff --git a/gui/js/i18n.js b/gui/js/i18n.js index 94e5f2001..bb431663d 100644 --- a/gui/js/i18n.js +++ b/gui/js/i18n.js @@ -23,13 +23,13 @@ const init = appRef => { } const buildTranslations = keys => - keys.reduce( - (translations /*: { [string]: string } */, string /*: string */) => { - translations[string] = translate(string) - return translations - }, - {} - ) + keys.reduce(( + translations /*: { [string]: string } */, + string /*: string */ + ) => { + translations[string] = translate(string) + return translations + }, {}) const translate = key => app.translations[key] || key.substr(key.indexOf(' ') + 1) // Key without prefix diff --git a/gui/js/lastfiles.js b/gui/js/lastfiles.js index be51c09e9..6cd2b1774 100644 --- a/gui/js/lastfiles.js +++ b/gui/js/lastfiles.js @@ -5,6 +5,7 @@ const fs = require('fs') const path = require('path') + const async = require('async') let lastFilesPath = '' diff --git a/gui/js/markdown-viewer.window.js b/gui/js/markdown-viewer.window.js index 0a3eb0563..8fef7edfe 100644 --- a/gui/js/markdown-viewer.window.js +++ b/gui/js/markdown-viewer.window.js @@ -1,10 +1,14 @@ /* @flow */ const path = require('path') + const { enable: enableRemoteModule } = require('@electron/remote/main') const { buildTranslations } = require('./i18n') const WindowManager = require('./window_manager') +const log = require('../../core/app').logger({ + component: 'GUI/MarkdownViewer' +}) /*:: import type { WindowBanner } from './window_manager' @@ -13,10 +17,6 @@ import type { WindowBanner } from './window_manager' const VIEWER_SCREEN_WIDTH = 768 const VIEWER_SCREEN_HEIGHT = 570 -const log = require('../../core/app').logger({ - component: 'GUI/MarkdownViewer' -}) - module.exports = class MarkdownViewerWindow extends WindowManager { windowOptions() { return { diff --git a/gui/js/network/agent.js b/gui/js/network/agent.js index 62551d3d8..aee9b56bb 100644 --- a/gui/js/network/agent.js +++ b/gui/js/network/agent.js @@ -9,13 +9,14 @@ const http = require('http') const https = require('https') -const { LRUCache } = require('lru-cache') + const { Agent } = require('agent-base') -const { PacProxyAgent } = require('pac-proxy-agent') const { HttpProxyAgent } = require('http-proxy-agent') const { HttpsProxyAgent } = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') const _ = require('lodash') +const { LRUCache } = require('lru-cache') +const { PacProxyAgent } = require('pac-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') const { logger } = require('../../../core/utils/logger') const log = logger({ @@ -212,34 +213,35 @@ class ProxyAgent extends Agent { // getProxyForUrl uses the given Electron Session to resolve the proxy to use // for the given requested URL and returns its URL. // It is meant to be used with `ProxyAgent`. -const getProxyForUrl = - (session /*: Session */) => async (reqUrl /*: string */) => { - log.debug('getProxyForUrl', { reqUrl }) - const proxy = await session.resolveProxy(reqUrl) - if (!proxy) { - return '' - } +const getProxyForUrl = (session /*: Session */) => async ( + reqUrl /*: string */ +) => { + log.debug('getProxyForUrl', { reqUrl }) + const proxy = await session.resolveProxy(reqUrl) + if (!proxy) { + return '' + } - const proxies = String(proxy) - .trim() - .split(/\s*;\s*/g) - .filter(Boolean) - - // XXX: right now, only the first proxy specified will be used - const first = proxies[0] - const [type, addr] = first.split(/\s+/) - - if ('DIRECT' == type) { - return '' - } else if ('PROXY' == type) { - return `http://${addr}` - } else if (['SOCKS', 'SOCKS5', 'HTTPS'].includes(type)) { - return `${type.toLowerCase()}://${addr}` - } else { - log.error('Unknown proxy type', { type, reqUrl }) - return '' - } + const proxies = String(proxy) + .trim() + .split(/\s*;\s*/g) + .filter(Boolean) + + // XXX: right now, only the first proxy specified will be used + const first = proxies[0] + const [type, addr] = first.split(/\s+/) + + if ('DIRECT' == type) { + return '' + } else if ('PROXY' == type) { + return `http://${addr}` + } else if (['SOCKS', 'SOCKS5', 'HTTPS'].includes(type)) { + return `${type.toLowerCase()}://${addr}` + } else { + log.error('Unknown proxy type', { type, reqUrl }) + return '' } +} module.exports = { ProxyAgent, diff --git a/gui/js/network/index.js b/gui/js/network/index.js index ad0c1e8d8..b70aebf69 100644 --- a/gui/js/network/index.js +++ b/gui/js/network/index.js @@ -7,12 +7,13 @@ const dns = require('dns') const http = require('http') const https = require('https') -const yargs = require('yargs') -const electronFetch = require('electron-fetch').default + const { app } = require('electron') +const electronFetch = require('electron-fetch').default +const yargs = require('yargs') -const { logger } = require('../../../core/utils/logger') const { ProxyAgent, getProxyForUrl } = require('./agent') +const { logger } = require('../../../core/utils/logger') /*:: import { App, Session } from 'electron' diff --git a/gui/js/onboarding.window.js b/gui/js/onboarding.window.js index 5e76b1ee7..bb7561d07 100644 --- a/gui/js/onboarding.window.js +++ b/gui/js/onboarding.window.js @@ -1,28 +1,27 @@ /* @flow */ -const { addFileManagerShortcut } = require('./shortcut') +const { enable: enableRemoteModule } = require('@electron/remote/main') const { dialog, session, BrowserView, shell } = require('electron') + const autoLaunch = require('./autolaunch') const defaults = require('./defaults') const { translate } = require('./i18n') const { SESSION_PARTITION_NAME } = require('./network') -const { enable: enableRemoteModule } = require('@electron/remote/main') +const { addFileManagerShortcut } = require('./shortcut') +const WindowManager = require('./window_manager') +const log = require('../../core/app').logger({ + component: 'GUI' +}) /*:: import type { Event as ElectronEvent } from 'electron' */ -const log = require('../../core/app').logger({ - component: 'GUI' -}) - const ONBOARDING_SCREEN_WIDTH = 768 const ONBOARDING_SCREEN_HEIGHT = 570 const LOGIN_SCREEN_WIDTH = ONBOARDING_SCREEN_WIDTH const LOGIN_SCREEN_HEIGHT = 740 -const WindowManager = require('./window_manager') - module.exports = class OnboardingWM extends WindowManager { windowOptions() { return { diff --git a/gui/js/shortcut.js b/gui/js/shortcut.js index 00274c965..d9094a0e1 100644 --- a/gui/js/shortcut.js +++ b/gui/js/shortcut.js @@ -3,10 +3,11 @@ * @module gui/js/shortcut */ -const lnk = require('lnk') +const childProcess = require('child_process') const os = require('os') const path = require('path') -const childProcess = require('child_process') + +const lnk = require('lnk') const log = require('../../core/app').logger({ component: 'GUI' diff --git a/gui/js/tray.js b/gui/js/tray.js index fb5c81150..22c6739a0 100644 --- a/gui/js/tray.js +++ b/gui/js/tray.js @@ -3,11 +3,13 @@ * @module gui/js/tray */ -const { Tray, Menu, MenuItem, nativeImage } = require('electron') -const { translate } = require('./i18n') const path = require('path') + +const { Tray, Menu, MenuItem, nativeImage } = require('electron') const _ = require('lodash') +const { translate } = require('./i18n') + let tray = null let lastStatus = '' diff --git a/gui/js/tray.window.js b/gui/js/tray.window.js index cbc32e205..a8ac9f926 100644 --- a/gui/js/tray.window.js +++ b/gui/js/tray.window.js @@ -1,18 +1,23 @@ /* @flow */ -const electron = require('electron') -const { dialog, shell } = electron const path = require('path') + const { enable: enableRemoteModule } = require('@electron/remote/main') +const electron = require('electron') +const { dialog, shell } = electron -const { openNote } = require('../utils/notes') -const { openUrl } = require('../utils/urls') -const { openInWeb } = require('../utils/web') +const { restart } = require('./actions') const autoLaunch = require('./autolaunch') -const DetailsWM = require('./details.window') const CozyWebWM = require('./cozy-web.window') +const DetailsWM = require('./details.window') const { translate } = require('./i18n') -const { restart } = require('./actions') +const WindowManager = require('./window_manager') +const log = require('../../core/app').logger({ + component: 'GUI' +}) +const { openNote } = require('../utils/notes') +const { openUrl } = require('../utils/urls') +const { openInWeb } = require('../utils/web') /*:: import type { App as ElectronApp, Event as ElectronEvent } from 'electron' @@ -27,10 +32,6 @@ type Bounds = { } */ -const log = require('../../core/app').logger({ - component: 'GUI' -}) - const DASHBOARD_SCREEN_WIDTH = 440 const DASHBOARD_SCREEN_HEIGHT = 830 @@ -96,8 +97,6 @@ const popoverBounds = ( return newBounds } -const WindowManager = require('./window_manager') - module.exports = class TrayWM extends WindowManager { constructor( app /*: ElectronApp */, diff --git a/gui/js/updater.window.js b/gui/js/updater.window.js index 1eacbb79a..5df40f948 100644 --- a/gui/js/updater.window.js +++ b/gui/js/updater.window.js @@ -1,22 +1,23 @@ /* @flow */ +const path = require('path') + +const { enable: enableRemoteModule } = require('@electron/remote/main') const Promise = require('bluebird') -const WindowManager = require('./window_manager') +const { dialog } = require('electron') const { autoUpdater } = require('electron-updater') + const { translate } = require('./i18n') -const { dialog } = require('electron') -const path = require('path') -const { enable: enableRemoteModule } = require('@electron/remote/main') +const WindowManager = require('./window_manager') +const log = require('../../core/app').logger({ + component: 'GUI:autoupdater' +}) /*:: import type { App as ElectronApp } from 'electron' import type { App as CoreApp } from '../../core/app' */ -const log = require('../../core/app').logger({ - component: 'GUI:autoupdater' -}) - /** The delay starting from the update info request after which it is skipped. * * Long enough so users with slow connection have chances to start downloading diff --git a/gui/js/window_manager.js b/gui/js/window_manager.js index 226b4e950..79889a70f 100644 --- a/gui/js/window_manager.js +++ b/gui/js/window_manager.js @@ -3,9 +3,14 @@ * @module gui/js/window_manager */ +const path = require('path') + const { BrowserWindow, ipcMain, shell } = require('electron') const _ = require('lodash') -const path = require('path') + +const log = require('../../core/app').logger({ + component: 'windows' +}) const capabilities = require('../../core/utils/capabilities') const flags = require('../../core/utils/flags') @@ -19,10 +24,6 @@ export type WindowBanner = { } */ -const log = require('../../core/app').logger({ - component: 'windows' -}) - module.exports = class WindowManager { constructor(app, desktop) { this.win = null diff --git a/gui/main.js b/gui/main.js index 965f989e2..641e97353 100644 --- a/gui/main.js +++ b/gui/main.js @@ -4,6 +4,10 @@ require('../core/globals') // Initialize `remote` module so that renderer processes can use it. require('@electron/remote/main').initialize() +const os = require('os') +const path = require('path') + +const async = require('async') const { app, Menu, @@ -19,40 +23,33 @@ if (process.env.INSECURE_SSL) { } const Desktop = require('../core/app.js') -const sentry = require('../core/utils/sentry') +const { exit, restart } = require('./js/actions') +const { buildAppMenu } = require('./js/appmenu') +const autoLaunch = require('./js/autolaunch') +const { fileInfo } = require('./js/fileutils') +const HelpWM = require('./js/help.window.js') +const i18n = require('./js/i18n') +const lastFiles = require('./js/lastfiles') +const OnboardingWM = require('./js/onboarding.window.js') +const tray = require('./js/tray') +const TrayWM = require('./js/tray.window.js') +const UpdaterWM = require('./js/updater.window.js') const { openNote } = require('./utils/notes') +const config = require('../core/config') +const { + SYNC_DIR_EMPTY_MESSAGE, + SYNC_DIR_UNLINKED_MESSAGE +} = require('../core/local/errors') +const sentry = require('../core/utils/sentry') const pkg = require('../package.json') - -const path = require('path') -const os = require('os') -const async = require('async') - const network = require('./js/network') +const { MigrationFailedError } = require('../core/migrations') const { COZY_CLIENT_REVOKED_CODE, COZY_CLIENT_REVOKED_MESSAGE } = require('../core/remote/errors') -const { - SYNC_DIR_EMPTY_MESSAGE, - SYNC_DIR_UNLINKED_MESSAGE -} = require('../core/local/errors') -const { MigrationFailedError } = require('../core/migrations') -const config = require('../core/config') const winRegistry = require('../core/utils/win_registry') - -const tray = require('./js/tray') -const TrayWM = require('./js/tray.window.js') -const UpdaterWM = require('./js/updater.window.js') -const HelpWM = require('./js/help.window.js') -const OnboardingWM = require('./js/onboarding.window.js') - -const autoLaunch = require('./js/autolaunch') -const lastFiles = require('./js/lastfiles') -const { fileInfo } = require('./js/fileutils') -const { buildAppMenu } = require('./js/appmenu') -const i18n = require('./js/i18n') const { translate } = i18n -const { exit, restart } = require('./js/actions') const DAILY = 3600 * 24 * 1000 diff --git a/gui/ports.js b/gui/ports.js index b7efe2488..93d41577b 100644 --- a/gui/ports.js +++ b/gui/ports.js @@ -2,9 +2,9 @@ 'use strict' +const remote = require('@electron/remote') const electron = require('electron') const { ipcRenderer } = electron -const remote = require('@electron/remote') /*:: import type { SyncStatus, UserAlert, SyncError } from '../core/syncstate' @@ -179,15 +179,12 @@ elmectron.ports.userActionCommand.subscribe(([cmd, action]) => { ipcRenderer.send('userActionCommand', cmd, action) }) -ipcRenderer.on( - 'sync-state', - ( - event, - newState /*: { status: SyncStatus, remaining: number, userAlerts: UserAlert[], errors: SyncError[] } */ - ) => { - elmectron.ports.syncState.send(newState) - } -) +ipcRenderer.on('sync-state', ( + event, + newState /*: { status: SyncStatus, remaining: number, userAlerts: UserAlert[], errors: SyncError[] } */ +) => { + elmectron.ports.syncState.send(newState) +}) ipcRenderer.on('transfer', (event, info) => { elmectron.ports.transfer.send(info) diff --git a/gui/utils/notes.js b/gui/utils/notes.js index 2709fb938..77b2ceb2b 100644 --- a/gui/utils/notes.js +++ b/gui/utils/notes.js @@ -6,8 +6,8 @@ const { app, dialog, shell } = require('electron') const Desktop = require('../../core/app.js') -const MarkdownViewerWindow = require('../js/markdown-viewer.window.js') const i18n = require('../js/i18n') +const MarkdownViewerWindow = require('../js/markdown-viewer.window.js') const { translate } = i18n const log = Desktop.logger({ diff --git a/test/integration/add.js b/test/integration/add.js index 8371d453a..2e15ba255 100644 --- a/test/integration/add.js +++ b/test/integration/add.js @@ -2,19 +2,20 @@ /* eslint-env mocha */ const path = require('path') -const should = require('should') + const fse = require('fs-extra') +const should = require('should') const sinon = require('sinon') const metadata = require('../../core/metadata') +const { logger } = require('../../core/utils/logger') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const cozy = cozyHelpers.cozy -const { logger } = require('../../core/utils/logger') const log = new logger({ component: 'TEST' }) @@ -27,7 +28,7 @@ describe('Add', () => { beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() @@ -38,7 +39,7 @@ describe('Add', () => { helpers.spyPouch() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/case_or_encoding_change.js b/test/integration/case_or_encoding_change.js index bd30dd1c6..24152d56e 100644 --- a/test/integration/case_or_encoding_change.js +++ b/test/integration/case_or_encoding_change.js @@ -3,10 +3,10 @@ const should = require('should') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') describe('Case or encoding change', () => { let cozy, helpers @@ -20,14 +20,14 @@ describe('Case or encoding change', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/conflict_resolution.js b/test/integration/conflict_resolution.js index 73c84efdb..6fc6af3df 100644 --- a/test/integration/conflict_resolution.js +++ b/test/integration/conflict_resolution.js @@ -1,20 +1,18 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') -const sinon = require('sinon') - +const { FetchError } = require('electron-fetch') const fse = require('fs-extra') const _ = require('lodash') -const { FetchError } = require('electron-fetch') +const should = require('should') +const sinon = require('sinon') const metadata = require('../../core/metadata') - const Builders = require('../support/builders') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const cozy = cozyHelpers.cozy @@ -30,14 +28,14 @@ describe('Conflict resolution', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) builders = new Builders(this) await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -51,7 +49,10 @@ describe('Conflict resolution', () => { await helpers.local.syncDir.ensureDir('foo') await helpers.prep.putFolderAsync( 'local', - builders.metadir().path('foo').build() + builders + .metadir() + .path('foo') + .build() ) should(await helpers.local.tree()).deepEqual(['foo-conflict-.../']) @@ -411,7 +412,10 @@ describe('Conflict resolution', () => { beforeEach('set up conflict', async () => { await helpers.prep.putFolderAsync( 'local', - builders.metadir().path('foo').build() + builders + .metadir() + .path('foo') + .build() ) await cozy.files.create('whatever', { name: 'foo' }) }) diff --git a/test/integration/differential_sync.js b/test/integration/differential_sync.js index 65685bcca..b2c539cdb 100644 --- a/test/integration/differential_sync.js +++ b/test/integration/differential_sync.js @@ -1,21 +1,20 @@ /* @flow */ /* eslint-env mocha */ +const { Promise } = require('bluebird') const should = require('should') const sinon = require('sinon') -const { Promise } = require('bluebird') - -const configHelpers = require('../support/helpers/config') -const cozyHelpers = require('../support/helpers/cozy') -const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') -const Builders = require('../support/builders') const { DIR_TYPE, FILES_DOCTYPE, OAUTH_CLIENTS_DOCTYPE } = require('../../core/remote/constants') +const Builders = require('../support/builders') +const TestHelpers = require('../support/helpers') +const configHelpers = require('../support/helpers/config') +const cozyHelpers = require('../support/helpers/cozy') +const pouchHelpers = require('../support/helpers/pouch') const path = remoteDoc => remoteDoc.type === DIR_TYPE @@ -34,7 +33,7 @@ describe('Differential synchronization', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { this.cozy = cozy = await cozyHelpers.oauthCozy(this.config) helpers = TestHelpers.init(this) @@ -45,13 +44,16 @@ describe('Differential synchronization', () => { builders = new Builders({ cozy }) }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) let remoteDir, remoteFile - beforeEach(async function () { - remoteDir = await builders.remoteDir().name('Photos').create() + beforeEach(async function() { + remoteDir = await builders + .remoteDir() + .name('Photos') + .create() remoteFile = await builders .remoteFile() .inDir(remoteDir) @@ -64,7 +66,7 @@ describe('Differential synchronization', () => { describe('when a folder is excluded from synchronization', () => { let excludedDir, oauthClient - beforeEach(async function () { + beforeEach(async function() { excludedDir = { _id: remoteDir._id, _type: FILES_DOCTYPE } oauthClient = { _id: this.config.client.clientID, @@ -73,7 +75,7 @@ describe('Differential synchronization', () => { }) context('and the folder was never synced', () => { - it('does not propagate it or its content to the local filesystem', async function () { + it('does not propagate it or its content to the local filesystem', async function() { await files.addNotSynchronizedDirectories(oauthClient, [excludedDir]) await helpers.pullAndSyncAll() @@ -86,11 +88,11 @@ describe('Differential synchronization', () => { }) context('and the folder was previously synced', () => { - beforeEach(async function () { + beforeEach(async function() { await helpers.pullAndSyncAll() }) - it('propagates its deletion to the local filesystem', async function () { + it('propagates its deletion to the local filesystem', async function() { should(await helpers.local.treeWithoutTrash()).deepEqual([ path(remoteDir), path(remoteFile) @@ -110,7 +112,7 @@ describe('Differential synchronization', () => { describe('when a folder is re-included into synchronization', () => { let excludedDir, oauthClient - beforeEach(async function () { + beforeEach(async function() { excludedDir = { _id: remoteDir._id, _type: FILES_DOCTYPE } oauthClient = { _id: this.config.client.clientID, @@ -124,7 +126,7 @@ describe('Differential synchronization', () => { await helpers.pullAndSyncAll() }) - it('propagates its addition and that of its content to the local filesystem', async function () { + it('propagates its addition and that of its content to the local filesystem', async function() { should(await helpers.local.treeWithoutTrash()).deepEqual([]) await files.removeNotSynchronizedDirectories(oauthClient, [excludedDir]) @@ -143,7 +145,7 @@ describe('Differential synchronization', () => { describe('when a folder is created locally with the same path as an excluded folder', () => { let excludedDir, oauthClient - beforeEach(async function () { + beforeEach(async function() { excludedDir = { _id: remoteDir._id, _type: FILES_DOCTYPE } oauthClient = { _id: this.config.client.clientID, @@ -152,7 +154,7 @@ describe('Differential synchronization', () => { }) context('and the user chooses to create a conflict', () => { - beforeEach(function () { + beforeEach(function() { const originalBlockSyncFor = helpers._sync.blockSyncFor sinon.stub(helpers._sync, 'blockSyncFor') @@ -167,12 +169,12 @@ describe('Differential synchronization', () => { }) helpers._sync.blockSyncFor.callThrough() }) - afterEach(async function () { + afterEach(async function() { helpers._sync.blockSyncFor.restore() await helpers.local.side.stop() }) - it('renames the local folder with a conflict suffix before synchronizing it', async function () { + it('renames the local folder with a conflict suffix before synchronizing it', async function() { await files.addNotSynchronizedDirectories(oauthClient, [excludedDir]) await helpers.pullAndSyncAll() @@ -211,7 +213,7 @@ describe('Differential synchronization', () => { }) context('and the user chooses to merge both folders', () => { - beforeEach(function () { + beforeEach(function() { const originalBlockSyncFor = helpers._sync.blockSyncFor sinon.stub(helpers._sync, 'blockSyncFor') @@ -230,11 +232,11 @@ describe('Differential synchronization', () => { }) helpers._sync.blockSyncFor.callThrough() }) - afterEach(function () { + afterEach(function() { helpers._sync.blockSyncFor.restore() }) - it('does not rename the local folder and re-includes the remote one', async function () { + it('does not rename the local folder and re-includes the remote one', async function() { await files.addNotSynchronizedDirectories(oauthClient, [excludedDir]) await helpers.pullAndSyncAll() diff --git a/test/integration/executable.js b/test/integration/executable.js index 1774d9ed8..ce751f6da 100644 --- a/test/integration/executable.js +++ b/test/integration/executable.js @@ -3,6 +3,7 @@ const should = require('should') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const { @@ -10,7 +11,6 @@ const { onPlatforms } = require('../support/helpers/platform') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const { platform } = process @@ -25,7 +25,7 @@ describe('Executable handling', () => { afterEach(pouchHelpers.cleanDatabase) afterEach(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) syncDir = helpers.local.syncDir @@ -33,7 +33,7 @@ describe('Executable handling', () => { await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/full_loop.js b/test/integration/full_loop.js index 3c9847311..7fcc7584b 100644 --- a/test/integration/full_loop.js +++ b/test/integration/full_loop.js @@ -3,10 +3,10 @@ const should = require('should') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const cozy = cozyHelpers.cozy @@ -22,7 +22,7 @@ describe('Full watch/merge/sync/repeat loop', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() @@ -32,7 +32,7 @@ describe('Full watch/merge/sync/repeat loop', () => { helpers.spyPouch() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/id_conflict.js b/test/integration/id_conflict.js index 3ef26e18d..466cf18e2 100644 --- a/test/integration/id_conflict.js +++ b/test/integration/id_conflict.js @@ -4,7 +4,7 @@ const should = require('should') const config = require('../../core/config') - +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const { @@ -14,7 +14,6 @@ const { onHFS } = require('../support/helpers/platform') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') describe('Identity conflict', () => { let cozy, helpers @@ -28,14 +27,14 @@ describe('Identity conflict', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/interrupted_sync.js b/test/integration/interrupted_sync.js index c72a6106f..1e0b4e08b 100644 --- a/test/integration/interrupted_sync.js +++ b/test/integration/interrupted_sync.js @@ -4,10 +4,10 @@ const should = require('should') const sinon = require('sinon') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const cozy = cozyHelpers.cozy @@ -23,7 +23,7 @@ describe('Sync gets interrupted, initialScan occurs', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() @@ -33,7 +33,7 @@ describe('Sync gets interrupted, initialScan occurs', () => { helpers.spyPouch() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -72,7 +72,7 @@ describe('Sync gets interrupted, initialScan occurs', () => { }) describe('remote file update', () => { - it('does not override the remote file with the local version', async function () { + it('does not override the remote file with the local version', async function() { const path = 'file' await helpers.local.syncDir.outputFile(path, 'original content') @@ -97,7 +97,7 @@ describe('Sync gets interrupted, initialScan occurs', () => { }) describe('local file move outside dir then update then dir trashing', () => { - beforeEach('run actions', async function () { + beforeEach('run actions', async function() { const dirPath = 'dir/' const fileSrcPath = 'dir/file' const fileDstPath = 'file' @@ -138,7 +138,7 @@ describe('Sync gets interrupted, initialScan occurs', () => { } }) - it('moves the file and trashes the dir', async function () { + it('moves the file and trashes the dir', async function() { await should(helpers.trees('local', 'remote')).be.fulfilledWith({ local: ['file'], remote: ['file'] diff --git a/test/integration/move.js b/test/integration/move.js index 2cc60fedd..b3aba2623 100644 --- a/test/integration/move.js +++ b/test/integration/move.js @@ -1,21 +1,21 @@ /* @flow */ /* eslint-env mocha */ -const _ = require('lodash') const path = require('path') + +const _ = require('lodash') const should = require('should') +const { ROOT_DIR_ID, TRASH_DIR_ID } = require('../../core/remote/constants') +const { logger } = require('../../core/utils/logger') const Builders = require('../support/builders') const dbBuilders = require('../support/builders/db') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') -const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const { onPlatform } = require('../support/helpers/platform') +const pouchHelpers = require('../support/helpers/pouch') -const { ROOT_DIR_ID, TRASH_DIR_ID } = require('../../core/remote/constants') - -const { logger } = require('../../core/utils/logger') const log = new logger({ component: 'TEST' }) /*:: @@ -47,7 +47,7 @@ describe('Move', () => { beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) pouch = helpers.pouch prep = helpers.prep @@ -55,7 +55,7 @@ describe('Move', () => { await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -565,7 +565,10 @@ describe('Move', () => { it('local', async () => { const oldFolder = await pouch.byRemoteIdMaybe(dir._id) - const doc = builders.metadir().path('parent/dst/dir').build() + const doc = builders + .metadir() + .path('parent/dst/dir') + .build() await prep.moveFolderAsync('local', doc, oldFolder) @@ -690,7 +693,10 @@ describe('Move', () => { helpers.resetPouchSpy() const oldFolder = await pouch.byRemoteIdMaybe(dir._id) - const doc = builders.metadir().path('parent/dst/dir').build() + const doc = builders + .metadir() + .path('parent/dst/dir') + .build() await prep.moveFolderAsync('local', doc, oldFolder) diff --git a/test/integration/mtime-update.js b/test/integration/mtime-update.js index 33c259b44..0c835c937 100644 --- a/test/integration/mtime-update.js +++ b/test/integration/mtime-update.js @@ -5,12 +5,11 @@ const should = require('should') const { ROOT_DIR_ID } = require('../../core/remote/constants') const timestamp = require('../../core/utils/timestamp') - +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') -const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const platform = require('../support/helpers/platform') +const pouchHelpers = require('../support/helpers/pouch') const cozy = cozyHelpers.cozy @@ -22,11 +21,11 @@ describe('Update only mtime', () => { beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach(function () { + beforeEach(function() { helpers = TestHelpers.init(this) helpers.local.setupTrash() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -37,7 +36,7 @@ describe('Update only mtime', () => { describe('of a file', () => { context('when update is made on local filesystem', () => { let oldUpdatedAt - beforeEach('create file and update mtime', async function () { + beforeEach('create file and update mtime', async function() { await helpers.remote.ignorePreviousChanges() oldUpdatedAt = new Date() @@ -76,7 +75,7 @@ describe('Update only mtime', () => { context('when update is made on remote Cozy', () => { let file, oldUpdatedAt - beforeEach('create file and update mtime', async function () { + beforeEach('create file and update mtime', async function() { await helpers.remote.ignorePreviousChanges() oldUpdatedAt = new Date() @@ -117,7 +116,7 @@ describe('Update only mtime', () => { describe('of a folder', () => { context('when update is made on local filesystem', () => { let oldUpdatedAt - beforeEach('create folder and update mtime', async function () { + beforeEach('create folder and update mtime', async function() { await helpers.remote.ignorePreviousChanges() oldUpdatedAt = new Date() @@ -150,7 +149,7 @@ describe('Update only mtime', () => { context('when update is made on remote Cozy', () => { let oldUpdatedAt, dir - beforeEach('create folder and update mtime', async function () { + beforeEach('create folder and update mtime', async function() { await helpers.remote.ignorePreviousChanges() oldUpdatedAt = new Date() diff --git a/test/integration/notes.js b/test/integration/notes.js index 405f51e23..508b81b77 100644 --- a/test/integration/notes.js +++ b/test/integration/notes.js @@ -1,19 +1,19 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') const path = require('path') +const should = require('should') + +const { TRASH_DIR_ID } = require('../../core/remote/constants') +const { isNote } = require('../../core/utils/notes') +const timestamp = require('../../core/utils/timestamp') const Builders = require('../support/builders') const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const { TRASH_DIR_ID } = require('../../core/remote/constants') -const { isNote } = require('../../core/utils/notes') -const timestamp = require('../../core/utils/timestamp') - describe('Update', () => { let builders, helpers @@ -25,7 +25,7 @@ describe('Update', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { builders = new Builders({ cozy: cozyHelpers.cozy, pouch: this.pouch }) helpers = TestHelpers.init(this) @@ -33,7 +33,7 @@ describe('Update', () => { await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -89,7 +89,10 @@ describe('Update', () => { describe('Cozy Note move', () => { let note beforeEach('create note', async () => { - await builders.remoteDir().name('dst').create() + await builders + .remoteDir() + .name('dst') + .create() note = await builders .remoteNote() .name('note.cozy-note') @@ -138,7 +141,10 @@ describe('Update', () => { describe('Cozy Note move with update', () => { let dst, note beforeEach('create note', async () => { - dst = await builders.remoteDir().name('dst').create() + dst = await builders + .remoteDir() + .name('dst') + .create() note = await builders .remoteNote() .name('note.cozy-note') @@ -177,7 +183,9 @@ describe('Update', () => { it('updates the note metadata', async () => { const updatedDoc = await helpers.pouch.byRemoteIdMaybe(note._id) - should(updatedDoc).have.property('name').equal(note.name) + should(updatedDoc) + .have.property('name') + .equal(note.name) }) }) @@ -211,7 +219,9 @@ describe('Update', () => { name: note.name, dir_id: dst._id }) - should(updatedRemote).have.property('md5sum').not.equal(note.md5sum) + should(updatedRemote) + .have.property('md5sum') + .not.equal(note.md5sum) should(isNote(updatedRemote)).be.true() }) diff --git a/test/integration/permanent_deletion.js b/test/integration/permanent_deletion.js index 383444aaa..32d83111a 100644 --- a/test/integration/permanent_deletion.js +++ b/test/integration/permanent_deletion.js @@ -3,10 +3,10 @@ const should = require('should') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const cozy = cozyHelpers.cozy @@ -22,12 +22,12 @@ describe('Permanent deletion remote', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/platform_incompatibilities.js b/test/integration/platform_incompatibilities.js index 179a0c606..a0f33b9da 100644 --- a/test/integration/platform_incompatibilities.js +++ b/test/integration/platform_incompatibilities.js @@ -1,20 +1,20 @@ /* @flow */ /* eslint-env mocha */ +const path = require('path') + const should = require('should') const sinon = require('sinon') -const path = require('path') const metadata = require('../../core/metadata') -const timestamp = require('../../core/utils/timestamp') -const { INCOMPATIBLE_DOC_CODE } = require('../../core/sync/errors') const { DIR_TYPE } = require('../../core/remote/constants') - +const { INCOMPATIBLE_DOC_CODE } = require('../../core/sync/errors') +const timestamp = require('../../core/utils/timestamp') const Builders = require('../support/builders') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') /*:: import type { RemoteDir } from '../../core/remote/document' @@ -38,7 +38,7 @@ describe('Platform incompatibilities', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy builders = new Builders({ cozy }) helpers = TestHelpers.init(this) @@ -54,7 +54,7 @@ describe('Platform incompatibilities', () => { helpers._sync.lifecycle.unblockFor(err.code) }) }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -75,8 +75,14 @@ describe('Platform incompatibilities', () => { should(helpers._sync.blockSyncFor).not.have.been.called() it('add incompatible dir and file', async () => { - await builders.remoteDir().name('di:r').create() - await builders.remoteFile().name('fi:le').create() + await builders + .remoteDir() + .name('di:r') + .create() + await builders + .remoteFile() + .name('fi:le') + .create() await helpers.pullAndSyncAll() should(await helpers.local.tree()).be.empty() @@ -85,7 +91,10 @@ describe('Platform incompatibilities', () => { }) it('add incompatible dir with two colons', async () => { - await builders.remoteDir().name('d:i:r').create() + await builders + .remoteDir() + .name('d:i:r') + .create() await helpers.pullAndSyncAll() should(await helpers.local.tree()).be.empty() @@ -376,7 +385,10 @@ describe('Platform incompatibilities', () => { .name('dir2') .updatedAt(...timestamp.spread(new Date())) .create() - const dir2 = builders.metadir().fromRemote(newRemoteDoc).build() + const dir2 = builders + .metadir() + .fromRemote(newRemoteDoc) + .build() await helpers.prep.moveFolderAsync('remote', dir2, dir) await helpers.syncAll() diff --git a/test/integration/sync_state.js b/test/integration/sync_state.js index 432b9bf81..6a2556937 100644 --- a/test/integration/sync_state.js +++ b/test/integration/sync_state.js @@ -5,10 +5,10 @@ const should = require('should') const sinon = require('sinon') const Builders = require('../support/builders') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const builders = new Builders() @@ -24,14 +24,14 @@ describe('Sync state', () => { let events, helpers - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) events = helpers.events sinon.spy(events, 'emit') await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) diff --git a/test/integration/trash.js b/test/integration/trash.js index 470a4b8a1..246e52681 100644 --- a/test/integration/trash.js +++ b/test/integration/trash.js @@ -2,12 +2,13 @@ /* eslint-env mocha */ const path = require('path') + const should = require('should') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const pouchHelpers = require('../support/helpers/pouch') -const TestHelpers = require('../support/helpers') const skipRemoteChange = async ({ helpers, cozy }) => { const since = await helpers.pouch.getRemoteSeq() @@ -30,7 +31,7 @@ describe('Trash', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) pouch = helpers.pouch @@ -39,7 +40,7 @@ describe('Trash', () => { await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -396,7 +397,7 @@ describe('Restore', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) diff --git a/test/integration/update.js b/test/integration/update.js index f03136dae..c2bbd4fc8 100644 --- a/test/integration/update.js +++ b/test/integration/update.js @@ -1,18 +1,18 @@ /* @flow */ /* eslint-env mocha */ +const path = require('path') + const Promise = require('bluebird') const _ = require('lodash') const should = require('should') -const path = require('path') const sinon = require('sinon') -const { logger } = require('../../core/utils/logger') const metadata = require('../../core/metadata') const { byPathKey } = require('../../core/pouch') const { MAX_SYNC_RETRIES } = require('../../core/sync') const syncErrors = require('../../core/sync/errors') - +const { logger } = require('../../core/utils/logger') const Builders = require('../support/builders') const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') @@ -32,7 +32,7 @@ describe('Update file', () => { afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { builders = new Builders({ cozy: cozyHelpers.cozy }) cozy = cozyHelpers.cozy helpers = TestHelpers.init(this) @@ -43,7 +43,7 @@ describe('Update file', () => { await helpers.local.setupTrash() await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) @@ -307,7 +307,7 @@ describe('Update file', () => { }) describe('M1, local merge M1, M2, remote sync M1, local merge M2', () => { - it('fails remote sync M1 & local merge M2', async function () { + it('fails remote sync M1 & local merge M2', async function() { if (process.env.CI) this.timeout(60 * 1000) await cozy.files.create('Initial content', { name: 'file' }) diff --git a/test/performance/local/watcher.js b/test/performance/local/watcher.js index b46873dde..72313ec35 100644 --- a/test/performance/local/watcher.js +++ b/test/performance/local/watcher.js @@ -1,15 +1,15 @@ /* eslint-env mocha */ /* @flow */ +const path = require('path') + const Promise = require('bluebird') const fse = require('fs-extra') -const path = require('path') const Watcher = require('../../../core/local/chokidar/watcher') - +const Builders = require('../../support/builders') const configHelpers = require('../../support/helpers/config') const pouchHelpers = require('../../support/helpers/pouch') -const Builders = require('../../support/builders') class SpyPrep { /*:: @@ -45,7 +45,12 @@ let abspath const createDoc = async (builders, dir, relpath /*: string */, ino) => { if (dir) { - await builders.metadir().path(relpath).ino(ino).upToDate().create() + await builders + .metadir() + .path(relpath) + .ino(ino) + .upToDate() + .create() } else { await builders .metafile() @@ -69,30 +74,30 @@ describe('LocalWatcher charge', () => { let watcher, prep, builders before('instanciate config', configHelpers.createConfig) before('instanciate pouch', pouchHelpers.createDatabase) - before('prepare builders', function () { + before('prepare builders', function() { builders = new Builders({ pouch: this.pouch }) }) - before('create outside dir', async function () { + before('create outside dir', async function() { await fse.emptyDir(path.resolve(path.join(this.syncPath, '..', 'outside'))) }) - before('instanciate local watcher', async function () { + before('instanciate local watcher', async function() { prep = new SpyPrep() const events = { emit: () => {} } // $FlowFixMe watcher = new Watcher(this.syncPath, prep, this.pouch, events) }) - before('cleanup test directory', async function () { + before('cleanup test directory', async function() { await fse.emptyDir(this.syncPath) }) - before(function () { + before(function() { abspath = relpath => path.join(this.syncPath, relpath.replace(/\//g, path.sep)) }) let events - before('prepare FS', async function () { + before('prepare FS', async function() { this.timeout(10 * 60 * 1000) const now = new Date() events = new Array(N) @@ -119,9 +124,9 @@ describe('LocalWatcher charge', () => { after('destroy pouch', pouchHelpers.cleanDatabase) after('clean config', configHelpers.cleanConfig) - describe(`with ${N} events`, function () { + describe(`with ${N} events`, function() { this.timeout(5 * 60 * 1000) - it('takes less than 5min and does not crash', async function () { + it('takes less than 5min and does not crash', async function() { this.timeout(5 * 60 * 1000) await watcher.onFlush(events) // TODO: Make benchmark more realistic with real actions, e.g. big moves. diff --git a/test/property/device.js b/test/property/device.js index c34d06e07..45625eaa5 100644 --- a/test/property/device.js +++ b/test/property/device.js @@ -1,8 +1,9 @@ /* @flow */ -const fse = require('fs-extra') -const path = require('path') const { spawn } = require('child_process') +const path = require('path') + +const fse = require('fs-extra') const { clone } = require('lodash') /*:: diff --git a/test/property/local_watcher/index.js b/test/property/local_watcher/index.js index aa698dd3d..7f38f6ce9 100644 --- a/test/property/local_watcher/index.js +++ b/test/property/local_watcher/index.js @@ -1,31 +1,29 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') - const fs = require('fs') -const fse = require('fs-extra') -const glob = require('glob') const path = require('path') + const Promise = require('bluebird') +const fse = require('fs-extra') +const glob = require('glob') +const should = require('should') const winston = require('winston') const { id } = require('../../../core/metadata') const { baseLogger } = require('../../../core/utils/logger') - -const { ContextDir } = require('../../support/helpers/context_dir') const TmpDir = require('../../support/helpers/TmpDir') - +const { ContextDir } = require('../../support/helpers/context_dir') const { run } = require('../runner') -describe('Local watcher', function () { +describe('Local watcher', function() { this.timeout(240000) this.slow(30000) const scenarios = glob.sync(path.join(__dirname, '*.json')) scenarios.forEach(scenario => { scenario = path.normalize(scenario) - it(`works fine for ${path.basename(scenario)}`, async function () { + it(`works fine for ${path.basename(scenario)}`, async function() { const ops = await fse.readJson(scenario) if (ops.length > 0 && ops[0].op === 'pending') { return this.skip(ops[0].msg || 'pending') diff --git a/test/property/runner.js b/test/property/runner.js index 530fd4f48..88d0921d5 100644 --- a/test/property/runner.js +++ b/test/property/runner.js @@ -2,18 +2,19 @@ /* eslint no-fallthrough: ["error", { "commentPattern": "break omitted" }] */ const crypto = require('crypto') -const fs = require('fs') -const fse = require('fs-extra') const EventEmitter = require('events') +const fs = require('fs') + const Promise = require('bluebird') +const fse = require('fs-extra') +const PouchDB = require('pouchdb') const { Ignore } = require('../../core/ignore') +const Watcher = require('../../core/local/watcher') const { Merge } = require('../../core/merge') const { Pouch } = require('../../core/pouch') const Prep = require('../../core/prep') -const Watcher = require('../../core/local/watcher') -const PouchDB = require('pouchdb') PouchDB.plugin(require('pouchdb-adapter-memory')) let winfs @@ -87,7 +88,7 @@ async function step(state /*: Object */, op /*: Object */) { const block = size > 65536 ? 65536 : size const content = await crypto.randomBytes(block) size -= block - setTimeout(async function () { + setTimeout(async function() { await state.dir.outputFile(op.path, content).catch(() => {}) }, (i + 1) * 10) } diff --git a/test/property/stack.js b/test/property/stack.js index fb17234c3..5594281dd 100644 --- a/test/property/stack.js +++ b/test/property/stack.js @@ -1,9 +1,10 @@ /* @flow */ -const fs = require('fs') -const fse = require('fs-extra') const { spawn, spawnSync } = require('child_process') +const fs = require('fs') + const Promise = require('bluebird') +const fse = require('fs-extra') /*:: import type { ChildProcess } from 'child_process' diff --git a/test/property/two_clients/index.js b/test/property/two_clients/index.js index f5954f961..c110d275e 100644 --- a/test/property/two_clients/index.js +++ b/test/property/two_clients/index.js @@ -1,28 +1,27 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') +const path = require('path') +const Promise = require('bluebird') const fse = require('fs-extra') const glob = require('glob') -const path = require('path') -const Promise = require('bluebird') +const should = require('should') -const { ContextDir } = require('../../support/helpers/context_dir') const TmpDir = require('../../support/helpers/TmpDir') - +const { ContextDir } = require('../../support/helpers/context_dir') +const { setupDevice } = require('../device') const { run } = require('../runner') const { setupStack } = require('../stack') -const { setupDevice } = require('../device') -describe('Two clients', function () { +describe('Two clients', function() { this.timeout(600000) this.slow(60000) const scenarios = glob.sync(path.join(__dirname, '*.json')) scenarios.forEach(scenario => { scenario = path.normalize(scenario) - it(`works fine for ${path.basename(scenario)}`, async function () { + it(`works fine for ${path.basename(scenario)}`, async function() { const data = await fse.readJson(scenario) if (data.pending) { return this.skip(data.pending.msg || 'pending') diff --git a/test/regression/TRELLO_484_local_sort_before_squash.js b/test/regression/TRELLO_484_local_sort_before_squash.js index f339cebaa..dcb0d4f6a 100644 --- a/test/regression/TRELLO_484_local_sort_before_squash.js +++ b/test/regression/TRELLO_484_local_sort_before_squash.js @@ -6,30 +6,30 @@ const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const { runActions, init } = require('../support/helpers/scenarios') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') -const TestHelpers = require('../support/helpers') const pouchHelpers = require('../support/helpers/pouch') +const { runActions, init } = require('../support/helpers/scenarios') let helpers // Spies let prepCalls -describe('TRELLO #484: Local sort before squash (https://trello.com/c/RcRmqymw)', function () { +describe('TRELLO #484: Local sort before squash (https://trello.com/c/RcRmqymw)', function() { before(configHelpers.createConfig) before(configHelpers.registerClient) beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach('set up synced dir', async function () { + beforeEach('set up synced dir', async function() { await fse.emptyDir(this.syncPath) }) afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) prepCalls = [] @@ -63,7 +63,7 @@ describe('TRELLO #484: Local sort before squash (https://trello.com/c/RcRmqymw)' } }) - it('is fixed', async function () { + it('is fixed', async function() { await init( { init: [ diff --git a/test/regression/TRELLO_646_move_overridden_before_sync.js b/test/regression/TRELLO_646_move_overridden_before_sync.js index 5d0d5a7e5..25e5c7606 100644 --- a/test/regression/TRELLO_646_move_overridden_before_sync.js +++ b/test/regression/TRELLO_646_move_overridden_before_sync.js @@ -6,12 +6,11 @@ const _ = require('lodash') const should = require('should') const metadata = require('../../core/metadata') - -const { runActions, init } = require('../support/helpers/scenarios') +const TestHelpers = require('../support/helpers') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') -const TestHelpers = require('../support/helpers') const pouchHelpers = require('../support/helpers/pouch') +const { runActions, init } = require('../support/helpers/scenarios') describe('TRELLO #646: Déplacement écrasé avant synchro (malgré la synchro par lot, https://trello.com/c/Co05qttn)', () => { let helpers @@ -20,19 +19,19 @@ describe('TRELLO #646: Déplacement écrasé avant synchro (malgré la synchro p before(configHelpers.registerClient) beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach('set up synced dir', async function () { + beforeEach('set up synced dir', async function() { await fse.emptyDir(this.syncPath) }) afterEach(pouchHelpers.cleanDatabase) after(configHelpers.cleanConfig) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) await helpers.local.setupTrash() }) - it('is broken', async function () { + it('is broken', async function() { this.timeout(30000) const pouchTree = async () => _.chain(await this.pouch.byRecursivePath('')) diff --git a/test/scenarios/run.js b/test/scenarios/run.js index 6b822cb05..de325fdce 100644 --- a/test/scenarios/run.js +++ b/test/scenarios/run.js @@ -1,17 +1,23 @@ /* eslint-env mocha */ /* @flow */ -const Promise = require('bluebird') -const fse = require('fs-extra') -const _ = require('lodash') const path = require('path') + +const Promise = require('bluebird') const chai = require('chai') const chaiLike = require('chai-like') +const fse = require('fs-extra') +const _ = require('lodash') chai.use(chaiLike) chai.Should() const config = require('../../core/config') - +const { logger } = require('../../core/utils/logger') +const remoteCaptureHelpers = require('../../dev/capture/remote') +const TestHelpers = require('../support/helpers') +const configHelpers = require('../support/helpers/config') +const cozyHelpers = require('../support/helpers/cozy') +const pouchHelpers = require('../support/helpers/pouch') const { disabledScenarioTest, init, @@ -23,31 +29,25 @@ const { runWithStoppedClient, fsStatsFromObj } = require('../support/helpers/scenarios') -const configHelpers = require('../support/helpers/config') -const cozyHelpers = require('../support/helpers/cozy') -const TestHelpers = require('../support/helpers') -const pouchHelpers = require('../support/helpers/pouch') -const remoteCaptureHelpers = require('../../dev/capture/remote') const { env: { CI: isCI }, platform } = process -const { logger } = require('../../core/utils/logger') const log = new logger({ component: 'TEST' }) -describe('Scenario', function () { +describe('Scenario', function() { let helpers beforeEach(configHelpers.createConfig) beforeEach(configHelpers.registerClient) beforeEach(pouchHelpers.createDatabase) beforeEach(cozyHelpers.deleteAll) - beforeEach('set up outside dir', async function () { + beforeEach('set up outside dir', async function() { await fse.emptyDir(path.resolve(path.join(this.syncPath, '..', 'outside'))) }) - beforeEach(async function () { + beforeEach(async function() { helpers = TestHelpers.init(this) // TODO: helpers.setup() @@ -55,7 +55,7 @@ describe('Scenario', function () { await helpers.remote.ignorePreviousChanges() }) - afterEach(async function () { + afterEach(async function() { await helpers.stop() }) afterEach(pouchHelpers.cleanDatabase) @@ -97,7 +97,7 @@ describe('Scenario', function () { }) } - it('', async function () { + it('', async function() { await runLocalChannel(scenario, parcelCapture, helpers) }) }) @@ -118,7 +118,7 @@ describe('Scenario', function () { const breakpoints = injectChokidarBreakpoints(eventsFile) for (let flushAfter of breakpoints) { - it(localTestName + ' flushAfter=' + flushAfter, async function () { + it(localTestName + ' flushAfter=' + flushAfter, async function() { await runLocalChokidarWithCaptures( scenario, _.cloneDeep(eventsFile), @@ -128,7 +128,7 @@ describe('Scenario', function () { }) } } else { - it(localTestName, async function () { + it(localTestName, async function() { if (isCI) this.timeout(3 * 60 * 1000) await runLocalChokidarWithoutCaptures( scenario, @@ -144,7 +144,7 @@ describe('Scenario', function () { if (stoppedTestSkipped) { it.skip(`${stoppedTestName} (${stoppedTestSkipped})`, () => {}) } else { - it(stoppedTestName, async function () { + it(stoppedTestName, async function() { if (isCI) this.timeout(60 * 1000) await runLocalStopped(scenario, helpers) }) @@ -159,7 +159,7 @@ describe('Scenario', function () { continue } - it(remoteTestName, async function () { + it(remoteTestName, async function() { if (isCI && platform === 'darwin') this.timeout(60 * 1000) await runRemote(scenario, helpers) }) diff --git a/test/support/assertions/change.js b/test/support/assertions/change.js index 742840374..90dffca98 100644 --- a/test/support/assertions/change.js +++ b/test/support/assertions/change.js @@ -4,7 +4,7 @@ const should = require('should') function changeAssertion(strict) { const assertionName = strict ? 'changeOnly' : 'change' - const assertion = function (actual, props) { + const assertion = function(actual, props) { if (props) { if (this.negate) { throw new Error( diff --git a/test/support/assertions/fileContents.js b/test/support/assertions/fileContents.js index 90870e4a7..5221cce8f 100644 --- a/test/support/assertions/fileContents.js +++ b/test/support/assertions/fileContents.js @@ -14,7 +14,7 @@ import type { ContextDir } from '../helpers/context_dir' // file1: 'content 1', // file2: 'content 2' // }) -should.Assertion.prototype.fileContents = async function ( +should.Assertion.prototype.fileContents = async function( expected /*: { [path: string]: string } */ ) { const dir /*: ContextDir */ = this.obj diff --git a/test/support/assertions/pending.js b/test/support/assertions/pending.js index 8a2adfd67..0be8434b1 100644 --- a/test/support/assertions/pending.js +++ b/test/support/assertions/pending.js @@ -1,6 +1,6 @@ const should = require('should') -should.Assertion.add('pending', function () { +should.Assertion.add('pending', function() { this.params = { operator: 'be pending' } this.obj.isPending().should.be.true() }) diff --git a/test/support/assertions/timestamp.js b/test/support/assertions/timestamp.js index 50036d40d..ca44f2b54 100644 --- a/test/support/assertions/timestamp.js +++ b/test/support/assertions/timestamp.js @@ -2,8 +2,8 @@ const should = require('should') const timestamp = require('../../../core/utils/timestamp') -should.use(function (should, Assertion) { - Assertion.add('sameTimestamp', function (expected, message) { +should.use(function(should, Assertion) { + Assertion.add('sameTimestamp', function(expected, message) { this.params = { operator: 'be the same timestamp as', expected, @@ -13,7 +13,7 @@ should.use(function (should, Assertion) { this.obj.getTime().should.equal(expected.getTime()) }) - Assertion.add('timestamp', function (...args) { + Assertion.add('timestamp', function(...args) { const expected = timestamp.build(...args) this.params = { diff --git a/test/support/builders/channel_event.js b/test/support/builders/channel_event.js index d9da441b1..c937c3737 100644 --- a/test/support/builders/channel_event.js +++ b/test/support/builders/channel_event.js @@ -1,13 +1,13 @@ /* @flow */ -const _ = require('lodash') const path = require('path') -const { FILE, FOLDER } = require('../../../core/metadata') -const events = require('../../../core/local/channel_watcher/event') +const _ = require('lodash') -const statsBuilder = require('./stats') const ChecksumBuilder = require('./checksum') +const statsBuilder = require('./stats') +const events = require('../../../core/local/channel_watcher/event') +const { FILE, FOLDER } = require('../../../core/metadata') /*:: import type { Stats } from 'fs' diff --git a/test/support/builders/checksum.js b/test/support/builders/checksum.js index df17ef207..5d470fce3 100644 --- a/test/support/builders/checksum.js +++ b/test/support/builders/checksum.js @@ -1,7 +1,7 @@ /* @flow */ -const { Readable } = require('stream') const crypto = require('crypto') +const { Readable } = require('stream') module.exports = class ChecksumBuilder { /*:: @@ -19,7 +19,11 @@ module.exports = class ChecksumBuilder { 'build() can only be called with String data as we will not await a Stream reading' ) } else { - return crypto.createHash('md5').update(data).digest().toString('base64') + return crypto + .createHash('md5') + .update(data) + .digest() + .toString('base64') } } @@ -30,11 +34,11 @@ module.exports = class ChecksumBuilder { checksum.setEncoding('base64') return new Promise((resolve, reject) => { - stream.on('end', function () { + stream.on('end', function() { checksum.end() resolve(String(checksum.read())) }) - stream.on('error', function (err) { + stream.on('error', function(err) { checksum.end() reject(err) }) diff --git a/test/support/builders/index.js b/test/support/builders/index.js index fb011946f..a29daaaa2 100644 --- a/test/support/builders/index.js +++ b/test/support/builders/index.js @@ -6,18 +6,17 @@ const path = require('path') -const { ROOT_DIR_ID } = require('../../../core/remote/constants') - +const ChannelEventBuilder = require('./channel_event') +const ChecksumBuilder = require('./checksum') const DirMetadataBuilder = require('./metadata/dir') const FileMetadataBuilder = require('./metadata/file') const RemoteDirBuilder = require('./remote/dir') +const RemoteErasedBuilder = require('./remote/erased') const RemoteFileBuilder = require('./remote/file') const RemoteNoteBuilder = require('./remote/note') -const RemoteErasedBuilder = require('./remote/erased') -const StreamBuilder = require('./stream') -const ChannelEventBuilder = require('./channel_event') const { DefaultStatsBuilder, WinStatsBuilder } = require('./stats') -const ChecksumBuilder = require('./checksum') +const StreamBuilder = require('./stream') +const { ROOT_DIR_ID } = require('../../../core/remote/constants') /*:: import type { Readable } from 'stream' diff --git a/test/support/builders/metadata/base.js b/test/support/builders/metadata/base.js index 672d4f57a..5aeca6355 100644 --- a/test/support/builders/metadata/base.js +++ b/test/support/builders/metadata/base.js @@ -1,15 +1,15 @@ /* @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') + const metadata = require('../../../../core/metadata') -const timestamp = require('../../../../core/utils/timestamp') const pathUtils = require('../../../../core/utils/path') - -const RemoteFileBuilder = require('../remote/file') -const RemoteDirBuilder = require('../remote/dir') +const timestamp = require('../../../../core/utils/timestamp') const dbBuilders = require('../db') +const RemoteDirBuilder = require('../remote/dir') +const RemoteFileBuilder = require('../remote/file') const statsBuilder = require('../stats') /*:: @@ -134,7 +134,11 @@ module.exports = class BaseMetadataBuilder { if (platform === 'win32') { return this.path('in:compatible') } else { - return this.path(Array(256).fill('a').join('')) + return this.path( + Array(256) + .fill('a') + .join('') + ) } } diff --git a/test/support/builders/metadata/dir.js b/test/support/builders/metadata/dir.js index 3a3a7feb7..e40dd7515 100644 --- a/test/support/builders/metadata/dir.js +++ b/test/support/builders/metadata/dir.js @@ -1,8 +1,7 @@ // @flow -const { FOLDER } = require('../../../../core/metadata') - const BaseMetadataBuilder = require('./base') +const { FOLDER } = require('../../../../core/metadata') /*:: import type { Metadata } from '../../../../core/metadata' diff --git a/test/support/builders/metadata/file.js b/test/support/builders/metadata/file.js index 1552f019b..034a7e38c 100644 --- a/test/support/builders/metadata/file.js +++ b/test/support/builders/metadata/file.js @@ -1,8 +1,7 @@ /* @flow */ -const mime = require('../../../../core/utils/mime') - const BaseMetadataBuilder = require('./base') +const mime = require('../../../../core/utils/mime') const ChecksumBuilder = require('../checksum') /*:: diff --git a/test/support/builders/remote/base.js b/test/support/builders/remote/base.js index 2107de2f5..981ed4a31 100644 --- a/test/support/builders/remote/base.js +++ b/test/support/builders/remote/base.js @@ -1,8 +1,10 @@ /* @flow */ -const _ = require('lodash') const { posix } = require('path') +const _ = require('lodash') + +const metadata = require('../../../../core/metadata') const { DIR_TYPE, FILES_DOCTYPE, @@ -10,9 +12,7 @@ const { TRASH_DIR_ID, TRASH_DIR_NAME } = require('../../../../core/remote/constants') -const metadata = require('../../../../core/metadata') const timestamp = require('../../../../core/utils/timestamp') - const dbBuilders = require('../db') /*:: diff --git a/test/support/builders/remote/dir.js b/test/support/builders/remote/dir.js index 1115dbc0a..b3ea7839f 100644 --- a/test/support/builders/remote/dir.js +++ b/test/support/builders/remote/dir.js @@ -1,18 +1,19 @@ /* @flow */ const _ = require('lodash') + const CozyClient = require('cozy-client').default const RemoteBaseBuilder = require('./base') -const { - inRemoteTrash, - remoteJsonToRemoteDoc -} = require('../../../../core/remote/document') const { DIR_TYPE, FILES_DOCTYPE, OAUTH_CLIENTS_DOCTYPE } = require('../../../../core/remote/constants') +const { + inRemoteTrash, + remoteJsonToRemoteDoc +} = require('../../../../core/remote/document') /*:: import type { Cozy } from 'cozy-client-js' @@ -31,9 +32,7 @@ var dirNumber = 1 // // const dir: RemoteDir = await builders.remoteDir().inDir(...).create() // -module.exports = class RemoteDirBuilder extends ( - RemoteBaseBuilder -) /*:: */ { +module.exports = class RemoteDirBuilder extends RemoteBaseBuilder /*:: */ { constructor(cozy /*: ?Cozy */, old /*: ?RemoteDir */) { super(cozy, old) diff --git a/test/support/builders/remote/erased.js b/test/support/builders/remote/erased.js index 7d879fa3d..34b7a5828 100644 --- a/test/support/builders/remote/erased.js +++ b/test/support/builders/remote/erased.js @@ -2,10 +2,9 @@ const _ = require('lodash') +const metadata = require('../../../../core/metadata') const { ROOT_DIR_ID } = require('../../../../core/remote/constants') const { remoteJsonToRemoteDoc } = require('../../../../core/remote/document') -const metadata = require('../../../../core/metadata') - const dbBuilders = require('../db') /*:: diff --git a/test/support/builders/remote/file.js b/test/support/builders/remote/file.js index c54104ac1..61984dd2a 100644 --- a/test/support/builders/remote/file.js +++ b/test/support/builders/remote/file.js @@ -1,17 +1,17 @@ /* @flow */ const fs = require('fs') + const _ = require('lodash') const RemoteBaseBuilder = require('./base') -const ChecksumBuilder = require('../checksum') -const cozyHelpers = require('../../helpers/cozy') - +const { FILES_DOCTYPE } = require('../../../../core/remote/constants') const { inRemoteTrash, remoteJsonToRemoteDoc } = require('../../../../core/remote/document') -const { FILES_DOCTYPE } = require('../../../../core/remote/constants') +const cozyHelpers = require('../../helpers/cozy') +const ChecksumBuilder = require('../checksum') /*:: import type stream from 'stream' @@ -48,9 +48,7 @@ const baseData = `Content of remote file ${fileNumber}` // // const file /*: FullRemoteFile */ = await builders.remoteFile().inDir(...).create() // -module.exports = class RemoteFileBuilder extends ( - RemoteBaseBuilder -) /*:: */ { +module.exports = class RemoteFileBuilder extends RemoteBaseBuilder /*:: */ { /*:: _data: string | stream.Readable | Buffer */ diff --git a/test/support/builders/remote/note.js b/test/support/builders/remote/note.js index f513e3c80..4c2469386 100644 --- a/test/support/builders/remote/note.js +++ b/test/support/builders/remote/note.js @@ -1,17 +1,17 @@ /* @flow */ const { posix } = require('path') + const _ = require('lodash') const RemoteBaseBuilder = require('./base') -const ChecksumBuilder = require('../checksum') -const cozyHelpers = require('../../helpers/cozy') - -const { remoteJsonToRemoteDoc } = require('../../../../core/remote/document') const { FILES_DOCTYPE, NOTE_MIME_TYPE } = require('../../../../core/remote/constants') +const { remoteJsonToRemoteDoc } = require('../../../../core/remote/document') +const cozyHelpers = require('../../helpers/cozy') +const ChecksumBuilder = require('../checksum') /*:: import type stream from 'stream' @@ -38,9 +38,7 @@ const baseMetadata = { // // const note /*: FullRemoteFile */ = await builders.remoteNote().inDir(...).create() // -module.exports = class RemoteNoteBuilder extends ( - RemoteBaseBuilder -) /*:: */ { +module.exports = class RemoteNoteBuilder extends RemoteBaseBuilder /*:: */ { /*:: _title: string _content: string diff --git a/test/support/builders/stats.js b/test/support/builders/stats.js index 8230bb8e5..119480bdd 100644 --- a/test/support/builders/stats.js +++ b/test/support/builders/stats.js @@ -4,6 +4,7 @@ */ const fs = require('fs') + const _ = require('lodash') /*:: @@ -86,7 +87,11 @@ class DefaultStatsBuilder { * hexadecimal string. */ const fileIdFromNumber = (n /*: number */) => - '0x' + n.toString(16).toUpperCase().padStart(16, '0') + '0x' + + n + .toString(16) + .toUpperCase() + .padStart(16, '0') /** Build a @gyselroth/windows-fsstat object */ class WinStatsBuilder { diff --git a/test/support/builders/stream.js b/test/support/builders/stream.js index b2bc52bb7..c1f4b5c97 100644 --- a/test/support/builders/stream.js +++ b/test/support/builders/stream.js @@ -26,7 +26,7 @@ module.exports = class StreamBuilder { build() /*: stream.Readable */ { const builder = this return new stream.Readable({ - read: function () { + read: function() { if (builder.err) { this.emit('error', builder.err) } else { diff --git a/test/support/coverage.js b/test/support/coverage.js index 491f5b336..89377110b 100644 --- a/test/support/coverage.js +++ b/test/support/coverage.js @@ -8,11 +8,13 @@ * @module test/support/coverage */ -const glob = require('glob') const path = require('path') + const fse = require('fs-extra') +const glob = require('glob') const { hookRequire } = require('istanbul-lib-hook') -const { createInstrumenter } = require('istanbul-lib-instrument') // eslint-disable-line node/no-extraneous-require +// eslint-disable-next-line import/no-extraneous-dependencies, node/no-extraneous-require +const { createInstrumenter } = require('istanbul-lib-instrument') const cov = (global.__coverage__ = {}) diff --git a/test/support/doubles/fs.js b/test/support/doubles/fs.js index 3e2ea65f0..98fbddd28 100644 --- a/test/support/doubles/fs.js +++ b/test/support/doubles/fs.js @@ -1,5 +1,5 @@ -const sinon = require('sinon') const fse = require('fs-extra') +const sinon = require('sinon') const { MissingFileError } = require('../../../core/utils/fs') diff --git a/test/support/helpers/TmpDir.js b/test/support/helpers/TmpDir.js index c64c0c702..f11ac6ecc 100644 --- a/test/support/helpers/TmpDir.js +++ b/test/support/helpers/TmpDir.js @@ -1,8 +1,9 @@ /* @flow */ -const fse = require('fs-extra') const path = require('path') +const fse = require('fs-extra') + const rootDir = path.resolve(__dirname, '../..') // Where can we put temporary stuff diff --git a/test/support/helpers/config.js b/test/support/helpers/config.js index 1a0b6761c..5ff68ba13 100644 --- a/test/support/helpers/config.js +++ b/test/support/helpers/config.js @@ -1,13 +1,13 @@ -const fse = require('fs-extra') -const del = require('del') const path = require('path') -const config = require('../../../core/config') +const del = require('del') +const fse = require('fs-extra') -const automatedRegistration = require('../../../dev/remote/automated_registration') -const pkg = require('../../../package.json') const { COZY_URL } = require('./cozy') const PASSPHRASE = require('./passphrase') +const config = require('../../../core/config') +const automatedRegistration = require('../../../dev/remote/automated_registration') +const pkg = require('../../../package.json') module.exports = { createConfig() { diff --git a/test/support/helpers/context_dir.js b/test/support/helpers/context_dir.js index 5457fbcde..f65abf61f 100644 --- a/test/support/helpers/context_dir.js +++ b/test/support/helpers/context_dir.js @@ -1,15 +1,16 @@ /* @flow */ +const path = require('path') + const autoBind = require('auto-bind') const Promise = require('bluebird') const fse = require('fs-extra') const _ = require('lodash') -const path = require('path') const rimraf = require('rimraf') const checksumer = require('../../../core/local/checksumer') -const stater = require('../../../core/local/stater') const { TMP_DIR_NAME } = require('../../../core/local/constants') +const stater = require('../../../core/local/stater') Promise.promisifyAll(checksumer) const rimrafAsync = Promise.promisify(rimraf) diff --git a/test/support/helpers/cozy.js b/test/support/helpers/cozy.js index 16f1e2695..0338624b7 100644 --- a/test/support/helpers/cozy.js +++ b/test/support/helpers/cozy.js @@ -5,7 +5,6 @@ require('../../../core/globals') // Setup network so that all test requests will go through `electron-fetch` const { app, session } = require('electron') -const network = require('../../../gui/js/network') /*:: import type { Config } from '../../../core/config' @@ -29,14 +28,15 @@ const resetNetwork = async () => { } setupNetwork() -const OldCozyClient = require('cozy-client-js').Client const CozyClient = require('cozy-client').default +const OldCozyClient = require('cozy-client-js').Client const { FILES_DOCTYPE, ROOT_DIR_ID, TRASH_DIR_ID } = require('../../../core/remote/constants') +const network = require('../../../gui/js/network') // The URL of the Cozy instance used for tests const COZY_URL = process.env.COZY_URL || 'http://cozy.localhost:8080' diff --git a/test/support/helpers/index.js b/test/support/helpers/index.js index 82e14b5b5..7c9cbfd0b 100644 --- a/test/support/helpers/index.js +++ b/test/support/helpers/index.js @@ -1,22 +1,22 @@ /* @flow */ -const autoBind = require('auto-bind') const path = require('path') + +const autoBind = require('auto-bind') const _ = require('lodash') const { defaults, pick } = _ const sinon = require('sinon') +const conflictHelpers = require('./conflict') +const { posixifyPath } = require('./context_dir') +const { LocalTestHelpers } = require('./local') +const { RemoteTestHelpers } = require('./remote') const { Ignore } = require('../../../core/ignore') const { Merge } = require('../../../core/merge') +const { FOLDER } = require('../../../core/metadata') const Prep = require('../../../core/prep') const { Sync } = require('../../../core/sync') const SyncState = require('../../../core/syncstate') -const { FOLDER } = require('../../../core/metadata') - -const conflictHelpers = require('./conflict') -const { posixifyPath } = require('./context_dir') -const { LocalTestHelpers } = require('./local') -const { RemoteTestHelpers } = require('./remote') /*:: import type { Client as OldCozyClient } from 'cozy-client-js' diff --git a/test/support/helpers/local.js b/test/support/helpers/local.js index be428a1e9..2df0fd4b5 100644 --- a/test/support/helpers/local.js +++ b/test/support/helpers/local.js @@ -1,22 +1,22 @@ /* @flow */ +const path = require('path') + const autoBind = require('auto-bind') const Promise = require('bluebird') const fse = require('fs-extra') const _ = require('lodash') -const path = require('path') const rimraf = require('rimraf') const conflictHelpers = require('./conflict') const { ContextDir } = require('./context_dir') - const { Local } = require('../../../core/local') const channelWatcher = require('../../../core/local/channel_watcher') -const { TMP_DIR_NAME } = require('../../../core/local/constants') const dispatch = require('../../../core/local/channel_watcher/dispatch') const { INITIAL_SCAN_DONE } = require('../../../core/local/channel_watcher/event') +const { TMP_DIR_NAME } = require('../../../core/local/constants') const rimrafAsync = Promise.promisify(rimraf) diff --git a/test/support/helpers/remote.js b/test/support/helpers/remote.js index b077153ff..b6000ea81 100644 --- a/test/support/helpers/remote.js +++ b/test/support/helpers/remote.js @@ -1,12 +1,12 @@ /* @flow */ +const path = require('path') + const autoBind = require('auto-bind') const _ = require('lodash') -const path = require('path') const conflictHelpers = require('./conflict') const cozyHelpers = require('./cozy') - const { Remote, dirAndName } = require('../../../core/remote') const { DIR_TYPE, diff --git a/test/support/helpers/scenarios.js b/test/support/helpers/scenarios.js index 725b5b3c7..6488c036c 100644 --- a/test/support/helpers/scenarios.js +++ b/test/support/helpers/scenarios.js @@ -4,17 +4,17 @@ * @flow */ +const fs = require('fs') +const path = require('path') + const Promise = require('bluebird') const fse = require('fs-extra') const glob = require('glob') const _ = require('lodash') -const path = require('path') -const fs = require('fs') const sinon = require('sinon') -const stater = require('../../../core/local/stater') - const { cozy } = require('./cozy') +const stater = require('../../../core/local/stater') const Builders = require('../builders') /*:: diff --git a/test/support/hooks/logging.js b/test/support/hooks/logging.js index 50089e2fd..ecf4dbd9f 100644 --- a/test/support/hooks/logging.js +++ b/test/support/hooks/logging.js @@ -19,7 +19,7 @@ baseLogger.add( }) ) -beforeEach(function () { +beforeEach(function() { errors.length = 0 // FIXME: this.currentTest is undefined on AppVeyor, not sure why if (process.env.APPVEYOR == null) { @@ -27,7 +27,7 @@ beforeEach(function () { } }) -afterEach(function () { +afterEach(function() { for (const err of errors) { // eslint-disable-next-line no-console console.log(err) diff --git a/test/support/istanbul_reporter.js b/test/support/istanbul_reporter.js index df94b09de..5915a22c5 100644 --- a/test/support/istanbul_reporter.js +++ b/test/support/istanbul_reporter.js @@ -2,8 +2,7 @@ const istanbulAPI = require('istanbul-api') const libCoverage = require('istanbul-lib-coverage') - -// eslint-disable-next-line node/no-extraneous-require +// eslint-disable-next-line import/no-extraneous-dependencies, node/no-extraneous-require const SpecReporter = require('mocha').reporters.spec const applySpecReporter = runner => new SpecReporter(runner) diff --git a/test/support/suppress-experimental-warnings.js b/test/support/suppress-experimental-warnings.js index 2eec847cc..f179f5620 100644 --- a/test/support/suppress-experimental-warnings.js +++ b/test/support/suppress-experimental-warnings.js @@ -12,8 +12,8 @@ const originalEmit = process.emit module.exports = { - fetch: function () { - process.emit = function (name, data) { + fetch: function() { + process.emit = function(name, data) { if ( name === 'warning' && typeof data === 'object' && diff --git a/test/unit/IdConflict.js b/test/unit/IdConflict.js index 4477dd6e3..d28bdcbb8 100644 --- a/test/unit/IdConflict.js +++ b/test/unit/IdConflict.js @@ -3,15 +3,14 @@ const should = require('should') +const IdConflict = require('../../core/IdConflict') const Builders = require('../support/builders') const { onPlatform, onPlatforms } = require('../support/helpers/platform') -const IdConflict = require('../../core/IdConflict') - const builders = new Builders() const { platform } = process -describe('IdConflict', function () { +describe('IdConflict', function() { const side = 'remote' // whatever describe('.detect()', () => { @@ -104,13 +103,19 @@ describe('IdConflict', function () { } beforeEach(() => { - existingDoc = builders.metadata().path(existingPath).build() + existingDoc = builders + .metadata() + .path(existingPath) + .build() }) describe('when change is an addition', () => { const addition = path => ({ side, - doc: builders.metadata().path(path).build() + doc: builders + .metadata() + .path(path) + .build() }) describe('to the existing path', () => { @@ -128,8 +133,14 @@ describe('IdConflict', function () { describe('when change is a move', () => { const move = ({ srcPath, dstPath }) => ({ - doc: builders.metadata().path(dstPath).build(), - was: builders.metadata().path(srcPath).build() + doc: builders + .metadata() + .path(dstPath) + .build(), + was: builders + .metadata() + .path(srcPath) + .build() }) describe('to a completely different path (should not happen)', () => { diff --git a/test/unit/app.js b/test/unit/app.js index 334919044..c097b93d1 100644 --- a/test/unit/app.js +++ b/test/unit/app.js @@ -1,47 +1,47 @@ /* eslint-env mocha */ -const fse = require('fs-extra') const os = require('os') const path = require('path') + +const fse = require('fs-extra') const should = require('should') const sinon = require('sinon') const { App } = require('../../core/app') +const { FetchError } = require('../../core/remote/cozy') const { LOG_BASENAME } = require('../../core/utils/logger') const pkg = require('../../package.json') const { version } = pkg -const { FetchError } = require('../../core/remote/cozy') - const configHelpers = require('../support/helpers/config') -describe('App', function () { - describe('parseCozyUrl', function () { - it('parses https://example.com/', function () { +describe('App', function() { + describe('parseCozyUrl', function() { + it('parses https://example.com/', function() { let parsed = App.prototype.parseCozyUrl('https://example.com') parsed.protocol.should.equal('https:') parsed.host.should.equal('example.com') }) - it('parses example.org as https://example.org', function () { + it('parses example.org as https://example.org', function() { let parsed = App.prototype.parseCozyUrl('example.org') parsed.protocol.should.equal('https:') parsed.host.should.equal('example.org') }) - it('parses zoe as https://zoe.mycozy.cloud', function () { + it('parses zoe as https://zoe.mycozy.cloud', function() { let parsed = App.prototype.parseCozyUrl('zoe') parsed.protocol.should.equal('https:') parsed.host.should.equal('zoe.mycozy.cloud') }) - it('parses http://localhost:9104', function () { + it('parses http://localhost:9104', function() { let parsed = App.prototype.parseCozyUrl('http://localhost:9104') parsed.protocol.should.equal('http:') parsed.hostname.should.equal('localhost') parsed.port.should.equal('9104') }) - it('parses https://toto.cozy.claude.fr:8084', function () { + it('parses https://toto.cozy.claude.fr:8084', function() { let parsed = App.prototype.parseCozyUrl( 'https://toto.cozy.claude.fr:8084' ) @@ -54,7 +54,7 @@ describe('App', function () { describe('removeRemote', () => { beforeEach(configHelpers.createConfig) - it('removes the config even if the Cozy is unreachable', async function () { + it('removes the config even if the Cozy is unreachable', async function() { // We have to call this helper here and not in a beforeEach otherwise the // next test will actually delete the test OAuth client on the Cozy and // other tests will subsequently fail. @@ -82,7 +82,7 @@ describe('App', function () { return } - it('unregisters the client', async function () { + it('unregisters the client', async function() { await configHelpers.registerOAuthClient.call(this) const configDir = path.dirname(this.config.configPath) const basePath = path.dirname(configDir) @@ -105,7 +105,7 @@ describe('App', function () { beforeEach(configHelpers.createConfig) beforeEach(configHelpers.registerClient) - it('removes everything but the logs from the config dir', async function () { + it('removes everything but the logs from the config dir', async function() { const configDir = path.dirname(this.config.configPath) const basePath = path.dirname(configDir) const app = new App(basePath) @@ -196,7 +196,7 @@ describe('App', function () { describe('stopSync', () => { let app - beforeEach('create app', function () { + beforeEach('create app', function() { configHelpers.createConfig.call(this) configHelpers.registerClient.call(this) this.config.persist() // the config helper does not persist it @@ -204,7 +204,7 @@ describe('App', function () { }) context('when we have an instanciated Sync', () => { - beforeEach('instanciate app', function () { + beforeEach('instanciate app', function() { app.instanciate() }) @@ -241,7 +241,7 @@ describe('App', function () { }) }) - it('works when app is configured', function () { + it('works when app is configured', function() { configHelpers.createConfig.call(this) configHelpers.registerClient.call(this) this.config.persist() // the config helper does not persist it @@ -262,7 +262,7 @@ describe('App', function () { }) describe('sendMailToSupport', () => { - it('sends email even without the local PouchDB tree', async function () { + it('sends email even without the local PouchDB tree', async function() { configHelpers.createConfig.call(this) configHelpers.registerClient.call(this) this.config.persist() // the config helper does not persist it diff --git a/test/unit/config.js b/test/unit/config.js index 4caac173a..da450368b 100644 --- a/test/unit/config.js +++ b/test/unit/config.js @@ -1,44 +1,45 @@ /* eslint-env mocha */ const path = require('path') -const should = require('should') + const fse = require('fs-extra') -const configHelpers = require('../support/helpers/config') -const { COZY_URL } = require('../support/helpers/cozy') +const should = require('should') const config = require('../../core/config') +const configHelpers = require('../support/helpers/config') +const { COZY_URL } = require('../support/helpers/cozy') -describe('core/config', function () { +describe('core/config', function() { describe('.Config', () => { beforeEach('instanciate config', configHelpers.createConfig) afterEach('clean config directory', configHelpers.cleanConfig) - describe('read', function () { - context('when a tmp config file exists', function () { - beforeEach('create tmp config file', function () { + describe('read', function() { + context('when a tmp config file exists', function() { + beforeEach('create tmp config file', function() { fse.ensureFileSync(this.config.tmpConfigPath) }) - afterEach('remove tmp config file', function () { + afterEach('remove tmp config file', function() { if (fse.existsSync(this.config.tmpConfigPath)) { fse.unlinkSync(this.config.tmpConfigPath) } }) - context('and it has a valid JSON content', function () { + context('and it has a valid JSON content', function() { const fileConfig = { url: 'https://cozy.test/' } - beforeEach('write valid content', function () { + beforeEach('write valid content', function() { fse.writeFileSync( this.config.tmpConfigPath, JSON.stringify(fileConfig, null, 2) ) }) - it('reads the tmp config', function () { + it('reads the tmp config', function() { should(this.config.read()).match(fileConfig) }) - it('persists the tmp config file as the new config file', function () { + it('persists the tmp config file as the new config file', function() { this.config.read() const fileConfigPersisted = fse.readJSONSync(this.config.configPath) @@ -46,13 +47,13 @@ describe('core/config', function () { }) }) - context('and it does not have a valid JSON content', function () { - beforeEach('write invalid content', function () { + context('and it does not have a valid JSON content', function() { + beforeEach('write invalid content', function() { fse.writeFileSync(this.config.tmpConfigPath, '\0') this.config.persist() }) - it('reads the existing config', function () { + it('reads the existing config', function() { const fileConfig = this.config.read() should(fileConfig).be.an.Object() should(fileConfig.url).eql(COZY_URL) @@ -60,28 +61,28 @@ describe('core/config', function () { }) }) - context('when no tmp config files exist', function () { - beforeEach('remove any tmp config file', function () { + context('when no tmp config files exist', function() { + beforeEach('remove any tmp config file', function() { if (fse.existsSync(this.config.tmpConfigPath)) { fse.unlinkSync(this.config.tmpConfigPath) } this.config.persist() }) - it('reads the existing config', function () { + it('reads the existing config', function() { const fileConfig = this.config.read() should(fileConfig).be.an.Object() should(fileConfig.url).eql(COZY_URL) }) }) - context('when the read config is empty', function () { - beforeEach('empty local config', function () { + context('when the read config is empty', function() { + beforeEach('empty local config', function() { fse.ensureFileSync(this.config.configPath) fse.writeFileSync(this.config.configPath, '') }) - it('creates a new empty one', function () { + it('creates a new empty one', function() { const fileConfig = this.config.read() should(fileConfig).be.an.Object() should(fileConfig).be.empty() @@ -89,69 +90,69 @@ describe('core/config', function () { }) }) - describe('safeLoad', function () { - context('when the file content is valid JSON', function () { + describe('safeLoad', function() { + context('when the file content is valid JSON', function() { const fileConfig = { url: 'https://cozy.test/' } - beforeEach('write valid content', function () { + beforeEach('write valid content', function() { fse.writeFileSync( this.config.configPath, JSON.stringify(fileConfig, null, 2) ) }) - it('returns an object matching the file content', function () { + it('returns an object matching the file content', function() { const newFileConfig = config.loadOrDeleteFile(this.config.configPath) newFileConfig.should.be.an.Object() newFileConfig.url.should.eql(fileConfig.url) }) }) - context('when the file does not exist', function () { - beforeEach('remove config file', function () { + context('when the file does not exist', function() { + beforeEach('remove config file', function() { if (fse.existsSync(this.config.configPath)) { fse.unlinkSync(this.config.configPath) } }) - it('throws an error', function () { + it('throws an error', function() { ;(() => { config.loadOrDeleteFile(this.config.configPath) }).should.throw() }) }) - context('when the file is empty', function () { - beforeEach('create empty file', function () { + context('when the file is empty', function() { + beforeEach('create empty file', function() { fse.writeFileSync(this.config.configPath, '') }) - it('returns an empty object', function () { + it('returns an empty object', function() { should(config.loadOrDeleteFile(this.config.configPath)).deepEqual({}) }) - it('does not delete it', function () { + it('does not delete it', function() { config.loadOrDeleteFile(this.config.configPath) should(fse.existsSync(this.config.configPath)).be.true() }) }) - context('when the file content is not valid JSON', function () { - beforeEach('write invalid content', function () { + context('when the file content is not valid JSON', function() { + beforeEach('write invalid content', function() { fse.writeFileSync(this.config.configPath, '\0') }) - it('does not throw any errors', function () { + it('does not throw any errors', function() { ;(() => { config.loadOrDeleteFile(this.config.configPath) }).should.not.throw() }) - it('returns an empty object', function () { + it('returns an empty object', function() { should(config.loadOrDeleteFile(this.config.configPath)).deepEqual({}) }) - it('deletes the file', function () { + it('deletes the file', function() { fse.existsSync(this.config.configPath).should.be.true() config.loadOrDeleteFile(this.config.configPath) fse.existsSync(this.config.configPath).should.be.false() @@ -159,8 +160,8 @@ describe('core/config', function () { }) }) - describe('persist', function () { - it('saves last changes made on the config', function () { + describe('persist', function() { + it('saves last changes made on the config', function() { const url = 'http://cozy.local:8080/' this.config.cozyUrl = url this.config.persist() @@ -169,51 +170,51 @@ describe('core/config', function () { }) }) - describe('SyncPath', function () { - it('returns the set sync path', function () { + describe('SyncPath', function() { + it('returns the set sync path', function() { this.config.syncPath = '/path/to/sync/dir' should(this.config.syncPath).equal('/path/to/sync/dir') }) }) - describe('CozyUrl', function () { - it('returns the set Cozy URL', function () { + describe('CozyUrl', function() { + it('returns the set Cozy URL', function() { this.config.cozyUrl = 'https://cozy.example.com' should(this.config.cozyUrl).equal('https://cozy.example.com') }) }) describe('gui', () => { - it('returns an empty hash by default', function () { + it('returns an empty hash by default', function() { should(this.config.gui).deepEqual({}) }) - it('returns GUI configuration if any', function () { + it('returns GUI configuration if any', function() { const guiConfig = { foo: 'bar' } this.config.fileConfig.gui = guiConfig should(this.config.gui).deepEqual(guiConfig) }) }) - describe('Client', function () { - it('can set a client', function () { + describe('Client', function() { + it('can set a client', function() { this.config.client = { clientName: 'test' } should(this.config.isValid()).be.true() should(this.config.client.clientName).equal('test') }) - it('has no client after a reset', function () { + it('has no client after a reset', function() { this.config.reset() should(this.config.isValid()).be.false() }) }) describe('flags', () => { - it('returns an empty hash by default', function () { + it('returns an empty hash by default', function() { should(this.config.flags).deepEqual({}) }) - it('returns GUI configuration if any', function () { + it('returns GUI configuration if any', function() { const flagsConfig = { 'settings.partial-desktop-sync.show-synced-folders-selection': true } @@ -227,13 +228,13 @@ describe('core/config', function () { }) }) - describe('#watcherType', function () { - it('returns valid watcher type from file config if any', function () { + describe('#watcherType', function() { + it('returns valid watcher type from file config if any', function() { this.config.fileConfig.watcherType = 'channel' should(this.config.watcherType).equal('channel') }) - it('is the same as core/config.watcherType() otherwise', function () { + it('is the same as core/config.watcherType() otherwise', function() { should(this.config.watcherType).equal(config.watcherType()) }) }) diff --git a/test/unit/gui/lastfiles.js b/test/unit/gui/lastfiles.js index 10b06dd1a..e83c9ab98 100644 --- a/test/unit/gui/lastfiles.js +++ b/test/unit/gui/lastfiles.js @@ -1,9 +1,10 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') const path = require('path') +const should = require('should') + const lastfiles = require('../../../gui/js/lastfiles') const buildFile = fpath => ({ diff --git a/test/unit/gui/network.js b/test/unit/gui/network.js index d90d3b1b4..26553977f 100644 --- a/test/unit/gui/network.js +++ b/test/unit/gui/network.js @@ -1,20 +1,20 @@ /* eslint-env mocha */ -const { app, session } = require('electron') -const faker = require('faker') const fs = require('fs') const http = require('http') const https = require('https') const path = require('path') const process = require('process') -const should = require('should') const { URL } = require('url') -const cozyHelpers = require('../../support/helpers/cozy') +const { app, session } = require('electron') +const faker = require('faker') +const should = require('should') const network = require('../../../gui/js/network') +const cozyHelpers = require('../../support/helpers/cozy') -describe('gui/js/network', function () { +describe('gui/js/network', function() { const emptyConfig = { 'login-by-realm': undefined, 'proxy-bypassrules': undefined, diff --git a/test/unit/helpers_merge.js b/test/unit/helpers_merge.js index 2471651a7..c0123562e 100644 --- a/test/unit/helpers_merge.js +++ b/test/unit/helpers_merge.js @@ -1,25 +1,25 @@ /* eslint-env mocha */ -const _ = require('lodash') const path = require('path') + +const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const conflicts = require('../../core/utils/conflicts') const { Merge } = require('../../core/merge') const metadata = require('../../core/metadata') - -const configHelpers = require('../support/helpers/config') -const pouchHelpers = require('../support/helpers/pouch') +const conflicts = require('../../core/utils/conflicts') const Builders = require('../support/builders') const stubSide = require('../support/doubles/side') +const configHelpers = require('../support/helpers/config') +const pouchHelpers = require('../support/helpers/pouch') -describe('Merge Helpers', function () { +describe('Merge Helpers', function() { let builders before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('instanciate merge', function () { + beforeEach('instanciate merge', function() { this.side = 'local' this.merge = new Merge(this.pouch) this.merge.putFolderAsync = sinon.stub() @@ -37,22 +37,28 @@ describe('Merge Helpers', function () { return conflict.local }) }) - beforeEach('prepare builders', function () { + beforeEach('prepare builders', function() { builders = new Builders(this) }) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - describe('resolveConflict', function () { - it('does not change the original doc path', async function () { - const doc = builders.metadir().path('foo/bar').build() + describe('resolveConflict', function() { + it('does not change the original doc path', async function() { + const doc = builders + .metadir() + .path('foo/bar') + .build() await this.merge.resolveConflictAsync(this.side, doc) should(this.merge.local.resolveConflict).have.been.called() should(doc.path).eql(path.normalize('foo/bar')) }) - it('appends -conflict- and the date to the path', async function () { - const doc = builders.metadir().path('foo/bar').build() + it('appends -conflict- and the date to the path', async function() { + const doc = builders + .metadir() + .path('foo/bar') + .build() const dstDoc = await this.merge.resolveConflictAsync(this.side, doc) should(this.merge.local.resolveConflict).have.been.called() should(dstDoc.path) @@ -60,8 +66,11 @@ describe('Merge Helpers', function () { .and.match(conflicts.CONFLICT_REGEXP) }) - it('preserves the extension', async function () { - const doc = builders.metafile().path('foo/bar.jpg').build() + it('preserves the extension', async function() { + const doc = builders + .metafile() + .path('foo/bar.jpg') + .build() const dstDoc = await this.merge.resolveConflictAsync(this.side, doc) should(this.merge.local.resolveConflict).have.been.called() should(dstDoc.path) @@ -70,7 +79,7 @@ describe('Merge Helpers', function () { .and.endWith('.jpg') }) - it('do not chain conflicts', async function () { + it('do not chain conflicts', async function() { const doc = builders .metafile() .path('foo/baz-conflict-2018-11-08T01_02_03.004Z.jpg') diff --git a/test/unit/ignore.js b/test/unit/ignore.js index f5315c624..618572a42 100644 --- a/test/unit/ignore.js +++ b/test/unit/ignore.js @@ -2,16 +2,16 @@ const fs = require('fs') const path = require('path') + const should = require('should') const sinon = require('sinon') const { Ignore, loadSync } = require('../../core/ignore') const metadata = require('../../core/metadata') - -const { onPlatform } = require('../support/helpers/platform') const TmpDir = require('../support/helpers/TmpDir') +const { onPlatform } = require('../support/helpers/platform') -describe('Ignore', function () { +describe('Ignore', function() { describe('.loadSync()', () => { let tmpDir @@ -34,7 +34,7 @@ describe('Ignore', function () { }) describe('Removal of unnecessary lines', () => { - it('remove blank lines or comments', function () { + it('remove blank lines or comments', function() { const ignore = new Ignore([ 'foo', '', // removed @@ -47,26 +47,26 @@ describe('Ignore', function () { }) describe('Ignored patterns', () => { - it("don't ignore file name not matching to the pattern", function () { + it("don't ignore file name not matching to the pattern", function() { const ignore = new Ignore(['foo']) ignore .isIgnored({ relativePath: 'bar', isFolder: false }) .should.be.false() }) - it('ignore file name matching to the pattern', function () { + it('ignore file name matching to the pattern', function() { const ignore = new Ignore(['foo']) ignore .isIgnored({ relativePath: 'foo', isFolder: false }) .should.be.true() }) - it('ignore folder name matching to the pattern', function () { + it('ignore folder name matching to the pattern', function() { const ignore = new Ignore(['foo']) ignore.isIgnored({ relativePath: 'foo', isFolder: true }).should.be.true() }) - it("don't ignore file name when the pattern match folders", function () { + it("don't ignore file name when the pattern match folders", function() { const ignore = new Ignore(['foo/']) ignore .isIgnored({ relativePath: 'foo', isFolder: false }) @@ -76,14 +76,14 @@ describe('Ignore', function () { }) describe('Patterns operators', () => { - it('match to the glob with *', function () { + it('match to the glob with *', function() { const ignore = new Ignore(['*.txt']) ignore .isIgnored({ relativePath: 'foo.txt', isFolder: false }) .should.be.true() }) - it('match to the glob with ?', function () { + it('match to the glob with ?', function() { const ignore = new Ignore(['ba?']) ignore .isIgnored({ relativePath: 'bar', isFolder: false }) @@ -99,7 +99,7 @@ describe('Ignore', function () { .should.be.false() }) - it('match braces {p1,p2}', function () { + it('match braces {p1,p2}', function() { const ignore = new Ignore(['{bar,baz}.txt']) ignore .isIgnored({ relativePath: 'bar.txt', isFolder: false }) @@ -112,7 +112,7 @@ describe('Ignore', function () { .should.be.false() }) - it('match to the glob with range [a-c]', function () { + it('match to the glob with range [a-c]', function() { const ignore = new Ignore(['foo[a-c]']) ignore .isIgnored({ relativePath: 'fooa', isFolder: false }) @@ -130,7 +130,7 @@ describe('Ignore', function () { }) describe('Path patterns', () => { - it('ignore files in subdirectory', function () { + it('ignore files in subdirectory', function() { new Ignore(['foo']) .isIgnored({ relativePath: 'bar/foo', isFolder: false }) .should.be.true() @@ -139,7 +139,7 @@ describe('Ignore', function () { .should.be.false() }) - it('ignore files in a ignored directory', function () { + it('ignore files in a ignored directory', function() { new Ignore(['foo']) .isIgnored({ relativePath: 'foo/bar', isFolder: false }) .should.be.true() @@ -148,14 +148,14 @@ describe('Ignore', function () { .should.be.true() }) - it('ignore folders in a ignored directory', function () { + it('ignore folders in a ignored directory', function() { const ignore = new Ignore(['foo']) ignore .isIgnored({ relativePath: 'foo/bar', isFolder: true }) .should.be.true() }) - it('match leading slash pattern', function () { + it('match leading slash pattern', function() { const ignore = new Ignore(['/foo']) ignore.isIgnored({ relativePath: 'foo', isFolder: true }).should.be.true() ignore @@ -163,21 +163,21 @@ describe('Ignore', function () { .should.be.false() }) - it('match nested file with leading **', function () { + it('match nested file with leading **', function() { const ignore = new Ignore(['**/baz']) ignore .isIgnored({ relativePath: 'foo/bar/baz', isFolder: false }) .should.be.true() }) - it('match nested files with trailing **', function () { + it('match nested files with trailing **', function() { const ignore = new Ignore(['foo/**']) ignore .isIgnored({ relativePath: 'foo/bar/baz', isFolder: false }) .should.be.true() }) - it('match nested files with middle **', function () { + it('match nested files with middle **', function() { const ignore = new Ignore(['a/**/b']) ignore .isIgnored({ relativePath: 'a/foo/bar/b', isFolder: false }) @@ -187,7 +187,7 @@ describe('Ignore', function () { .should.be.true() }) - it("doen't match misnested file with middle **", function () { + it("doen't match misnested file with middle **", function() { const ignore = new Ignore(['a/**/b']) ignore .isIgnored({ relativePath: 'foo/a/b', isFolder: false }) @@ -196,14 +196,14 @@ describe('Ignore', function () { }) describe('Escaping', () => { - it('escapes the comment character', function () { + it('escapes the comment character', function() { const ignore = new Ignore(['\\#foo']) ignore .isIgnored({ relativePath: '#foo', isFolder: false }) .should.be.true() }) - it('escapes the negation character', function () { + it('escapes the negation character', function() { const ignore = new Ignore(['\\!foo']) ignore .isIgnored({ relativePath: '!foo', isFolder: false }) @@ -219,7 +219,7 @@ describe('Ignore', function () { .should.be.false() }) - it('can negate a previous rule', function () { + it('can negate a previous rule', function() { const ignore = new Ignore(['*.foo', '!bar.foo']) ignore .isIgnored({ relativePath: 'bar.foo', isFolder: false }) @@ -229,7 +229,7 @@ describe('Ignore', function () { .should.be.true() }) - it('can negate a more complex previous rules organization', function () { + it('can negate a more complex previous rules organization', function() { const ignore = new Ignore(['/*', '!/foo', '/foo/*', '!/foo/bar']) ignore .isIgnored({ relativePath: 'foo/bar', isFolder: false }) @@ -244,7 +244,7 @@ describe('Ignore', function () { }) describe('Default rules', () => { - it('has some defaults rules for dropbox', function () { + it('has some defaults rules for dropbox', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore @@ -252,7 +252,7 @@ describe('Ignore', function () { .should.be.true() }) - it('has some defaults rules for editors', function () { + it('has some defaults rules for editors', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore @@ -260,7 +260,7 @@ describe('Ignore', function () { .should.be.true() }) - it('has some defaults rules for OSes', function () { + it('has some defaults rules for OSes', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore @@ -268,7 +268,7 @@ describe('Ignore', function () { .should.be.true() }) - it('does ignore Icon', function () { + it('does ignore Icon', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore @@ -276,7 +276,7 @@ describe('Ignore', function () { .should.be.true() }) - it('does ignore any hidden file or directory', function () { + it('does ignore any hidden file or directory', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore @@ -284,7 +284,7 @@ describe('Ignore', function () { .should.be.true() }) - it('ignores Microsoft Office temporary files', function () { + it('ignores Microsoft Office temporary files', function() { const ignore = new Ignore([]) ignore.addDefaultRules() ignore diff --git a/test/unit/incompatibilities/platform.js b/test/unit/incompatibilities/platform.js index cf4b8e573..da1cd6800 100644 --- a/test/unit/incompatibilities/platform.js +++ b/test/unit/incompatibilities/platform.js @@ -3,8 +3,10 @@ const should = require('should') const platformIncompatibilities = require('../../../core/incompatibilities/platform') -const { detectNameIncompatibilities, detectPathLengthIncompatibility } = - platformIncompatibilities +const { + detectNameIncompatibilities, + detectPathLengthIncompatibility +} = platformIncompatibilities describe('core/incompatibilities/platform', () => { describe('detectNameIncompatibilities', () => { @@ -150,9 +152,11 @@ describe('core/incompatibilities/platform', () => { it('is incompatible when dir name is longer than win.dirNameMaxBytes', () => { const name = 'x'.repeat(dirNameMaxBytes + 1) - should(detectNameIncompatibilities(name, 'folder', platform)).deepEqual( - [{ type: 'dirNameMaxBytes', name, dirNameMaxBytes, platform }] - ) + should( + detectNameIncompatibilities(name, 'folder', platform) + ).deepEqual([ + { type: 'dirNameMaxBytes', name, dirNameMaxBytes, platform } + ]) }) it('is incompatible when name contains any of win.reservedChars', () => { diff --git a/test/unit/local/channel_watcher/add_checksum.js b/test/unit/local/channel_watcher/add_checksum.js index e5cff2e59..6d07c4c6b 100644 --- a/test/unit/local/channel_watcher/add_checksum.js +++ b/test/unit/local/channel_watcher/add_checksum.js @@ -1,22 +1,22 @@ /* eslint-env mocha */ /* @flow */ -const should = require('should') -const sinon = require('sinon') const path = require('path') -const configHelpers = require('../../../support/helpers/config') -const { onPlatforms } = require('../../../support/helpers/platform') +const should = require('should') +const sinon = require('sinon') -const checksumer = require('../../../../core/local/checksumer') const addChecksum = require('../../../../core/local/channel_watcher/add_checksum') const Channel = require('../../../../core/local/channel_watcher/channel') +const checksumer = require('../../../../core/local/checksumer') +const configHelpers = require('../../../support/helpers/config') +const { onPlatforms } = require('../../../support/helpers/platform') onPlatforms(['linux', 'win32'], () => { describe('core/local/channel_watcher/add_checksum.loop()', () => { let dirpath, filepath, opts before(configHelpers.createConfig) - before(function () { + before(function() { dirpath = path.basename(__dirname) filepath = path.join(dirpath, path.basename(__filename)) @@ -37,7 +37,9 @@ onPlatforms(['linux', 'win32'], () => { channel.push(batch) const enhancedChannel = addChecksum.loop(channel, opts) const enhancedBatch = await enhancedChannel.pop() - should(enhancedBatch).be.an.Array().and.length(batch.length) + should(enhancedBatch) + .be.an.Array() + .and.length(batch.length) should.exist(enhancedBatch[0].md5sum) }) @@ -53,7 +55,9 @@ onPlatforms(['linux', 'win32'], () => { channel.push(batch) const enhancedChannel = addChecksum.loop(channel, opts) const enhancedBatch = await enhancedChannel.pop() - should(enhancedBatch).be.an.Array().and.length(batch.length) + should(enhancedBatch) + .be.an.Array() + .and.length(batch.length) should.not.exist(enhancedBatch[0].md5sum) }) @@ -70,7 +74,9 @@ onPlatforms(['linux', 'win32'], () => { channel.push(batch) const enhancedChannel = addChecksum.loop(channel, opts) const enhancedBatch = await enhancedChannel.pop() - should(enhancedBatch).be.an.Array().and.length(batch.length) + should(enhancedBatch) + .be.an.Array() + .and.length(batch.length) should(enhancedBatch[0]).have.property('md5sum', 'checksum') }) diff --git a/test/unit/local/channel_watcher/add_infos.js b/test/unit/local/channel_watcher/add_infos.js index 5a5dcd061..8431feae5 100644 --- a/test/unit/local/channel_watcher/add_infos.js +++ b/test/unit/local/channel_watcher/add_infos.js @@ -1,16 +1,16 @@ /* eslint-env mocha */ /* @flow */ -const should = require('should') const path = require('path') -const Builders = require('../../../support/builders') -const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') -const { onPlatforms } = require('../../../support/helpers/platform') +const should = require('should') const addInfos = require('../../../../core/local/channel_watcher/add_infos') const Channel = require('../../../../core/local/channel_watcher/channel') +const Builders = require('../../../support/builders') +const configHelpers = require('../../../support/helpers/config') +const { onPlatforms } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') onPlatforms(['linux', 'win32'], () => { describe('core/local/channel_watcher/add_infos.loop()', () => { @@ -20,10 +20,10 @@ onPlatforms(['linux', 'win32'], () => { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('instanciate builders', async function () { + beforeEach('instanciate builders', async function() { builders = new Builders({ pouch: this.pouch }) }) - beforeEach('create step opts', async function () { + beforeEach('create step opts', async function() { this.config.syncPath = path.dirname(__dirname) opts = this filepath = path.basename(__filename) @@ -42,7 +42,9 @@ onPlatforms(['linux', 'win32'], () => { channel.push(batch) const enhancedChannel = addInfos.loop(channel, opts) const enhancedBatch = await enhancedChannel.pop() - should(enhancedBatch).be.an.Array().and.have.length(batch.length) + should(enhancedBatch) + .be.an.Array() + .and.have.length(batch.length) }) it('adds specific infos for specific events', async () => { @@ -81,8 +83,11 @@ onPlatforms(['linux', 'win32'], () => { const channel = new Channel() channel.push(batch) const enhancedChannel = addInfos.loop(channel, opts) - const [deletedEvent, ignoredEvent, ...otherEvents] = - await enhancedChannel.pop() + const [ + deletedEvent, + ignoredEvent, + ...otherEvents + ] = await enhancedChannel.pop() should(deletedEvent).eql({ action: batch[0].action, kind: 'directory', @@ -102,14 +107,19 @@ onPlatforms(['linux', 'win32'], () => { context('when deleted event kind is unknown', () => { context('and document exists in Pouch', () => { let file, dir - beforeEach('populate Pouch with documents', async function () { + beforeEach('populate Pouch with documents', async function() { file = await builders .metafile() .path('file') .ino(1) .upToDate() .create() - dir = await builders.metadir().path('dir').ino(2).upToDate().create() + dir = await builders + .metadir() + .path('dir') + .ino(2) + .upToDate() + .create() }) it('looks up existing document doctype from Pouch', async () => { @@ -188,7 +198,7 @@ onPlatforms(['linux', 'win32'], () => { 'when deleted document has different remote & synced path in Pouch', () => { let file, dir - beforeEach('populate Pouch with documents', async function () { + beforeEach('populate Pouch with documents', async function() { file = await builders .metafile() .path('file') @@ -200,7 +210,12 @@ onPlatforms(['linux', 'win32'], () => { .path('other-file') .changedSide('remote') .create() - dir = await builders.metadir().path('dir').ino(2).upToDate().create() + dir = await builders + .metadir() + .path('dir') + .ino(2) + .upToDate() + .create() await builders .metadir(dir) .path('other-dir') diff --git a/test/unit/local/channel_watcher/await_write_finish.js b/test/unit/local/channel_watcher/await_write_finish.js index d54c3c58f..18e401f41 100644 --- a/test/unit/local/channel_watcher/await_write_finish.js +++ b/test/unit/local/channel_watcher/await_write_finish.js @@ -4,11 +4,10 @@ const _ = require('lodash') const should = require('should') -const { onPlatforms } = require('../../../support/helpers/platform') - const awaitWriteFinish = require('../../../../core/local/channel_watcher/await_write_finish') const Channel = require('../../../../core/local/channel_watcher/channel') const Builders = require('../../../support/builders') +const { onPlatforms } = require('../../../support/helpers/platform') const lastEventToCheckEmptyness = { action: 'initial-scan-done', diff --git a/test/unit/local/channel_watcher/channel.js b/test/unit/local/channel_watcher/channel.js index 4c2e9eace..d4f074729 100644 --- a/test/unit/local/channel_watcher/channel.js +++ b/test/unit/local/channel_watcher/channel.js @@ -7,7 +7,6 @@ const should = require('should') const sinon = require('sinon') const Channel = require('../../../../core/local/channel_watcher/channel') - const Builders = require('../../../support/builders') const { onPlatforms } = require('../../../support/helpers/platform') @@ -21,7 +20,7 @@ import type { const builders = new Builders() onPlatforms(['linux', 'win32'], () => { - describe('core/local/channel_watcher/Channel', function () { + describe('core/local/channel_watcher/Channel', function() { this.timeout(100) describe('Basics', () => { diff --git a/test/unit/local/channel_watcher/dispatch.js b/test/unit/local/channel_watcher/dispatch.js index 5955e5457..dc3826b7a 100644 --- a/test/unit/local/channel_watcher/dispatch.js +++ b/test/unit/local/channel_watcher/dispatch.js @@ -9,20 +9,19 @@ type DispatchedCalls = { } */ +const { Promise } = require('bluebird') +const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const _ = require('lodash') -const { Promise } = require('bluebird') +const Channel = require('../../../../core/local/channel_watcher/channel') +const dispatch = require('../../../../core/local/channel_watcher/dispatch') +const Prep = require('../../../../core/prep') +const SyncState = require('../../../../core/syncstate') const Builders = require('../../../support/builders') const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') const { onPlatforms } = require('../../../support/helpers/platform') - -const SyncState = require('../../../../core/syncstate') -const Prep = require('../../../../core/prep') -const Channel = require('../../../../core/local/channel_watcher/channel') -const dispatch = require('../../../../core/local/channel_watcher/dispatch') +const pouchHelpers = require('../../../support/helpers/pouch') function dispatchedCalls(obj /*: Stub */) /*: DispatchedCalls */ { const methods = Object.getOwnPropertyNames(obj).filter( @@ -51,7 +50,7 @@ function dispatchedCalls(obj /*: Stub */) /*: DispatchedCalls */ { } onPlatforms(['linux', 'win32'], () => { - describe('core/local/channel_watcher/dispatch.loop()', function () { + describe('core/local/channel_watcher/dispatch.loop()', function() { let builders let channel let events @@ -60,7 +59,7 @@ onPlatforms(['linux', 'win32'], () => { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('populate pouch with documents', async function () { + beforeEach('populate pouch with documents', async function() { builders = new Builders({ pouch: this.pouch }) channel = new Channel() @@ -80,10 +79,15 @@ onPlatforms(['linux', 'win32'], () => { context('when channel contains an initial-scan-done event', () => { beforeEach(() => { - channel.push([builders.event().action('initial-scan-done').build()]) + channel.push([ + builders + .event() + .action('initial-scan-done') + .build() + ]) }) - it('emits an initial-scan-done event via the emitter', async function () { + it('emits an initial-scan-done event via the emitter', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(events)).containDeep({ @@ -91,7 +95,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('does not emit a sync-target event via the emitter', async function () { + it('does not emit a sync-target event via the emitter', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(events)).not.containDeep({ @@ -99,7 +103,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('does not call any Prep method', async function () { + it('does not call any Prep method', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -108,16 +112,21 @@ onPlatforms(['linux', 'win32'], () => { context('when channel contains an ignored event', () => { beforeEach(() => { - channel.push([builders.event().action('ignored').build()]) + channel.push([ + builders + .event() + .action('ignored') + .build() + ]) }) - it('does not call any Prep method', async function () { + it('does not call any Prep method', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) }) - it('does not emit a sync-target event via the emitter', async function () { + it('does not emit a sync-target event via the emitter', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(events)).not.containDeep({ @@ -130,16 +139,36 @@ onPlatforms(['linux', 'win32'], () => { let changeEvents beforeEach(() => { changeEvents = [ - builders.event().action('created').kind('file').build(), - builders.event().action('created').kind('file').build(), - builders.event().action('ignored').kind('file').build(), // No events for this one - builders.event().action('created').kind('file').build(), - builders.event().action('created').kind('file').build() + builders + .event() + .action('created') + .kind('file') + .build(), + builders + .event() + .action('created') + .kind('file') + .build(), + builders + .event() + .action('ignored') + .kind('file') + .build(), // No events for this one + builders + .event() + .action('created') + .kind('file') + .build(), + builders + .event() + .action('created') + .kind('file') + .build() ] channel.push(changeEvents) }) - it('emits sync-target events via the emitter', async function () { + it('emits sync-target events via the emitter', async function() { await dispatch.loop(channel, stepOptions).pop() // Make sure we emit exactly 4 sync-target events, one for each @@ -167,7 +196,7 @@ onPlatforms(['linux', 'win32'], () => { context('when channel contains multiple batches', () => { context('processed in less than a second', () => { - it('emits a local-start event for each batch via the emitter', async function () { + it('emits a local-start event for each batch via the emitter', async function() { const outChannel = dispatch.loop(channel, stepOptions) channel.push([builders.event().build()]) @@ -180,7 +209,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('emits only one local-end event via the emitter', async function () { + it('emits only one local-end event via the emitter', async function() { const outChannel = dispatch.loop(channel, stepOptions) channel.push([builders.event().build()]) @@ -202,7 +231,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('processed in more than a second', () => { - it('emits a local-start event for each batch via the emitter', async function () { + it('emits a local-start event for each batch via the emitter', async function() { const outChannel = dispatch.loop(channel, stepOptions) channel.push([builders.event().build()]) @@ -219,7 +248,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('emits one local-end event for each batch via the emitter', async function () { + it('emits one local-end event for each batch via the emitter', async function() { const outChannel = dispatch.loop(channel, stepOptions) channel.push([builders.event().build()]) @@ -259,7 +288,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to addFileAsync with a file Metadata object', async function () { + it('triggers a call to addFileAsync with a file Metadata object', async function() { const doc = builders .metafile() .path(filePath) @@ -305,7 +334,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to putFolderAsync with a directory Metadata object', async function () { + it('triggers a call to putFolderAsync with a directory Metadata object', async function() { const doc = builders .metadir() .path(directoryPath) @@ -342,7 +371,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to addFileAsync with a file Metadata object', async function () { + it('triggers a call to addFileAsync with a file Metadata object', async function() { const doc = builders .metafile() .path(filePath) @@ -388,7 +417,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to putFolderAsync with a directory Metadata object', async function () { + it('triggers a call to putFolderAsync with a directory Metadata object', async function() { const doc = builders .metadir() .path(directoryPath) @@ -425,7 +454,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to updateFileAsync with a file Metadata object', async function () { + it('triggers a call to updateFileAsync with a file Metadata object', async function() { const doc = builders .metafile() .path(filePath) @@ -471,7 +500,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('triggers a call to putFolderAsync with a directory Metadata object', async function () { + it('triggers a call to putFolderAsync with a directory Metadata object', async function() { const doc = builders .metadir() .path(directoryPath) @@ -524,7 +553,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('triggers a call to moveFileAsync with a file Metadata object', async function () { + it('triggers a call to moveFileAsync with a file Metadata object', async function() { const doc = builders .metafile() .path(newFilePath) @@ -563,7 +592,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('does not call moveFileAsync', async function () { + it('does not call moveFileAsync', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -572,7 +601,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('for a propagated remote move', () => { - beforeEach('build records for moved doc', async function () { + beforeEach('build records for moved doc', async function() { const src = await builders .metafile() .path(filePath) @@ -593,7 +622,7 @@ onPlatforms(['linux', 'win32'], () => { this.pouch.put(dst) }) - it('does not trigger any call to prep', async function () { + it('does not trigger any call to prep', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -649,7 +678,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('triggers a call to moveFileAsync with an overwriting file Metadata object', async function () { + it('triggers a call to moveFileAsync with an overwriting file Metadata object', async function() { const doc = builders .metafile() .path(newFilePath) @@ -682,7 +711,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('without existing documents at the event oldPath', () => { - it('triggers a call to addFileAsync with a file Metadata object', async function () { + it('triggers a call to addFileAsync with a file Metadata object', async function() { const doc = builders .metafile() .path(newFilePath) @@ -707,7 +736,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('removes the event oldPath', async function () { + it('removes the event oldPath', async function() { const batch = await dispatch.loop(channel, stepOptions).pop() should(batch).have.length(1) @@ -750,7 +779,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('triggers a call to moveFolderAsync with a directory Metadata object', async function () { + it('triggers a call to moveFolderAsync with a directory Metadata object', async function() { const doc = builders .metadir() .path(newDirectoryPath) @@ -778,7 +807,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('does not call moveFolderAsync', async function () { + it('does not call moveFolderAsync', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -787,7 +816,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('without existing documents at the event oldPath', () => { - it('triggers a call to putFolderAsync with a directory Metadata object', async function () { + it('triggers a call to putFolderAsync with a directory Metadata object', async function() { const doc = builders .metadir() .path(newDirectoryPath) @@ -804,7 +833,7 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('removes the event oldPath', async function () { + it('removes the event oldPath', async function() { const batch = await dispatch.loop(channel, stepOptions).pop() should(batch).have.length(1) @@ -813,7 +842,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('for a propagated remote move', () => { - beforeEach('build records for moved doc', async function () { + beforeEach('build records for moved doc', async function() { const src = await builders .metadir() .path(directoryPath) @@ -834,7 +863,7 @@ onPlatforms(['linux', 'win32'], () => { this.pouch.put(dst) }) - it('does not trigger any call to prep', async function () { + it('does not trigger any call to prep', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -869,7 +898,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('triggers a call to trashFileAsync with the existing document', async function () { + it('triggers a call to trashFileAsync with the existing document', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({ @@ -879,7 +908,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('without existing documents at the event path', () => { - it('ignores the event', async function () { + it('ignores the event', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) @@ -914,7 +943,7 @@ onPlatforms(['linux', 'win32'], () => { .create() }) - it('triggers a call to trashFolderAsync with the existing document', async function () { + it('triggers a call to trashFolderAsync with the existing document', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({ @@ -924,7 +953,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('without existing documents at the event path', () => { - it('ignores the event', async function () { + it('ignores the event', async function() { await dispatch.loop(channel, stepOptions).pop() should(dispatchedCalls(prep)).deepEqual({}) diff --git a/test/unit/local/channel_watcher/filter_ignored.js b/test/unit/local/channel_watcher/filter_ignored.js index 9a8380f69..f1af258cc 100644 --- a/test/unit/local/channel_watcher/filter_ignored.js +++ b/test/unit/local/channel_watcher/filter_ignored.js @@ -1,19 +1,18 @@ /* eslint-env mocha */ /* @flow */ +const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const _ = require('lodash') - -const { onPlatforms } = require('../../../support/helpers/platform') -const Builders = require('../../../support/builders') const { Ignore } = require('../../../../core/ignore') +const Channel = require('../../../../core/local/channel_watcher/channel') const { INITIAL_SCAN_DONE } = require('../../../../core/local/channel_watcher/event') -const Channel = require('../../../../core/local/channel_watcher/channel') const filterIgnored = require('../../../../core/local/channel_watcher/filter_ignored') +const Builders = require('../../../support/builders') +const { onPlatforms } = require('../../../support/helpers/platform') onPlatforms(['linux', 'win32'], () => { describe('core/local/channel_watcher/filter_ignored.loop()', () => { diff --git a/test/unit/local/channel_watcher/incomplete_fixer.js b/test/unit/local/channel_watcher/incomplete_fixer.js index b26cb4df5..02ad3dce2 100644 --- a/test/unit/local/channel_watcher/incomplete_fixer.js +++ b/test/unit/local/channel_watcher/incomplete_fixer.js @@ -1,20 +1,20 @@ /* eslint-env mocha */ /* @flow */ -const _ = require('lodash') const path = require('path') + +const _ = require('lodash') const should = require('should') const sinon = require('sinon') +const Channel = require('../../../../core/local/channel_watcher/channel') +const incompleteFixer = require('../../../../core/local/channel_watcher/incomplete_fixer') +const stater = require('../../../../core/local/stater') const Builders = require('../../../support/builders') -const { ContextDir } = require('../../../support/helpers/context_dir') const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') +const { ContextDir } = require('../../../support/helpers/context_dir') const { onPlatforms } = require('../../../support/helpers/platform') - -const stater = require('../../../../core/local/stater') -const Channel = require('../../../../core/local/channel_watcher/channel') -const incompleteFixer = require('../../../../core/local/channel_watcher/incomplete_fixer') +const pouchHelpers = require('../../../support/helpers/pouch') const CHECKSUM = 'checksum' const checksumer = { @@ -33,7 +33,7 @@ onPlatforms(['linux', 'win32'], () => { before('create config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('create helpers', function () { + beforeEach('create helpers', function() { syncDir = new ContextDir(this.syncPath) builders = new Builders({ pouch: this.pouch }) @@ -41,13 +41,13 @@ onPlatforms(['linux', 'win32'], () => { opts = { config, checksumer, pouch, fatal: sinon.spy() } }) afterEach('clean pouch', pouchHelpers.cleanDatabase) - afterEach('clean files', function () { + afterEach('clean files', function() { syncDir.clean() }) after('cleanup config', configHelpers.cleanConfig) describe('.loop()', () => { - it('pushes the result of step() into the output Channel', async function () { + it('pushes the result of step() into the output Channel', async function() { const src = 'missing' const dst = path.basename(__filename) await syncDir.ensureFile(dst) @@ -82,7 +82,7 @@ onPlatforms(['linux', 'win32'], () => { describe('.step()', () => { context('without any complete "renamed" event', () => { - it('drops incomplete events', async function () { + it('drops incomplete events', async function() { const inputBatch = [ builders .event() @@ -102,7 +102,12 @@ onPlatforms(['linux', 'win32'], () => { .action('deleted') .path('foo3') .build(), - builders.event().incomplete().action('scan').path('foo4').build() + builders + .event() + .incomplete() + .action('scan') + .path('foo4') + .build() ] const incompletes = [] @@ -115,13 +120,22 @@ onPlatforms(['linux', 'win32'], () => { }) context('with a complete "renamed" event', () => { - it('leaves complete events untouched', async function () { + it('leaves complete events untouched', async function() { const src = 'file' const dst = 'foo' await syncDir.ensureFile(dst) const inputBatch = [ - builders.event().action('created').path(src).build(), - builders.event().action('renamed').oldPath(src).path(dst).build() + builders + .event() + .action('created') + .path(src) + .build(), + builders + .event() + .action('renamed') + .oldPath(src) + .path(dst) + .build() ] const incompletes = [] @@ -132,7 +146,7 @@ onPlatforms(['linux', 'win32'], () => { should(outputBatch).deepEqual(inputBatch) }) - it('rebuilds the all incomplete events matching the "renamed" event old path', async function () { + it('rebuilds the all incomplete events matching the "renamed" event old path', async function() { const { config } = this await syncDir.makeTree([ @@ -235,7 +249,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('drops incomplete ignored events matching the "renamed" event old path', async function () { + it('drops incomplete ignored events matching the "renamed" event old path', async function() { await syncDir.makeTree(['dst/', 'dst/file']) const ignoredEvent = builders .event() @@ -258,7 +272,7 @@ onPlatforms(['linux', 'win32'], () => { should(outputBatch).deepEqual([renamedEvent]) }) - it('replaces the completing event if its path is the same as the rebuilt one', async function () { + it('replaces the completing event if its path is the same as the rebuilt one', async function() { const { config } = this const src = 'missing' @@ -300,7 +314,7 @@ onPlatforms(['linux', 'win32'], () => { }) describe('file renamed then deleted', () => { - it('is deleted at its original path', async function () { + it('is deleted at its original path', async function() { const src = 'src' const dst = 'dst' const renamedEvent = builders @@ -346,7 +360,7 @@ onPlatforms(['linux', 'win32'], () => { }) describe('file renamed twice', () => { - it('is renamed once as a whole', async function () { + it('is renamed once as a whole', async function() { const { config } = this const src = 'src' @@ -407,7 +421,7 @@ onPlatforms(['linux', 'win32'], () => { }) describe('file renamed three times', () => { - it('is renamed once as a whole', async function () { + it('is renamed once as a whole', async function() { const { config } = this const src = 'src' @@ -471,7 +485,7 @@ onPlatforms(['linux', 'win32'], () => { }) describe('file renamed and then renamed back to its previous name', () => { - it('results in no events at all', async function () { + it('results in no events at all', async function() { const src = 'src' const dst = 'dst' await syncDir.ensureFile(src) @@ -509,7 +523,7 @@ onPlatforms(['linux', 'win32'], () => { }) describe('file renamed to backup location and replaced by new file', () => { - it('is modified once and not deleted', async function () { + it('is modified once and not deleted', async function() { const src = 'src' const tmp = 'src.tmp' await syncDir.ensureFile(src) @@ -580,11 +594,15 @@ onPlatforms(['linux', 'win32'], () => { const src = 'src' const dst = 'dst' - beforeEach(async function () { - await builders.metafile().path(src).sides({ local: 1 }).create() + beforeEach(async function() { + await builders + .metafile() + .path(src) + .sides({ local: 1 }) + .create() }) - it('results in the renamed event', async function () { + it('results in the renamed event', async function() { await syncDir.ensureFile(dst) const createdEvent = builders .event() @@ -619,11 +637,15 @@ onPlatforms(['linux', 'win32'], () => { const src = 'src' const dst = 'dst' - beforeEach(async function () { - await builders.metafile().path(src).sides({ local: 1 }).create() + beforeEach(async function() { + await builders + .metafile() + .path(src) + .sides({ local: 1 }) + .create() }) - it('results in the renamed event followed by the rebuilt modified event', async function () { + it('results in the renamed event followed by the rebuilt modified event', async function() { const { config } = this await syncDir.ensureFile(dst) @@ -677,11 +699,15 @@ onPlatforms(['linux', 'win32'], () => { const dst1 = 'dst1' const dst2 = 'dst2' - beforeEach(async function () { - await builders.metafile().path(src).sides({ local: 1 }).create() + beforeEach(async function() { + await builders + .metafile() + .path(src) + .sides({ local: 1 }) + .create() }) - it('results in one renamed event followed by the rebuilt modified event', async function () { + it('results in one renamed event followed by the rebuilt modified event', async function() { const { config } = this await syncDir.ensureFile(dst2) diff --git a/test/unit/local/channel_watcher/index.js b/test/unit/local/channel_watcher/index.js index b679b43ec..e95652941 100644 --- a/test/unit/local/channel_watcher/index.js +++ b/test/unit/local/channel_watcher/index.js @@ -9,11 +9,10 @@ const { stepsInitialState } = require('../../../../core/local/channel_watcher') const initialDiff = require('../../../../core/local/channel_watcher/initial_diff') - const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') const TestHelpers = require('../../../support/helpers/index') const { onPlatforms } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') onPlatforms(['linux', 'win32'], () => { describe('core/local/channel_watcher/watcher', () => { @@ -24,7 +23,7 @@ onPlatforms(['linux', 'win32'], () => { after('clean config directory', configHelpers.cleanConfig) describe('.stepsInitialState()', () => { - it('includes initial diff state key', async function () { + it('includes initial diff state key', async function() { const state = {} const initialState = await stepsInitialState(state, this) should(state).have.property(initialDiff.STEP_NAME) @@ -34,12 +33,12 @@ onPlatforms(['linux', 'win32'], () => { describe('start', () => { let helpers - beforeEach('init helpers', async function () { + beforeEach('init helpers', async function() { helpers = TestHelpers.init(this) }) context('when producer.start() rejects', () => { - it('should reject with the same error', async function () { + it('should reject with the same error', async function() { const watcher = new ChannelWatcher({ ...helpers, config: this.config, diff --git a/test/unit/local/channel_watcher/initial_diff.js b/test/unit/local/channel_watcher/initial_diff.js index aaad761e4..6b8b15db0 100644 --- a/test/unit/local/channel_watcher/initial_diff.js +++ b/test/unit/local/channel_watcher/initial_diff.js @@ -1,19 +1,19 @@ /* eslint-env mocha */ /* @flow */ -const _ = require('lodash') const path = require('path') +const _ = require('lodash') const should = require('should') -const Builders = require('../../../support/builders') -const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') -const { onPlatforms } = require('../../../support/helpers/platform') -const { FOLDER } = require('../../../../core/metadata') const { WINDOWS_DATE_MIGRATION_FLAG } = require('../../../../core/config') const Channel = require('../../../../core/local/channel_watcher/channel') const initialDiff = require('../../../../core/local/channel_watcher/initial_diff') +const { FOLDER } = require('../../../../core/metadata') +const Builders = require('../../../support/builders') +const configHelpers = require('../../../support/helpers/config') +const { onPlatforms } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') const kind = doc => (doc.docType === FOLDER ? 'directory' : 'file') @@ -25,14 +25,14 @@ onPlatforms(['linux', 'win32'], () => { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('create builders', function () { + beforeEach('create builders', function() { builders = new Builders({ pouch: this.pouch }) }) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) describe('.initialState()', () => { - it('returns initial state referenced by initial diff step name', async function () { + it('returns initial state referenced by initial diff step name', async function() { const foo = await builders .metadir() .path('foo') @@ -51,7 +51,11 @@ onPlatforms(['linux', 'win32'], () => { .ino(3) .sides({ local: 1 }) .create() - await builders.metafile().path('baz').sides({ remote: 1 }).create() + await builders + .metafile() + .path('baz') + .sides({ remote: 1 }) + .create() const state = await initialDiff.initialState(this) should(state).have.property(initialDiff.STEP_NAME, { @@ -69,13 +73,22 @@ onPlatforms(['linux', 'win32'], () => { }) describe('.clearState()', () => { - it('removes every item from all initialDiff state collections', function () { - const doc = builders.metadata().path('foo').ino(1).upToDate().build() + it('removes every item from all initialDiff state collections', function() { + const doc = builders + .metadata() + .path('foo') + .ino(1) + .upToDate() + .build() const waiting = [ { batch: [], nbCandidates: 0, timeout: setTimeout(() => {}, 0) } ] const renamedEvents = [ - builders.event().path('foo').oldPath('bar').build() + builders + .event() + .path('foo') + .oldPath('bar') + .build() ] const scannedPaths = new Set(['foo']) const byInode = new Map([[doc.fileid || doc.ino || '', doc]]) // Flow thinks doc.ino can be null @@ -109,7 +122,7 @@ onPlatforms(['linux', 'win32'], () => { const inputBatch = batch => channel.push(_.cloneDeep(batch)) - beforeEach(function () { + beforeEach(function() { channel = new Channel() initialScanDone = builders .event() @@ -119,8 +132,13 @@ onPlatforms(['linux', 'win32'], () => { .build() }) - it('forwards events untouched when initial scan is done', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() + it('forwards events untouched when initial scan is done', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) initialDiff.clearState(state) @@ -161,7 +179,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('clears the state after initial-scan-done is received', async function () { + it('clears the state after initial-scan-done is received', async function() { const state = await initialDiff.initialState({ pouch: this.pouch }) const outChannel = initialDiff.loop(channel, { config: this.config, @@ -180,18 +198,32 @@ onPlatforms(['linux', 'win32'], () => { inputBatch([fooScan]) await outChannel.pop() - should(state.initialDiff).have.property('initialScanDone').be.false() + should(state.initialDiff) + .have.property('initialScanDone') + .be.false() // Send initial-scan-done inputBatch([initialScanDone]) await outChannel.pop() - should(state.initialDiff).have.property('initialScanDone').be.true() + should(state.initialDiff) + .have.property('initialScanDone') + .be.true() }) - it('detects documents moved while client was stopped', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() - await builders.metafile().path('fizz').ino(2).upToDate().create() + it('detects documents moved while client was stopped', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() + await builders + .metafile() + .path('fizz') + .ino(2) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) @@ -232,10 +264,25 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('detects documents moved while client is doing initial scan', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() - await builders.metafile().path('foo/baz').ino(2).upToDate().create() - await builders.metadir().path('bar').ino(3).upToDate().create() + it('detects documents moved while client is doing initial scan', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() + await builders + .metafile() + .path('foo/baz') + .ino(2) + .upToDate() + .create() + await builders + .metadir() + .path('bar') + .ino(3) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) @@ -292,11 +339,31 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('detects documents replaced by another one of a different kind while client was stopped', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() - await builders.metafile().path('bar').ino(2).upToDate().create() - await builders.metadir().path('fizz').ino(3).upToDate().create() - await builders.metafile().path('buzz').ino(4).upToDate().create() + it('detects documents replaced by another one of a different kind while client was stopped', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() + await builders + .metafile() + .path('bar') + .ino(2) + .upToDate() + .create() + await builders + .metadir() + .path('fizz') + .ino(3) + .upToDate() + .create() + await builders + .metafile() + .path('buzz') + .ino(4) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) @@ -339,9 +406,19 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('detects documents replaced by another one with a different ino while client was stopped', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() - await builders.metafile().path('bar').ino(2).upToDate().create() + it('detects documents replaced by another one with a different ino while client was stopped', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() + await builders + .metafile() + .path('bar') + .ino(2) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) @@ -374,9 +451,19 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('detects documents replaced by another one of a different kind with the same ino while client was stopped', async function () { - await builders.metadir().path('foo').ino(1).upToDate().create() - await builders.metafile().path('bar').ino(2).upToDate().create() + it('detects documents replaced by another one of a different kind with the same ino while client was stopped', async function() { + await builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .create() + await builders + .metafile() + .path('bar') + .ino(2) + .upToDate() + .create() const state = await initialDiff.initialState({ pouch: this.pouch }) @@ -409,7 +496,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('detects documents removed while client was stopped', async function () { + it('detects documents removed while client was stopped', async function() { const foo = await builders .metadir() .path('foo') @@ -460,7 +547,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('reuses the checksum of untouched files', async function () { + it('reuses the checksum of untouched files', async function() { const stillEmptyFile = await builders .metafile() .path('stillEmptyFile') @@ -511,7 +598,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('does not try to reuse the checksum of a directory', async function () { + it('does not try to reuse the checksum of a directory', async function() { const dir = await builders .metadir() .path('dir') @@ -536,7 +623,7 @@ onPlatforms(['linux', 'win32'], () => { should(events).deepEqual([dirScan, initialScanDone]) }) - it('does not reuse the checksum of modified files', async function () { + it('does not reuse the checksum of modified files', async function() { const updatedContent = await builders .metafile() .path('updatedContent') @@ -564,14 +651,14 @@ onPlatforms(['linux', 'win32'], () => { }) context('when WINDOWS_DATE_MIGRATION_FLAG is active', () => { - before(function () { + before(function() { this.config.setFlag(WINDOWS_DATE_MIGRATION_FLAG, true) }) - after(function () { + after(function() { this.config.setFlag(WINDOWS_DATE_MIGRATION_FLAG, false) }) - it('reuses the checksum of untouched files with a same second modification date', async function () { + it('reuses the checksum of untouched files with a same second modification date', async function() { const emptyFileUpdateDate = new Date() const stillEmptyFile = await builders .metafile() @@ -628,7 +715,7 @@ onPlatforms(['linux', 'win32'], () => { }) context('when WINDOWS_DATE_MIGRATION_FLAG is inactive', () => { - it('does not reuse the checksum of untouched files with a same second modification date', async function () { + it('does not reuse the checksum of untouched files with a same second modification date', async function() { const updatedContentUpdateDate = new Date() const updatedContent = await builders .metafile() @@ -657,8 +744,13 @@ onPlatforms(['linux', 'win32'], () => { }) }) - it('ignores events for unapplied moves', async function () { - const wasDir = builders.metadir().path('foo').ino(1).upToDate().build() + it('ignores events for unapplied moves', async function() { + const wasDir = builders + .metadir() + .path('foo') + .ino(1) + .upToDate() + .build() await builders .metadir() .moveFrom(wasDir) @@ -715,9 +807,19 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('fixes renamed after parent renamed', async function () { - await builders.metadir().path('parent').ino(1).upToDate().create() - await builders.metadir().path('parent/foo').ino(2).upToDate().create() + it('fixes renamed after parent renamed', async function() { + await builders + .metadir() + .path('parent') + .ino(1) + .upToDate() + .create() + await builders + .metadir() + .path('parent/foo') + .ino(2) + .upToDate() + .create() await builders .metadir() .path('parent/foo/bar') @@ -789,9 +891,19 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('fixes deleted after parent renamed', async function () { - await builders.metadir().path('parent').ino(1).upToDate().create() - await builders.metadir().path('parent/foo').ino(2).upToDate().create() + it('fixes deleted after parent renamed', async function() { + await builders + .metadir() + .path('parent') + .ino(1) + .upToDate() + .create() + await builders + .metadir() + .path('parent/foo') + .ino(2) + .upToDate() + .create() const missingDoc = await builders .metadir() .path('parent/foo/bar') @@ -864,8 +976,13 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('does not swallow possible changes on move descendants', async function () { - await builders.metadir().path('parent').ino(1).upToDate().create() + it('does not swallow possible changes on move descendants', async function() { + await builders + .metadir() + .path('parent') + .ino(1) + .upToDate() + .create() await builders .metafile() .path('parent/foo') @@ -920,8 +1037,13 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('does not delete replaced file after parent move', async function () { - await builders.metadir().path('parent').ino(1).upToDate().create() + it('does not delete replaced file after parent move', async function() { + await builders + .metadir() + .path('parent') + .ino(1) + .upToDate() + .create() await builders .metafile() .path('parent/foo') @@ -965,7 +1087,7 @@ onPlatforms(['linux', 'win32'], () => { ]) }) - it('does not delete unsynced remote additions', async function () { + it('does not delete unsynced remote additions', async function() { await builders .metadir() .path('dir') diff --git a/test/unit/local/channel_watcher/overwrite.js b/test/unit/local/channel_watcher/overwrite.js index 3fcf82a44..3b7cd8179 100644 --- a/test/unit/local/channel_watcher/overwrite.js +++ b/test/unit/local/channel_watcher/overwrite.js @@ -6,7 +6,6 @@ const should = require('should') const Channel = require('../../../../core/local/channel_watcher/channel') const overwrite = require('../../../../core/local/channel_watcher/overwrite') - const Builders = require('../../../support/builders') const { onPlatforms } = require('../../../support/helpers/platform') @@ -207,7 +206,11 @@ onPlatforms(['linux', 'win32'], () => { it(`forwards ${action} ${kind} (${ oldPath ? oldPath + ' -> ' : '' }${path}) after .DELAY`, async () => { - let event = builders.event().action(action).kind(kind).path(path) + let event = builders + .event() + .action(action) + .kind(kind) + .path(path) if (oldPath) event.oldPath(oldPath) const batch = [event.build()] diff --git a/test/unit/local/channel_watcher/parcel_producer.js b/test/unit/local/channel_watcher/parcel_producer.js index 52c9c70d1..b442d1ca3 100644 --- a/test/unit/local/channel_watcher/parcel_producer.js +++ b/test/unit/local/channel_watcher/parcel_producer.js @@ -1,15 +1,15 @@ /* eslint-env mocha */ /* @flow */ +const EventEmitter = require('events') +const path = require('path') + const _ = require('lodash') const should = require('should') -const path = require('path') -const Producer = require('../../../../core/local/channel_watcher/parcel_producer') const { Ignore } = require('../../../../core/ignore') +const Producer = require('../../../../core/local/channel_watcher/parcel_producer') const stater = require('../../../../core/local/stater') -const EventEmitter = require('events') - const configHelpers = require('../../../support/helpers/config') const { ContextDir } = require('../../../support/helpers/context_dir') const { onPlatforms } = require('../../../support/helpers/platform') @@ -23,7 +23,7 @@ onPlatforms(['linux', 'win32'], () => { let producer beforeEach('instanciate config', configHelpers.createConfig) - beforeEach(function () { + beforeEach(function() { config = this.config syncDir = new ContextDir(config.syncPath) ignore = new Ignore([]) @@ -35,13 +35,13 @@ onPlatforms(['linux', 'win32'], () => { context('on readdir error on dir', () => { beforeEach( 'create content with missing read permission', - async function () { + async function() { await syncDir.makeTree(['dirA/fileA', 'dirB/fileB', 'dirC/fileC']) await syncDir.chmod('dirB', 0o220) } ) - it('should not reject', async function () { + it('should not reject', async function() { await should(producer.start()).be.fulfilled() await producer.stop() }) diff --git a/test/unit/local/channel_watcher/scan_folder.js b/test/unit/local/channel_watcher/scan_folder.js index 91dff53ba..07b8a3c71 100644 --- a/test/unit/local/channel_watcher/scan_folder.js +++ b/test/unit/local/channel_watcher/scan_folder.js @@ -4,9 +4,8 @@ const should = require('should') const sinon = require('sinon') -const scanFolder = require('../../../../core/local/channel_watcher/scan_folder') const Channel = require('../../../../core/local/channel_watcher/channel') - +const scanFolder = require('../../../../core/local/channel_watcher/scan_folder') const { onPlatforms } = require('../../../support/helpers/platform') const setup = batch => { diff --git a/test/unit/local/channel_watcher/win_identical_renaming.js b/test/unit/local/channel_watcher/win_identical_renaming.js index e62c4c154..f8a157594 100644 --- a/test/unit/local/channel_watcher/win_identical_renaming.js +++ b/test/unit/local/channel_watcher/win_identical_renaming.js @@ -7,7 +7,6 @@ const should = require('should') const Channel = require('../../../../core/local/channel_watcher/channel') const winIdenticalRenaming = require('../../../../core/local/channel_watcher/win_identical_renaming') const metadata = require('../../../../core/metadata') - const Builders = require('../../../support/builders') /*:: @@ -25,8 +24,14 @@ if (process.platform === 'win32') { beforeEach(() => { builders = new Builders() const docs = { - DIR: builders.metadir().path('dir').build(), - FILE: builders.metafile().path('file').build() + DIR: builders + .metadir() + .path('dir') + .build(), + FILE: builders + .metafile() + .path('file') + .build() } inputChannel = new Channel() outputChannel = winIdenticalRenaming.loop(inputChannel, { @@ -179,7 +184,11 @@ if (process.platform === 'win32') { ] const buildEvent = ({ action, kind, path, oldPath }) => { - let event = builders.event().action(action).kind(kind).path(path) + let event = builders + .event() + .action(action) + .kind(kind) + .path(path) if (oldPath) event.oldPath(oldPath) return event.build() } diff --git a/test/unit/local/checksumer.js b/test/unit/local/checksumer.js index c68dca385..404574b4c 100644 --- a/test/unit/local/checksumer.js +++ b/test/unit/local/checksumer.js @@ -2,9 +2,10 @@ /* @flow */ const fs = require('fs') +const { Readable } = require('stream') + const should = require('should') const sinon = require('sinon') -const { Readable } = require('stream') const { init } = require('../../../core/local/checksumer') @@ -62,7 +63,7 @@ describe('local/checksumer', () => { ).be.fulfilledWith('+HBGS7uN4XdB0blqLv5tFQ==') }) - it.skip('fails on successive errors', async function () { + it.skip('fails on successive errors', async function() { this.timeout(60000) createReadStream.callsFake(() => { return busyStream() diff --git a/test/unit/local/chokidar/analysis.js b/test/unit/local/chokidar/analysis.js index d6eb37be8..401ae2ada 100644 --- a/test/unit/local/chokidar/analysis.js +++ b/test/unit/local/chokidar/analysis.js @@ -1,11 +1,11 @@ /* eslint-env mocha */ +const path = require('path') + const _ = require('lodash') const should = require('should') -const path = require('path') const analysis = require('../../../../core/local/chokidar/analysis') - const Builders = require('../../../support/builders') const { onPlatform } = require('../../../support/helpers/platform') @@ -16,7 +16,7 @@ import type { Metadata } from '../../../../core/metadata' */ onPlatform('darwin', () => { - describe('core/local/chokidar/analysis', function () { + describe('core/local/chokidar/analysis', function() { const sideName = 'local' const builders = new Builders() @@ -353,7 +353,10 @@ onPlatform('darwin', () => { describe('FileMove(src => dst)', () => { describe('unlink(src) + add(dst)', () => { it('is the most common case', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: 1 } const { md5sum } = old const events /*: LocalEvent[] */ = [ @@ -384,7 +387,10 @@ onPlatform('darwin', () => { describe('unlinkDir(src) + add(dst)', () => { it('is a chokidar bug', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: 1 } const { md5sum } = old const events /*: LocalEvent[] */ = [ @@ -413,7 +419,10 @@ onPlatform('darwin', () => { describe('add(tmp) + unlink(src) + add(dst) + flush + unlink(tmp)', () => { it('is already complete on first flush', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: 1 } const { md5sum } = old const events /*: LocalEvent[] */ = [ @@ -456,7 +465,10 @@ onPlatform('darwin', () => { describe('unlink(src) + add(tmp) + dropped unlink(tmp) + wip add(dst)', () => { it('is incomplete', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: 1 } const events /*: LocalEvent[] */ = [ { type: 'unlink', path: 'src', old }, @@ -489,7 +501,10 @@ onPlatform('darwin', () => { describe('unlink(src) + wip add(tmp) + add(dst)', () => { it('is complete', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: 1 } const { md5sum } = old const events /*: LocalEvent[] */ = [ @@ -628,7 +643,10 @@ onPlatform('darwin', () => { describe('FileMove.update(src => dst)', () => { describe('unlink(src) + add(dst) + change(dst)', () => { it('happens when there is sufficient delay betwen move & change', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const addStats = { ino: old.ino, mtime: new Date(old.local.updated_at) @@ -672,7 +690,10 @@ onPlatform('darwin', () => { describe('unlink(src, ino=1) + add(dst, ino=1) + change(dst, ino=2)', () => { it('does not include the change into the move', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const addStats = { ino: old.ino, mtime: new Date(old.local.updated_at) @@ -718,7 +739,10 @@ onPlatform('darwin', () => { describe('unlink(src) + add(dst) with different md5sum but same update date', () => { it('does not mark the move as an update', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: old.ino, mtime: new Date(old.local.updated_at) } const events /*: LocalEvent[] */ = [ { type: 'unlink', path: 'src', old }, @@ -748,7 +772,10 @@ onPlatform('darwin', () => { describe('unlink(src) + add(dst) with different md5sum and update date', () => { it('marks the move as an update', () => { - const old /*: Metadata */ = builders.metafile().ino(1).build() + const old /*: Metadata */ = builders + .metafile() + .ino(1) + .build() const stats = { ino: old.ino, mtime: new Date(new Date(old.local.updated_at).getTime() + 1000) @@ -832,7 +859,11 @@ onPlatform('darwin', () => { it('is a FileUpdate(a) not to be confused with', () => { const partiallyAddedPath = 'partially-added-file' const changedPath = 'changed-file' - const old = builders.metafile().path(changedPath).ino(111).build() + const old = builders + .metafile() + .path(changedPath) + .ino(111) + .build() const ino = 222 const md5sum = 'changedSum' const events /*: LocalEvent[] */ = [ @@ -1039,7 +1070,10 @@ onPlatform('darwin', () => { describe('DirMove(src => dst)', () => { describe('unlinkDir(src) + addDir(dst)', () => { it('is the most common case', () => { - const old /*: Metadata */ = builders.metadir().ino(1).build() + const old /*: Metadata */ = builders + .metadir() + .ino(1) + .build() const stats = { ino: 1 } const events /*: LocalEvent[] */ = [ { type: 'unlinkDir', path: 'src', old }, @@ -1068,7 +1102,10 @@ onPlatform('darwin', () => { describe('addDir(dst) + unlinkDir(src)', () => { it('may happen with this reversed order on some platforms', () => { - const old /*: Metadata */ = builders.metadir().ino(1).build() + const old /*: Metadata */ = builders + .metadir() + .ino(1) + .build() const stats = { ino: 1 } const events /*: LocalEvent[] */ = [ { type: 'addDir', path: 'dst', stats }, @@ -1097,7 +1134,10 @@ onPlatform('darwin', () => { describe('unlinkDir(src) + wip addDir(tmp) + addDir(dst)', () => { it('ignores the intermediate move', () => { - const old /*: Metadata */ = builders.metadir().ino(1).build() + const old /*: Metadata */ = builders + .metadir() + .ino(1) + .build() const stats = { ino: 1 } const events /*: LocalEvent[] */ = [ { type: 'unlinkDir', path: 'src', old }, @@ -1128,7 +1168,10 @@ onPlatform('darwin', () => { describe('unlinkDir(src) + addDir(tmp) + wip addDir(dst)', () => { it('is incomplete, waiting for an upcoming unlinkDir(tmp)', () => { - const old /*: Metadata */ = builders.metadir().ino(1).build() + const old /*: Metadata */ = builders + .metadir() + .ino(1) + .build() const stats = { ino: 1 } const events /*: LocalEvent[] */ = [ { type: 'unlinkDir', path: 'src', old }, @@ -1903,9 +1946,17 @@ onPlatform('darwin', () => { describe('Move squashing', () => { it('move into moved folder', () => { const dirStats = { ino: 1 } - const dir = builders.metadir().path('src/dir').ino(dirStats.ino).build() + const dir = builders + .metadir() + .path('src/dir') + .ino(dirStats.ino) + .build() const fileStats = { ino: 2 } - const file = builders.metafile().path('file').ino(fileStats.ino).build() + const file = builders + .metafile() + .path('file') + .ino(fileStats.ino) + .build() const events /*: LocalEvent[] */ = [ { type: 'unlinkDir', path: dir.path, old: dir }, @@ -1934,7 +1985,11 @@ onPlatform('darwin', () => { it('child move', () => { const dirStats = { ino: 1 } - const dir = builders.metadir().path('src/dir').ino(dirStats.ino).build() + const dir = builders + .metadir() + .path('src/dir') + .ino(dirStats.ino) + .build() const fileStats = { ino: 2 } const file = builders .metafile() @@ -1966,7 +2021,11 @@ onPlatform('darwin', () => { it('child moved out of moved folder', () => { const dirStats = { ino: 1 } - const dir = builders.metadir().path('src/dir').ino(dirStats.ino).build() + const dir = builders + .metadir() + .path('src/dir') + .ino(dirStats.ino) + .build() const fileStats = { ino: 2 } const file = builders .metafile() @@ -2001,7 +2060,11 @@ onPlatform('darwin', () => { it('child moved within moved dir', () => { const dirStats = { ino: 1 } - const dir = builders.metadir().path('src/dir').ino(dirStats.ino).build() + const dir = builders + .metadir() + .path('src/dir') + .ino(dirStats.ino) + .build() const fileStats = { ino: 2 } const file = builders .metafile() @@ -2166,13 +2229,25 @@ onPlatform('darwin', () => { const otherFileStats = { ino: 4 } const otherDirStats = { ino: 5 } const dirMetadata /*: Metadata */ = normalizer( - builders.metadir().path('src').ino(dirStats.ino).build() + builders + .metadir() + .path('src') + .ino(dirStats.ino) + .build() ) const subdirMetadata /*: Metadata */ = normalizer( - builders.metadir().path('src/subdir').ino(subdirStats.ino).build() + builders + .metadir() + .path('src/subdir') + .ino(subdirStats.ino) + .build() ) const fileMetadata /*: Metadata */ = normalizer( - builders.metafile().path('src/file').ino(fileStats.ino).build() + builders + .metafile() + .path('src/file') + .ino(fileStats.ino) + .build() ) const otherFileMetadata /*: Metadata */ = normalizer( builders diff --git a/test/unit/local/chokidar/initial_scan.js b/test/unit/local/chokidar/initial_scan.js index 9e9c068da..dd00f7331 100644 --- a/test/unit/local/chokidar/initial_scan.js +++ b/test/unit/local/chokidar/initial_scan.js @@ -6,11 +6,10 @@ const { detectOfflineUnlinkEvents } = require('../../../../core/local/chokidar/initial_scan') const metadata = require('../../../../core/metadata') - const Builders = require('../../../support/builders') const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') const { onPlatform } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') const { platform } = process @@ -21,17 +20,21 @@ onPlatform('darwin', () => { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('set up builders', function () { + beforeEach('set up builders', function() { builders = new Builders({ pouch: this.pouch }) }) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - describe('.detectOfflineUnlinkEvents()', function () { - it('detects deleted files and folders', async function () { + describe('.detectOfflineUnlinkEvents()', function() { + it('detects deleted files and folders', async function() { // Folder still exists - await builders.metadir().path('folder1').upToDate().create() + await builders + .metadir() + .path('folder1') + .upToDate() + .create() // Folder does not exist anymore const folder2 = await builders .metadir() @@ -65,7 +68,11 @@ onPlatform('darwin', () => { .changedSide('remote') .create() // File still exists - builders.metafile().path('file1').upToDate().create() + builders + .metafile() + .path('file1') + .upToDate() + .create() // File does not exist anymore const file2 = await builders .metafile() @@ -114,8 +121,11 @@ onPlatform('darwin', () => { }) if (platform === 'win32') { - it('ignores incompatible docs', async function () { - await builders.metafile().incompatible().create() + it('ignores incompatible docs', async function() { + await builders + .metafile() + .incompatible() + .create() const initialScan = { ids: [] } const { offlineEvents } = await detectOfflineUnlinkEvents( @@ -127,8 +137,13 @@ onPlatform('darwin', () => { } }) - it('does not detect unsynced remote additions as deleted docs', async function () { - await builders.metadir().path('dir').ino(1).sides({ remote: 1 }).create() + it('does not detect unsynced remote additions as deleted docs', async function() { + await builders + .metadir() + .path('dir') + .ino(1) + .sides({ remote: 1 }) + .create() await builders .metafile() .path('file') diff --git a/test/unit/local/chokidar/normalize_paths.js b/test/unit/local/chokidar/normalize_paths.js index 10ff4d8aa..8cf77cf4f 100644 --- a/test/unit/local/chokidar/normalize_paths.js +++ b/test/unit/local/chokidar/normalize_paths.js @@ -1,14 +1,14 @@ /* eslint-env mocha */ -const should = require('should') const path = require('path') -const normalizePaths = require('../../../../core/local/chokidar/normalize_paths') +const should = require('should') +const normalizePaths = require('../../../../core/local/chokidar/normalize_paths') const Builders = require('../../../support/builders') const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') const { onPlatform } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') const stepOptions = self => ({ pouch: self.pouch @@ -21,7 +21,7 @@ onPlatform('darwin', () => { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('set up builders', function () { + beforeEach('set up builders', function() { builders = new Builders({ pouch: this.pouch }) }) @@ -34,7 +34,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirPath.normalize('NFC')) @@ -42,7 +42,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileAddition', @@ -58,7 +58,7 @@ onPlatform('darwin', () => { }) context('when parent is saved with NFD encoded path in Pouch', () => { - beforeEach(async function () { + beforeEach(async function() { await builders .metadir() .path(dirPath.normalize('NFD')) @@ -66,7 +66,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new path', async function () { + it('does not normalize the new path', async function() { const changes = [ { type: 'FileAddition', @@ -89,7 +89,7 @@ onPlatform('darwin', () => { const dirPath = (dirFirst + dirSecond).normalize('NFD') let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirFirst.normalize('NFD') + dirSecond.normalize('NFC')) @@ -97,7 +97,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileAddition', @@ -123,7 +123,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirPath.normalize('NFC')) @@ -133,7 +133,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -142,7 +142,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -177,7 +177,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -186,7 +186,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -222,7 +222,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFD encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirPath.normalize('NFD')) @@ -232,7 +232,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -241,7 +241,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -276,7 +276,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -285,7 +285,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -327,7 +327,7 @@ onPlatform('darwin', () => { const dirPath = (dirFirst + dirSecond).normalize('NFD') let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirFirst.normalize('NFD') + dirSecond.normalize('NFC')) @@ -337,7 +337,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -346,7 +346,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -381,7 +381,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -390,7 +390,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file path', async function () { + it('reuses the existing file path', async function() { const changes = [ { type: 'FileUpdate', @@ -433,7 +433,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFC')) @@ -443,7 +443,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -452,7 +452,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -486,7 +486,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -495,7 +495,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -530,7 +530,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFD encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFD')) @@ -540,7 +540,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -549,7 +549,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -583,7 +583,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -592,7 +592,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -633,13 +633,17 @@ onPlatform('darwin', () => { const dstDirPath = 'Énoncés / corrigés'.normalize('NFD') let dir - beforeEach(async function () { - dir = await builders.metadir().path(dirPath).upToDate().create() + beforeEach(async function() { + dir = await builders + .metadir() + .path(dirPath) + .upToDate() + .create() }) context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFC'))) @@ -648,7 +652,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -682,7 +686,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, filename.normalize('NFD'))) @@ -691,7 +695,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -733,7 +737,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirPath.normalize('NFC')) @@ -743,7 +747,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -752,7 +756,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileMove', @@ -775,7 +779,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -784,7 +788,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileMove', @@ -808,7 +812,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFD encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirPath.normalize('NFD')) @@ -818,7 +822,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -827,7 +831,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileMove', @@ -850,7 +854,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -859,7 +863,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'FileMove', @@ -889,7 +893,7 @@ onPlatform('darwin', () => { const dirPath = (dirFirst + dirSecond).normalize('NFD') let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(dirFirst.normalize('NFD') + dirSecond.normalize('NFC')) @@ -899,7 +903,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -908,7 +912,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileMove', @@ -931,7 +935,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -940,7 +944,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing parent path', async function () { + it('reuses the existing parent path', async function() { const changes = [ { type: 'FileMove', @@ -972,7 +976,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFC')) @@ -982,7 +986,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -991,7 +995,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1025,7 +1029,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -1034,7 +1038,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1069,7 +1073,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFD encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFD')) @@ -1079,7 +1083,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -1088,7 +1092,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1122,7 +1126,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -1131,7 +1135,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1172,13 +1176,17 @@ onPlatform('darwin', () => { const dstDirPath = 'Énoncés / corrigés'.normalize('NFD') let dir - beforeEach(async function () { - dir = await builders.metadir().path(srcDirPath).upToDate().create() + beforeEach(async function() { + dir = await builders + .metadir() + .path(srcDirPath) + .upToDate() + .create() }) context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFC'))) @@ -1187,7 +1195,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1221,7 +1229,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(path.join(dir.path, srcFilename.normalize('NFD'))) @@ -1230,7 +1238,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1270,7 +1278,7 @@ onPlatform('darwin', () => { const dstFilename = 'Échec inconséquent'.normalize('NFD') let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(srcFilename) @@ -1279,7 +1287,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', @@ -1322,7 +1330,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFC encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFC')) @@ -1332,7 +1340,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFC')) @@ -1341,7 +1349,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1375,7 +1383,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFD')) @@ -1384,7 +1392,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1424,7 +1432,7 @@ onPlatform('darwin', () => { const filename = (filenameFirst + filenameSecond).normalize('NFD') let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path( @@ -1436,7 +1444,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1472,7 +1480,7 @@ onPlatform('darwin', () => { context('when parent is saved with NFD encoded path in Pouch', () => { let dir - beforeEach(async function () { + beforeEach(async function() { dir = await builders .metadir() .path(srcDirPath.normalize('NFD')) @@ -1482,7 +1490,7 @@ onPlatform('darwin', () => { context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFC')) @@ -1491,7 +1499,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1525,7 +1533,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFD')) @@ -1534,7 +1542,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1574,7 +1582,7 @@ onPlatform('darwin', () => { const filename = (filenameFirst + filenameSecond).normalize('NFD') let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path( @@ -1586,7 +1594,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1629,13 +1637,17 @@ onPlatform('darwin', () => { 'Énoncés'.normalize('NFD') + '/ corrigés'.normalize('NFC') let dir - beforeEach(async function () { - dir = await builders.metadir().path(dirPath).upToDate().create() + beforeEach(async function() { + dir = await builders + .metadir() + .path(dirPath) + .upToDate() + .create() }) context('when file is saved with NFC encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFC')) @@ -1644,7 +1656,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1678,7 +1690,7 @@ onPlatform('darwin', () => { context('when file is saved with NFD encoded name in Pouch', () => { let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(filename.normalize('NFD')) @@ -1687,7 +1699,7 @@ onPlatform('darwin', () => { .create() }) - it('reuses the existing file name', async function () { + it('reuses the existing file name', async function() { const changes = [ { type: 'DirMove', @@ -1728,7 +1740,7 @@ onPlatform('darwin', () => { 'Échec'.normalize('NFD') + ' inconséquent'.normalize('NFC') let file - beforeEach(async function () { + beforeEach(async function() { file = await builders .metafile() .path(srcFilename) @@ -1737,7 +1749,7 @@ onPlatform('darwin', () => { .create() }) - it('does not normalize the new paths', async function () { + it('does not normalize the new paths', async function() { const changes = [ { type: 'DirMove', diff --git a/test/unit/local/chokidar/prepare_events.js b/test/unit/local/chokidar/prepare_events.js index 079ec73b5..06b633cf6 100644 --- a/test/unit/local/chokidar/prepare_events.js +++ b/test/unit/local/chokidar/prepare_events.js @@ -4,11 +4,10 @@ const should = require('should') const sinon = require('sinon') const prepareEvents = require('../../../../core/local/chokidar/prepare_events') - const Builders = require('../../../support/builders') const configHelpers = require('../../../support/helpers/config') -const pouchHelpers = require('../../../support/helpers/pouch') const { onPlatform } = require('../../../support/helpers/platform') +const pouchHelpers = require('../../../support/helpers/pouch') onPlatform('darwin', () => { describe('core/local/chokidar_steps/prepare_events', () => { @@ -17,7 +16,7 @@ onPlatform('darwin', () => { before('instanciate config', configHelpers.createConfig) before('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('set up builders', function () { + beforeEach('set up builders', function() { builders = new Builders({ pouch: this.pouch }) }) @@ -25,8 +24,11 @@ onPlatform('darwin', () => { after('clean config directory', configHelpers.cleanConfig) describe('#oldMetadata()', () => { - it('resolves with the metadata whose id matches the event path', async function () { - const old = await builders.metadata().upToDate().create() + it('resolves with the metadata whose id matches the event path', async function() { + const old = await builders + .metadata() + .upToDate() + .create() const resultByEventType = {} for (let type of ['add', 'addDir', 'change', 'unlink', 'unlinkDir']) { resultByEventType[type] = await prepareEvents.oldMetadata( @@ -48,7 +50,7 @@ onPlatform('darwin', () => { }) describe('#step()', () => { - it('does not compute checksum of untouched file', async function () { + it('does not compute checksum of untouched file', async function() { const untouched = await builders .metafile() .path('untouched') @@ -90,7 +92,7 @@ onPlatform('darwin', () => { should(checksum).not.have.been.called() }) - it('does not compute checksum after only a path normalization change', async function () { + it('does not compute checksum after only a path normalization change', async function() { const old = await builders .metafile() .path('énoncé'.normalize('NFC')) diff --git a/test/unit/local/chokidar/watcher.js b/test/unit/local/chokidar/watcher.js index 272249d60..990fffa15 100644 --- a/test/unit/local/chokidar/watcher.js +++ b/test/unit/local/chokidar/watcher.js @@ -1,17 +1,17 @@ /* eslint-env mocha */ +const EventEmitter = require('events') const fs = require('fs') -const fse = require('fs-extra') const path = require('path') -const sinon = require('sinon') + +const fse = require('fs-extra') const should = require('should') -const EventEmitter = require('events') +const sinon = require('sinon') -const { FOLDER } = require('../../../../core/metadata') -const { TMP_DIR_NAME } = require('../../../../core/local/constants') -const Watcher = require('../../../../core/local/chokidar/watcher') const chokidarEvent = require('../../../../core/local/chokidar/event') - +const Watcher = require('../../../../core/local/chokidar/watcher') +const { TMP_DIR_NAME } = require('../../../../core/local/constants') +const { FOLDER } = require('../../../../core/metadata') const Builders = require('../../../support/builders') const configHelpers = require('../../../support/helpers/config') const { ContextDir } = require('../../../support/helpers/context_dir') @@ -19,12 +19,12 @@ const { onPlatform } = require('../../../support/helpers/platform') const pouchHelpers = require('../../../support/helpers/pouch') onPlatform('darwin', () => { - describe('ChokidarWatcher Tests', function () { + describe('ChokidarWatcher Tests', function() { let builders before('instanciate config', configHelpers.createConfig) before('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('instanciate local watcher', function () { + beforeEach('instanciate local watcher', function() { builders = new Builders({ pouch: this.pouch }) this.prep = {} this.watcher = new Watcher( @@ -34,7 +34,7 @@ onPlatform('darwin', () => { sinon.createStubInstance(EventEmitter) ) }) - afterEach('stop watcher and clean path', function (done) { + afterEach('stop watcher and clean path', function(done) { this.watcher.stop(true) this.watcher.checksumer.kill() fse.emptyDir(this.syncPath, done) @@ -42,12 +42,12 @@ onPlatform('darwin', () => { after('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - describe('start', function () { - it('calls the callback when initial scan is done', function () { + describe('start', function() { + it('calls the callback when initial scan is done', function() { this.watcher.start() }) - it('calls addFile/putFolder for files that are aleady here', async function () { + it('calls addFile/putFolder for files that are aleady here', async function() { fse.ensureDirSync(path.join(this.syncPath, 'aa')) fse.ensureFileSync(path.join(this.syncPath, 'aa/ab')) this.prep.putFolderAsync = sinon.stub().resolves() @@ -63,7 +63,7 @@ onPlatform('darwin', () => { ) }) - it('only recomputes checksums of changed files', async function () { + it('only recomputes checksums of changed files', async function() { const unchangedFilename = 'unchanged-file.txt' const changedFilename = 'changed-file.txt' const unchangedPath = path.join(this.syncPath, unchangedFilename) @@ -110,7 +110,7 @@ onPlatform('darwin', () => { } }) - it('ignores the temporary directory', async function () { + it('ignores the temporary directory', async function() { fse.ensureDirSync(path.join(this.syncPath, TMP_DIR_NAME)) fse.ensureFileSync(path.join(this.syncPath, TMP_DIR_NAME, 'ac')) this.prep.putFolder = sinon.spy() @@ -125,8 +125,12 @@ onPlatform('darwin', () => { describe('stop', () => { context('when initial scan events have not been flushed yet', () => { - beforeEach(async function () { - await builders.metafile().path('no-event').upToDate().create() + beforeEach(async function() { + await builders + .metafile() + .path('no-event') + .upToDate() + .create() this.watcher.initialScanParams = { paths: [], @@ -139,7 +143,7 @@ onPlatform('darwin', () => { this.watcher.watcher = { close: sinon.stub().resolves() } }) - it('clears the buffer and does not flush any event', async function () { + it('clears the buffer and does not flush any event', async function() { const onFlushSpy = sinon.spy(this.watcher, 'onFlush') try { this.watcher.buffer.push({ @@ -160,8 +164,12 @@ onPlatform('darwin', () => { }) context('when intial scan events have already been flushed', () => { - beforeEach(async function () { - await builders.metafile().path('no-event').upToDate().create() + beforeEach(async function() { + await builders + .metafile() + .path('no-event') + .upToDate() + .create() this.watcher.initialScanParams = { paths: [], @@ -174,7 +182,7 @@ onPlatform('darwin', () => { this.watcher.watcher = { close: sinon.stub().resolves() } }) - it('tries to flush buffered events before stopping', async function () { + it('tries to flush buffered events before stopping', async function() { const onFlushSpy = sinon.spy(this.watcher, 'onFlush') try { this.watcher.buffer.push({ @@ -199,18 +207,18 @@ onPlatform('darwin', () => { const relpath = 'foo.txt' let abspath - beforeEach(function () { + beforeEach(function() { abspath = path.join(this.syncPath, relpath) }) - it('resolves with the md5sum for the given relative path', async function () { + it('resolves with the md5sum for the given relative path', async function() { await fse.outputFile(abspath, 'foo') await should(this.watcher.checksum(relpath)).be.fulfilledWith( 'rL0Y20zC+Fzt72VPzMSk2A==' ) // foo }) - it('does not swallow errors', async function () { + it('does not swallow errors', async function() { await should(this.watcher.checksum(relpath)).be.rejectedWith({ code: 'ENOENT' }) @@ -218,11 +226,11 @@ onPlatform('darwin', () => { }) describe('onFlush', () => { - beforeEach(function () { + beforeEach(function() { this.prep.addFileAsync = sinon.stub().resolves() this.prep.putFolderAsync = sinon.stub().resolves() }) - afterEach(function () { + afterEach(function() { delete this.prep.addFileAsync delete this.prep.putFolderAsync }) @@ -230,7 +238,7 @@ onPlatform('darwin', () => { context( 'when processing the initial events of an empty sync directory', () => { - it('calls the initial scan step', async function () { + it('calls the initial scan step', async function() { sinon.spy(this.watcher.pouch, 'initialScanDocs') try { @@ -263,7 +271,7 @@ onPlatform('darwin', () => { context('while an initial scan is being processed', () => { const trigger = new EventEmitter() const SECOND_FLUSH_TRIGGER = 'second-flush' - beforeEach(function () { + beforeEach(function() { // Make sure we're in initial scan mode this.watcher.initialScanParams = { paths: [], @@ -295,11 +303,11 @@ onPlatform('darwin', () => { }) this.watcher.buffer.flush() }) - afterEach(function () { + afterEach(function() { this.watcher.pouch.initialScanDocs.restore() }) - it('does not trigger a new initial scan', async function () { + it('does not trigger a new initial scan', async function() { this.watcher.buffer.push({ type: 'add', path: __filename, @@ -317,9 +325,9 @@ onPlatform('darwin', () => { }) describe('onAddFile', () => { - it('detects when a file is created', function () { + it('detects when a file is created', function() { return this.watcher.start().then(() => { - this.prep.addFileAsync = function (side, doc) { + this.prep.addFileAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: 'aaa.jpg', @@ -336,7 +344,7 @@ onPlatform('darwin', () => { }) }) - it('does not skip checksum computation when an identity conflict could occur during initial scan', async function () { + it('does not skip checksum computation when an identity conflict could occur during initial scan', async function() { const syncDir = new ContextDir(this.syncPath) const existing = await builders .metafile() @@ -356,10 +364,10 @@ onPlatform('darwin', () => { }) }) - describe('onAddDir', function () { - it('detects when a folder is created', function () { + describe('onAddDir', function() { + it('detects when a folder is created', function() { return this.watcher.start().then(() => { - this.prep.putFolderAsync = function (side, doc) { + this.prep.putFolderAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: 'aba', @@ -373,11 +381,11 @@ onPlatform('darwin', () => { }) }) - it('detects when a sub-folder is created', function () { + it('detects when a sub-folder is created', function() { return this.watcher.start().then(() => { this.prep.putFolderAsync = () => { // For abb folder - this.prep.putFolderAsync = function (side, doc) { + this.prep.putFolderAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: path.normalize('abb/abc'), @@ -396,14 +404,14 @@ onPlatform('darwin', () => { }) describe('onUnlinkFile', () => { - it('detects when a file is deleted', function () { + it('detects when a file is deleted', function() { // This test does not create the file in pouchdb. // the watcher will not find a inode number for the unlink // and therefore discard it. fse.ensureFileSync(path.join(this.syncPath, 'aca')) this.prep.addFileAsync = () => { // For aca file - this.prep.trashFileAsync = function (side, doc) { + this.prep.trashFileAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: 'aca' @@ -418,14 +426,14 @@ onPlatform('darwin', () => { }) describe('onUnlinkDir', () => { - it('detects when a folder is deleted', function () { + it('detects when a folder is deleted', function() { // This test does not create the file in pouchdb. // the watcher will not find a inode number for the unlink // and therefore discard it. fse.mkdirSync(path.join(this.syncPath, 'ada')) this.prep.putFolderAsync = () => { // For ada folder - this.prep.trashFolderAsync = function (side, doc) { + this.prep.trashFolderAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: 'ada' @@ -440,12 +448,12 @@ onPlatform('darwin', () => { }) describe('onChange', () => - it('detects when a file is changed', function () { + it('detects when a file is changed', function() { let src = path.join(__dirname, '../../../fixtures/chat-mignon.jpg') let dst = path.join(this.syncPath, 'aea.jpg') fse.copySync(src, dst) this.prep.addFileAsync = () => { - this.prep.updateFileAsync = function (side, doc) { + this.prep.updateFileAsync = function(side, doc) { side.should.equal('local') doc.should.have.properties({ path: 'aea.jpg', @@ -464,8 +472,8 @@ onPlatform('darwin', () => { this.watcher.start() })) - describe('when a file is moved', function () { - it('deletes the source and adds the destination', function () { + describe('when a file is moved', function() { + it('deletes the source and adds the destination', function() { // This test does not create the file in pouchdb. // the watcher will not find a inode number for the unlink // and therefore discard it. @@ -508,11 +516,11 @@ onPlatform('darwin', () => { }) }) - describe('when a directory is moved', function () { + describe('when a directory is moved', function() { beforeEach('instanciate pouch', pouchHelpers.createDatabase) afterEach('clean pouch', pouchHelpers.cleanDatabase) - it.skip('deletes the source and adds the destination', function () { + it.skip('deletes the source and adds the destination', function() { // This test does not create the file in pouchdb. // the watcher will not find a inode number for the unlink // and therefore discard it. @@ -567,12 +575,15 @@ onPlatform('darwin', () => { }) }) - describe('when a rescan request event is fired', function () { - it('drops buffered events', async function () { + describe('when a rescan request event is fired', function() { + it('drops buffered events', async function() { await this.watcher.start() const filePath = path.join(this.syncPath, 'added') - const stats = builders.stats().kind('file').build() + const stats = builders + .stats() + .kind('file') + .build() this.watcher.watcher.emit('add', filePath, stats) should(this.watcher.buffer.events).deepEqual([ @@ -594,7 +605,7 @@ onPlatform('darwin', () => { should(this.watcher.buffer.events).be.empty() }) - it('restarts the watcher', async function () { + it('restarts the watcher', async function() { await this.watcher.start() sinon.spy(this.watcher, 'stop') diff --git a/test/unit/local/index.js b/test/unit/local/index.js index 01a3486d5..1ee6a127a 100644 --- a/test/unit/local/index.js +++ b/test/unit/local/index.js @@ -1,38 +1,42 @@ /* eslint-env mocha */ /* @flow */ +const path = require('path') + const Promise = require('bluebird') const fse = require('fs-extra') -const path = require('path') -const sinon = require('sinon') const should = require('should') +const sinon = require('sinon') const { Local } = require('../../../core/local') const { TMP_DIR_NAME } = require('../../../core/local/constants') - const Builders = require('../../support/builders') +const { createTrashMock } = require('../../support/doubles/fs') const configHelpers = require('../../support/helpers/config') const { ContextDir } = require('../../support/helpers/context_dir') const { WINDOWS_DEFAULT_MODE } = require('../../support/helpers/platform') const pouchHelpers = require('../../support/helpers/pouch') -const { createTrashMock } = require('../../support/doubles/fs') const CHAT_MIGNON_MOD_PATH = 'test/fixtures/chat-mignon-mod.jpg' const streamer = (doc, content, err) => ({ createReadStreamAsync(docToStream) { docToStream.should.equal(doc) - const stream = new Builders().stream().push(content).error(err).build() + const stream = new Builders() + .stream() + .push(content) + .error(err) + .build() return Promise.resolve(stream) } }) -describe('Local', function () { +describe('Local', function() { let builders, syncDir, trashMock before('instanciate config', configHelpers.createConfig) before('instanciate pouch', pouchHelpers.createDatabase) - before('instanciate local', function () { + before('instanciate local', function() { trashMock = createTrashMock() this.prep = {} @@ -45,30 +49,30 @@ describe('Local', function () { after('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - describe('constructor', function () { - it('has a base path', function () { + describe('constructor', function() { + it('has a base path', function() { this.local.syncPath.should.equal(this.syncPath) }) - it('has a tmp path', function () { + it('has a tmp path', function() { let tmpPath = syncDir.abspath(TMP_DIR_NAME) this.local.tmpPath.should.equal(tmpPath) }) - it('has a side name', function () { + it('has a side name', function() { should(this.local.name).eql('local') }) }) - describe('createReadStream', function () { - it('throws an error if no file for this document', async function () { + describe('createReadStream', function() { + it('throws an error if no file for this document', async function() { let doc = { path: 'no-such-file' } await should(this.local.createReadStreamAsync(doc)).be.rejectedWith( /ENOENT/ ) }) - it('creates a readable stream for the document', async function () { + it('creates a readable stream for the document', async function() { const image = await fse.readFile(CHAT_MIGNON_MOD_PATH) const src = CHAT_MIGNON_MOD_PATH @@ -89,7 +93,7 @@ describe('Local', function () { }) describe('updateMetadataAsync', () => { - it('chmod -x for a non-executable file', async function () { + it('chmod -x for a non-executable file', async function() { const doc = { docType: 'file', path: 'non-exec-file' @@ -103,7 +107,7 @@ describe('Local', function () { ) }) - it('chmod +x for an executable file', async function () { + it('chmod +x for an executable file', async function() { let date = new Date('2015-11-09T05:06:07Z') let filePath = syncDir.abspath('exec-file') fse.ensureFileSync(filePath) @@ -123,7 +127,7 @@ describe('Local', function () { } }) - it('updates mtime for a file', async function () { + it('updates mtime for a file', async function() { let date = new Date('2015-10-09T05:06:07Z') let filePath = syncDir.abspath('utimes-file') fse.ensureFileSync(filePath) @@ -137,7 +141,7 @@ describe('Local', function () { should(+mtime).equal(+date) }) - it('updates mtime for a directory', async function () { + it('updates mtime for a directory', async function() { let date = new Date('2015-10-09T05:06:07Z') let folderPath = syncDir.abspath('utimes-folder') fse.ensureDirSync(folderPath) @@ -159,7 +163,7 @@ describe('Local', function () { fullPath = doc => syncDir.abspath(doc.path) }) - it('sets ino for a file', function (done) { + it('sets ino for a file', function(done) { const doc /*: { path: string, ino?: number } */ = { path: 'file-needs-ino' } @@ -172,7 +176,7 @@ describe('Local', function () { }) }) - it('sets ino for a directory', function (done) { + it('sets ino for a directory', function(done) { const doc /*: { path: string, ino?: number } */ = { path: 'dir-needs-ino' } @@ -187,7 +191,7 @@ describe('Local', function () { }) describe('fileExistsLocally', () => { - it('checks file existence as a binary in the db and on disk', async function () { + it('checks file existence as a binary in the db and on disk', async function() { const filePath = path.resolve(this.syncPath, 'folder', 'testfile') await should(this.local.fileExistsLocally('deadcafe')).be.fulfilledWith( false @@ -210,15 +214,15 @@ describe('Local', function () { }) }) - describe('addFile', function () { - beforeEach(function () { + describe('addFile', function() { + beforeEach(function() { sinon.spy(this.events, 'emit') }) - afterEach(function () { + afterEach(function() { this.events.emit.restore() }) - it('creates the file by downloading it', async function () { + it('creates the file by downloading it', async function() { const content = 'foobar' const doc = builders .metafile() @@ -244,7 +248,7 @@ describe('Local', function () { } }) - it('creates the file from another file with same checksum', async function () { + it('creates the file from another file with same checksum', async function() { sinon.spy(this.local, 'fileExistsLocally') const content = 'foo bar baz' @@ -281,7 +285,7 @@ describe('Local', function () { } }) - it('can create a file in the root', async function () { + it('can create a file in the root', async function() { const content = 'foobaz' const doc = builders .metafile() @@ -306,7 +310,7 @@ describe('Local', function () { } }) - it('aborts when the download is incorrect', async function () { + it('aborts when the download is incorrect', async function() { const content = 'foo' const invalidContent = 'bar' const doc = builders @@ -328,7 +332,7 @@ describe('Local', function () { } }) - it('adds write permission to existing read-only Cozy Note', async function () { + it('adds write permission to existing read-only Cozy Note', async function() { const doc = { docType: 'file', mime: 'text/vnd.cozy.note+markdown', @@ -361,29 +365,29 @@ describe('Local', function () { let doc beforeEach('set up doc', () => { - doc = builders.metafile().data(corruptData).build() + doc = builders + .metafile() + .data(corruptData) + .build() doc.size = validData.length }) - beforeEach( - 'stub #createReadStreamAsync() on the other side', - function () { - this.local.other = streamer(doc, corruptData) - } - ) + beforeEach('stub #createReadStreamAsync() on the other side', function() { + this.local.other = streamer(doc, corruptData) + }) afterEach( 'restore #createReadStreamAsync() on the other side', - function () { + function() { this.local.other = null } ) - it('rejects', async function () { + it('rejects', async function() { await should(this.local.addFileAsync(doc)).be.rejectedWith(message) }) - const addFileRejection = async function () { + const addFileRejection = async function() { await this.local.addFileAsync(doc).catch(() => {}) } @@ -392,7 +396,7 @@ describe('Local', function () { afterEach(() => syncDir.unlink(doc)) beforeEach(addFileRejection) - it('is not overridden to prevent valid data loss', async function () { + it('is not overridden to prevent valid data loss', async function() { await should(syncDir.readFile(doc)).be.fulfilledWith(validData) }) }) @@ -400,7 +404,7 @@ describe('Local', function () { describe('missing local file', () => { beforeEach(addFileRejection) - it('is not downloaded to prevent confusion', async function () { + it('is not downloaded to prevent confusion', async function() { await should(syncDir.exists(doc)).be.fulfilledWith(false) }) }) @@ -412,31 +416,31 @@ describe('Local', function () { let doc beforeEach('set up doc', () => { - doc = builders.metafile().data(data).build() + doc = builders + .metafile() + .data(data) + .build() }) - beforeEach( - 'stub #createReadStreamAsync() on the other side', - function () { - this.local.other = streamer(doc, data, new Error(message)) - } - ) + beforeEach('stub #createReadStreamAsync() on the other side', function() { + this.local.other = streamer(doc, data, new Error(message)) + }) afterEach( 'restore #createReadStreamAsync() on the other side', - function () { + function() { this.local.other = null } ) - it('rejects', async function () { + it('rejects', async function() { await should(this.local.addFileAsync(doc)).be.rejectedWith(message) }) }) }) - describe('addFolder', function () { - it('creates the folder', async function () { + describe('addFolder', function() { + it('creates the folder', async function() { const doc = builders .metadir() .path('parent/folder-to-create') @@ -452,7 +456,7 @@ describe('Local', function () { should(doc.ino).be.a.Number() }) - it('updates mtime if the folder already exists', async function () { + it('updates mtime if the folder already exists', async function() { const doc = builders .metadir() .path('parent/folder-to-create') @@ -470,7 +474,7 @@ describe('Local', function () { }) describe('overwriteFile', () => { - it('writes the new content of a file', async function () { + it('writes the new content of a file', async function() { const newContent = 'Hello world' const doc = builders .metafile() @@ -499,7 +503,7 @@ describe('Local', function () { }) describe('updateFileMetadata', () => { - it('updates metadata', async function () { + it('updates metadata', async function() { const doc = builders .metafile() .path('file-to-update') @@ -514,8 +518,11 @@ describe('Local', function () { }) describe('updateFolder', () => { - it('calls addFolder', async function () { - const doc = builders.metadir().path('a-folder-to-update').build() + it('calls addFolder', async function() { + const doc = builders + .metadir() + .path('a-folder-to-update') + .build() sinon.stub(this.local, 'addFolderAsync').resolves() await this.local.updateFolderAsync(doc) should(this.local.addFolderAsync).be.calledWith(doc) @@ -523,12 +530,15 @@ describe('Local', function () { }) }) - describe('move', function () { - context('with file', function () { + describe('move', function() { + context('with file', function() { let dstFile, srcFile beforeEach(async () => { - srcFile = builders.metafile().path('src/file').build() + srcFile = builders + .metafile() + .path('src/file') + .build() dstFile = builders .metafile() .path('dst/file') @@ -538,7 +548,7 @@ describe('Local', function () { await fse.emptyDir(syncDir.root) }) - it('moves the file and updates its mtime', async function () { + it('moves the file and updates its mtime', async function() { await syncDir.outputFile(srcFile, 'foobar') await syncDir.ensureParentDir(dstFile) @@ -551,7 +561,7 @@ describe('Local', function () { should(await syncDir.readFile(dstFile)).equal('foobar') }) - it('throws ENOENT on missing source', async function () { + it('throws ENOENT on missing source', async function() { await syncDir.emptyDir(path.dirname(srcFile.path)) await syncDir.emptyDir(path.dirname(dstFile.path)) @@ -562,7 +572,7 @@ describe('Local', function () { should(await syncDir.tree()).deepEqual(['dst/', 'src/']) }) - it('throws ENOENT on missing destination parent', async function () { + it('throws ENOENT on missing destination parent', async function() { await syncDir.outputFile(srcFile, 'foobar') await syncDir.removeParentDir(dstFile) @@ -573,7 +583,7 @@ describe('Local', function () { should(await syncDir.tree()).deepEqual(['src/', 'src/file']) }) - it('throws a custom Error on existing destination', async function () { + it('throws a custom Error on existing destination', async function() { await syncDir.outputFile(srcFile, 'src/file content') await syncDir.outputFile(dstFile, 'dst/file content') @@ -589,7 +599,7 @@ describe('Local', function () { ]) }) - it('throws a custom Error on existing destination (and missing source)', async function () { + it('throws a custom Error on existing destination (and missing source)', async function() { await syncDir.ensureParentDir(srcFile) await syncDir.outputFile(dstFile, 'dst/file content') @@ -601,17 +611,24 @@ describe('Local', function () { }) }) - context('with folder', function () { + context('with folder', function() { let dstDir, srcDir beforeEach(async () => { - srcDir = builders.metadir().path('src/dir').build() - dstDir = builders.metadir().path('dst/dir').olderThan(srcDir).build() + srcDir = builders + .metadir() + .path('src/dir') + .build() + dstDir = builders + .metadir() + .path('dst/dir') + .olderThan(srcDir) + .build() await fse.emptyDir(syncDir.root) }) - it('moves the folder and updates its mtime', async function () { + it('moves the folder and updates its mtime', async function() { await syncDir.ensureDir(srcDir) await syncDir.ensureParentDir(dstDir) @@ -623,7 +640,7 @@ describe('Local', function () { ) }) - it('throws ENOENT on missing source', async function () { + it('throws ENOENT on missing source', async function() { await syncDir.ensureParentDir(srcDir) await syncDir.ensureParentDir(dstDir) @@ -634,7 +651,7 @@ describe('Local', function () { should(await syncDir.tree()).deepEqual(['dst/', 'src/']) }) - it('throws ENOENT on missing destination parent', async function () { + it('throws ENOENT on missing destination parent', async function() { await syncDir.ensureDir(srcDir) await should(this.local.moveAsync(dstDir, srcDir)).be.rejectedWith({ @@ -644,7 +661,7 @@ describe('Local', function () { should(await syncDir.tree()).deepEqual(['src/', 'src/dir/']) }) - it('throws a custom Error on existing destination', async function () { + it('throws a custom Error on existing destination', async function() { await syncDir.ensureDir(srcDir) await syncDir.ensureDir(dstDir) @@ -660,7 +677,7 @@ describe('Local', function () { ]) }) - it('throws a custom Error on existing destination (and missing source)', async function () { + it('throws a custom Error on existing destination (and missing source)', async function() { await syncDir.ensureParentDir(srcDir) await syncDir.ensureDir(dstDir) @@ -674,7 +691,7 @@ describe('Local', function () { }) describe('trash', () => { - it('deletes a file from the local filesystem', async function () { + it('deletes a file from the local filesystem', async function() { const doc = await builders .metafile() .path('FILE-TO-DELETE') @@ -688,7 +705,7 @@ describe('Local', function () { await should(fse.exists(filePath)).be.fulfilledWith(false) }) - it('deletes a folder from the local filesystem', async function () { + it('deletes a folder from the local filesystem', async function() { const doc = await builders .metadir() .path('FOLDER-TO-DELETE') @@ -703,7 +720,7 @@ describe('Local', function () { }) context('when the document is missing on the filesystem', () => { - it('does not throw', async function () { + it('does not throw', async function() { const doc = await builders .metafile() .path('FILE-TO-DELETE') @@ -719,7 +736,7 @@ describe('Local', function () { }) context('when the document cannot be sent to the trash', () => { - it('permanently deletes it', async function () { + it('permanently deletes it', async function() { trashMock.withFailingTrash() try { diff --git a/test/unit/merge.js b/test/unit/merge.js index b93ff28c0..8278a9074 100644 --- a/test/unit/merge.js +++ b/test/unit/merge.js @@ -1,23 +1,23 @@ /* @flow */ /* eslint-env mocha */ +const path = require('path') + const _ = require('lodash') -const sinon = require('sinon') const should = require('should') -const path = require('path') +const sinon = require('sinon') const { Merge } = require('../../core/merge') const metadata = require('../../core/metadata') const { otherSide } = require('../../core/side') const pathUtils = require('../../core/utils/path') const timestamp = require('../../core/utils/timestamp') - +const Builders = require('../support/builders') +const stubSide = require('../support/doubles/side') const configHelpers = require('../support/helpers/config') const cozyHelpers = require('../support/helpers/cozy') const { onPlatform, onPlatforms } = require('../support/helpers/platform') const pouchHelpers = require('../support/helpers/pouch') -const Builders = require('../support/builders') -const stubSide = require('../support/doubles/side') const win32 = ( win32Data /*: Object */, @@ -105,12 +105,12 @@ function trashedSide(side, sideName) { : { ...side, trashed: true } } -describe('Merge', function () { +describe('Merge', function() { let builders before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('instanciate merge', function () { + beforeEach('instanciate merge', function() { this.side = 'local' this.merge = new Merge(this.pouch) this.merge.local = stubSide('local') @@ -140,8 +140,8 @@ describe('Merge', function () { afterEach('clean remote', cozyHelpers.deleteAll) after('clean config directory', configHelpers.cleanConfig) - describe('addFile', function () { - it('saves the new file', async function () { + describe('addFile', function() { + it('saves the new file', async function() { const doc = builders .metafile() .path('new-file') @@ -167,13 +167,13 @@ describe('Merge', function () { }) }) - context('remote', function () { + context('remote', function() { context( 'when an unsynced local file record with the same path but different content exists', () => { const filepath = 'BUZZ.JPG' - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { await builders .metafile() .path(filepath) @@ -182,7 +182,7 @@ describe('Merge', function () { .create() }) - it('creates a remote conflict', async function () { + it('creates a remote conflict', async function() { const newRemoteFile = await builders .remoteFile() .inRootDir() @@ -217,7 +217,7 @@ describe('Merge', function () { const filepath = 'BUZZ.JPG' let file - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { file = await builders .metafile() .path(filepath) @@ -226,7 +226,7 @@ describe('Merge', function () { .create() }) - it('updates the record with the remote metadata', async function () { + it('updates the record with the remote metadata', async function() { const newRemoteFile = await builders .remoteFile() .inRootDir() @@ -283,7 +283,7 @@ describe('Merge', function () { const filepath = 'BUZZ.JPG' let remoteFile, deleted - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { remoteFile = await builders .remoteFile() .inRootDir() @@ -304,7 +304,7 @@ describe('Merge', function () { .create() }) - it('replaces the existing record with a new remote file record', async function () { + it('replaces the existing record with a new remote file record', async function() { const newRemoteFile = await builders .remoteFile(remoteFile) .data('updated content') @@ -341,7 +341,7 @@ describe('Merge', function () { const filepath = 'BUZZ.JPG' let file - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { file = await builders .metafile() .path(filepath) @@ -351,7 +351,7 @@ describe('Merge', function () { .create() }) - it('updates the existing record', async function () { + it('updates the existing record', async function() { const newRemoteFile = await builders .remoteFile() .inRootDir() @@ -388,14 +388,14 @@ describe('Merge', function () { ) }) - context('local', function () { + context('local', function() { const filepath = 'BUZZ.JPG' context( 'when an unsynced remote file record with the same path but different content exists', () => { let file - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { const remoteFile = await builders .remoteFile() .inRootDir() @@ -411,7 +411,7 @@ describe('Merge', function () { .create() }) - it('creates a local conflict', async function () { + it('creates a local conflict', async function() { const doc = await builders .metafile() .path(filepath) @@ -438,7 +438,7 @@ describe('Merge', function () { 'when an unsynced remote file record with the same path and content exists', () => { let file - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { const remoteFile = await builders .remoteFile() .inRootDir() @@ -454,7 +454,7 @@ describe('Merge', function () { .create() }) - it('updates the record with the local metadata', async function () { + it('updates the record with the local metadata', async function() { const doc = await builders .metafile() .path(filepath) @@ -497,7 +497,7 @@ describe('Merge', function () { 'when a deleted remote file record with the same path and content exists', () => { let synced - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { const remoteFile = await builders .remoteFile() .inRootDir() @@ -518,7 +518,7 @@ describe('Merge', function () { .create() }) - it('does not overwrite the existing record', async function () { + it('does not overwrite the existing record', async function() { const doc = await builders .metafile(synced) .unmerged('local') @@ -540,7 +540,7 @@ describe('Merge', function () { 'when a record with an unsynced remote deletion and different content exists', () => { let synced, deleted - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { const remoteFile = await builders .remoteFile() .inRootDir() @@ -561,7 +561,7 @@ describe('Merge', function () { .create() }) - it('updates the existing record as a new local file', async function () { + it('updates the existing record as a new local file', async function() { const doc = await builders .metafile(synced) .data('local content') @@ -595,7 +595,7 @@ describe('Merge', function () { 'when an up-to-date file record with the same path exists', () => { let file - beforeEach('create a file', async function () { + beforeEach('create a file', async function() { file = await builders .metafile() .path(filepath) @@ -605,7 +605,7 @@ describe('Merge', function () { .create() }) - it('updates the existing record', async function () { + it('updates the existing record', async function() { const doc = builders .metafile() .path(filepath) @@ -641,10 +641,10 @@ describe('Merge', function () { ) }) - context('when the path was used in the past', function () { + context('when the path was used in the past', function() { const path = 'file-created-deleted-and-then-recreated' - beforeEach(async function () { + beforeEach(async function() { const was = await builders .metafile() .path(path) @@ -654,7 +654,7 @@ describe('Merge', function () { await this.pouch.remove(was) }) - it('saves the new file with the correct side number', async function () { + it('saves the new file with the correct side number', async function() { const doc = builders .metafile() .path(path) @@ -681,9 +681,17 @@ describe('Merge', function () { }) onPlatforms(['win32', 'darwin'], () => { - it('resolves an identity conflict with an existing file', async function () { - await builders.metafile().path('bar').upToDate().create() - const doc = builders.metafile().path('BAR').unmerged('remote').build() + it('resolves an identity conflict with an existing file', async function() { + await builders + .metafile() + .path('bar') + .upToDate() + .create() + const doc = builders + .metafile() + .path('BAR') + .unmerged('remote') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('remote', _.cloneDeep(doc)) @@ -697,9 +705,17 @@ describe('Merge', function () { }) onPlatform('linux', () => { - it('does not have identity conflicts', async function () { - await builders.metafile().path('bar').upToDate().create() - const doc = builders.metafile().path('BAR').unmerged('remote').build() + it('does not have identity conflicts', async function() { + await builders + .metafile() + .path('bar') + .upToDate() + .create() + const doc = builders + .metafile() + .path('BAR') + .unmerged('remote') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('remote', _.cloneDeep(doc)) @@ -719,7 +735,7 @@ describe('Merge', function () { }) }) - it('resolves a conflict with an existing dir', async function () { + it('resolves a conflict with an existing dir', async function() { const existingLocalDir = await builders .metadir() .sides({ local: 1 }) @@ -742,9 +758,15 @@ describe('Merge', function () { }) }) - it('does nothing for an already merged file (aka idempotence)', async function () { - const mergedFile = await builders.metafile().sides({ remote: 1 }).create() - const sameFile = builders.metafile(mergedFile).unmerged('remote').build() + it('does nothing for an already merged file (aka idempotence)', async function() { + const mergedFile = await builders + .metafile() + .sides({ remote: 1 }) + .create() + const sameFile = builders + .metafile(mergedFile) + .unmerged('remote') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('remote', _.cloneDeep(sameFile)) @@ -757,7 +779,7 @@ describe('Merge', function () { }) context('when content is the same as an existing file', () => { - it('updates the PouchDB record without marking changes from a local update', async function () { + it('updates the PouchDB record without marking changes from a local update', async function() { const mergedFile = await builders .metafile() .updatedAt(new Date(2020, 5, 19, 11, 9, 0)) @@ -787,7 +809,7 @@ describe('Merge', function () { }) }) - it('sets the local metadata when it is missing', async function () { + it('sets the local metadata when it is missing', async function() { const mergedFile = await builders .metafile() .updatedAt(new Date(2020, 5, 19, 11, 9, 0)) @@ -799,7 +821,10 @@ describe('Merge', function () { const { rev } = await this.pouch.db.put(mergedFile) mergedFile._rev = rev - const sameFile = builders.metafile(mergedFile).unmerged('local').build() + const sameFile = builders + .metafile(mergedFile) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('local', _.cloneDeep(sameFile)) @@ -819,7 +844,7 @@ describe('Merge', function () { }) }) - it('keeps an existing local metadata when it is not present in the new doc', async function () { + it('keeps an existing local metadata when it is not present in the new doc', async function() { const oldRemoteFile = await builders .remoteFile() .updatedAt(2020, 5, 19, 11, 9, 0, 0) @@ -858,8 +883,8 @@ describe('Merge', function () { }) }) - context('on initial scan', function () { - it('saves an offline update after an unsynced local addition', async function () { + context('on initial scan', function() { + it('saves an offline update after an unsynced local addition', async function() { const initialFile = await builders .metafile() .sides({ local: 1 }) @@ -892,7 +917,7 @@ describe('Merge', function () { }) }) - it('saves an offline update after an unsynced local update', async function () { + it('saves an offline update after an unsynced local update', async function() { const initial = await builders .metafile() .path('yafile') @@ -931,7 +956,7 @@ describe('Merge', function () { }) }) - it('does nothing for an locally untouched file after an unsynced remote update', async function () { + it('does nothing for an locally untouched file after an unsynced remote update', async function() { const synced = await builders .metafile() .data('previous content') @@ -942,7 +967,10 @@ describe('Merge', function () { .data('remote update') .changedSide('remote') .create() - const sameAsSynced = builders.metafile(synced).unmerged('local').build() + const sameAsSynced = builders + .metafile(synced) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('local', _.cloneDeep(sameAsSynced)) @@ -956,7 +984,7 @@ describe('Merge', function () { // XXX: This sides are increased on the remote update to make sure it will // get synced. - it('creates a conflict for an oflline local update after an unsynced remote update', async function () { + it('creates a conflict for an oflline local update after an unsynced remote update', async function() { const synced = await builders .metafile() .data('initial content') @@ -997,7 +1025,7 @@ describe('Merge', function () { }) }) - it('cancels the local trashing of a file if found later', async function () { + it('cancels the local trashing of a file if found later', async function() { const synced = await builders .metafile() .data('initial content') @@ -1008,7 +1036,10 @@ describe('Merge', function () { .trashed() .changedSide('local') .create() - const localScan = builders.metafile(synced).unmerged('local').build() + const localScan = builders + .metafile(synced) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.addFileAsync('local', _.cloneDeep(localScan)) @@ -1033,7 +1064,7 @@ describe('Merge', function () { describe('updateFile', () => { let file - beforeEach('create synced file', async function () { + beforeEach('create synced file', async function() { file = await builders .metafile() .path('FIZZBUZZ.JPG') @@ -1046,7 +1077,7 @@ describe('Merge', function () { .create() }) - it('creates the file if it does not exist', async function () { + it('creates the file if it does not exist', async function() { const doc = builders .metafile() .path('NEW-FILE') @@ -1073,7 +1104,7 @@ describe('Merge', function () { }) context('when content is the same', () => { - it('updates the PouchDB record without marking changes from a remote update', async function () { + it('updates the PouchDB record without marking changes from a remote update', async function() { const doc = builders .metafile(file) .tags('bar', 'baz') @@ -1103,7 +1134,7 @@ describe('Merge', function () { // XXX: Here we don't increase the sides as we don't want to propagate a // simple change of modification date. - it('updates the PouchDB record without marking changes from a local update', async function () { + it('updates the PouchDB record without marking changes from a local update', async function() { const doc = builders .metafile(file) .updatedAt(new Date()) @@ -1129,7 +1160,7 @@ describe('Merge', function () { }) }) - it('does nothing when the modification date is the same', async function () { + it('does nothing when the modification date is the same', async function() { const doc = builders .metafile(file) .md5sum('xxx') @@ -1148,13 +1179,16 @@ describe('Merge', function () { }) }) - it('sets the local metadata when it is missing', async function () { + it('sets the local metadata when it is missing', async function() { // Remove local attribute for the test delete file.local const { rev } = await this.pouch.db.put(file) file._rev = rev - const doc = builders.metafile(file).unmerged('local').build() + const doc = builders + .metafile(file) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.updateFileAsync('local', _.cloneDeep(doc)) @@ -1173,7 +1207,7 @@ describe('Merge', function () { }) }) - it('removes an existing errors counter', async function () { + it('removes an existing errors counter', async function() { const inError = await builders .metafile(file) .errors(2) @@ -1207,7 +1241,7 @@ describe('Merge', function () { }) }) - it('keeps an existing local metadata for a remote update', async function () { + it('keeps an existing local metadata for a remote update', async function() { const doc = builders .metafile(file) .data('updated content') @@ -1234,7 +1268,7 @@ describe('Merge', function () { }) }) - it('keeps an existing remote metadata for a local update', async function () { + it('keeps an existing remote metadata for a local update', async function() { const doc = builders .metafile(file) .data('new content') @@ -1261,9 +1295,12 @@ describe('Merge', function () { }) }) - it('keeps the overwrite attribute if it exists', async function () { + it('keeps the overwrite attribute if it exists', async function() { // Overwrite file with a move - const src = await builders.metafile().changedSide(this.side).create() + const src = await builders + .metafile() + .changedSide(this.side) + .create() const dst = await builders .metafile() .moveFrom(src) @@ -1301,7 +1338,7 @@ describe('Merge', function () { }) }) - it('keeps the overwrite attribute if it exists', async function () { + it('keeps the overwrite attribute if it exists', async function() { const firstUpdateDate = new Date() const firstUpdate = await builders .metafile(file) @@ -1340,7 +1377,7 @@ describe('Merge', function () { }) }) - it('rejects an unresolvable conflict with an existing directory', async function () { + it('rejects an unresolvable conflict with an existing directory', async function() { // FIXME: Why don't we resolve the conflict like everywhere else? const existingLocalDir = await builders .metadir() @@ -1357,14 +1394,17 @@ describe('Merge', function () { ).be.rejectedWith(/conflict/) }) - it('resolves a conflict between a new remote update and a previous local update', async function () { + it('resolves a conflict between a new remote update and a previous local update', async function() { const initial = await builders .metafile() .sides({ local: 1 }) .ino(456) .data('initial content') .create() - const synced = await builders.metafile(initial).upToDate().create() + const synced = await builders + .metafile(initial) + .upToDate() + .create() const mergedLocalUpdate = await builders .metafile(synced) .changedSide('local') @@ -1399,7 +1439,7 @@ describe('Merge', function () { }) }) - it('does not overwrite an unsynced remote update with a locally unchanged file', async function () { + it('does not overwrite an unsynced remote update with a locally unchanged file', async function() { const synced = await builders .metafile() .data('initial content') @@ -1412,7 +1452,10 @@ describe('Merge', function () { .changedSide('remote') .noRecord() // XXX: Prevent Pouch conflict from reusing `synced`'s _id .create() - const unchangedLocal = builders.metafile(synced).unmerged('local').build() + const unchangedLocal = builders + .metafile(synced) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.updateFileAsync('local', _.cloneDeep(unchangedLocal)) @@ -1424,7 +1467,7 @@ describe('Merge', function () { }) }) - it('does not overwrite an unsynced remote update with a locally updated file and creates a local conflict', async function () { + it('does not overwrite an unsynced remote update with a locally updated file and creates a local conflict', async function() { const synced = await builders .metafile() .data('initial content') @@ -1465,7 +1508,7 @@ describe('Merge', function () { }) }) - it('overwrites a versioned unsynced local version with a remote update', async function () { + it('overwrites a versioned unsynced local version with a remote update', async function() { const local = await builders .metafile() .path('file') @@ -1513,7 +1556,7 @@ describe('Merge', function () { }) }) - it('cancels an already versioned local update with a previous remote update', async function () { + it('cancels an already versioned local update with a previous remote update', async function() { const synced = await builders .metafile() .path('file') @@ -1564,7 +1607,7 @@ describe('Merge', function () { }) context('when existing file is the same and up-to-date', () => { - it('updates the PouchDB record without marking changes', async function () { + it('updates the PouchDB record without marking changes', async function() { const initial = await builders .metafile() .data('initial content') @@ -1602,7 +1645,7 @@ describe('Merge', function () { }) context('when the file is a Cozy Note export', () => { - it('does not create a conflict', async function () { + it('does not create a conflict', async function() { const remoteNote = await builders .remoteNote() .name('my-note.cozy-note') @@ -1642,7 +1685,7 @@ describe('Merge', function () { }) describe('putFolder', () => { - it('saves the new folder', async function () { + it('saves the new folder', async function() { const doc = builders .metadir() .path('NEW-FOLDER') @@ -1667,7 +1710,7 @@ describe('Merge', function () { }) }) - it('saves a new version of an existing folder', async function () { + it('saves a new version of an existing folder', async function() { const old = await builders .metadir() .path('existing-folder') @@ -1700,14 +1743,20 @@ describe('Merge', function () { }) context('on initial scan', () => { - it('cancels the local trashing of a folder if found later', async function () { - const synced = await builders.metadir().upToDate().create() + it('cancels the local trashing of a folder if found later', async function() { + const synced = await builders + .metadir() + .upToDate() + .create() const localTrashing = await builders .metadir(synced) .trashed() .changedSide('local') .create() - const localScan = builders.metadir(synced).unmerged('local').build() + const localScan = builders + .metadir(synced) + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.putFolderAsync('local', _.cloneDeep(localScan)) @@ -1728,10 +1777,10 @@ describe('Merge', function () { }) }) - context('when the path was used in the past', function () { + context('when the path was used in the past', function() { const path = 'folder-created-deleted-and-then-recreated' - beforeEach(async function () { + beforeEach(async function() { const was = await builders .metadir() .path(path) @@ -1740,8 +1789,12 @@ describe('Merge', function () { await this.pouch.remove(was) }) - it('saves the new folder with the correct side number', async function () { - const doc = builders.metadir().path(path).unmerged(this.side).build() + it('saves the new folder with the correct side number', async function() { + const doc = builders + .metadir() + .path(path) + .unmerged(this.side) + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.putFolderAsync(this.side, _.cloneDeep(doc)) @@ -1761,13 +1814,16 @@ describe('Merge', function () { }) }) - it('does nothing when existing folder is up to date', async function () { + it('does nothing when existing folder is up to date', async function() { const old = await builders .metadir() .path('up-to-date-folder') .upToDate() .create() - const doc = builders.metadir(old).unmerged(this.side).build() + const doc = builders + .metadir(old) + .unmerged(this.side) + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.putFolderAsync(this.side, _.cloneDeep(doc)) @@ -1786,8 +1842,11 @@ describe('Merge', function () { // issues). // Until we find a way to mark specific events as obsolete, our only // recourse is to discard these modification date changes. - it('does nothing when only the modification date has changed', async function () { - const mergedFolder = await builders.metadir().upToDate().create() + it('does nothing when only the modification date has changed', async function() { + const mergedFolder = await builders + .metadir() + .upToDate() + .create() const sameFolder = builders .metadir(mergedFolder) .updatedAt(new Date()) @@ -1804,7 +1863,7 @@ describe('Merge', function () { }) }) - it('resolves a conflict with an existing file', async function () { + it('resolves a conflict with an existing file', async function() { const existingLocalFile = await builders .metafile() .sides({ local: 1 }) @@ -1827,7 +1886,7 @@ describe('Merge', function () { }) }) - it('removes an existing errors counter', async function () { + it('removes an existing errors counter', async function() { const inError = await builders .metadir() .errors(2) @@ -1861,7 +1920,7 @@ describe('Merge', function () { }) }) - it('sets the local metadata when it is missing', async function () { + it('sets the local metadata when it is missing', async function() { const mergedFolder = await builders .metadir() .updatedAt(new Date(2020, 5, 19, 11, 9, 0)) @@ -1896,7 +1955,7 @@ describe('Merge', function () { }) }) - it('keeps existing local metadata when it is not present in the new doc', async function () { + it('keeps existing local metadata when it is not present in the new doc', async function() { const oldRemoteDir = await builders .remoteDir() .updatedAt(2020, 5, 19, 11, 9, 0, 0) @@ -1941,7 +2000,7 @@ describe('Merge', function () { () => { let Alfred - beforeEach(async function () { + beforeEach(async function() { await builders .metadir() .path('alfred') @@ -1955,7 +2014,7 @@ describe('Merge', function () { }) onPlatforms(['win32', 'darwin'], () => { - it('resolves the conflict', async function () { + it('resolves the conflict', async function() { const sideEffects = await mergeSideEffects(this, () => this.merge.putFolderAsync(this.side, _.cloneDeep(Alfred)) ) @@ -1970,7 +2029,7 @@ describe('Merge', function () { }) onPlatform('linux', () => { - it('saves the doc as a new doc', async function () { + it('saves the doc as a new doc', async function() { const sideEffects = await mergeSideEffects(this, () => this.merge.putFolderAsync(this.side, _.cloneDeep(Alfred)) ) @@ -1992,13 +2051,17 @@ describe('Merge', function () { ) }) - describe('moveFileAsync', function () { - beforeEach('create parent folder', async function () { - await builders.metadir().path('FOO').upToDate().create() + describe('moveFileAsync', function() { + beforeEach('create parent folder', async function() { + await builders + .metadir() + .path('FOO') + .upToDate() + .create() }) context('local', () => { - it('saves the moved file', async function () { + it('saves the moved file', async function() { const was = await builders .metafile() .path('FOO/OLD') @@ -2041,7 +2104,7 @@ describe('Merge', function () { }) context('remote', () => { - it('saves the moved file', async function () { + it('saves the moved file', async function() { const oldRemoteFile = await builders .remoteFile() .inRootDir() @@ -2093,7 +2156,7 @@ describe('Merge', function () { }) }) - it('removes an existing errors counter', async function () { + it('removes an existing errors counter', async function() { const inError = await builders .metafile() .path('src') @@ -2133,7 +2196,7 @@ describe('Merge', function () { }) }) - it('adds missing fields', async function () { + it('adds missing fields', async function() { const was = await builders .metafile() .path('FOO/OLD-MISSING-FIELDS.JPG') @@ -2174,7 +2237,7 @@ describe('Merge', function () { context('when the destination exists', () => { let existing - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metafile() .path('DST_FILE') @@ -2183,7 +2246,7 @@ describe('Merge', function () { .create() }) - it('erases the existing destination record and saves the moved file', async function () { + it('erases the existing destination record and saves the moved file', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2225,7 +2288,7 @@ describe('Merge', function () { }) }) - it('keeps the overwrite attribute if it exists', async function () { + it('keeps the overwrite attribute if it exists', async function() { const overwritten = await builders .metafile(existing) .overwrite(existing) @@ -2276,7 +2339,7 @@ describe('Merge', function () { }) context('and we have unapplied modifications on the remote side', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metafile(existing) .data('new content') @@ -2287,7 +2350,7 @@ describe('Merge', function () { context( 'and these modifications were already versioned for the moved file', () => { - it('overwrites the modified version with the locally moved file', async function () { + it('overwrites the modified version with the locally moved file', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2342,7 +2405,7 @@ describe('Merge', function () { ) context('and these modifications were not versioned', () => { - it('resolves a conflict', async function () { + it('resolves a conflict', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2390,7 +2453,7 @@ describe('Merge', function () { }) context('and we have unapplied modifications on the local side', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metafile(existing) .data('new content') @@ -2401,7 +2464,7 @@ describe('Merge', function () { context( 'and these modifications were already versioned for the moved file', () => { - it('overwrites the modified version with the remotely moved file', async function () { + it('overwrites the modified version with the remotely moved file', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2456,7 +2519,7 @@ describe('Merge', function () { ) context('and these modifications were not versioned', () => { - it('resolves a conflict', async function () { + it('resolves a conflict', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2507,7 +2570,7 @@ describe('Merge', function () { context('when the destination has existed', () => { const path = 'DST_FILE' - beforeEach(async function () { + beforeEach(async function() { const previous = await builders .metafile() .path(path) @@ -2516,7 +2579,7 @@ describe('Merge', function () { await this.pouch.remove(previous) }) - it('saves the moved file with the correct side', async function () { + it('saves the moved file with the correct side', async function() { const was = await builders .metafile() .path('SRC_FILE') @@ -2554,7 +2617,7 @@ describe('Merge', function () { }) context('for a local-only file', () => { - it('converts the move into a local addition', async function () { + it('converts the move into a local addition', async function() { const was = await builders .metafile() .path('FOO/OLD') @@ -2585,7 +2648,7 @@ describe('Merge', function () { }) }) - it('converts an overwriting move into a local update', async function () { + it('converts an overwriting move into a local update', async function() { const existing = await builders .metafile() .path('FOO/NEW') @@ -2632,7 +2695,7 @@ describe('Merge', function () { }) context('for a remote-only file', () => { - it('converts the move into a remote addition ', async function () { + it('converts the move into a remote addition ', async function() { const oldRemoteFile = builders .remoteFile() .inRootDir() @@ -2671,7 +2734,7 @@ describe('Merge', function () { }) }) - it('converts an overwriting move into a remote update', async function () { + it('converts an overwriting move into a remote update', async function() { const existing = await builders .metafile() .path('FOO/NEW') @@ -2716,8 +2779,12 @@ describe('Merge', function () { }) }) - it('does not identify the child file move following another unsynced move as an addition', async function () { - const src = await builders.metadir().path('SRC').upToDate().create() + it('does not identify the child file move following another unsynced move as an addition', async function() { + const src = await builders + .metadir() + .path('SRC') + .upToDate() + .create() const file = await builders .metafile() .path('SRC/FILE') @@ -2735,7 +2802,11 @@ describe('Merge', function () { ) const was = await this.pouch.bySyncedPath(file2.path) - const dst = builders.metadir(src).path('DST').unmerged('local').build() + const dst = builders + .metadir(src) + .path('DST') + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.moveFolderAsync('local', _.cloneDeep(dst), _.cloneDeep(src)) @@ -2775,10 +2846,22 @@ describe('Merge', function () { }) }) - it('does not identify the local move of a file following an unsynced child move as an addition', async function () { - const src = await builders.metadir().path('SRC').upToDate().create() - await builders.metafile().path('SRC/FILE').upToDate().create() - const dst = builders.metadir(src).path('DST').unmerged('local').build() + it('does not identify the local move of a file following an unsynced child move as an addition', async function() { + const src = await builders + .metadir() + .path('SRC') + .upToDate() + .create() + await builders + .metafile() + .path('SRC/FILE') + .upToDate() + .create() + const dst = builders + .metadir(src) + .path('DST') + .unmerged('local') + .build() await this.merge.moveFolderAsync( 'local', _.cloneDeep(dst), @@ -2813,7 +2896,7 @@ describe('Merge', function () { }) onPlatforms(['win32', 'darwin'], () => { - it('does not identify an identical renaming as a conflict', async function () { + it('does not identify an identical renaming as a conflict', async function() { const banana = await builders .metafile() .path('banana') @@ -2849,9 +2932,17 @@ describe('Merge', function () { }) }) - it('resolves an identity conflict with an existing file', async function () { - await builders.metafile().path('QUX').upToDate().create() - const was = await builders.metafile().path('baz').upToDate().create() + it('resolves an identity conflict with an existing file', async function() { + await builders + .metafile() + .path('QUX') + .upToDate() + .create() + const was = await builders + .metafile() + .path('baz') + .upToDate() + .create() const doc = builders .metafile(was) .path('qux') @@ -2876,7 +2967,7 @@ describe('Merge', function () { }) onPlatform('linux', () => { - it('does not identify an identical renaming as a conflict', async function () { + it('does not identify an identical renaming as a conflict', async function() { const banana = await builders .metafile() .path('banana') @@ -2912,9 +3003,17 @@ describe('Merge', function () { }) }) - it('does not have identity conflicts', async function () { - const was = await builders.metafile().path('baz').upToDate().create() - await builders.metafile().path('QUX').upToDate().create() + it('does not have identity conflicts', async function() { + const was = await builders + .metafile() + .path('baz') + .upToDate() + .create() + await builders + .metafile() + .path('QUX') + .upToDate() + .create() const doc = builders .metafile(was) .path('qux') @@ -2947,9 +3046,9 @@ describe('Merge', function () { }) }) - describe('moveFolderAsync', function () { + describe('moveFolderAsync', function() { context('local', () => { - it('saves the new folder and deletes the old one with hints for writers', async function () { + it('saves the new folder and deletes the old one with hints for writers', async function() { const was = await builders .metadir() .path('OLD') @@ -2957,7 +3056,11 @@ describe('Merge', function () { .tags('courge', 'quux') .upToDate() .create() - const doc = builders.metadir(was).path('NEW').unmerged('local').build() + const doc = builders + .metadir(was) + .path('NEW') + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.moveFolderAsync( @@ -2984,7 +3087,7 @@ describe('Merge', function () { }) context('when the folder has children marked for deletion', () => { - it('does not move them', async function () { + it('does not move them', async function() { const was = await builders .metadir() .path('OLD') @@ -3031,7 +3134,7 @@ describe('Merge', function () { context('when the folder does not exist remotely', () => { let was, child - beforeEach(async function () { + beforeEach(async function() { was = await builders .metadir() .path('OLD') @@ -3046,7 +3149,7 @@ describe('Merge', function () { .create() }) - it('saves a local folder addition', async function () { + it('saves a local folder addition', async function() { const doc = builders .metadir(was) .path('NEW') @@ -3088,7 +3191,7 @@ describe('Merge', function () { context('and the destination exists', () => { let existing context('and it is up-to-date', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('NEW') @@ -3096,7 +3199,7 @@ describe('Merge', function () { .create() }) - it('overwrites the destination', async function () { + it('overwrites the destination', async function() { const doc = builders .metadir(was) .path(existing.path) @@ -3151,7 +3254,7 @@ describe('Merge', function () { context( 'and it is not at least up-to-date on the movement side', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('NEW') @@ -3159,7 +3262,7 @@ describe('Merge', function () { .create() }) - it('resolves a conflict', async function () { + it('resolves a conflict', async function() { const doc = builders .metadir(was) .path(existing.path) @@ -3213,7 +3316,7 @@ describe('Merge', function () { }) context('remote', () => { - it('saves the moved folder', async function () { + it('saves the moved folder', async function() { const oldRemoteDir = builders .remoteDir() .inRootDir() @@ -3264,8 +3367,12 @@ describe('Merge', function () { context('when the folder does not exist locally', () => { let oldRemoteDir, was, child - beforeEach(async function () { - oldRemoteDir = builders.remoteDir().inRootDir().name('OLD').build() + beforeEach(async function() { + oldRemoteDir = builders + .remoteDir() + .inRootDir() + .name('OLD') + .build() was = await builders .metadir() .fromRemote(oldRemoteDir) @@ -3283,7 +3390,7 @@ describe('Merge', function () { .create() }) - it('saves a remote folder addition', async function () { + it('saves a remote folder addition', async function() { const newRemoteDir = builders .remoteDir(oldRemoteDir) .name('NEW') @@ -3331,7 +3438,7 @@ describe('Merge', function () { context( 'and platform incompatibilities of the folder are solved', () => { - it('updates the incompatibilities of its children', async function () { + it('updates the incompatibilities of its children', async function() { const was = await builders .metadir() .incompatible() @@ -3388,7 +3495,7 @@ describe('Merge', function () { context('and the destination exists', () => { let existing context('and it is up-to-date', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('NEW') @@ -3396,7 +3503,7 @@ describe('Merge', function () { .create() }) - it('overwrites the destination', async function () { + it('overwrites the destination', async function() { const newRemoteDir = builders .remoteDir(oldRemoteDir) .name('NEW') @@ -3459,7 +3566,7 @@ describe('Merge', function () { context( 'and it is not at least up-to-date on the movement side', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('NEW') @@ -3467,7 +3574,7 @@ describe('Merge', function () { .create() }) - it('resolves a conflict', async function () { + it('resolves a conflict', async function() { const newRemoteDir = builders .remoteDir(oldRemoteDir) .name('NEW') @@ -3532,7 +3639,7 @@ describe('Merge', function () { let existing context('and it is up-to-date', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('DST_DIR') @@ -3540,7 +3647,7 @@ describe('Merge', function () { .create() }) - it('overwrites the destination', async function () { + it('overwrites the destination', async function() { const was = await builders .metadir() .path('SRC_DIR') @@ -3584,7 +3691,7 @@ describe('Merge', function () { }) context('and it is not at least up-to-date on the movement side', () => { - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metadir() .path('DST_DIR') @@ -3592,7 +3699,7 @@ describe('Merge', function () { .create() }) - it('resolves a conflict', async function () { + it('resolves a conflict', async function() { const was = await builders .metadir() .path('SRC_DIR') @@ -3639,14 +3746,26 @@ describe('Merge', function () { context('when the destination has existed', () => { const path = 'DST_DIR' - beforeEach(async function () { - const previous = await builders.metadir().path(path).upToDate().create() + beforeEach(async function() { + const previous = await builders + .metadir() + .path(path) + .upToDate() + .create() await this.pouch.remove(previous) }) - it('saves the new directory with the correct side', async function () { - const was = await builders.metadir().path('SRC_DIR').upToDate().create() - const doc = builders.metadir(was).path(path).unmerged(this.side).build() + it('saves the new directory with the correct side', async function() { + const was = await builders + .metadir() + .path('SRC_DIR') + .upToDate() + .create() + const doc = builders + .metadir(was) + .path(path) + .unmerged(this.side) + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.moveFolderAsync( @@ -3673,13 +3792,17 @@ describe('Merge', function () { }) }) - it('does not create conflict for local-only existing folder.', async function () { + it('does not create conflict for local-only existing folder.', async function() { const existing = await builders .metadir() .path('DST_DIR') .sides({ [this.side]: 1 }) .create() - const was = await builders.metadir().path('SRC_DIR').upToDate().create() + const was = await builders + .metadir() + .path('SRC_DIR') + .upToDate() + .create() const doc = builders .metadir(was) .path(existing.path) @@ -3715,7 +3838,7 @@ describe('Merge', function () { }) }) - it('removes existing errors counters', async function () { + it('removes existing errors counters', async function() { const inError = await builders .metadir() .path('src') @@ -3784,8 +3907,12 @@ describe('Merge', function () { }) onPlatforms(['win32', 'darwin'], () => { - it('does not identify an identical renaming as a conflict', async function () { - const apple = await builders.metadir().path('apple').upToDate().create() + it('does not identify an identical renaming as a conflict', async function() { + const apple = await builders + .metadir() + .path('apple') + .upToDate() + .create() const APPLE = builders .metadir(apple) .path('APPLE') @@ -3816,8 +3943,12 @@ describe('Merge', function () { }) }) - it('resolves an identity conflict when moving a synced folder to an existing path', async function () { - await builders.metadir().path('LINUX').upToDate().create() + it('resolves an identity conflict when moving a synced folder to an existing path', async function() { + await builders + .metadir() + .path('LINUX') + .upToDate() + .create() const torvalds = await builders .metadir() .path('torvalds') @@ -3845,8 +3976,12 @@ describe('Merge', function () { }) onPlatform('linux', () => { - it('does not identify an identical renaming as a conflict', async function () { - const apple = await builders.metadir().path('apple').upToDate().create() + it('does not identify an identical renaming as a conflict', async function() { + const apple = await builders + .metadir() + .path('apple') + .upToDate() + .create() const APPLE = builders .metadir(apple) .path('APPLE') @@ -3877,9 +4012,17 @@ describe('Merge', function () { }) }) - it('does not have identity conflicts', async function () { - await builders.metadir().path('NUKEM').upToDate().create() - const duke = await builders.metadir().path('duke').upToDate().create() + it('does not have identity conflicts', async function() { + await builders + .metadir() + .path('NUKEM') + .upToDate() + .create() + const duke = await builders + .metadir() + .path('duke') + .upToDate() + .create() const nukem = builders .metadir(duke) .path('nukem') @@ -3911,14 +4054,22 @@ describe('Merge', function () { }) }) - it('handles overwritten descendants', async function () { - const srcDir = await builders.metadir().path('src').upToDate().create() + it('handles overwritten descendants', async function() { + const srcDir = await builders + .metadir() + .path('src') + .upToDate() + .create() const srcFile = await builders .metafile() .path('src/file') .upToDate() .create() - const oldDst = await builders.metadir().path('dst').upToDate().create() + const oldDst = await builders + .metadir() + .path('dst') + .upToDate() + .create() const oldDstFile = await builders .metafile() .path('dst/file') @@ -3981,9 +4132,13 @@ describe('Merge', function () { }) }) - describe('moveFolderRecursively', function () { - it('moves the folder and files/folders inside it', async function () { - const was = await builders.metadir().path('my-folder').upToDate().create() + describe('moveFolderRecursively', function() { + it('moves the folder and files/folders inside it', async function() { + const was = await builders + .metadir() + .path('my-folder') + .upToDate() + .create() const subdir = await builders .metadir() .path('my-folder/folder-9') @@ -4059,7 +4214,7 @@ describe('Merge', function () { }) context('when platform incompatibilities of the folder are solved', () => { - it('updates the incompatibilities of its children', async function () { + it('updates the incompatibilities of its children', async function() { const was = await builders .metadir() .incompatible() @@ -4113,7 +4268,7 @@ describe('Merge', function () { }) context('local with an unsynced remote file', () => { - it('adds the remote file to the destination folder', async function () { + it('adds the remote file to the destination folder', async function() { const was = await builders .metadir() .path('ADDED_DIR') @@ -4170,7 +4325,7 @@ describe('Merge', function () { }) context('remote with an unsynced local file', () => { - it('adds the local file to the destination folder', async function () { + it('adds the local file to the destination folder', async function() { const oldRemoteDir = builders .remoteDir() .inRootDir() @@ -4233,14 +4388,26 @@ describe('Merge', function () { context('when the destination has existed', () => { const path = 'DST_DIR' - beforeEach(async function () { - const previous = await builders.metadir().path(path).upToDate().create() + beforeEach(async function() { + const previous = await builders + .metadir() + .path(path) + .upToDate() + .create() await this.pouch.remove(previous) }) - it('saves the new directory with the correct side', async function () { - const was = await builders.metadir().path('SRC_DIR').upToDate().create() - const doc = builders.metadir(was).path(path).unmerged(this.side).build() + it('saves the new directory with the correct side', async function() { + const was = await builders + .metadir() + .path('SRC_DIR') + .upToDate() + .create() + const doc = builders + .metadir(was) + .path(path) + .unmerged(this.side) + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.moveFolderRecursivelyAsync( @@ -4271,7 +4438,7 @@ describe('Merge', function () { const parentPath = 'DST_DIR' const childName = 'CHILD' - beforeEach(async function () { + beforeEach(async function() { const previous = await builders .metadata() .path(`${parentPath}/${childName}`) @@ -4280,8 +4447,12 @@ describe('Merge', function () { await this.pouch.remove(previous) }) - it('saves the new child with the correct side', async function () { - const was = await builders.metadir().path('SRC_DIR').upToDate().create() + it('saves the new child with the correct side', async function() { + const was = await builders + .metadir() + .path('SRC_DIR') + .upToDate() + .create() const child = await builders .metadata() .path(`SRC_DIR/${childName}`) @@ -4340,7 +4511,7 @@ describe('Merge', function () { context( 'when the parent normalization differs in its children paths', () => { - it('correctly replaces the NFD parent part in the children paths', async function () { + it('correctly replaces the NFD parent part in the children paths', async function() { const nfdParentPath = 'Énoncés'.normalize('NFD') const nfcParentPath = nfdParentPath.normalize('NFC') const was = await builders @@ -4420,7 +4591,7 @@ describe('Merge', function () { }) }) - it('correctly replaces the NFC parent part in the children paths', async function () { + it('correctly replaces the NFC parent part in the children paths', async function() { const nfdParentPath = 'Énoncés'.normalize('NFD') const nfcParentPath = nfdParentPath.normalize('NFC') const was = await builders @@ -4506,7 +4677,7 @@ describe('Merge', function () { describe('deleteFileAsync', () => { context('when a record is found in Pouch', () => { - it('deletes a file', async function () { + it('deletes a file', async function() { const doc = await builders .metafile() .path('FILE') @@ -4532,7 +4703,7 @@ describe('Merge', function () { }) }) - it('removes move hints', async function () { + it('removes move hints', async function() { const old = await builders .metafile() .path('FILE') @@ -4564,7 +4735,7 @@ describe('Merge', function () { }) }) - it('removes an existing errors counter', async function () { + it('removes an existing errors counter', async function() { const inError = await builders .metafile() .errors(2) @@ -4593,14 +4764,17 @@ describe('Merge', function () { }) context('when a record marked for deletion is found in Pouch', () => { - it('keeps the deletion marker and updates sides info', async function () { + it('keeps the deletion marker and updates sides info', async function() { const was = await builders .metafile() .path('FILE') .trashed() .changedSide(otherSide(this.side)) .create() - const doc = builders.metafile(was).unmerged(this.side).build() + const doc = builders + .metafile(was) + .unmerged(this.side) + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.deleteFileAsync(this.side, _.cloneDeep(doc)) @@ -4625,18 +4799,18 @@ describe('Merge', function () { }) }) - describe('deleteFolderAsync', function () { - before(function () { + describe('deleteFolderAsync', function() { + before(function() { // XXX: deleteFolderAsync is only used for remote deletions this.side = 'remote' }) - after(function () { + after(function() { // XXX: 'local' is the current side value but it could change this.side = 'local' }) context('when a record is found in Pouch', () => { - it('marks the folder for deletion on the local filesystem', async function () { + it('marks the folder for deletion on the local filesystem', async function() { const doc = await builders .metadir() .path('FOLDER') @@ -4662,7 +4836,7 @@ describe('Merge', function () { }) context('and it has children', () => { - it('marks children for deletion on the local filesystem', async function () { + it('marks children for deletion on the local filesystem', async function() { const doc = await builders .metadir() .path('FOLDER') @@ -4724,7 +4898,7 @@ describe('Merge', function () { }) }) - it('removes existing errors counters', async function () { + it('removes existing errors counters', async function() { const inError = await builders .metadir() .path('dir') @@ -4770,7 +4944,7 @@ describe('Merge', function () { context( 'and child was moved into the folder on the local filesystem', () => { - it('marks it for deletion on the remote Cozy', async function () { + it('marks it for deletion on the remote Cozy', async function() { const doc = await builders .metadir() .path('folder') @@ -4823,7 +4997,7 @@ describe('Merge', function () { }) context('and it was moved on the local filesystem', () => { - it('removes move hints', async function () { + it('removes move hints', async function() { const old = await builders .metadir() .path('FOLDER') @@ -4857,7 +5031,7 @@ describe('Merge', function () { }) context('with children', () => { - it('marks the children for deletion on the local filesystem', async function () { + it('marks the children for deletion on the local filesystem', async function() { const dir = await builders .metadir() .path('folder') @@ -4935,7 +5109,7 @@ describe('Merge', function () { }) context('overwriting another document', () => { - it('marks the overwritten document for deletion on the remote Cozy', async function () { + it('marks the overwritten document for deletion on the remote Cozy', async function() { const overwritten = await builders .metafile() .path('MOVED') @@ -4989,7 +5163,7 @@ describe('Merge', function () { }) context('and it was moved on the remote Cozy', () => { - it('marks the previous location for deletion on the local filesystem', async function () { + it('marks the previous location for deletion on the local filesystem', async function() { const old = await builders .metadir() .path('FOLDER') @@ -5026,7 +5200,7 @@ describe('Merge', function () { }) context('when a record marked for deletion is found in Pouch', () => { - it('keeps the deletion marker and updates sides info', async function () { + it('keeps the deletion marker and updates sides info', async function() { const was = await builders .metadir() .path('FOLDER') @@ -5059,13 +5233,16 @@ describe('Merge', function () { describe('trashFileAsync', () => { for (const side of ['local', 'remote']) { context(`when trashed on ${side} side`, () => { - before(function () { + before(function() { this.side = side }) context('when record is found in Pouch', () => { - it('marks it for deletion and updates sides info', async function () { - const was = await builders.metafile().upToDate().create() + it('marks it for deletion and updates sides info', async function() { + const was = await builders + .metafile() + .upToDate() + .create() const doc = builders .metafile(was) .trashed() @@ -5095,7 +5272,7 @@ describe('Merge', function () { }) }) - it('removes an existing errors counter', async function () { + it('removes an existing errors counter', async function() { const inError = await builders .metafile() .errors(2) @@ -5136,7 +5313,7 @@ describe('Merge', function () { context('when a record marked for deletion is found in Pouch', () => { context('and the record was modified on the other side', () => { - it('completely erases the document from PouchDB', async function () { + it('completely erases the document from PouchDB', async function() { const was = await builders .metafile() .trashed() @@ -5170,7 +5347,7 @@ describe('Merge', function () { }) context('and the record was modified on the same side', () => { - it('does nothing', async function () { + it('does nothing', async function() { const was = await builders .metafile() .trashed() @@ -5199,8 +5376,11 @@ describe('Merge', function () { }) context('when no records are found in Pouch', () => { - it('does nothing', async function () { - const was = await builders.metafile().upToDate().create() + it('does nothing', async function() { + const was = await builders + .metafile() + .upToDate() + .create() const doc = builders .metafile(was) .trashed() @@ -5226,8 +5406,11 @@ describe('Merge', function () { }) context('when docType of found record does not match', () => { - it('does nothing', async function () { - const was = await builders.metafile().upToDate().create() + it('does nothing', async function() { + const was = await builders + .metafile() + .upToDate() + .create() const doc = builders .metadir() .path(was.path) @@ -5251,7 +5434,7 @@ describe('Merge', function () { }) context('when found record was not synced', () => { - it('marks it for deletion and upadtes sides info', async function () { + it('marks it for deletion and upadtes sides info', async function() { const was = await builders .metafile() .sides({ [this.side]: 1 }) @@ -5288,7 +5471,7 @@ describe('Merge', function () { context('when found record was moved on the same side', () => { let initial, src - beforeEach(async function () { + beforeEach(async function() { initial = await builders .metafile() .path('initial') @@ -5297,7 +5480,7 @@ describe('Merge', function () { src = await builders.metafile(initial).create() }) - it('marks the moved document for deletion', async function () { + it('marks the moved document for deletion', async function() { const was = await builders .metafile() .path('moved') @@ -5335,7 +5518,7 @@ describe('Merge', function () { context('and the move was overwriting an existing doc', () => { let existing - beforeEach(async function () { + beforeEach(async function() { existing = await builders .metafile() .path('moved') @@ -5343,7 +5526,7 @@ describe('Merge', function () { .create() }) - it('marks the moved document for deletion', async function () { + it('marks the moved document for deletion', async function() { const was = await builders .metafile() .path('moved') @@ -5394,7 +5577,7 @@ describe('Merge', function () { }) context('when found record was modified on the same side', () => { - it('marks it for deletion and updates sides info', async function () { + it('marks it for deletion and updates sides info', async function() { const initial = await builders .metafile() .data('initial') @@ -5436,7 +5619,7 @@ describe('Merge', function () { }) context('when found record was modified on the other side', () => { - it('dissociates the record from the trashed side which is not saved', async function () { + it('dissociates the record from the trashed side which is not saved', async function() { const initial = await builders .metafile() .data('initial') @@ -5479,7 +5662,7 @@ describe('Merge', function () { context('when trashed on local side', () => { context('and found record was moved on the remote side', () => { - it('dissociates the record from the local side so it can be downloaded again', async function () { + it('dissociates the record from the local side so it can be downloaded again', async function() { const initial = await builders .metafile() .path('initial') @@ -5491,7 +5674,11 @@ describe('Merge', function () { .moveFrom(src) .changedSide('remote') .create() - const doc = builders.metafile(was).trashed().unmerged('local').build() + const doc = builders + .metafile(was) + .trashed() + .unmerged('local') + .build() const sideEffects = await mergeSideEffects(this, () => this.merge.trashFileAsync( @@ -5523,7 +5710,7 @@ describe('Merge', function () { // its `trashed` attribute. // We need to find a solution for this (e.g. restore the file before // moving it to its destination). - it('updates the record remote metadata so it can be restored', async function () { + it('updates the record remote metadata so it can be restored', async function() { const src = await builders .metafile() .path('initial') @@ -5569,13 +5756,16 @@ describe('Merge', function () { describe('trashFolderAsync', () => { for (const side of ['local', 'remote']) { context(`when trashed on ${side} side`, () => { - before(function () { + before(function() { this.side = side }) context('when record is found in Pouch', () => { - it('marks it for deletion and updates sides info', async function () { - const was = await builders.metadir().upToDate().create() + it('marks it for deletion and updates sides info', async function() { + const was = await builders + .metadir() + .upToDate() + .create() const doc = builders .metadir(was) .trashed() @@ -5605,7 +5795,7 @@ describe('Merge', function () { }) }) - it('removes existing errors counters', async function () { + it('removes existing errors counters', async function() { const inError = await builders .metadir() .path('dir') @@ -5666,7 +5856,7 @@ describe('Merge', function () { context('when a record marked for deletion is found in Pouch', () => { context('and the record was modified on the other side', () => { - it('completely erases the record from PouchDB', async function () { + it('completely erases the record from PouchDB', async function() { const was = await builders .metadir() .trashed() @@ -5700,7 +5890,7 @@ describe('Merge', function () { }) context('and the record was modified on the same side', () => { - it('does nothing', async function () { + it('does nothing', async function() { const was = await builders .metadir() .trashed() @@ -5729,8 +5919,11 @@ describe('Merge', function () { }) context('when no records are found in Pouch', () => { - it('does nothing', async function () { - const was = await builders.metadir().upToDate().create() + it('does nothing', async function() { + const was = await builders + .metadir() + .upToDate() + .create() const doc = builders .metadir(was) .trashed() @@ -5756,8 +5949,11 @@ describe('Merge', function () { }) context('when docType of found record does not match', () => { - it('does nothing', async function () { - const was = await builders.metafile().upToDate().create() + it('does nothing', async function() { + const was = await builders + .metafile() + .upToDate() + .create() const doc = builders .metadir() .path(was.path) @@ -5781,7 +5977,7 @@ describe('Merge', function () { }) context('when found record was not synced', () => { - it('marks it for deletion and updates sides info', async function () { + it('marks it for deletion and updates sides info', async function() { const was = await builders .metadir() .sides({ [this.side]: 1 }) diff --git a/test/unit/metadata.js b/test/unit/metadata.js index 8ca272270..be87cb4e4 100644 --- a/test/unit/metadata.js +++ b/test/unit/metadata.js @@ -1,17 +1,15 @@ /* @flow */ /* eslint-env mocha */ -const _ = require('lodash') -const fse = require('fs-extra') const path = require('path') + +const fse = require('fs-extra') +const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const configHelpers = require('../support/helpers/config') -const pouchHelpers = require('../support/helpers/pouch') -const Builders = require('../support/builders') -const { onPlatform, onPlatforms } = require('../support/helpers/platform') - +const { Ignore } = require('../../core/ignore') +const stater = require('../../core/local/stater') const metadata = require('../../core/metadata') const { assignMaxDate, @@ -32,11 +30,13 @@ const { outOfDateSide, createConflictingDoc } = metadata -const { Ignore } = require('../../core/ignore') -const stater = require('../../core/local/stater') const { DIR_TYPE, NOTE_MIME_TYPE } = require('../../core/remote/constants') const pathUtils = require('../../core/utils/path') const timestamp = require('../../core/utils/timestamp') +const Builders = require('../support/builders') +const configHelpers = require('../support/helpers/config') +const { onPlatform, onPlatforms } = require('../support/helpers/platform') +const pouchHelpers = require('../support/helpers/pouch') /*:: import type { Metadata, MetadataRemoteFile, MetadataRemoteDir, MetadataLocalInfo } from '../../core/metadata' @@ -45,12 +45,12 @@ import type { RemoteBase } from '../../core/remote/document' const { platform } = process -describe('metadata', function () { +describe('metadata', function() { let builders before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach(function () { + beforeEach(function() { builders = new Builders({ pouch: this.pouch }) }) afterEach('clean pouch', pouchHelpers.cleanDatabase) @@ -144,8 +144,8 @@ describe('metadata', function () { }) }) - describe('invalidPath', function () { - should.Assertion.add('invalidPath', function () { + describe('invalidPath', function() { + should.Assertion.add('invalidPath', function() { this.params = { operator: 'to make metadata.invalidPath() return', expected: true @@ -153,7 +153,7 @@ describe('metadata', function () { should(invalidPath(this.obj)).be.exactly(true) }) - it('returns true if the path is incorrect', function () { + it('returns true if the path is incorrect', function() { should({ path: path.sep }).have.invalidPath() should({ path: '/' }).have.invalidPath() should({ path: '' }).have.invalidPath() @@ -164,64 +164,118 @@ describe('metadata', function () { should({ path: 'f/../oo/../../bar/./baz' }).have.invalidPath() }) - it('returns false if everything is OK', function () { + it('returns false if everything is OK', function() { should({ path: 'foo' }).not.have.invalidPath() should({ path: 'foo/bar' }).not.have.invalidPath() should({ path: 'foo/bar/baz.jpg' }).not.have.invalidPath() }) - it('returns false for paths with a leading slash', function () { + it('returns false for paths with a leading slash', function() { should({ path: '/foo/bar' }).not.have.invalidPath() should({ path: '/foo/bar/baz.bmp' }).not.have.invalidPath() }) }) - describe('invalidChecksum', function () { - it('returns true if the checksum is missing for a file', function () { + describe('invalidChecksum', function() { + it('returns true if the checksum is missing for a file', function() { const missingMd5sum = builders.metafile().build() delete missingMd5sum.md5sum should(invalidChecksum(missingMd5sum)).be.true() should( - invalidChecksum(builders.metafile().md5sum(null).build()) + invalidChecksum( + builders + .metafile() + .md5sum(null) + .build() + ) ).be.true() should( - invalidChecksum(builders.metafile().md5sum(undefined).build()) + invalidChecksum( + builders + .metafile() + .md5sum(undefined) + .build() + ) ).be.true() }) - it('returns false if the checksum is missing for a folder', function () { + it('returns false if the checksum is missing for a folder', function() { should(invalidChecksum(builders.metadir().build())).be.false() }) - it('returns true if the checksum is incorrect', function () { - should(invalidChecksum(builders.metafile().md5sum('').build())).be.true() + it('returns true if the checksum is incorrect', function() { should( - invalidChecksum(builders.metafile().md5sum('f00').build()) + invalidChecksum( + builders + .metafile() + .md5sum('') + .build() + ) + ).be.true() + should( + invalidChecksum( + builders + .metafile() + .md5sum('f00') + .build() + ) ).be.true() const sha1 = '68b329da9893e34099c7d8ad5cb9c94068b329da' should( - invalidChecksum(builders.metafile().md5sum(sha1).build()) + invalidChecksum( + builders + .metafile() + .md5sum(sha1) + .build() + ) ).be.true() const md5hex = 'adc83b19e793491b1c6ea0fd8b46cd9f' should( - invalidChecksum(builders.metafile().md5sum(md5hex).build()) + invalidChecksum( + builders + .metafile() + .md5sum(md5hex) + .build() + ) ).be.true() const md5base64truncated = 'rcg7GeeTSRscbqD9i0bNn' should( - invalidChecksum(builders.metafile().md5sum(md5base64truncated).build()) + invalidChecksum( + builders + .metafile() + .md5sum(md5base64truncated) + .build() + ) ).be.true() const sha1base64 = 'aLMp2piT40CZx9itXLnJQGizKdo=' should( - invalidChecksum(builders.metafile().md5sum(sha1base64).build()) + invalidChecksum( + builders + .metafile() + .md5sum(sha1base64) + .build() + ) ).be.true() const md5base64NonPadded = 'rcg7GeeTSRscbqD9i0bNnw' should( - invalidChecksum(builders.metafile().md5sum(md5base64NonPadded).build()) + invalidChecksum( + builders + .metafile() + .md5sum(md5base64NonPadded) + .build() + ) ).be.true() }) - it('returns false if the checksum is OK', function () { - should(invalidChecksum(builders.metafile().data('').build())).be.false() + it('returns false if the checksum is OK', function() { + should( + invalidChecksum( + builders + .metafile() + .data('') + .build() + ) + ).be.false() }) }) @@ -229,18 +283,32 @@ describe('metadata', function () { const syncPath = ';' it('adds incompatibilities to given doc if any', () => { - const incompatible = builders.metafile().incompatible().build() - const doc = builders.metafile().path('foo/bar').build() + const incompatible = builders + .metafile() + .incompatible() + .build() + const doc = builders + .metafile() + .path('foo/bar') + .build() doc.path = incompatible.path assignPlatformIncompatibilities(doc, syncPath) - should(doc).have.property('incompatibilities').and.not.be.empty() + should(doc) + .have.property('incompatibilities') + .and.not.be.empty() }) it('removes incompatibilities from given doc if none', () => { - const incompatible = builders.metafile().incompatible().build() - const doc = builders.metafile().path('foo/bar').build() + const incompatible = builders + .metafile() + .incompatible() + .build() + const doc = builders + .metafile() + .path('foo/bar') + .build() incompatible.path = doc.path assignPlatformIncompatibilities(incompatible, syncPath) @@ -252,14 +320,20 @@ describe('metadata', function () { describe('detectIncompatibilities', () => { const syncPath = ';' - it('is null when all names in the path are compatible', function () { - const doc = builders.metafile().path('foo/bar').build() + it('is null when all names in the path are compatible', function() { + const doc = builders + .metafile() + .path('foo/bar') + .build() should(detectIncompatibilities(doc, syncPath)).deepEqual([]) }) onPlatform('win32', () => { - it('lists platform incompatibilities for all names in the path', function () { - const doc = builders.metafile().path('f?o:o\\ba|r\\baz\\q"ux').build() + it('lists platform incompatibilities for all names in the path', function() { + const doc = builders + .metafile() + .path('f?o:o\\ba|r\\baz\\q"ux') + .build() should(detectIncompatibilities(doc, syncPath)).deepEqual([ { type: 'reservedChars', @@ -291,26 +365,29 @@ describe('metadata', function () { onPlatforms(['darwin', 'linux'], () => { it('does not list Windows incompatibilities', () => { - const doc = builders.metadir().path('foo/b:ar/qux').build() + const doc = builders + .metadir() + .path('foo/b:ar/qux') + .build() should(detectIncompatibilities(doc, syncPath)).deepEqual([]) }) }) }) - describe('extractRevNumber', function () { - it('extracts the revision number', function () { + describe('extractRevNumber', function() { + it('extracts the revision number', function() { const infos = { _rev: '42-0123456789' } should(extractRevNumber(infos)).equal(42) }) - it('returns 0 if not found', function () { + it('returns 0 if not found', function() { // $FlowFixMe the _rev attribute is missing on purpose should(extractRevNumber({})).equal(0) }) }) describe('isUpToDate', () => { - it('is false when the given side is undefined in doc', function () { + it('is false when the given side is undefined in doc', function() { const doc = builders .metafile() .rev('1-0123456') @@ -319,7 +396,7 @@ describe('metadata', function () { should(metadata.isUpToDate('local', doc)).be.false() }) - it('is true when the given side equals the target in doc', function () { + it('is true when the given side equals the target in doc', function() { const doc = builders .metafile() .rev('2-0123456') @@ -328,7 +405,7 @@ describe('metadata', function () { should(metadata.isUpToDate('local', doc)).be.true() }) - it('is false when the given side is lower than the target in doc', function () { + it('is false when the given side is lower than the target in doc', function() { const doc = builders .metafile() .rev('3-0123456') @@ -337,7 +414,7 @@ describe('metadata', function () { should(metadata.isUpToDate('local', doc)).be.false() }) - it('is true when the given side is the only one', function () { + it('is true when the given side is the only one', function() { const doc = builders .metafile() .rev('3-0123456') @@ -348,15 +425,18 @@ describe('metadata', function () { // XXX: We implemented the same workaround as in `isAtLeastUpToDate()` // although we haven't encountered the same issue yet but it is possible. - it('is true when the given side is the only one and lower than the target', function () { - const doc = builders.metafile().rev('3-0123456').build() + it('is true when the given side is the only one and lower than the target', function() { + const doc = builders + .metafile() + .rev('3-0123456') + .build() doc.sides = { local: 2, target: 35 } should(metadata.isUpToDate('local', doc)).be.true() }) }) describe('isAtLeastUpToDate', () => { - it('is false when the given side is undefined in doc', function () { + it('is false when the given side is undefined in doc', function() { const doc = builders .metafile() .rev('1-0123456') @@ -365,7 +445,7 @@ describe('metadata', function () { should(metadata.isAtLeastUpToDate('local', doc)).be.false() }) - it('is true when the given side equals the target in doc', function () { + it('is true when the given side equals the target in doc', function() { const doc = builders .metafile() .rev('2-0123456') @@ -374,7 +454,7 @@ describe('metadata', function () { should(metadata.isAtLeastUpToDate('local', doc)).be.true() }) - it('is true when the given side is greater than the target in doc', function () { + it('is true when the given side is greater than the target in doc', function() { const doc = builders .metafile() .rev('3-0123456') @@ -383,7 +463,7 @@ describe('metadata', function () { should(metadata.isAtLeastUpToDate('local', doc)).be.true() }) - it('is false when the given side is lower than the target in doc', function () { + it('is false when the given side is lower than the target in doc', function() { const doc = builders .metafile() .rev('3-0123456') @@ -392,7 +472,7 @@ describe('metadata', function () { should(metadata.isAtLeastUpToDate('local', doc)).be.false() }) - it('is true when the given side is the only one', function () { + it('is true when the given side is the only one', function() { const doc = builders .metafile() .rev('3-0123456') @@ -404,17 +484,23 @@ describe('metadata', function () { // XXX: It is yet unknown how we end up in this situation but it seems like // it can happen when we have sync errors and maybe some side dissociation. // Until we figure out the root cause, we try to prevent its consequences. - it('is true when the given side is the only one and lower than the target', function () { - const doc = builders.metafile().rev('3-0123456').build() + it('is true when the given side is the only one and lower than the target', function() { + const doc = builders + .metafile() + .rev('3-0123456') + .build() doc.sides = { local: 2, target: 35 } should(metadata.isAtLeastUpToDate('local', doc)).be.true() }) }) describe('assignMaxDate', () => { - it('assigns the previous timestamp to the doc when it is more recent than the current one to prevent updated_at < created_at errors on remote sync', function () { + it('assigns the previous timestamp to the doc when it is more recent than the current one to prevent updated_at < created_at errors on remote sync', function() { const was = builders.metafile().build() - const doc = builders.metafile().olderThan(was).build() + const doc = builders + .metafile() + .olderThan(was) + .build() should(() => { assignMaxDate(doc, was) }).changeOnly(doc, { @@ -422,34 +508,49 @@ describe('metadata', function () { }) }) - it('does nothing when the doc has no previous version', function () { + it('does nothing when the doc has no previous version', function() { const doc = builders.metafile().build() should(() => { assignMaxDate(doc) }).not.change(doc) }) - it('does nothing when both current and previous timestamps are the same', function () { + it('does nothing when both current and previous timestamps are the same', function() { const was = builders.metafile().build() - const doc = builders.metafile().updatedAt(was.updated_at).build() + const doc = builders + .metafile() + .updatedAt(was.updated_at) + .build() should(() => { assignMaxDate(doc, was) }).not.change(doc) }) - it('does nothing when the current timestamp is more recent than the previous one', function () { + it('does nothing when the current timestamp is more recent than the previous one', function() { const was = builders.metafile().build() - const doc = builders.metafile().newerThan(was).build() + const doc = builders + .metafile() + .newerThan(was) + .build() should(() => { assignMaxDate(doc, was) }).not.change(doc) }) - it('nevers changes the previous doc', function () { + it('nevers changes the previous doc', function() { const was = builders.metafile().build() - const sameDateDoc = builders.metafile().updatedAt(was.updated_at).build() - const newerDoc = builders.metafile().newerThan(was).build() - const olderDoc = builders.metafile().olderThan(was).build() + const sameDateDoc = builders + .metafile() + .updatedAt(was.updated_at) + .build() + const newerDoc = builders + .metafile() + .newerThan(was) + .build() + const olderDoc = builders + .metafile() + .olderThan(was) + .build() should(() => { assignMaxDate(sameDateDoc, was) }).not.change(was) @@ -464,7 +565,7 @@ describe('metadata', function () { describe('equivalent', () => { describe('with folders', () => { - it('returns true if the folders are the same', function () { + it('returns true if the folders are the same', function() { const a = builders .metadir() .ino(234) @@ -543,7 +644,7 @@ describe('metadata', function () { ).be.true() }) - it('does not fail when a property is absent on one side and undefined on the other', function () { + it('does not fail when a property is absent on one side and undefined on the other', function() { const a = builders .metadir() .path('foo/bar') @@ -586,7 +687,7 @@ describe('metadata', function () { }) describe('with files', () => { - it('returns true if the files are the same', function () { + it('returns true if the files are the same', function() { const a = builders .metafile() .path('foo/bar') @@ -654,7 +755,10 @@ describe('metadata', function () { .metafile(a) .ino(a.ino + 1) .build() - const h = builders.metafile(a).remoteId('321').build() + const h = builders + .metafile(a) + .remoteId('321') + .build() should(equivalent(a, a)).be.true() should(equivalent(a, b)).be.false() should(equivalent(a, c)).be.false() @@ -692,7 +796,7 @@ describe('metadata', function () { ) ).be.true() }) - it('does not fail when a property is absent on one side and undefined on the other', function () { + it('does not fail when a property is absent on one side and undefined on the other', function() { const a = builders .metafile() .path('foo/bar') @@ -739,8 +843,8 @@ describe('metadata', function () { }) }) - describe('sameBinary', function () { - it('returns true for two docs with the same checksum', function () { + describe('sameBinary', function() { + it('returns true for two docs with the same checksum', function() { const one = builders .metafile() .md5sum('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') @@ -752,7 +856,7 @@ describe('metadata', function () { should(sameBinary(one, two)).be.true() }) - it('returns false for two docs with different checksums', function () { + it('returns false for two docs with different checksums', function() { const one = builders .metafile() .md5sum('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') @@ -765,26 +869,26 @@ describe('metadata', function () { }) }) - describe('markSide', function () { + describe('markSide', function() { const path = 'path' for (const kind of ['File', 'Dir']) { let stats - beforeEach(async function () { + beforeEach(async function() { stats = kind === 'File' ? await stater.stat(__filename) : await stater.stat(__dirname) }) - it(`marks local: 1 for a new ${kind}`, async function () { + it(`marks local: 1 for a new ${kind}`, async function() { const doc = metadata[`build${kind}`](path, stats) markSide('local', doc) should(doc).have.properties({ sides: { target: 1, local: 1 } }) }) - it(`increments the side from the _rev of an already existing ${kind}`, async function () { + it(`increments the side from the _rev of an already existing ${kind}`, async function() { const prev = metadata[`build${kind}`](path, stats) prev.sides = { target: 5, local: 3, remote: 5 } prev._rev = '5-0123' @@ -806,23 +910,46 @@ describe('metadata', function () { it('increments existing sides by 1 in-place', () => { should(sidesAfterInc({})).deepEqual(undefined) - should(sidesAfterInc(builders.metadata().sides({}).build())).deepEqual({ + should( + sidesAfterInc( + builders + .metadata() + .sides({}) + .build() + ) + ).deepEqual({ target: 0 }) should( - sidesAfterInc(builders.metadata().sides({ local: 1 }).build()) + sidesAfterInc( + builders + .metadata() + .sides({ local: 1 }) + .build() + ) ).deepEqual({ target: 2, local: 2 }) should( - sidesAfterInc(builders.metadata().sides({ remote: 1 }).build()) + sidesAfterInc( + builders + .metadata() + .sides({ remote: 1 }) + .build() + ) ).deepEqual({ target: 2, remote: 2 }) should( sidesAfterInc( - builders.metadata().sides({ local: 2, remote: 2 }).build() + builders + .metadata() + .sides({ local: 2, remote: 2 }) + .build() ) ).deepEqual({ target: 3, local: 3, remote: 3 }) should( sidesAfterInc( - builders.metadata().sides({ local: 3, remote: 2 }).build() + builders + .metadata() + .sides({ local: 3, remote: 2 }) + .build() ) ).deepEqual({ target: 4, local: 4, remote: 3 }) }) @@ -832,7 +959,10 @@ describe('metadata', function () { it('returns `local` if `remote` side is absent', () => { should( metadata.detectSingleSide( - builders.metadata().sides({ local: 1 }).build() + builders + .metadata() + .sides({ local: 1 }) + .build() ) ).equal('local') }) @@ -840,14 +970,22 @@ describe('metadata', function () { it('returns `remote` if `local` side is absent', () => { should( metadata.detectSingleSide( - builders.metadata().sides({ remote: 1 }).build() + builders + .metadata() + .sides({ remote: 1 }) + .build() ) ).equal('remote') }) it('returns undefined if both sides are absent', () => { should( - metadata.detectSingleSide(builders.metadata().sides({}).build()) + metadata.detectSingleSide( + builders + .metadata() + .sides({}) + .build() + ) ).be.undefined() }) @@ -857,8 +995,8 @@ describe('metadata', function () { }) }) - describe('buildFile', function () { - it('creates a document for an existing file', async function () { + describe('buildFile', function() { + it('creates a document for an existing file', async function() { const stats = await fse.stat( path.join(__dirname, '../fixtures/chat-mignon.jpg') ) @@ -887,7 +1025,7 @@ describe('metadata', function () { ).deepEqual(remote) }) - it('sets the correct MIME type for Cozy Notes', async function () { + it('sets the correct MIME type for Cozy Notes', async function() { const stats = await fse.stat( path.join(__dirname, '../fixtures/chat-mignon.jpg') ) @@ -917,7 +1055,7 @@ describe('metadata', function () { }) if (platform !== 'win32') { - it('sets the executable bit', async function () { + it('sets the executable bit', async function() { const filePath = path.join(__dirname, '../../tmp/test/executable') const whateverChecksum = '1B2M2Y8AsgTpgAmY7PhCfg==' await fse.ensureFile(filePath) @@ -930,7 +1068,7 @@ describe('metadata', function () { }) describe('buildDir', () => { - it('creates a document for an existing folder', async function () { + it('creates a document for an existing folder', async function() { const stats = await fse.stat(path.join(__dirname, '../fixtures')) const doc = buildDir('fixtures', stats) should(doc).have.properties({ @@ -954,13 +1092,37 @@ describe('metadata', function () { const ino = 123 should( - buildDir(path, builders.stats().ino(ino).mtime(d1).ctime(d1).build()) + buildDir( + path, + builders + .stats() + .ino(ino) + .mtime(d1) + .ctime(d1) + .build() + ) ).have.property('updated_at', d1.toISOString()) should( - buildDir(path, builders.stats().ino(ino).mtime(d1).ctime(d2).build()) + buildDir( + path, + builders + .stats() + .ino(ino) + .mtime(d1) + .ctime(d2) + .build() + ) ).have.property('updated_at', d1.toISOString()) should( - buildDir(path, builders.stats().ino(ino).mtime(d2).ctime(d1).build()) + buildDir( + path, + builders + .stats() + .ino(ino) + .mtime(d2) + .ctime(d1) + .build() + ) ).have.property('updated_at', d2.toISOString()) }) @@ -970,7 +1132,12 @@ describe('metadata', function () { const remote = builders.remoteDir().build() const doc = buildDir( path, - builders.stats().ctime(ctime).mtime(ctime).ino(123).build(), + builders + .stats() + .ctime(ctime) + .mtime(ctime) + .ino(123) + .build(), metadata.serializableRemote(remote) ) should(doc.remote).deepEqual(remote) @@ -979,8 +1146,12 @@ describe('metadata', function () { describe('invariants', () => { let doc - beforeEach(function () { - doc = builders.metadata().remoteId('badbeef').upToDate().build() + beforeEach(function() { + doc = builders + .metadata() + .remoteId('badbeef') + .upToDate() + .build() }) it('throws when trying to put bad doc (no sides)', () => { @@ -997,8 +1168,12 @@ describe('metadata', function () { ) }) - it('throws when trying to put bad doc (no md5sum)', function () { - doc = builders.metafile().remoteId('badbeef').upToDate().build() + it('throws when trying to put bad doc (no md5sum)', function() { + doc = builders + .metafile() + .remoteId('badbeef') + .upToDate() + .build() // $FlowFixMe md5sum is null on purpose should(() => invariants(Object.assign(doc, { md5sum: null }))).throw( /checksum/ @@ -1024,7 +1199,11 @@ describe('metadata', function () { describe('markAsUpToDate', () => { let doc beforeEach(async () => { - doc = await builders.metadata().notUpToDate().remoteId('badbeef').build() + doc = await builders + .metadata() + .notUpToDate() + .remoteId('badbeef') + .build() }) it('increments the doc target', () => { @@ -1038,13 +1217,17 @@ describe('metadata', function () { it('returns the new target', () => { const target = markAsUpToDate(doc) - should(target).be.a.Number().and.eql(doc.sides.target) + should(target) + .be.a.Number() + .and.eql(doc.sides.target) }) it('sets both sides to the new target', () => { markAsUpToDate(doc) - should(doc.sides.local).eql(doc.sides.remote).and.eql(doc.sides.target) + should(doc.sides.local) + .eql(doc.sides.remote) + .and.eql(doc.sides.target) }) it('removes errors', () => { @@ -1058,39 +1241,59 @@ describe('metadata', function () { describe('outOfDateSide', () => { it('returns nothing if sides are not set', () => { - const doc1 = builders.metadata().sides({}).build() + const doc1 = builders + .metadata() + .sides({}) + .build() should(outOfDateSide(doc1)).be.undefined() - const doc2 = builders.metadata().sides().build() + const doc2 = builders + .metadata() + .sides() + .build() should(outOfDateSide(doc2)).be.undefined() }) it('returns nothing if sides are equal', () => { - const doc = builders.metadata().sides({ local: 1, remote: 1 }).build() + const doc = builders + .metadata() + .sides({ local: 1, remote: 1 }) + .build() should(outOfDateSide(doc)).be.undefined() }) it('returns "local" if the local side is smaller than the remote one', () => { - const doc = builders.metadata().sides({ local: 1, remote: 2 }).build() + const doc = builders + .metadata() + .sides({ local: 1, remote: 2 }) + .build() should(outOfDateSide(doc)).equal('local') }) it('returns "remote" if the remote side is smaller than the local one', () => { - const doc = builders.metadata().sides({ local: 2, remote: 1 }).build() + const doc = builders + .metadata() + .sides({ local: 2, remote: 1 }) + .build() should(outOfDateSide(doc)).equal('remote') }) }) - describe('createConflictingDoc', function () { + describe('createConflictingDoc', function() { const filepath = 'parent/dir/file.txt' let doc - beforeEach(function () { - doc = builders.metafile().path(filepath).build() + beforeEach(function() { + doc = builders + .metafile() + .path(filepath) + .build() }) it('returns a doc with a different path', () => { const newDoc = createConflictingDoc(doc) - should(newDoc.path).be.a.String().and.not.equal(filepath) + should(newDoc.path) + .be.a.String() + .and.not.equal(filepath) }) it('does not change the other attributes', () => { @@ -1111,13 +1314,19 @@ describe('metadata', function () { isIgnored.restore() }) - it('calls isIgnored with the document normalized path', function () { + it('calls isIgnored with the document normalized path', function() { metadata.shouldIgnore( - builders.metadir().path('échange/nourriture').build(), + builders + .metadir() + .path('échange/nourriture') + .build(), ignore ) metadata.shouldIgnore( - builders.metafile().path('échange/nourriture').build(), + builders + .metafile() + .path('échange/nourriture') + .build(), ignore ) @@ -1125,7 +1334,10 @@ describe('metadata', function () { }) it('returns true when document is a folder', () => { - const doc = builders.metadir().path('échange/nourriture').build() + const doc = builders + .metadir() + .path('échange/nourriture') + .build() metadata.shouldIgnore(doc, ignore) should(isIgnored.calledOnce).be.true() @@ -1134,8 +1346,11 @@ describe('metadata', function () { ]) }) - it('returns false when document is a file', function () { - const doc = builders.metafile().path('échange/nourriture').build() + it('returns false when document is a file', function() { + const doc = builders + .metafile() + .path('échange/nourriture') + .build() metadata.shouldIgnore(doc, ignore) should(isIgnored.args[0]).deepEqual([ @@ -1269,8 +1484,13 @@ describe('metadata', function () { }) describe('updateLocal', () => { - it('adds the local attribute if it is missing', function () { - const doc = builders.metafile().ino(1).unmerged('local').noLocal().build() + it('adds the local attribute if it is missing', function() { + const doc = builders + .metafile() + .ino(1) + .unmerged('local') + .noLocal() + .build() const expectedAttributes = _.without( metadata.LOCAL_ATTRIBUTES, 'trashed', @@ -1287,7 +1507,7 @@ describe('metadata', function () { should(doc.local).have.property('trashed') }) - it('fetches the local attributes from the main doc', function () { + it('fetches the local attributes from the main doc', function() { const file1 = builders .metafile() .ino(1) @@ -1323,7 +1543,9 @@ describe('metadata', function () { metadata.updateLocal(file2) - should(file2.local).have.property('executable').be.false() + should(file2.local) + .have.property('executable') + .be.false() const dir = builders .metadir() @@ -1349,7 +1571,7 @@ describe('metadata', function () { ]) }) - it('prefers the provided local attributes', function () { + it('prefers the provided local attributes', function() { const file = builders .metafile() .ino(1) @@ -1386,9 +1608,13 @@ describe('metadata', function () { }) describe('updateRemote', () => { - it('adds the remote attribute if it is missing', function () { + it('adds the remote attribute if it is missing', function() { const remoteFile = builders.remoteFile().build() - const doc = builders.metafile().unmerged('remote').noRemote().build() + const doc = builders + .metafile() + .unmerged('remote') + .noRemote() + .build() metadata.updateRemote(doc, remoteFile) @@ -1405,7 +1631,7 @@ describe('metadata', function () { ]) }) - it('keeps non-overwritten remote attributes', function () { + it('keeps non-overwritten remote attributes', function() { const file = builders .metafile() .path('parent/OLD') @@ -1433,14 +1659,24 @@ describe('metadata', function () { }) }) - describe('comparators', function () { + describe('comparators', function() { let file, folder - beforeEach(async function () { - file = await builders.metafile().ino(1).tags('qux').upToDate().create() - folder = await builders.metadir().ino(1).tags('qux').upToDate().create() + beforeEach(async function() { + file = await builders + .metafile() + .ino(1) + .tags('qux') + .upToDate() + .create() + folder = await builders + .metadir() + .ino(1) + .tags('qux') + .upToDate() + .create() }) - context('when doc is up-to-date', function () { + context('when doc is up-to-date', function() { it('equivalentLocal returns true when comparing doc to its local side', () => { should(equivalentLocal(file, file.local)).be.true() should(equivalentLocal(folder, folder.local)).be.true() @@ -1452,8 +1688,8 @@ describe('metadata', function () { }) }) - context('when local only attribute changed', function () { - beforeEach(async function () { + context('when local only attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .ino(2) @@ -1477,8 +1713,8 @@ describe('metadata', function () { }) }) - context('when local attribute changed', function () { - beforeEach(async function () { + context('when local attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .path('newPath') @@ -1502,8 +1738,8 @@ describe('metadata', function () { }) }) - context('when remote only attribute changed', function () { - beforeEach(async function () { + context('when remote only attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .tags('foo') @@ -1527,8 +1763,8 @@ describe('metadata', function () { }) }) - context('when remote attribute changed', function () { - beforeEach(async function () { + context('when remote attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .path('newPath') @@ -1552,8 +1788,8 @@ describe('metadata', function () { }) }) - context('when local updated_at attribute changed', function () { - beforeEach(async function () { + context('when local updated_at attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .updatedAt(new Date()) @@ -1577,8 +1813,8 @@ describe('metadata', function () { }) }) - context('when remote updated_at attribute changed', function () { - beforeEach(async function () { + context('when remote updated_at attribute changed', function() { + beforeEach(async function() { file = await builders .metafile(file) .updatedAt(new Date()) diff --git a/test/unit/migrations/index.js b/test/unit/migrations/index.js index 6b913e130..869f87c7f 100644 --- a/test/unit/migrations/index.js +++ b/test/unit/migrations/index.js @@ -1,11 +1,12 @@ /* @flow */ /* eslint-env mocha */ +const path = require('path') + const should = require('should') const sinon = require('sinon') -const path = require('path') -const { PouchError } = require('../../../core/pouch/error') +const metadata = require('../../../core/metadata') const { MigrationFailedError, currentSchemaVersion, @@ -19,8 +20,7 @@ const { SCHEMA_DOC_ID, SCHEMA_INITIAL_VERSION } = require('../../../core/migrations/constants') -const metadata = require('../../../core/metadata') - +const { PouchError } = require('../../../core/pouch/error') const configHelpers = require('../../support/helpers/config') const pouchHelpers = require('../../support/helpers/pouch') @@ -29,14 +29,14 @@ import type { Migration } from '../../../core/migrations/migrations' import type { SavedMetadata } from '../../../core/metadata' */ -describe('core/migrations', function () { +describe('core/migrations', function() { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) let createdDocs - beforeEach('create folders and files', async function () { + beforeEach('create folders and files', async function() { createdDocs = [await pouchHelpers.createParentFolder(this.pouch)] for (let i of [1, 2, 3]) { createdDocs.push( @@ -57,7 +57,7 @@ describe('core/migrations', function () { describe('runMigrations', () => { let currentVersion /* number */ let availableMigrations /*: Migration[] */ - beforeEach('create migrations', async function () { + beforeEach('create migrations', async function() { currentVersion = await currentSchemaVersion(this.pouch.db) availableMigrations = [ { @@ -102,7 +102,7 @@ describe('core/migrations', function () { ] }) - it('runs all given migrations', async function () { + it('runs all given migrations', async function() { await runMigrations(availableMigrations, this) const docs = await this.pouch.byRecursivePath('') @@ -113,7 +113,7 @@ describe('core/migrations', function () { }) }) - it('retries failed migrations', async function () { + it('retries failed migrations', async function() { let calls = 0 const migrationFailingOnce = { baseSchemaVersion: availableMigrations[1].baseSchemaVersion, @@ -144,7 +144,7 @@ describe('core/migrations', function () { }) }) - it('throws a MigrationFailedError in case both attempts failed', async function () { + it('throws a MigrationFailedError in case both attempts failed', async function() { const migrationFailing = { baseSchemaVersion: availableMigrations[1].baseSchemaVersion, targetSchemaVersion: availableMigrations[1].targetSchemaVersion, @@ -179,13 +179,13 @@ describe('core/migrations', function () { describe('currentSchemaVersion()', () => { context('without schema', () => { - beforeEach('remove schema', async function () { + beforeEach('remove schema', async function() { if (await this.pouch.byIdMaybe(SCHEMA_DOC_ID)) { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, _deleted: true }) } }) - it('returns SCHEMA_INITIAL_VERSION', async function () { + it('returns SCHEMA_INITIAL_VERSION', async function() { await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( SCHEMA_INITIAL_VERSION ) @@ -193,11 +193,11 @@ describe('core/migrations', function () { }) context('with a schema missing its version', () => { - beforeEach('corrupt schema', async function () { + beforeEach('corrupt schema', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version: undefined }) }) - it('returns SCHEMA_INITIAL_VERSION', async function () { + it('returns SCHEMA_INITIAL_VERSION', async function() { await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( SCHEMA_INITIAL_VERSION ) @@ -207,11 +207,11 @@ describe('core/migrations', function () { context('with a valid schema', () => { const version = 12 - beforeEach('create schema', async function () { + beforeEach('create schema', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version }) }) - it('returns the version of the schema', async function () { + it('returns the version of the schema', async function() { await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( version ) @@ -223,13 +223,13 @@ describe('core/migrations', function () { const version = 12 context('without schema', () => { - beforeEach('remove schema', async function () { + beforeEach('remove schema', async function() { if (await this.pouch.byIdMaybe(SCHEMA_DOC_ID)) { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, _deleted: true }) } }) - it('creates the schema with the given version', async function () { + it('creates the schema with the given version', async function() { await should(updateSchemaVersion(version, this.pouch.db)).be.fulfilled() await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( version @@ -238,11 +238,11 @@ describe('core/migrations', function () { }) context('with a schema missing its version', () => { - beforeEach('corrupt schema', async function () { + beforeEach('corrupt schema', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version: undefined }) }) - it('creates the schema with the given version', async function () { + it('creates the schema with the given version', async function() { await should(updateSchemaVersion(version, this.pouch.db)).be.fulfilled() await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( version @@ -253,11 +253,11 @@ describe('core/migrations', function () { context('with a valid schema', () => { const version = 12 - beforeEach('create schema', async function () { + beforeEach('create schema', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version }) }) - it('updates the version of the schema', async function () { + it('updates the version of the schema', async function() { const newVersion = version + 1 await should( updateSchemaVersion(newVersion, this.pouch.db) @@ -294,19 +294,19 @@ describe('core/migrations', function () { context( 'when the current schema version is lower than the migration base schema version', () => { - beforeEach('set schema version', async function () { + beforeEach('set schema version', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version: migration.baseSchemaVersion - 1 }) }) - it('does not run the migration', async function () { + it('does not run the migration', async function() { await migrate(migration, this) should(migration.run).not.have.been.called() }) - it('does not update the schema version', async function () { + it('does not update the schema version', async function() { const previousSchemaVersion = await currentSchemaVersion( this.pouch.db ) @@ -322,19 +322,19 @@ describe('core/migrations', function () { context( 'when the current schema version is higher than the migration base schema version', () => { - beforeEach('set schema version', async function () { + beforeEach('set schema version', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version: migration.baseSchemaVersion + 1 }) }) - it('does not run the migration', async function () { + it('does not run the migration', async function() { await migrate(migration, this) should(migration.run).not.have.been.called() }) - it('does not update the schema version', async function () { + it('does not update the schema version', async function() { const previousSchemaVersion = await currentSchemaVersion( this.pouch.db ) @@ -350,7 +350,7 @@ describe('core/migrations', function () { context( 'when the current schema version equals the migration base schema version', () => { - beforeEach('set schema version', async function () { + beforeEach('set schema version', async function() { await this.pouch.db.put({ _id: SCHEMA_DOC_ID, version: migration.baseSchemaVersion @@ -369,7 +369,7 @@ describe('core/migrations', function () { migrationAffectedDocs.restore() }) - it('does not save any docs', async function () { + it('does not save any docs', async function() { await migrate(migration, this) const docs = await this.pouch.allDocs() @@ -377,7 +377,7 @@ describe('core/migrations', function () { should(migratedDocs).be.empty() }) - it('sets the schema version to the migration target schema version', async function () { + it('sets the schema version to the migration target schema version', async function() { await migrate(migration, this) await should(currentSchemaVersion(this.pouch.db)).be.fulfilledWith( migration.targetSchemaVersion @@ -386,7 +386,7 @@ describe('core/migrations', function () { }) context('and some docs needed to be migrated', () => { - it('runs the migration on all affected docs', async function () { + it('runs the migration on all affected docs', async function() { const docs = await this.pouch.allDocs() await migrate(migration, { pouch: this.pouch, remote: this.remote }) @@ -397,7 +397,7 @@ describe('core/migrations', function () { ]) }) - it('saves the migrated docs', async function () { + it('saves the migrated docs', async function() { await migrate(migration, this) const docs = await this.pouch.allDocs() @@ -406,14 +406,14 @@ describe('core/migrations', function () { }) context('and the docs were successfully saved', () => { - it('sets the schema version to the migration target schema version', async function () { + it('sets the schema version to the migration target schema version', async function() { await migrate(migration, this) await should( currentSchemaVersion(this.pouch.db) ).be.fulfilledWith(migration.targetSchemaVersion) }) - it('sets the localSeq to the last change seq', async function () { + it('sets the localSeq to the last change seq', async function() { const expected = await this.pouch.db.changes({ since: 0 }) await migrate(migration, this) await should(this.pouch.getLocalSeq()).be.fulfilledWith( @@ -421,14 +421,14 @@ describe('core/migrations', function () { ) }) - it('does not update the remoteSeq', async function () { + it('does not update the remoteSeq', async function() { const expected = await this.pouch.getRemoteSeq() await migrate(migration, this) await should(this.pouch.getRemoteSeq()).be.fulfilledWith(expected) }) - it('does not prevent synchronizing merged changes', async function () { + it('does not prevent synchronizing merged changes', async function() { // We should have 7 unsynced docs, created in the main beforeEach const unsyncedDocIds = createdDocs.map(d => d._id) @@ -464,14 +464,14 @@ describe('core/migrations', function () { ) }) - it('reverts all changes', async function () { + it('reverts all changes', async function() { const docs = await this.pouch.allDocs() await migrate(migration, this) await should(this.pouch.allDocs()).be.fulfilledWith(docs) }) - it('does not update the schema version', async function () { + it('does not update the schema version', async function() { const previousSchemaVersion = await currentSchemaVersion( this.pouch.db ) @@ -482,14 +482,14 @@ describe('core/migrations', function () { ).be.fulfilledWith(previousSchemaVersion) }) - it('does not update the localSeq', async function () { + it('does not update the localSeq', async function() { const expected = await this.pouch.getLocalSeq() await migrate(migration, this) await should(this.pouch.getLocalSeq()).be.fulfilledWith(expected) }) - it('does not update the remoteSeq', async function () { + it('does not update the remoteSeq', async function() { const expected = await this.pouch.getRemoteSeq() await migrate(migration, this) @@ -503,7 +503,7 @@ describe('core/migrations', function () { describe('save()', () => { context('with no docs', () => { - it('returns a MigrationNoop result', async function () { + it('returns a MigrationNoop result', async function() { await should(save([], this.pouch.db)).be.fulfilledWith({ type: 'MigrationNoop', errors: [] @@ -513,21 +513,21 @@ describe('core/migrations', function () { context('with only valid docs', () => { let docs - beforeEach('fetch and update docs', async function () { + beforeEach('fetch and update docs', async function() { docs = await this.pouch.allDocs() docs.forEach(d => { d.migrated = true }) }) - it('returns a MigrationComplete result', async function () { + it('returns a MigrationComplete result', async function() { await should(save(docs, this.pouch.db)).be.fulfilledWith({ type: 'MigrationComplete', errors: [] }) }) - it('saves the new version of all documents', async function () { + it('saves the new version of all documents', async function() { await save(docs, this.pouch.db) const savedDocs = await this.pouch.allDocs() @@ -540,7 +540,7 @@ describe('core/migrations', function () { const isCorruptedDoc = index => index % 2 === 1 let docs - beforeEach('fetch and update docs', async function () { + beforeEach('fetch and update docs', async function() { docs = await this.pouch.allDocs() docs.forEach((d, index) => { d.migrated = true @@ -548,7 +548,7 @@ describe('core/migrations', function () { }) }) - it('returns a MigrationFailed result', async function () { + it('returns a MigrationFailed result', async function() { await should(save(docs, this.pouch.db)).be.fulfilledWith({ type: 'MigrationFailed', errors: docs @@ -564,7 +564,7 @@ describe('core/migrations', function () { }) }) - it('saves the new version of all valid documents', async function () { + it('saves the new version of all valid documents', async function() { await save(docs, this.pouch.db) const maybeMigratedDocs = await this.pouch.allDocs() @@ -583,7 +583,7 @@ describe('core/migrations', function () { const migration = migrations[0] describe('affectedDocs()', () => { - it('returns an empty array when all docs have sides.target', async function () { + it('returns an empty array when all docs have sides.target', async function() { const docs = (await this.pouch.allDocs()).map(doc => { doc.sides.target = 2 return doc @@ -591,7 +591,7 @@ describe('core/migrations', function () { should(migration.affectedDocs(docs)).be.empty() }) - it('returns only docs missing sides.target', async function () { + it('returns only docs missing sides.target', async function() { const docs = await this.pouch.allDocs() const incompleteDocs = docs.filter((doc, index) => index % 2 === 0) docs @@ -605,7 +605,7 @@ describe('core/migrations', function () { }) describe('run()', () => { - it('sets sides.target with the short rev extracted from _rev', async function () { + it('sets sides.target with the short rev extracted from _rev', async function() { const docs = await this.pouch.allDocs() const expected = docs.map(doc => ({ ...doc, diff --git a/test/unit/move.js b/test/unit/move.js index a22a0dc6a..982849e8c 100644 --- a/test/unit/move.js +++ b/test/unit/move.js @@ -4,18 +4,18 @@ const _ = require('lodash') const should = require('should') +const move = require('../../core/move') +const { otherSide } = require('../../core/side') +const pathUtils = require('../../core/utils/path') +const Builders = require('../support/builders') const configHelpers = require('../support/helpers/config') const pouchHelpers = require('../support/helpers/pouch') -const Builders = require('../support/builders') -const pathUtils = require('../../core/utils/path') -const { otherSide } = require('../../core/side') -const move = require('../../core/move') describe('move', () => { let builders before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('prepare builders', function () { + beforeEach('prepare builders', function() { builders = new Builders({ pouch: this.pouch }) }) afterEach('clean pouch', pouchHelpers.cleanDatabase) @@ -67,7 +67,9 @@ describe('move', () => { move.child('local', src, dst) - should(dst).have.propertyByPath('moveFrom', 'childMove').eql(true) + should(dst) + .have.propertyByPath('moveFrom', 'childMove') + .eql(true) }) }) @@ -76,7 +78,11 @@ describe('move', () => { let src, dst beforeEach(async () => { - src = await builders.metadata().path('src').upToDate().create() + src = await builders + .metadata() + .path('src') + .upToDate() + .create() dst = builders .metadata() .moveFrom(src) diff --git a/test/unit/pouch/index.js b/test/unit/pouch/index.js index 84efaf749..0e3935c0d 100644 --- a/test/unit/pouch/index.js +++ b/test/unit/pouch/index.js @@ -1,28 +1,28 @@ /* eslint-env mocha */ /* @flow */ -const Promise = require('bluebird') const path = require('path') -const should = require('should') -const sinon = require('sinon') + +const Promise = require('bluebird') const _ = require('lodash') const { REV_CONFLICT } = require('pouchdb') +const should = require('should') +const sinon = require('sinon') const metadata = require('../../../core/metadata') const { sortByPath, createBatches } = require('../../../core/pouch') - const Builders = require('../../support/builders') const configHelpers = require('../../support/helpers/config') const pouchHelpers = require('../../support/helpers/pouch') -describe('Pouch', function () { +describe('Pouch', function() { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) let createdDocs - beforeEach('create folders and files', async function () { + beforeEach('create folders and files', async function() { createdDocs = [await pouchHelpers.createParentFolder(this.pouch)] for (let i of [1, 2, 3]) { createdDocs.push( @@ -41,7 +41,7 @@ describe('Pouch', function () { }) describe('lock', () => { - it('ensures nobody else accesses Pouch until released', async function () { + it('ensures nobody else accesses Pouch until released', async function() { const promiseLock1 = this.pouch.lock('lock1') await should(promiseLock1).be.fulfilled() const releaseLock1 = promiseLock1.value() @@ -61,18 +61,22 @@ describe('Pouch', function () { }) }) - describe('ODM', function () { + describe('ODM', function() { describe('put', () => { let doc, old - beforeEach(async function () { + beforeEach(async function() { const builders = new Builders({ pouch: this.pouch }) - old = await builders.metafile().path('doc').upToDate().create() + old = await builders + .metafile() + .path('doc') + .upToDate() + .create() doc = _.cloneDeep(old) }) - it('does not update doc without sides', async function () { + it('does not update doc without sides', async function() { _.unset(doc, 'sides') await should(this.pouch.put(doc)).be.rejected() @@ -80,11 +84,11 @@ describe('Pouch', function () { }) context('when doc is not deleted', () => { - beforeEach(function () { + beforeEach(function() { doc._deleted = false }) - it('does not update doc with a remote side and no remote', async function () { + it('does not update doc with a remote side and no remote', async function() { _.assign(doc, { remote: undefined, sides: { remote: 1 } }) await should(this.pouch.put(doc)).be.rejected() @@ -93,12 +97,12 @@ describe('Pouch', function () { }) context('when doc is not up to date', () => { - beforeEach(function () { + beforeEach(function() { doc.sides.local = 1 doc.sides.remote = 2 }) - it('does not update doc with a remote side and no remote', async function () { + it('does not update doc with a remote side and no remote', async function() { _.assign(doc, { remote: undefined }) await should(this.pouch.put(doc)).be.rejected() @@ -110,14 +114,18 @@ describe('Pouch', function () { describe('remove', () => { let doc, old - beforeEach(async function () { + beforeEach(async function() { const builders = new Builders({ pouch: this.pouch }) - old = await builders.metafile().path('doc').upToDate().create() + old = await builders + .metafile() + .path('doc') + .upToDate() + .create() doc = _.clone(old) }) - it('updates the _deleted attribute of the doc', async function () { + it('updates the _deleted attribute of the doc', async function() { await (() => { this.pouch.remove(doc) }).should.not.throw() @@ -133,26 +141,40 @@ describe('Pouch', function () { describe('bulkDocs', () => { let builders, doc1, doc2, old1, old2 - beforeEach(async function () { + beforeEach(async function() { builders = new Builders({ pouch: this.pouch }) - old1 = await builders.metafile().path('doc1').upToDate().create() - old2 = await builders.metafile().path('doc2').upToDate().create() + old1 = await builders + .metafile() + .path('doc1') + .upToDate() + .create() + old2 = await builders + .metafile() + .path('doc2') + .upToDate() + .create() doc1 = _.clone(old1) doc2 = _.clone(old2) }) - it('adds missing ids', async function () { + it('adds missing ids', async function() { await should( this.pouch.bulkDocs([ - builders.metafile().upToDate().build(), - builders.metadir().upToDate().build() + builders + .metafile() + .upToDate() + .build(), + builders + .metadir() + .upToDate() + .build() ]) ).be.fulfilled() }) - it(`does not save two docs swallowing error on first one`, async function () { + it(`does not save two docs swallowing error on first one`, async function() { doc1._rev = '2-badbeef' await should(this.pouch.bulkDocs([doc1, doc2])).be.rejectedWith({ status: 409 @@ -161,7 +183,7 @@ describe('Pouch', function () { should((await this.pouch.db.get(doc2._id))._rev).not.equal(old2._rev) }) - it(`does not save two docs swallowing error on second one`, async function () { + it(`does not save two docs swallowing error on second one`, async function() { doc2._rev = '2-badbeef' await should(this.pouch.bulkDocs([doc1, doc2])).be.rejectedWith({ status: 409 @@ -172,7 +194,7 @@ describe('Pouch', function () { }) describe('getAll', () => { - it('returns all the documents matching the query', async function () { + it('returns all the documents matching the query', async function() { const params = { startkey: [metadata.id('my-folder') + path.sep, ''], endkey: [metadata.id('my-folder') + path.sep, '\ufff0'], @@ -187,7 +209,7 @@ describe('Pouch', function () { }) describe('byIdMaybe', () => { - it('resolves with a doc matching the given _id if any', async function () { + it('resolves with a doc matching the given _id if any', async function() { const existing = await this.pouch.db.post({ docType: metadata.FOLDER, path: 'my-folder' @@ -199,12 +221,12 @@ describe('Pouch', function () { }) }) - it('resolves with nothing otherwise', async function () { + it('resolves with nothing otherwise', async function() { const doc = await this.pouch.byIdMaybe('not-found') should(doc).be.undefined() }) - it('does not swallow non-404 errors', async function () { + it('does not swallow non-404 errors', async function() { const get = sinon.stub(this.pouch.db, 'get').rejects(REV_CONFLICT) try { await should( @@ -217,20 +239,20 @@ describe('Pouch', function () { }) describe('bySyncedPath', () => { - it('resolves with the doc whose path attribute matches the given path', async function () { + it('resolves with the doc whose path attribute matches the given path', async function() { for (const doc of createdDocs) { await should(this.pouch.bySyncedPath(doc.path)).be.fulfilledWith(doc) } }) - it('resolves with nothing otherwise', async function () { + it('resolves with nothing otherwise', async function() { const doc = await this.pouch.bySyncedPath('not-found') should(doc).be.undefined() }) }) describe('byChecksum', () => { - it('gets all the files with this checksum', async function () { + it('gets all the files with this checksum', async function() { const filePath = path.join('my-folder', 'file-1') const checksum = `111111111111111111111111111111111111111${filePath}` const docs = await this.pouch.byChecksum(checksum) @@ -240,8 +262,8 @@ describe('Pouch', function () { }) }) - describe('byPath', function () { - it('gets all the files and folders in this path', async function () { + describe('byPath', function() { + it('gets all the files and folders in this path', async function() { const docs = await this.pouch.byPath(metadata.id('my-folder')) should(docs).have.length(6) should(docs).containDeep( @@ -249,7 +271,7 @@ describe('Pouch', function () { ) }) - it('gets only files and folders in the first level', async function () { + it('gets only files and folders in the first level', async function() { createdDocs.push( await pouchHelpers.createFile( this.pouch, @@ -265,14 +287,14 @@ describe('Pouch', function () { }) }) - it('ignores design documents', async function () { + it('ignores design documents', async function() { const docs = await this.pouch.byPath('_design') docs.length.should.be.equal(0) }) }) - describe('byRecurivePath', function () { - it('gets the files and folders in this path recursively', async function () { + describe('byRecurivePath', function() { + it('gets the files and folders in this path recursively', async function() { const docs = await this.pouch.byRecursivePath('my-folder') docs.length.should.be.equal(6) for (let i = 1; i <= 3; i++) { @@ -289,7 +311,7 @@ describe('Pouch', function () { } }) - it('gets the files and folders from root', async function () { + it('gets the files and folders from root', async function() { const docs = await this.pouch.byRecursivePath('') docs.length.should.be.equal(7) docs[0].should.have.properties({ @@ -312,7 +334,7 @@ describe('Pouch', function () { }) context('in descending mode', () => { - it('sorts the results in descending path order', async function () { + it('sorts the results in descending path order', async function() { const docs = await this.pouch.byRecursivePath('', { descending: true }) @@ -327,7 +349,7 @@ describe('Pouch', function () { }) }) - it('does not return the content of other folders starting with the same path', async function () { + it('does not return the content of other folders starting with the same path', async function() { // create my-folder/folder-11 const similarFolderPath = path.join('my-folder', 'folder-1 other') await pouchHelpers.createFolder(this.pouch, similarFolderPath) @@ -346,8 +368,8 @@ describe('Pouch', function () { }) }) - describe('byRemoteId', function () { - it('gets all the file with this remote id', async function () { + describe('byRemoteId', function() { + it('gets all the file with this remote id', async function() { const filePath = path.join('my-folder', 'file-1') const id = `1234567890-${filePath}` const doc = await this.pouch.byRemoteId(id) @@ -356,7 +378,7 @@ describe('Pouch', function () { should.exist(doc.docType) }) - it('returns a 404 error if no file matches', async function () { + it('returns a 404 error if no file matches', async function() { let id = 'abcdef' await should(this.pouch.byRemoteId(id)).be.rejectedWith({ status: 404 @@ -364,8 +386,8 @@ describe('Pouch', function () { }) }) - describe('byRemoteIdMaybe', function () { - it('does the same as byRemoteId() when document exists', async function () { + describe('byRemoteIdMaybe', function() { + it('does the same as byRemoteId() when document exists', async function() { const filePath = path.join('my-folder', 'file-1') const id = `1234567890-${filePath}` const doc = await this.pouch.byRemoteIdMaybe(id) @@ -374,13 +396,13 @@ describe('Pouch', function () { should.exist(doc.docType) }) - it('returns null when document does not exist', async function () { + it('returns null when document does not exist', async function() { let id = 'abcdef' const doc = await this.pouch.byRemoteIdMaybe(id) should.equal(null, doc) }) - it('returns any non-404 error', async function () { + it('returns any non-404 error', async function() { const otherError = new Error('not a 404') sinon.stub(this.pouch, 'byRemoteId').throws(otherError) @@ -393,7 +415,7 @@ describe('Pouch', function () { describe('#allByRemoteIds()', () => { let dir, file - beforeEach(async function () { + beforeEach(async function() { const builders = new Builders({ pouch: this.pouch }) dir = await builders .metadir() @@ -407,14 +429,14 @@ describe('Pouch', function () { .create() }) - it('resolves with docs matching the given remoteIds, in the same order', async function () { + it('resolves with docs matching the given remoteIds, in the same order', async function() { const expectedDocs = [file, dir] const remoteIds = expectedDocs.map(doc => doc.remote._id) const docs = await this.pouch.allByRemoteIds(remoteIds) should(docs).deepEqual(expectedDocs) }) - it('resolves with matching docs except missing ones', async function () { + it('resolves with matching docs except missing ones', async function() { const docs = await this.pouch.allByRemoteIds([ dir.remote._id, 'missing', @@ -423,23 +445,23 @@ describe('Pouch', function () { should(docs).deepEqual([dir, file]) }) - it('resolves to an empty Array when given a single missing remote id', async function () { + it('resolves to an empty Array when given a single missing remote id', async function() { const docs = await this.pouch.allByRemoteIds(['missing']) should(docs).deepEqual([]) }) - it('resolves to an empty Array when given an empty Array', async function () { + it('resolves to an empty Array when given an empty Array', async function() { const docs = await this.pouch.allByRemoteIds([]) should(docs).deepEqual([]) }) - it('does not care about duplicate ids & docs', async function () { + it('does not care about duplicate ids & docs', async function() { const id = dir.remote._id const docs = await this.pouch.allByRemoteIds([id, id]) should(docs).deepEqual([dir, dir]) }) - it('can take a Set of remoteIds instead of an Array', async function () { + it('can take a Set of remoteIds instead of an Array', async function() { const expectedDocs = [dir, file] const remoteIds = new Set(expectedDocs.map(doc => doc.remote._id)) const docs = await this.pouch.allByRemoteIds(remoteIds) @@ -448,9 +470,13 @@ describe('Pouch', function () { }) describe('initialScanDocs', () => { - it('returns only existing docs with local side and metadata', async function () { + it('returns only existing docs with local side and metadata', async function() { const builders = new Builders({ pouch: this.pouch }) - const dir = await builders.metadir().path('dir').upToDate().create() + const dir = await builders + .metadir() + .path('dir') + .upToDate() + .create() const file = await builders .metafile() .path('file') @@ -487,15 +513,15 @@ describe('Pouch', function () { }) }) - describe('Views', function () { - describe('createDesignDoc', function () { + describe('Views', function() { + describe('createDesignDoc', function() { const query = `function (doc) { if (doc.docType === 'file') { emit(doc._id) } }` - it('creates a new design doc', async function () { + it('creates a new design doc', async function() { await this.pouch.createDesignDoc('file', query) const docs = await this.pouch.getAll('file') should(docs).have.length(3) @@ -504,7 +530,7 @@ describe('Pouch', function () { } }) - it('does not update the same design doc', async function () { + it('does not update the same design doc', async function() { await this.pouch.createDesignDoc('file', query) const was = await this.pouch.db.get('_design/file') await this.pouch.createDesignDoc('file', query) @@ -513,7 +539,7 @@ describe('Pouch', function () { designDoc._rev.should.equal(was._rev) }) - it('updates the design doc if the query change', async function () { + it('updates the design doc if the query change', async function() { await this.pouch.createDesignDoc('file', query) const was = await this.pouch.db.get('_design/file') let newQuery = query.replace('file', 'File') @@ -526,7 +552,7 @@ describe('Pouch', function () { }) describe('addByPathView', () => { - it('creates the byPath view', async function () { + it('creates the byPath view', async function() { await this.pouch.addByPathView() const doc = await this.pouch.db.get('_design/byPath') should.exist(doc) @@ -534,7 +560,7 @@ describe('Pouch', function () { }) describe('addByChecksumView', () => { - it('creates the byChecksum view', async function () { + it('creates the byChecksum view', async function() { await this.pouch.addByChecksumView() const doc = await this.pouch.db.get('_design/byChecksum') should.exist(doc) @@ -542,7 +568,7 @@ describe('Pouch', function () { }) describe('addByRemoteIdView', () => { - it('creates the byRemoteId view', async function () { + it('creates the byRemoteId view', async function() { await this.pouch.addByRemoteIdView() const doc = await this.pouch.db.get('_design/byRemoteId') should.exist(doc) @@ -550,7 +576,7 @@ describe('Pouch', function () { }) describe('removeDesignDoc', () => { - it('removes given view', async function () { + it('removes given view', async function() { let query = `function (doc) { if (doc.docType === '${metadata.FOLDER}') { emit(doc._id); @@ -566,9 +592,9 @@ describe('Pouch', function () { }) }) - describe('Helpers', function () { + describe('Helpers', function() { describe('getPreviousRev', () => { - it('retrieves previous document informations', async function () { + it('retrieves previous document informations', async function() { const dirPath = path.join('my-folder', 'folder-1') const doc = await this.pouch.bySyncedPath(dirPath) @@ -602,17 +628,17 @@ describe('Pouch', function () { describe('localTree', () => { let builders - beforeEach(async function () { + beforeEach(async function() { builders = new Builders({ pouch: this.pouch }) }) - it('returns the local paths of all saved documents', async function () { + it('returns the local paths of all saved documents', async function() { await should(this.pouch.localTree()).be.fulfilledWith( createdDocs.map(d => d.local.path).sort() ) }) - it('does not return the paths of remote only documents', async function () { + it('does not return the paths of remote only documents', async function() { await builders .metafile() .path('my-folder/remote-file') @@ -624,7 +650,7 @@ describe('Pouch', function () { ) }) - it('resturns the paths of local only documents', async function () { + it('resturns the paths of local only documents', async function() { const localFile = await builders .metafile() .path('my-folder/local-file') @@ -641,15 +667,15 @@ describe('Pouch', function () { }) }) - describe('Sequence numbers', function () { + describe('Sequence numbers', function() { describe('getLocalSeq', () => { - it('gets 0 when the local seq number is not initialized', async function () { + it('gets 0 when the local seq number is not initialized', async function() { await should(this.pouch.getLocalSeq()).be.fulfilledWith(0) }) }) describe('setLocalSeq', () => { - it('saves the local sequence number', async function () { + it('saves the local sequence number', async function() { await this.pouch.setLocalSeq(21) await should(this.pouch.getLocalSeq()).be.fulfilledWith(21) await this.pouch.setLocalSeq(22) @@ -658,20 +684,20 @@ describe('Pouch', function () { }) describe('getRemoteSeq', () => { - it('gets 0 when the remote seq number is not initialized', async function () { + it('gets 0 when the remote seq number is not initialized', async function() { await should(this.pouch.getRemoteSeq()).be.fulfilledWith('0') }) }) - describe('setRemoteSeq', function () { - it('saves the remote sequence number', async function () { + describe('setRemoteSeq', function() { + it('saves the remote sequence number', async function() { await this.pouch.setRemoteSeq('31') await should(this.pouch.getRemoteSeq()).be.fulfilledWith('31') await this.pouch.setRemoteSeq('32') await should(this.pouch.getRemoteSeq()).be.fulfilledWith('32') }) - it('can be called multiple times in parallel', async function () { + it('can be called multiple times in parallel', async function() { await Promise.map( _.range(1, 101), seq => this.pouch.setRemoteSeq(String(seq)), @@ -681,32 +707,32 @@ describe('Pouch', function () { }) }) - describe('unsyncedDocIds', function () { - it('returns the list of changed docs since the current local sequence', async function () { + describe('unsyncedDocIds', function() { + it('returns the list of changed docs since the current local sequence', async function() { const changedDocIds = createdDocs.map(d => d._id) await should(this.pouch.unsyncedDocIds()).be.fulfilledWith(changedDocIds) }) - it('can be called multiple times in a row', async function () { + it('can be called multiple times in a row', async function() { const unsyncedDocIds = await this.pouch.unsyncedDocIds() await should(this.pouch.unsyncedDocIds()).be.fulfilledWith(unsyncedDocIds) }) }) - describe('touchDocs', function () { - it('does nothing when no document ids are given', async function () { + describe('touchDocs', function() { + it('does nothing when no document ids are given', async function() { await should(this.pouch.touchDocs([])).be.fulfilledWith([]) }) - it('does nothing when no documents exist with the given ids', async function () { + it('does nothing when no documents exist with the given ids', async function() { await should( this.pouch.touchDocs(['inexistant-doc-id']) ).be.fulfilledWith([]) }) - it('updates the _rev value of all existing documents with the given ids', async function () { + it('updates the _rev value of all existing documents with the given ids', async function() { const touchResult = await this.pouch.touchDocs( createdDocs.map(d => d._id) ) diff --git a/test/unit/prep.js b/test/unit/prep.js index ca638c3a4..8529d53da 100644 --- a/test/unit/prep.js +++ b/test/unit/prep.js @@ -1,15 +1,15 @@ /* eslint-env mocha */ -const sinon = require('sinon') -const should = require('should') const _ = require('lodash') +const should = require('should') +const sinon = require('sinon') -const { FOLDER } = require('../../core/metadata') const { Ignore } = require('../../core/ignore') +const { FOLDER } = require('../../core/metadata') const Prep = require('../../core/prep') -describe('Prep', function () { - beforeEach('instanciate prep', function () { +describe('Prep', function() { + beforeEach('instanciate prep', function() { this.side = 'local' this.merge = { addFileAsync: sinon.stub(), @@ -28,15 +28,15 @@ describe('Prep', function () { this.prep = new Prep(this.merge, this.ignore) }) - describe('Put', function () { - describe('addFile', function () { - it('expects a doc with a valid path', async function () { + describe('Put', function() { + describe('addFile', function() { + it('expects a doc with a valid path', async function() { await should( this.prep.addFileAsync(this.side, { path: '/' }) ).be.rejectedWith('Invalid path') }) - it('rejects a doc with no checksum', async function () { + it('rejects a doc with no checksum', async function() { this.merge.addFileAsync.resolves() let doc = { path: 'no-checksum', @@ -47,7 +47,7 @@ describe('Prep', function () { ) }) - it('rejects doc with an invalid checksum', async function () { + it('rejects doc with an invalid checksum', async function() { let doc = { path: 'invalid-checksum', md5sum: 'foobar' @@ -57,7 +57,7 @@ describe('Prep', function () { ) }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.addFileAsync.resolves() let doc = { path: 'foo/missing-fields', @@ -69,7 +69,7 @@ describe('Prep', function () { // FIXME: should.exist(doc.updated_at) }) - it('does nothing for ignored paths on local', async function () { + it('does nothing for ignored paths on local', async function() { let doc = { path: 'ignored', md5sum: 'rcg7GeeTSRscbqD9i0bNnw==' @@ -79,14 +79,14 @@ describe('Prep', function () { }) }) - describe('updateFile', function () { - it('expects a doc with a valid path', async function () { + describe('updateFile', function() { + it('expects a doc with a valid path', async function() { await should( this.prep.updateFileAsync(this.side, { path: '/' }) ).be.rejectedWith('Invalid path') }) - it('rejects doc with no checksum', async function () { + it('rejects doc with no checksum', async function() { this.merge.updateFileAsync.resolves() let doc = { path: 'no-checksum', @@ -97,7 +97,7 @@ describe('Prep', function () { ) }) - it('rejects doc with an invalid checksum', async function () { + it('rejects doc with an invalid checksum', async function() { let doc = { path: 'no-checksum', md5sum: 'foobar' @@ -107,7 +107,7 @@ describe('Prep', function () { ) }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.updateFileAsync.resolves() let doc = { path: 'foobar/missing-fields', @@ -119,7 +119,7 @@ describe('Prep', function () { // FIXME: should.exist(doc.updated_at) }) - it('does nothing for ignored paths on local', async function () { + it('does nothing for ignored paths on local', async function() { let doc = { path: 'ignored', md5sum: 'rcg7GeeTSRscbqD9i0bNnw==' @@ -129,14 +129,14 @@ describe('Prep', function () { }) }) - describe('putFolder', function () { - it('expects a doc with a valid path', async function () { + describe('putFolder', function() { + it('expects a doc with a valid path', async function() { await should( this.prep.putFolderAsync(this.side, { path: '..' }) ).be.rejectedWith('Invalid path') }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.putFolderAsync.resolves() let doc = { path: 'foo/folder-missing-fields' } await this.prep.putFolderAsync(this.side, doc) @@ -145,7 +145,7 @@ describe('Prep', function () { // FIXME: should.exist(doc.updated_at) }) - it('does nothing for ignored paths on local', async function () { + it('does nothing for ignored paths on local', async function() { let doc = { path: 'ignored' } await this.prep.putFolderAsync('local', doc) this.merge.putFolderAsync.called.should.be.false() @@ -153,9 +153,9 @@ describe('Prep', function () { }) }) - describe('Move', function () { - describe('moveFile', function () { - it('expects a doc with a valid path', async function () { + describe('Move', function() { + describe('moveFile', function() { + it('expects a doc with a valid path', async function() { let doc = { path: '' } let was = { path: 'foo/baz' } await should( @@ -163,7 +163,7 @@ describe('Prep', function () { ).be.rejectedWith('Invalid path') }) - it('expects a was with a valid path', async function () { + it('expects a was with a valid path', async function() { let doc = { path: 'foo/bar' } let was = { path: '' } await should( @@ -171,7 +171,7 @@ describe('Prep', function () { ).be.rejectedWith('Invalid path') }) - it('expects a doc with a valid checksum', async function () { + it('expects a doc with a valid checksum', async function() { let doc = { path: 'foo/bar', docType: 'file', @@ -183,7 +183,7 @@ describe('Prep', function () { ).be.rejectedWith('Invalid checksum') }) - it('expects a revision for was', async function () { + it('expects a revision for was', async function() { let doc = { path: 'foo/bar', docType: 'file', @@ -199,7 +199,7 @@ describe('Prep', function () { ).be.rejectedWith('Missing rev') }) - it('calls updateFileAsync if src and dst paths are the same', async function () { + it('calls updateFileAsync if src and dst paths are the same', async function() { sinon.spy(this.prep, 'updateFileAsync') let doc = { @@ -218,7 +218,7 @@ describe('Prep', function () { this.prep.updateFileAsync.restore() }) - it('calls trashFileAsync if dst path is ignored', async function () { + it('calls trashFileAsync if dst path is ignored', async function() { sinon.spy(this.prep, 'trashFileAsync') const updated_at = new Date() @@ -260,7 +260,7 @@ describe('Prep', function () { this.prep.trashFileAsync.restore() }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.moveFileAsync.resolves() let doc = { path: 'FOO/new-missing-fields.jpg', @@ -286,8 +286,8 @@ describe('Prep', function () { }) }) - describe('moveFolder', function () { - it('expects a doc with a valid path', async function () { + describe('moveFolder', function() { + it('expects a doc with a valid path', async function() { let doc = { path: '' } let was = { path: 'foo/baz' } await should( @@ -295,7 +295,7 @@ describe('Prep', function () { ).be.rejectedWith('Invalid path') }) - it('expects a was with a valid id', async function () { + it('expects a was with a valid id', async function() { let doc = { path: 'foo/bar' } let was = { path: '' } await should( @@ -303,7 +303,7 @@ describe('Prep', function () { ).be.rejectedWith('Invalid path') }) - it('expects a revision for was', async function () { + it('expects a revision for was', async function() { let doc = { path: 'foo/bar', docType: FOLDER @@ -317,7 +317,7 @@ describe('Prep', function () { ).be.rejectedWith('Missing rev') }) - it('calls putFolderAsync if src and dst paths are the same', async function () { + it('calls putFolderAsync if src and dst paths are the same', async function() { sinon.spy(this.prep, 'putFolderAsync') let doc = { @@ -334,7 +334,7 @@ describe('Prep', function () { this.prep.putFolderAsync.restore() }) - it('calls trashFolderAsync if dst path is ignored', async function () { + it('calls trashFolderAsync if dst path is ignored', async function() { sinon.spy(this.prep, 'trashFolderAsync') const updated_at = new Date() @@ -368,7 +368,7 @@ describe('Prep', function () { this.prep.trashFolderAsync.restore() }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.moveFolderAsync.resolves() let doc = { path: 'FOOBAR/new-missing-fields' } let was = { @@ -388,15 +388,15 @@ describe('Prep', function () { }) }) - describe('Delete', function () { - describe('deleteFile', function () { - it('expects a doc with a valid path', async function () { + describe('Delete', function() { + describe('deleteFile', function() { + it('expects a doc with a valid path', async function() { await should( this.prep.deleteFileAsync(this.side, { path: '/' }) ).be.rejectedWith('Invalid path') }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.deleteFileAsync.resolves() let doc = { path: 'kill/file' } await this.prep.deleteFileAsync(this.side, doc) @@ -404,21 +404,21 @@ describe('Prep', function () { doc.docType.should.equal('file') }) - it('does nothing for ignored paths on local', async function () { + it('does nothing for ignored paths on local', async function() { let doc = { path: 'ignored' } await this.prep.deleteFileAsync('local', doc) this.merge.deleteFileAsync.called.should.be.false() }) }) - describe('deleteFolder', function () { - it('expects a doc with a valid path', async function () { + describe('deleteFolder', function() { + it('expects a doc with a valid path', async function() { await should( this.prep.deleteFolderAsync(this.side, { path: '/' }) ).be.rejectedWith('Invalid path') }) - it('calls Merge with the correct fields', async function () { + it('calls Merge with the correct fields', async function() { this.merge.deleteFolderAsync.resolves() let doc = { path: 'kill/folder' } await this.prep.deleteFolderAsync(this.side, doc) @@ -426,7 +426,7 @@ describe('Prep', function () { doc.docType.should.equal(FOLDER) }) - it('does nothing for ignored paths on local', async function () { + it('does nothing for ignored paths on local', async function() { let doc = { path: 'ignored' } await this.prep.deleteFolderAsync('local', doc) this.merge.deleteFolderAsync.called.should.be.false() @@ -435,7 +435,7 @@ describe('Prep', function () { }) describe('trashFileAsync', () => { - it('throws when the trashed path is invalid', async function () { + it('throws when the trashed path is invalid', async function() { const doc = { path: '/' } return this.prep.trashFileAsync(this.side, doc).then( @@ -446,7 +446,7 @@ describe('Prep', function () { context('locally with no trashed doc', () => { context('and no local side', () => { - it('calls Merge with the existing record and a copy marked as trashed', async function () { + it('calls Merge with the existing record and a copy marked as trashed', async function() { const was = { path: 'file-to-be-trashed', md5sum: 'rcg7GeeTSRscbqD9i0bNnw==' @@ -464,7 +464,7 @@ describe('Prep', function () { }) context('but a local side', () => { - it('calls Merge with the existing record and a copy marked as trashed', async function () { + it('calls Merge with the existing record and a copy marked as trashed', async function() { const was = { path: 'file-to-be-trashed', md5sum: 'rcg7GeeTSRscbqD9i0bNnw==', @@ -494,7 +494,7 @@ describe('Prep', function () { }) // FIXME - xit('does nothing for ignored paths on local', async function () { + xit('does nothing for ignored paths on local', async function() { const doc = { path: 'ignored' } await this.prep.trashFileAsync(this.side, doc) @@ -504,7 +504,7 @@ describe('Prep', function () { }) describe('trashFolderAsync', () => { - it('throws when the trashed path is invalid', async function () { + it('throws when the trashed path is invalid', async function() { const doc = { path: '/' } return this.prep.trashFolderAsync(this.side, doc).then( @@ -515,7 +515,7 @@ describe('Prep', function () { context('locally with no trashed doc', () => { context('and no local side', () => { - it('calls Merge with the existing record and a copy marked as trashed', async function () { + it('calls Merge with the existing record and a copy marked as trashed', async function() { const was = { path: 'folder-to-be-trashed' } await this.prep.trashFolderAsync(this.side, was) @@ -530,7 +530,7 @@ describe('Prep', function () { }) context('but a local side', () => { - it('calls Merge with the existing record and a copy marked as trashed', async function () { + it('calls Merge with the existing record and a copy marked as trashed', async function() { const was = { path: 'folder-to-be-trashed', local: { path: 'folder-to-be-trashed' } @@ -556,7 +556,7 @@ describe('Prep', function () { }) // FIXME - xit('does nothing for ignored paths on local', async function () { + xit('does nothing for ignored paths on local', async function() { const doc = { path: 'ignored' } await this.prep.trashFolderAsync(this.side, doc) diff --git a/test/unit/regressions/850.js b/test/unit/regressions/850.js index feefeebd2..70accf6af 100644 --- a/test/unit/regressions/850.js +++ b/test/unit/regressions/850.js @@ -2,39 +2,39 @@ /* eslint-env mocha */ /* @flow */ -const fse = require('fs-extra') -const path = require('path') -const sinon = require('sinon') const EventEmitter = require('events') +const path = require('path') + const { Promise } = require('bluebird') +const fse = require('fs-extra') +const sinon = require('sinon') // import { TMP_DIR_NAME } from '../../../core/local/constants' +const { Ignore } = require('../../../core/ignore') const ChokidarEvent = require('../../../core/local/chokidar/event') const Watcher = require('../../../core/local/chokidar/watcher') const { Merge } = require('../../../core/merge') +const metadata = require('../../../core/metadata') const Prep = require('../../../core/prep') -const { Ignore } = require('../../../core/ignore') const { Sync } = require('../../../core/sync') -const metadata = require('../../../core/metadata') - -const configHelpers = require('../../support/helpers/config') -const pouchHelpers = require('../../support/helpers/pouch') -const { onPlatform } = require('../../support/helpers/platform') const Builders = require('../../support/builders') const stubSide = require('../../support/doubles/side') +const configHelpers = require('../../support/helpers/config') +const { onPlatform } = require('../../support/helpers/platform') +const pouchHelpers = require('../../support/helpers/pouch') onPlatform('darwin', () => { - describe('issue 850', function () { + describe('issue 850', function() { this.timeout(10000) let builders before('instanciate config', configHelpers.createConfig) before('instanciate pouch', pouchHelpers.createDatabase) - before('prepare builders', function () { + before('prepare builders', function() { builders = new Builders({ pouch: this.pouch }) }) - before('instanciate local watcher', function () { + before('instanciate local watcher', function() { this.merge = new Merge(this.pouch) this.local = stubSide('local') this.remote = stubSide('remote') @@ -60,7 +60,7 @@ onPlatform('darwin', () => { this.events ) }) - after('stop watcher and clean path', async function () { + after('stop watcher and clean path', async function() { this.watcher.stop(true) this.watcher.checksumer.kill() await fse.emptyDir(this.syncPath) @@ -68,15 +68,20 @@ onPlatform('darwin', () => { after('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - before('create dst dir', async function () { + before('create dst dir', async function() { const dirPath = path.join(this.syncPath, 'dst') await fse.mkdirp(dirPath) const stat = await fse.stat(dirPath) - await builders.metadir().path(dirPath).ino(stat.ino).upToDate().create() + await builders + .metadir() + .path(dirPath) + .ino(stat.ino) + .upToDate() + .create() await this.sync.sync() }) - it('is fixed', async function () { + it('is fixed', async function() { let filePath = path.join(this.syncPath, 'file') let dstPath = path.join(this.syncPath, 'dst', 'file') await fse.outputFile(filePath, 'whatever') @@ -110,7 +115,12 @@ onPlatform('darwin', () => { _rev: '1-fakeRev' } return metadata.fromRemoteDoc( - builders.remoteFile().inRootDir().name('file').size('8').build() + builders + .remoteFile() + .inRootDir() + .name('file') + .size('8') + .build() ) } diff --git a/test/unit/remote/change.js b/test/unit/remote/change.js index 7044ed5d7..4386e6a7d 100644 --- a/test/unit/remote/change.js +++ b/test/unit/remote/change.js @@ -1,11 +1,12 @@ /* eslint-env mocha */ const path = require('path') + const should = require('should') const remoteChange = require('../../../core/remote/change') -const { onPlatforms } = require('../../support/helpers/platform') const Builders = require('../../support/builders') +const { onPlatforms } = require('../../support/helpers/platform') const builders = new Builders() @@ -13,19 +14,31 @@ describe('sorter()', () => { describe('with identical additions', () => { it('sorts FOO before foo', () => { const subdirAdd = { - doc: builders.metadir().path('FOO/subdir').build(), + doc: builders + .metadir() + .path('FOO/subdir') + .build(), type: 'DirAddition' } const otherDirAdd = { - doc: builders.metadir().path('foo').build(), + doc: builders + .metadir() + .path('foo') + .build(), type: 'DirAddition' } const fileAdd = { - doc: builders.metafile().path('FOO/subdir/file').build(), + doc: builders + .metafile() + .path('FOO/subdir/file') + .build(), type: 'FileAddition' } const dirAdd = { - doc: builders.metadir().path('FOO').build(), + doc: builders + .metadir() + .path('FOO') + .build(), type: 'DirAddition' } @@ -39,13 +52,25 @@ describe('sorter()', () => { it('sorts child move out of parent before parent move', () => { const parentMove = { type: 'DirMove', - doc: builders.metadir().path('moved').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('moved') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const childMove = { type: 'FileMove', - doc: builders.metafile().path('file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } should(remoteChange.sort([parentMove, childMove])).deepEqual([ @@ -63,13 +88,25 @@ describe('sorter()', () => { it('sorts move of replaced before move of replacing', () => { const moveReplacing = { type: 'DirMove', - doc: builders.metadir().path('dirA').build(), - was: builders.metadir().path('dirB').build() + doc: builders + .metadir() + .path('dirA') + .build(), + was: builders + .metadir() + .path('dirB') + .build() } const moveReplaced = { type: 'DirMove', - doc: builders.metadir().path('dirC').build(), - was: builders.metadir().path('dirA').build() + doc: builders + .metadir() + .path('dirC') + .build(), + was: builders + .metadir() + .path('dirA') + .build() } should(remoteChange.sort([moveReplacing, moveReplaced])).deepEqual([ @@ -85,13 +122,25 @@ describe('sorter()', () => { it('sorts move of replaced before child move of replacing', () => { const moveReplacing = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir/subdir/empty-subsubdir').build(), - was: builders.metadir().path('dirB/dir/subdir/empty-subsubdir').build() + doc: builders + .metadir() + .path('dirA/dir/subdir/empty-subsubdir') + .build(), + was: builders + .metadir() + .path('dirB/dir/subdir/empty-subsubdir') + .build() } const moveReplaced = { type: 'DirMove', - doc: builders.metadir().path('dirC').build(), - was: builders.metadir().path('dirA').build() + doc: builders + .metadir() + .path('dirC') + .build(), + was: builders + .metadir() + .path('dirA') + .build() } should(remoteChange.sort([moveReplacing, moveReplaced])).deepEqual([ @@ -107,13 +156,25 @@ describe('sorter()', () => { it('sorts child move of replaced before move of replacing', () => { const moveReplacing = { type: 'DirMove', - doc: builders.metadir().path('dirA').build(), - was: builders.metadir().path('dirB').build() + doc: builders + .metadir() + .path('dirA') + .build(), + was: builders + .metadir() + .path('dirB') + .build() } const moveReplaced = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir/empty-subdir-a').build(), - was: builders.metadir().path('dirA/dir/empty-subdir-a').build() + doc: builders + .metadir() + .path('dirC/dir/empty-subdir-a') + .build(), + was: builders + .metadir() + .path('dirA/dir/empty-subdir-a') + .build() } should(remoteChange.sort([moveReplacing, moveReplaced])).deepEqual([ @@ -129,13 +190,25 @@ describe('sorter()', () => { it('sorts child move of replaced and child move of replacing by deleted path', () => { const moveReplacing = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir/subdir').build(), - was: builders.metadir().path('dirB/dir/subdir').build() + doc: builders + .metadir() + .path('dirA/dir/subdir') + .build(), + was: builders + .metadir() + .path('dirB/dir/subdir') + .build() } const moveReplaced = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir/empty-subdir').build(), - was: builders.metadir().path('dirA/dir/empty-subdir').build() + doc: builders + .metadir() + .path('dirC/dir/empty-subdir') + .build(), + was: builders + .metadir() + .path('dirA/dir/empty-subdir') + .build() } should(remoteChange.sort([moveReplacing, moveReplaced])).deepEqual([ @@ -154,12 +227,21 @@ describe('sorter()', () => { it('sorts tashing before addition when addition has greater path', () => { const trashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/DIR').build(), - was: builders.metadir().path('dst/DIR').build() + doc: builders + .metadir() + .path('.cozy_trash/DIR') + .build(), + was: builders + .metadir() + .path('dst/DIR') + .build() } const addition = { type: 'DirAddition', - doc: builders.metadir().path('dst/dir').build() + doc: builders + .metadir() + .path('dst/dir') + .build() } should(remoteChange.sort([trashing, addition])).deepEqual([ trashing, @@ -174,12 +256,21 @@ describe('sorter()', () => { it('sorts tashing before addition when addition has lower path', () => { const trashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dst/dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dst/dir') + .build() } const addition = { type: 'DirAddition', - doc: builders.metadir().path('dst/DIR').build() + doc: builders + .metadir() + .path('dst/DIR') + .build() } should(remoteChange.sort([trashing, addition])).deepEqual([ trashing, @@ -196,13 +287,25 @@ describe('sorter()', () => { it('sorts tashing before move when moved change has greater path', () => { const trashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/DIR').build(), - was: builders.metadir().path('dst/DIR').build() + doc: builders + .metadir() + .path('.cozy_trash/DIR') + .build(), + was: builders + .metadir() + .path('dst/DIR') + .build() } const move = { type: 'DirMove', - doc: builders.metadir().path('dst/dir').build(), - was: builders.metadir().path('src/dir').build() + doc: builders + .metadir() + .path('dst/dir') + .build(), + was: builders + .metadir() + .path('src/dir') + .build() } should(remoteChange.sort([trashing, move])).deepEqual([trashing, move]) should(remoteChange.sort([move, trashing])).deepEqual([trashing, move]) @@ -211,13 +314,25 @@ describe('sorter()', () => { it('sorts tashing before move when moved change has lower path', () => { const trashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dst/dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dst/dir') + .build() } const move = { type: 'DirMove', - doc: builders.metadir().path('dst/DIR').build(), - was: builders.metadir().path('src/DIR').build() + doc: builders + .metadir() + .path('dst/DIR') + .build(), + was: builders + .metadir() + .path('src/DIR') + .build() } should(remoteChange.sort([trashing, move])).deepEqual([trashing, move]) should(remoteChange.sort([move, trashing])).deepEqual([trashing, move]) @@ -233,17 +348,32 @@ describe('sorter()', () => { .metafile() .path('parent/dst/dir/subdir/filerenamed2') .build(), - was: builders.metafile().path('parent/dst/dir/subdir/file2').build() + was: builders + .metafile() + .path('parent/dst/dir/subdir/file2') + .build() } const emptySubdirMove = { type: 'DescendantChange', - doc: builders.metadir().path('parent/dst/dir/empty-subdir').build(), - was: builders.metadir().path('parent/src/dir/empty-subdir').build() + doc: builders + .metadir() + .path('parent/dst/dir/empty-subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/empty-subdir') + .build() } const subdirMove = { type: 'DescendantChange', - doc: builders.metadir().path('parent/dst/dir/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/dir/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const fileMove = { type: 'FileMove', @@ -251,12 +381,21 @@ describe('sorter()', () => { .metafile() .path('parent/dst/dir/subdir/filerenamed') .build(), - was: builders.metafile().path('parent/dst/dir/subdir/file').build() + was: builders + .metafile() + .path('parent/dst/dir/subdir/file') + .build() } const dirMove = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/dir').build(), - was: builders.metadir().path('parent/src/dir').build() + doc: builders + .metadir() + .path('parent/dst/dir') + .build(), + was: builders + .metadir() + .path('parent/src/dir') + .build() } const expected = [ @@ -302,12 +441,18 @@ describe('sorter()', () => { describe('sorts deleted before created for the same path', () => { const deleted = { type: 'FileDeletion', - doc: builders.metafile().path('parent/file').build() + doc: builders + .metafile() + .path('parent/file') + .build() } const created = { type: 'FileAddition', - doc: builders.metafile().path('parent/file').build() + doc: builders + .metafile() + .path('parent/file') + .build() } it('when deleted comes before created', () => { @@ -339,12 +484,21 @@ describe('sorter()', () => { } const fileTrash = { type: 'FileTrashing', - doc: builders.metafile().path('.cozy_trash/fichier.pptx').build(), - was: builders.metafile().path('1_Dossier/fichier.pptx').build() + doc: builders + .metafile() + .path('.cozy_trash/fichier.pptx') + .build(), + was: builders + .metafile() + .path('1_Dossier/fichier.pptx') + .build() } const replacingFileAdd = { type: 'FileAddition', - doc: builders.metafile().path('1_Dossier/fichier.pptx').build() + doc: builders + .metafile() + .path('1_Dossier/fichier.pptx') + .build() } should( @@ -356,18 +510,36 @@ describe('sorter()', () => { it('sorts replacing move after move of replaced', () => { const emptySubdiraMove = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir/empty-subdir-a').build(), - was: builders.metadir().path('dirA/dir/empty-subdir-a').build() + doc: builders + .metadir() + .path('dirC/dir/empty-subdir-a') + .build(), + was: builders + .metadir() + .path('dirA/dir/empty-subdir-a') + .build() } const subdirMoveA = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir/subdir').build(), - was: builders.metadir().path('dirA/dir/subdir').build() + doc: builders + .metadir() + .path('dirC/dir/subdir') + .build(), + was: builders + .metadir() + .path('dirA/dir/subdir') + .build() } const emptySubdirMoveA = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir/empty-subdir').build(), - was: builders.metadir().path('dirA/dir/empty-subdir').build() + doc: builders + .metadir() + .path('dirC/dir/empty-subdir') + .build(), + was: builders + .metadir() + .path('dirA/dir/empty-subdir') + .build() } const emptySubsubdirMoveA = { type: 'DescendantChange', @@ -382,13 +554,25 @@ describe('sorter()', () => { } const dirMoveA = { type: 'DescendantChange', - doc: builders.metadir().path('dirC/dir').build(), - was: builders.metadir().path('dirA/dir').build() + doc: builders + .metadir() + .path('dirC/dir') + .build(), + was: builders + .metadir() + .path('dirA/dir') + .build() } const parentMoveA = { type: 'DirMove', - doc: builders.metadir().path('dirC').build(), - was: builders.metadir().path('dirA').build() + doc: builders + .metadir() + .path('dirC') + .build(), + was: builders + .metadir() + .path('dirA') + .build() } const emptySubsubdirMoveB = { type: 'DescendantChange', @@ -403,28 +587,58 @@ describe('sorter()', () => { } const dirMoveB = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir').build(), - was: builders.metadir().path('dirB/dir').build() + doc: builders + .metadir() + .path('dirA/dir') + .build(), + was: builders + .metadir() + .path('dirB/dir') + .build() } const emptySubdirMoveB = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir/empty-subdir').build(), - was: builders.metadir().path('dirB/dir/empty-subdir').build() + doc: builders + .metadir() + .path('dirA/dir/empty-subdir') + .build(), + was: builders + .metadir() + .path('dirB/dir/empty-subdir') + .build() } const emptySubdirbMove = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir/empty-subdir-b').build(), - was: builders.metadir().path('dirB/dir/empty-subdir-b').build() + doc: builders + .metadir() + .path('dirA/dir/empty-subdir-b') + .build(), + was: builders + .metadir() + .path('dirB/dir/empty-subdir-b') + .build() } const subdirMoveB = { type: 'DescendantChange', - doc: builders.metadir().path('dirA/dir/subdir').build(), - was: builders.metadir().path('dirB/dir/subdir').build() + doc: builders + .metadir() + .path('dirA/dir/subdir') + .build(), + was: builders + .metadir() + .path('dirB/dir/subdir') + .build() } const parentMoveB = { type: 'DirMove', - doc: builders.metadir().path('dirA').build(), - was: builders.metadir().path('dirB').build() + doc: builders + .metadir() + .path('dirA') + .build(), + was: builders + .metadir() + .path('dirB') + .build() } should( @@ -465,13 +679,25 @@ describe('sorter()', () => { it('sorts parent deletion before child deletion', () => { const dirTrashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const fileTrashing = { type: 'FileTrashing', - doc: builders.metafile().path('.cozy_trash/dir/file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('.cozy_trash/dir/file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } should(remoteChange.sort([dirTrashing, fileTrashing])).deepEqual([ dirTrashing, @@ -488,13 +714,25 @@ describe('sorter()', () => { it('sorts parent deletion before child deletion', () => { const dirTrashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const fileTrashing = { type: 'FileTrashing', - doc: builders.metafile().path('.cozy_trash/file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('.cozy_trash/file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } should(remoteChange.sort([dirTrashing, fileTrashing])).deepEqual([ dirTrashing, @@ -514,13 +752,25 @@ describe('sorter()', () => { it('sorts parent deletion before child deletion', () => { const dirTrashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const fileTrashing = { type: 'FileTrashing', - doc: builders.metafile().path('.cozy_trash/dir/file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('.cozy_trash/dir/file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } should(remoteChange.sort([dirTrashing, fileTrashing])).deepEqual([ dirTrashing, @@ -536,13 +786,25 @@ describe('sorter()', () => { it('sorts child move out of parent before parent deletion', () => { const dirTrashing = { type: 'DirTrashing', - doc: builders.metadir().path('.cozy_trash/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('.cozy_trash/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const fileMove = { type: 'FileMove', - doc: builders.metafile().path('file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } should(remoteChange.sort([dirTrashing, fileMove])).deepEqual([ fileMove, @@ -560,13 +822,25 @@ describe('isChildSource(p, c)', () => { it('returns true if p src path is parent of c src path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst2/file').build(), - was: builders.metafile().path('parent/src/dir/subdir/file').build() + doc: builders + .metafile() + .path('parent/dst2/file') + .build(), + was: builders + .metafile() + .path('parent/src/dir/subdir/file') + .build() } should(remoteChange.isChildSource(parent, child)).be.true() @@ -575,13 +849,25 @@ describe('isChildSource(p, c)', () => { it('returns false if p src path is not parent of c src path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst/subdir/file').build(), - was: builders.metafile().path('parent/src2/file').build() + doc: builders + .metafile() + .path('parent/dst/subdir/file') + .build(), + was: builders + .metafile() + .path('parent/src2/file') + .build() } should(remoteChange.isChildSource(parent, child)).be.false() @@ -592,13 +878,25 @@ describe('isChildDestination(p, c)', () => { it('returns true if p dst path is parent of c dst path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst/subdir/file').build(), - was: builders.metafile().path('parent/src2/file').build() + doc: builders + .metafile() + .path('parent/dst/subdir/file') + .build(), + was: builders + .metafile() + .path('parent/src2/file') + .build() } should(remoteChange.isChildDestination(parent, child)).be.true() @@ -607,13 +905,25 @@ describe('isChildDestination(p, c)', () => { it('returns false if p dst path is not parent of c dst path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst2/file').build(), - was: builders.metafile().path('parent/src/dir/subdir/file').build() + doc: builders + .metafile() + .path('parent/dst2/file') + .build(), + was: builders + .metafile() + .path('parent/src/dir/subdir/file') + .build() } should(remoteChange.isChildDestination(parent, child)).be.false() @@ -624,13 +934,25 @@ describe('isChildMove(p, c)', () => { it('returns true if p src path is parent of c src path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst2/file').build(), - was: builders.metafile().path('parent/src/dir/subdir/file').build() + doc: builders + .metafile() + .path('parent/dst2/file') + .build(), + was: builders + .metafile() + .path('parent/src/dir/subdir/file') + .build() } should(remoteChange.isChildSource(parent, child)).be.true() @@ -639,13 +961,25 @@ describe('isChildMove(p, c)', () => { it('returns true if p dst path is parent of c dst path', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst/subdir/file').build(), - was: builders.metafile().path('parent/src2/file').build() + doc: builders + .metafile() + .path('parent/dst/subdir/file') + .build(), + was: builders + .metafile() + .path('parent/src2/file') + .build() } should(remoteChange.isChildDestination(parent, child)).be.true() @@ -654,13 +988,25 @@ describe('isChildMove(p, c)', () => { it('returns true if p src and dst paths are parents of c src and dst paths', () => { const parent = { type: 'DirMove', - doc: builders.metadir().path('parent/dst2/subdir').build(), - was: builders.metadir().path('parent/src/dir/subdir').build() + doc: builders + .metadir() + .path('parent/dst2/subdir') + .build(), + was: builders + .metadir() + .path('parent/src/dir/subdir') + .build() } const child = { type: 'FileMove', - doc: builders.metafile().path('parent/dst2/subdir/file').build(), - was: builders.metafile().path('parent/src/dir/subdir/file').build() + doc: builders + .metafile() + .path('parent/dst2/subdir/file') + .build(), + was: builders + .metafile() + .path('parent/src/dir/subdir/file') + .build() } should(remoteChange.isChildMove(parent, child)).be.true() @@ -670,21 +1016,33 @@ describe('isChildMove(p, c)', () => { describe('isOnlyChildMove(p, c)', () => { const p = { type: 'DirMove', - doc: builders.metadir().path('dst').build(), - was: builders.metadir().path('src').build() + doc: builders + .metadir() + .path('dst') + .build(), + was: builders + .metadir() + .path('src') + .build() } it('returns false if c is not a move', () => { const c1 = { type: 'FileDeletion', - doc: builders.metafile().path('dst/file').build() + doc: builders + .metafile() + .path('dst/file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.false() const c2 = { type: 'FileDeletion', - doc: builders.metafile().path('src/file').build() + doc: builders + .metafile() + .path('src/file') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.false() @@ -693,48 +1051,84 @@ describe('isOnlyChildMove(p, c)', () => { it('returns false if c is not a move of a child of p', () => { const c1 = { type: 'FileMove', - doc: builders.metafile().path('dir/file').build(), - was: builders.metafile().path('file').build() + doc: builders + .metafile() + .path('dir/file') + .build(), + was: builders + .metafile() + .path('file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.false() const c2 = { type: 'FileMove', - doc: builders.metafile().path('dst/file').build(), - was: builders.metafile().path('file').build() + doc: builders + .metafile() + .path('dst/file') + .build(), + was: builders + .metafile() + .path('file') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.false() const c3 = { type: 'FileMove', - doc: builders.metafile().path('src/file').build(), - was: builders.metafile().path('file').build() + doc: builders + .metafile() + .path('src/file') + .build(), + was: builders + .metafile() + .path('file') + .build() } should(remoteChange.isOnlyChildMove(p, c3)).be.false() const c4 = { type: 'DirMove', - doc: builders.metadir().path('src/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('src/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } should(remoteChange.isOnlyChildMove(p, c4)).be.false() const c5 = { type: 'DirMove', - doc: builders.metadir().path('dst/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('dst/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } should(remoteChange.isOnlyChildMove(p, c5)).be.false() const c6 = { type: 'DirMove', - doc: builders.metadir().path('parent/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('parent/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } should(remoteChange.isOnlyChildMove(p, c6)).be.false() @@ -743,32 +1137,56 @@ describe('isOnlyChildMove(p, c)', () => { it('returns false if c is a move of a child of p outside p', () => { const c1 = { type: 'FileMove', - doc: builders.metafile().path('file').build(), - was: builders.metafile().path('src/file').build() + doc: builders + .metafile() + .path('file') + .build(), + was: builders + .metafile() + .path('src/file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.false() const c2 = { type: 'FileMove', - doc: builders.metafile().path('file').build(), - was: builders.metafile().path('dst/file').build() + doc: builders + .metafile() + .path('file') + .build(), + was: builders + .metafile() + .path('dst/file') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.false() const c3 = { type: 'DirMove', - doc: builders.metadir().path('dir').build(), - was: builders.metadir().path('src/dir').build() + doc: builders + .metadir() + .path('dir') + .build(), + was: builders + .metadir() + .path('src/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c3)).be.false() const c4 = { type: 'DirMove', - doc: builders.metadir().path('dir').build(), - was: builders.metadir().path('dst/dir').build() + doc: builders + .metadir() + .path('dir') + .build(), + was: builders + .metadir() + .path('dst/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c4)).be.false() @@ -777,32 +1195,56 @@ describe('isOnlyChildMove(p, c)', () => { it('returns false if c is a renaming of a child of p within p', () => { const c1 = { type: 'FileMove', - doc: builders.metafile().path('dst/file2').build(), - was: builders.metafile().path('src/file').build() + doc: builders + .metafile() + .path('dst/file2') + .build(), + was: builders + .metafile() + .path('src/file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.false() const c2 = { type: 'DirMove', - doc: builders.metadir().path('dst/dir2').build(), - was: builders.metadir().path('src/dir').build() + doc: builders + .metadir() + .path('dst/dir2') + .build(), + was: builders + .metadir() + .path('src/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.false() const c3 = { type: 'FileMove', - doc: builders.metafile().path('dst/file2').build(), - was: builders.metafile().path('dst/file').build() + doc: builders + .metafile() + .path('dst/file2') + .build(), + was: builders + .metafile() + .path('dst/file') + .build() } should(remoteChange.isOnlyChildMove(p, c3)).be.false() const c4 = { type: 'DirMove', - doc: builders.metadir().path('dst/dir2').build(), - was: builders.metadir().path('dst/dir').build() + doc: builders + .metadir() + .path('dst/dir2') + .build(), + was: builders + .metadir() + .path('dst/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c4)).be.false() @@ -811,16 +1253,28 @@ describe('isOnlyChildMove(p, c)', () => { it('returns true if c is a child move of p', () => { const c1 = { type: 'FileMove', - doc: builders.metafile().path('dst/file').build(), - was: builders.metafile().path('src/file').build() + doc: builders + .metafile() + .path('dst/file') + .build(), + was: builders + .metafile() + .path('src/file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.true() const c2 = { type: 'DirMove', - doc: builders.metadir().path('dst/dir').build(), - was: builders.metadir().path('src/dir').build() + doc: builders + .metadir() + .path('dst/dir') + .build(), + was: builders + .metadir() + .path('src/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.true() @@ -829,16 +1283,28 @@ describe('isOnlyChildMove(p, c)', () => { it('returns false if c is a child move of a move of a child of p', () => { const c1 = { type: 'FileMove', - doc: builders.metafile().path('dst/dir2/file').build(), - was: builders.metafile().path('src/dir/file').build() + doc: builders + .metafile() + .path('dst/dir2/file') + .build(), + was: builders + .metafile() + .path('src/dir/file') + .build() } should(remoteChange.isOnlyChildMove(p, c1)).be.false() const c2 = { type: 'DirMove', - doc: builders.metadir().path('dst/parent2/dir').build(), - was: builders.metadir().path('src/parent/dir').build() + doc: builders + .metadir() + .path('dst/parent2/dir') + .build(), + was: builders + .metadir() + .path('src/parent/dir') + .build() } should(remoteChange.isOnlyChildMove(p, c2)).be.false() @@ -849,26 +1315,50 @@ describe('sortByPath', () => { it('sorts changes by ascending alphanumerical destination path order', () => { const one = { type: 'FileMove', - doc: builders.metafile().path('dst/dir/file').build(), - was: builders.metafile().path('dir/file').build() + doc: builders + .metafile() + .path('dst/dir/file') + .build(), + was: builders + .metafile() + .path('dir/file') + .build() } const two = { type: 'DirMove', - doc: builders.metadir().path('dst/dir').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('dst/dir') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const three = { type: 'FileMove', - doc: builders.metafile().path('dst/dir/file2').build(), - was: builders.metafile().path('a/file2').build() + doc: builders + .metafile() + .path('dst/dir/file2') + .build(), + was: builders + .metafile() + .path('a/file2') + .build() } const four = { type: 'FileAddition', - doc: builders.metafile().path('dst/dir/spreadsheet').build() + doc: builders + .metafile() + .path('dst/dir/spreadsheet') + .build() } const five = { type: 'FileAddition', - doc: builders.metafile().path('doc').build() + doc: builders + .metafile() + .path('doc') + .build() } should(remoteChange.sortByPath([one, two, three, four, five])).deepEqual([ @@ -893,17 +1383,29 @@ describe('sortByPath', () => { 'file' ) }, - was: builders.metafile().path('dir/file').build() + was: builders + .metafile() + .path('dir/file') + .build() } const two = { type: 'DirMove', - doc: builders.metadir().path('décibels').build(), - was: builders.metadir().path('dir').build() + doc: builders + .metadir() + .path('décibels') + .build(), + was: builders + .metadir() + .path('dir') + .build() } const three = { type: 'FileMove', doc: { path: path.normalize('décibels/hélice/file2'.normalize('NFC')) }, - was: builders.metafile().path('a/file2').build() + was: builders + .metafile() + .path('a/file2') + .build() } const four = { type: 'FileAddition', @@ -929,17 +1431,26 @@ describe('sortByPath', () => { const one = { type: 'IgnoredChange', doc: { _id: 'whatever', _rev: '2-xxx', _deleted: true }, - was: builders.metafile().path('spreadsheet').build(), + was: builders + .metafile() + .path('spreadsheet') + .build(), detail: 'Deleted document' } const two = { type: 'FileAddition', - doc: builders.metafile().path('doc').build() + doc: builders + .metafile() + .path('doc') + .build() } const three = { type: 'IgnoredChange', doc: { _id: 'whatever', _rev: '2-xxx', _deleted: true }, - was: builders.metadir().path('dir').build(), + was: builders + .metadir() + .path('dir') + .build(), detail: 'Deleted document' } diff --git a/test/unit/remote/cozy.js b/test/unit/remote/cozy.js index 925da114b..0fe4dcdbf 100644 --- a/test/unit/remote/cozy.js +++ b/test/unit/remote/cozy.js @@ -1,15 +1,18 @@ /* eslint-env mocha */ /* @flow weak */ -const _ = require('lodash') const path = require('path') + +const electronFetch = require('electron-fetch') +const _ = require('lodash') const should = require('should') const sinon = require('sinon') -const electronFetch = require('electron-fetch') -const { FetchError } = require('cozy-stack-client') -const OldCozyClient = require('cozy-client-js').Client + const CozyClient = require('cozy-client').default +const OldCozyClient = require('cozy-client-js').Client +const { FetchError } = require('cozy-stack-client') +const metadata = require('../../../core/metadata') const { DIR_TYPE, ROOT_DIR_ID, @@ -23,13 +26,11 @@ const { const { RemoteCozy } = require('../../../core/remote/cozy') const { withDefaultValues } = require('../../../core/remote/document') const { DirectoryNotFound } = require('../../../core/remote/errors') -const metadata = require('../../../core/metadata') - +const Builders = require('../../support/builders') +const CozyStackDouble = require('../../support/doubles/cozy_stack') const configHelpers = require('../../support/helpers/config') const cozyHelpers = require('../../support/helpers/cozy') const { COZY_URL, cozy, deleteAll } = require('../../support/helpers/cozy') -const CozyStackDouble = require('../../support/doubles/cozy_stack') -const Builders = require('../../support/builders') const cozyStackDouble = new CozyStackDouble() const builders = new Builders({ cozy }) @@ -45,7 +46,7 @@ const CHROMIUM_ERROR = new electronFetch.FetchError( new Error('mojo result not ok') ) -describe('RemoteCozy', function () { +describe('RemoteCozy', function() { before(() => cozyStackDouble.start()) beforeEach(deleteAll) before('instanciate config', configHelpers.createConfig) @@ -56,7 +57,7 @@ describe('RemoteCozy', function () { let remoteCozy - beforeEach(function () { + beforeEach(function() { this.config.cozyUrl = COZY_URL remoteCozy = new RemoteCozy(this.config) // Use real OAuth client @@ -98,7 +99,10 @@ describe('RemoteCozy', function () { describe('createFile', () => { context('when the name starts or ends with a space', () => { it('creates the file with the given name', async () => { - const data = builders.stream().push('').build() + const data = builders + .stream() + .push('') + .build() const checksum = builders.checksum('').build() should( @@ -139,7 +143,11 @@ describe('RemoteCozy', function () { }) it('rejects with a 409 FetchError if a doc with the same path exists', async () => { - await builders.remoteDir().inRootDir().name('foo').create() + await builders + .remoteDir() + .inRootDir() + .name('foo') + .create() stubFetch() await should( @@ -162,16 +170,22 @@ describe('RemoteCozy', function () { stubFetch() await should( - remoteCozy.createFile(builders.stream().push(data).build(), { - name: 'foo', - dirID: ROOT_DIR_ID, - contentType: 'text/plain', - contentLength: data.length - 1, - checksum, - executable: false, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString() - }) + remoteCozy.createFile( + builders + .stream() + .push(data) + .build(), + { + name: 'foo', + dirID: ROOT_DIR_ID, + contentType: 'text/plain', + contentLength: data.length - 1, + checksum, + executable: false, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString() + } + ) ).be.rejectedWith(FetchError, { status: 412 }) }) @@ -181,16 +195,22 @@ describe('RemoteCozy', function () { stubFetch() await should( - remoteCozy.createFile(builders.stream().push(data).build(), { - name: 'foo', - dirID: ROOT_DIR_ID, - contentType: 'text/plain', - contentLength: data.length + 1, - checksum, - executable: false, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString() - }) + remoteCozy.createFile( + builders + .stream() + .push(data) + .build(), + { + name: 'foo', + dirID: ROOT_DIR_ID, + contentType: 'text/plain', + contentLength: data.length + 1, + checksum, + executable: false, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString() + } + ) ).be.rejectedWith(FetchError, { status: 412 }) }) @@ -237,16 +257,22 @@ describe('RemoteCozy', function () { stubFetch() await should( - remoteCozy.createFile(builders.stream().push(data).build(), { - name: 'foo', - dirID: ROOT_DIR_ID, - contentType: 'text/plain', - contentLength: data.length, - checksum: 'md5sum', // Force a request failure with a bad checksum - executable: false, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString() - }) + remoteCozy.createFile( + builders + .stream() + .push(data) + .build(), + { + name: 'foo', + dirID: ROOT_DIR_ID, + contentType: 'text/plain', + contentLength: data.length, + checksum: 'md5sum', // Force a request failure with a bad checksum + executable: false, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString() + } + ) ).be.rejectedWith(CHROMIUM_ERROR) }) }) @@ -280,7 +306,10 @@ describe('RemoteCozy', function () { .data('initial content') .create() - const data = builders.stream().push('').build() + const data = builders + .stream() + .push('') + .build() const checksum = builders.checksum('').build() should( @@ -363,31 +392,48 @@ describe('RemoteCozy', function () { }) }) - describe('changes', function () { - context('when no seq given', function () { + describe('changes', function() { + context('when no seq given', function() { // XXX: This test might timeout if a lot of changes were made on the // remote Cozy as we're doing an initial fetch here and thus cannot speed // it up by ignoring the previous changes. - it('resolves only with non trashed, non deleted docs', async function () { + it('resolves only with non trashed, non deleted docs', async function() { const dir = await builders.remoteDir().create() - const file = await builders.remoteFile().inDir(dir).create() - const deletedFile = await builders.remoteFile().inDir(dir).create() + const file = await builders + .remoteFile() + .inDir(dir) + .create() + const deletedFile = await builders + .remoteFile() + .inDir(dir) + .create() await builders.remoteErased(deletedFile).create() - const trashedFile = await builders.remoteFile().inDir(dir).create() - await builders.remoteFile(trashedFile).trashed().update() + const trashedFile = await builders + .remoteFile() + .inDir(dir) + .create() + await builders + .remoteFile(trashedFile) + .trashed() + .update() const { docs } = await remoteCozy.changes() const ids = docs.map(doc => doc._id) - should(ids).containDeep([dir._id, file._id]).and.have.length(2) + should(ids) + .containDeep([dir._id, file._id]) + .and.have.length(2) }) }) - it('resolves with changes since the given seq', async function () { + it('resolves with changes since the given seq', async function() { const last_seq = await remoteCozy.fetchLastSeq() const dir = await builders.remoteDir().create() - const file = await builders.remoteFile().inDir(dir).create() + const file = await builders + .remoteFile() + .inDir(dir) + .create() const { docs } = await remoteCozy.changes(last_seq) const ids = docs.map(doc => doc._id) @@ -395,16 +441,24 @@ describe('RemoteCozy', function () { should(ids.sort()).eql([file._id, dir._id].sort()) }) - it('resolves with docs ordered by path asc', async function () { + it('resolves with docs ordered by path asc', async function() { const last_seq = await remoteCozy.fetchLastSeq() - const dirB = await builders.remoteDir().inRootDir().name('dirB').create() + const dirB = await builders + .remoteDir() + .inRootDir() + .name('dirB') + .create() const fileB = await builders .remoteFile() .inRootDir() .name('fileB') .create() - const dirA = await builders.remoteDir().inRootDir().name('dirA').create() + const dirA = await builders + .remoteDir() + .inRootDir() + .name('dirA') + .create() const fileA = await builders .remoteFile() .inDir(dirA) @@ -418,7 +472,7 @@ describe('RemoteCozy', function () { ) }) - it('does not swallow errors', function () { + it('does not swallow errors', function() { this.config.cozyUrl = cozyStackDouble.url() const remoteCozy = new RemoteCozy(this.config) @@ -486,11 +540,19 @@ describe('RemoteCozy', function () { } }) - it('returns documents with a path attribute', async function () { + it('returns documents with a path attribute', async function() { const last_seq = await remoteCozy.fetchLastSeq() - await builders.remoteDir().inRootDir().name('dir').create() - await builders.remoteFile().inRootDir().name('file').create() + await builders + .remoteDir() + .inRootDir() + .name('dir') + .create() + await builders + .remoteFile() + .inRootDir() + .name('file') + .create() const { docs } = await remoteCozy.changes(last_seq) @@ -499,14 +561,14 @@ describe('RemoteCozy', function () { }) }) - describe('find', function () { - it('fetches a remote directory matching the given id', async function () { + describe('find', function() { + it('fetches a remote directory matching the given id', async function() { const remoteDir = await builders.remoteDir().create() await should(remoteCozy.find(remoteDir._id)).be.fulfilledWith(remoteDir) }) - it('fetches a remote root file including its path', async function () { + it('fetches a remote root file including its path', async function() { const remoteFile = await builders .remoteFile() .inRootDir() @@ -518,7 +580,7 @@ describe('RemoteCozy', function () { ) }) - it('fetches a remote non-root file including its path', async function () { + it('fetches a remote non-root file including its path', async function() { const remoteDir = await builders .remoteDir() .name('foo') @@ -535,15 +597,15 @@ describe('RemoteCozy', function () { ) }) - it('throws an error when directory is not found', async function () { + it('throws an error when directory is not found', async function() { await should(remoteCozy.find('missing')).be.rejectedWith({ status: 404 }) }) }) - describe('findMaybe', function () { - it('does the same as find() when file or directory exists', async function () { + describe('findMaybe', function() { + it('does the same as find() when file or directory exists', async function() { const remoteDir = await builders.remoteDir().create() await should(remoteCozy.findMaybe(remoteDir._id)).be.fulfilledWith( @@ -551,13 +613,13 @@ describe('RemoteCozy', function () { ) }) - it('returns null when file or directory is not found', async function () { + it('returns null when file or directory is not found', async function() { await should(remoteCozy.findMaybe('missing')).be.fulfilledWith(null) }) }) - describe('findDir', function () { - it('fetches a remote directory matching the given id', async function () { + describe('findDir', function() { + it('fetches a remote directory matching the given id', async function() { const remoteDir = await builders.remoteDir().create() await should(remoteCozy.findDir(remoteDir._id)).be.fulfilledWith( @@ -565,7 +627,7 @@ describe('RemoteCozy', function () { ) }) - it('throws an error if a remote file matches the given id', async function () { + it('throws an error if a remote file matches the given id', async function() { const remoteFile = await builders.remoteFile().create() await should(remoteCozy.findDir(remoteFile._id)).be.rejectedWith( @@ -573,15 +635,15 @@ describe('RemoteCozy', function () { ) }) - it('throws an error when directory is not found', async function () { + it('throws an error when directory is not found', async function() { await should(remoteCozy.findDir('missing')).be.rejectedWith({ status: 404 }) }) }) - describe('findDirMaybe', function () { - it('does the same as findDir() when directory exists', async function () { + describe('findDirMaybe', function() { + it('does the same as findDir() when directory exists', async function() { const remoteDir = await builders.remoteDir().create() await should(remoteCozy.findDirMaybe(remoteDir._id)).be.fulfilledWith( @@ -589,7 +651,7 @@ describe('RemoteCozy', function () { ) }) - it('does the same as findDir() when file exists', async function () { + it('does the same as findDir() when file exists', async function() { const remoteFile = await builders.remoteFile().create() await should(remoteCozy.findDirMaybe(remoteFile._id)).be.rejectedWith( @@ -597,20 +659,28 @@ describe('RemoteCozy', function () { ) }) - it('returns null when directory is not found', async function () { + it('returns null when directory is not found', async function() { await should(remoteCozy.findDirMaybe('missing')).be.fulfilledWith(null) }) }) - describe('isNameTaken', function () { + describe('isNameTaken', function() { it('returns true when a doc with the given name exists in the given directory', async () => { const remoteDir = await builders .remoteDir() .name('foo') .inRootDir() .create() - await builders.remoteFile().name('bar').inDir(remoteDir).create() - await builders.remoteDir().name('baz').inDir(remoteDir).create() + await builders + .remoteFile() + .name('bar') + .inDir(remoteDir) + .create() + await builders + .remoteDir() + .name('baz') + .inDir(remoteDir) + .create() await should( remoteCozy.isNameTaken({ name: 'bar', dir_id: remoteDir._id }) @@ -638,7 +708,10 @@ describe('RemoteCozy', function () { .name('foo') .inRootDir() .create() - await builders.remoteFile().name('bar').create() + await builders + .remoteFile() + .name('bar') + .create() await should( remoteCozy.isNameTaken({ name: 'bar', dir_id: remoteDir._id }) @@ -646,10 +719,13 @@ describe('RemoteCozy', function () { }) }) - describe('findDirectoryByPath', function () { - it('resolves when the directory exists remotely', async function () { + describe('findDirectoryByPath', function() { + it('resolves when the directory exists remotely', async function() { const dir = await builders.remoteDir().create() - const subdir = await builders.remoteDir().inDir(dir).create() + const subdir = await builders + .remoteDir() + .inDir(dir) + .create() const foundDir = await remoteCozy.findDirectoryByPath(dir.path) should(foundDir).have.properties(metadata.serializableRemote(dir)) @@ -658,8 +734,12 @@ describe('RemoteCozy', function () { should(foundSubdir).have.properties(metadata.serializableRemote(subdir)) }) - it('rejects when the directory does not exist remotely', async function () { - await builders.remoteFile().name('existing').inRootDir().create() + it('rejects when the directory does not exist remotely', async function() { + await builders + .remoteFile() + .name('existing') + .inRootDir() + .create() for (let path of ['/missing', '/existing/missing']) { await remoteCozy @@ -668,8 +748,12 @@ describe('RemoteCozy', function () { } }) - it('rejects when the path matches a file', async function () { - await builders.remoteFile().name('foo').inRootDir().create() + it('rejects when the path matches a file', async function() { + await builders + .remoteFile() + .name('foo') + .inRootDir() + .create() await remoteCozy .findDirectoryByPath('/foo') @@ -678,7 +762,7 @@ describe('RemoteCozy', function () { }) describe('trashById', () => { - it('resolves with a RemoteDoc representing the newly trashed item', async function () { + it('resolves with a RemoteDoc representing the newly trashed item', async function() { const orig = await builders .remoteFile() .createdAt(2017, 1, 1, 1, 1, 1, 0) @@ -703,38 +787,50 @@ describe('RemoteCozy', function () { }) describe('isEmpty', () => { - it('is true when the folder with the given id is empty', async function () { + it('is true when the folder with the given id is empty', async function() { const dir = await builders.remoteDir().create() should(await remoteCozy.isEmpty(dir._id)).be.true() - const subdir = await builders.remoteDir().inDir(dir).create() + const subdir = await builders + .remoteDir() + .inDir(dir) + .create() should(await remoteCozy.isEmpty(dir._id)).be.false() should(await remoteCozy.isEmpty(subdir._id)).be.true() - await builders.remoteFile().inDir(dir).create() + await builders + .remoteFile() + .inDir(dir) + .create() should(await remoteCozy.isEmpty(dir._id)).be.false() should(await remoteCozy.isEmpty(subdir._id)).be.true() - await builders.remoteFile().inDir(subdir).create() + await builders + .remoteFile() + .inDir(subdir) + .create() should(await remoteCozy.isEmpty(dir._id)).be.false() should(await remoteCozy.isEmpty(subdir._id)).be.false() }) - it('rejects when given a file id', async function () { + it('rejects when given a file id', async function() { const file = await builders.remoteFile().create() await should(remoteCozy.isEmpty(file._id)).be.rejectedWith(/wrong type/) }) - it('rejects when no document matches the id', async function () { + it('rejects when no document matches the id', async function() { await should(remoteCozy.isEmpty('missing')).be.rejectedWith({ status: 404 }) }) }) - describe('downloadBinary', function () { - it('resolves with a Readable stream of the file content', async function () { - const remoteFile = await builders.remoteFile().data('foo').create() + describe('downloadBinary', function() { + it('resolves with a Readable stream of the file content', async function() { + const remoteFile = await builders + .remoteFile() + .data('foo') + .create() const stream = await remoteCozy.downloadBinary(remoteFile._id) @@ -749,7 +845,7 @@ describe('RemoteCozy', function () { }) describe('#warnings()', () => { - beforeEach(function () { + beforeEach(function() { this.config.cozyUrl = cozyStackDouble.url() remoteCozy = new RemoteCozy(this.config) }) @@ -802,7 +898,7 @@ describe('RemoteCozy', function () { }) describe('#capabilities', () => { - beforeEach(async function () { + beforeEach(async function() { this.config.cozyUrl = cozyStackDouble.url() remoteCozy = new RemoteCozy(this.config) remoteCozy.client.oauth = true @@ -854,7 +950,7 @@ describe('RemoteCozy', function () { }) describe('#getDirectoryContent', () => { - beforeEach(function () { + beforeEach(function() { remoteCozy.client = new OldCozyClient({ version: 3, cozyURL: this.config.cozyUrl, @@ -913,7 +1009,10 @@ describe('RemoteCozy', function () { }) it('does not fail on an empty directory', async () => { - const dir = await builders.remoteDir().name('dir').create() + const dir = await builders + .remoteDir() + .name('dir') + .create() await should(remoteCozy.getDirectoryContent(dir)).be.fulfilledWith([]) }) @@ -977,7 +1076,10 @@ describe('RemoteCozy', function () { }) it('returns a list of the old versions of the given remote file', async () => { - const original = await builders.remoteFile().data('original').create() + const original = await builders + .remoteFile() + .data('original') + .create() const modified = await builders .remoteFile(original) .data('modified') @@ -1011,7 +1113,7 @@ describe('RemoteCozy.getClient', () => { afterEach('clean config directory', configHelpers.cleanConfig) let webappCozy - beforeEach(function () { + beforeEach(function() { webappCozy = new RemoteCozy(this.config) webappCozy.client = cozyHelpers.cozy }) @@ -1027,7 +1129,7 @@ describe('RemoteCozy.getClient', () => { afterEach('clean config directory', configHelpers.cleanConfig) let oauthCozy - beforeEach(async function () { + beforeEach(async function() { oauthCozy = new RemoteCozy(this.config) oauthCozy.client = await cozyHelpers.oauthCozy(this.config) }) @@ -1037,7 +1139,7 @@ describe('RemoteCozy.getClient', () => { }) context('when the client was not authorized yet', () => { - it('handles OAuth cozy-client-js clients without credentials', async function () { + it('handles OAuth cozy-client-js clients without credentials', async function() { oauthCozy.client = new OldCozyClient({ version: 3, cozyURL: this.config.cozyUrl, diff --git a/test/unit/remote/index.js b/test/unit/remote/index.js index 7da6dff69..7e4863026 100644 --- a/test/unit/remote/index.js +++ b/test/unit/remote/index.js @@ -1,18 +1,18 @@ /* @flow */ /* eslint-env mocha */ -const Promise = require('bluebird') const EventEmitter = require('events') +const path = require('path') + +const Promise = require('bluebird') const fse = require('fs-extra') const _ = require('lodash') -const path = require('path') -const sinon = require('sinon') const should = require('should') +const sinon = require('sinon') const metadata = require('../../../core/metadata') const Prep = require('../../../core/prep') const remote = require('../../../core/remote') -const { DirectoryNotFound } = require('../../../core/remote/errors') const { DIR_TYPE, ROOT_DIR_ID, @@ -20,13 +20,13 @@ const { } = require('../../../core/remote/constants') const { FetchError } = require('../../../core/remote/cozy') const { remoteJsonToRemoteDoc } = require('../../../core/remote/document') -const timestamp = require('../../../core/utils/timestamp') +const { DirectoryNotFound } = require('../../../core/remote/errors') const { CONFLICT_REGEXP } = require('../../../core/utils/conflicts') - +const timestamp = require('../../../core/utils/timestamp') +const Builders = require('../../support/builders') const configHelpers = require('../../support/helpers/config') -const pouchHelpers = require('../../support/helpers/pouch') const { cozy, deleteAll } = require('../../support/helpers/cozy') -const Builders = require('../../support/builders') +const pouchHelpers = require('../../support/helpers/pouch') /*:: import type { Metadata, SavedMetadata } from '../../../core/metadata' @@ -34,16 +34,16 @@ import type { RemoteDoc, RemoteJsonDoc } from '../../../core/remote/document' */ const CHAT_MIGNON_MOD_PATH = 'test/fixtures/chat-mignon-mod.jpg' -describe('remote.Remote', function () { +describe('remote.Remote', function() { let builders, couchdbFolder before('instanciate config', configHelpers.createConfig) before('register OAuth client', configHelpers.registerClient) beforeEach('instanciate pouch', pouchHelpers.createDatabase) - beforeEach('prepare builders', function () { + beforeEach('prepare builders', function() { builders = new Builders({ cozy, pouch: this.pouch }) }) - beforeEach('instanciate remote', function () { + beforeEach('instanciate remote', function() { this.prep = sinon.createStubInstance(Prep) this.events = new EventEmitter() this.remote = new remote.Remote(this) @@ -53,30 +53,34 @@ describe('remote.Remote', function () { this.remote.remoteCozy.client = cozy }) beforeEach(deleteAll) - beforeEach('create the couchdb folder', async function () { + beforeEach('create the couchdb folder', async function() { couchdbFolder = await builders .remoteDir() .name('couchdb-folder') .inRootDir() .create() - await builders.metadir().fromRemote(couchdbFolder).upToDate().create() + await builders + .metadir() + .fromRemote(couchdbFolder) + .upToDate() + .create() }) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) describe('constructor', () => { - it('has a remoteCozy and a watcher', function () { + it('has a remoteCozy and a watcher', function() { should.exist(this.remote.remoteCozy) should.exist(this.remote.watcher) }) - it('has a side name', function () { + it('has a side name', function() { should(this.remote.name).eql('remote') }) }) describe('createReadStream', () => { - it('create a readable stream from a remote binary', async function () { + it('create a readable stream from a remote binary', async function() { const expectedChecksum = '2NqmrnZqa1zTER40NtPGJg==' const fixture = 'test/fixtures/cool-pillow.jpg' @@ -98,13 +102,13 @@ describe('remote.Remote', function () { }) }) - describe('addFileAsync', function () { + describe('addFileAsync', function() { let image before('read image', async () => { image = await fse.readFile(CHAT_MIGNON_MOD_PATH) }) - it('adds a file to the remote Cozy', async function () { + it('adds a file to the remote Cozy', async function() { const doc = await builders .metafile() .path('cat2.jpg') @@ -138,7 +142,7 @@ describe('remote.Remote', function () { ) }) - it('fails if the md5sum does not match the content', async function () { + it('fails if the md5sum does not match the content', async function() { const doc = await builders .metafile() .path('cat2b.jpg') @@ -160,7 +164,7 @@ describe('remote.Remote', function () { }) }) - it('does not throw if the file does not exists locally anymore', async function () { + it('does not throw if the file does not exists locally anymore', async function() { const doc /*: Metadata */ = builders .metafile() .path('foo') @@ -172,10 +176,12 @@ describe('remote.Remote', function () { } } await this.remote.addFileAsync(doc) - should(doc).have.property('trashed').and.not.have.property('remote') + should(doc) + .have.property('trashed') + .and.not.have.property('remote') }) - it('rejects with a DirectoryNotFound error if its parent is missing on the Cozy', async function () { + it('rejects with a DirectoryNotFound error if its parent is missing on the Cozy', async function() { const doc /*: Metadata */ = builders .metafile() .path('dir/foo') @@ -192,7 +198,7 @@ describe('remote.Remote', function () { ) }) - it('rejects if there is not enough space on the Cozy', async function () { + it('rejects if there is not enough space on the Cozy', async function() { sinon .stub(this.remote.remoteCozy, 'createFile') .rejects( @@ -227,7 +233,7 @@ describe('remote.Remote', function () { }) describe('addFolderAsync', () => { - it('adds a folder on the remote Cozy', async function () { + it('adds a folder on the remote Cozy', async function() { const doc = builders .metadir() .path('folder-1') @@ -250,7 +256,7 @@ describe('remote.Remote', function () { ) }) - it('throws an error if a conflicting folder exists', async function () { + it('throws an error if a conflicting folder exists', async function() { const remoteDir = await builders .remoteDir() .inRootDir() @@ -267,7 +273,7 @@ describe('remote.Remote', function () { await should(this.remote.addFolderAsync(doc)).be.rejectedWith(/Conflict/) }) - it('throws an error if the parent folder is missing', async function () { + it('throws an error if the parent folder is missing', async function() { const doc /*: Metadata */ = builders .metadir() .path(path.join('foo', 'bar', 'qux')) @@ -281,8 +287,8 @@ describe('remote.Remote', function () { if (process.platform === 'win32' && process.env.CI) { it.skip('overwrites the binary content (unstable on AppVeyor)', () => {}) } else { - describe('overwriteFileAsync', function () { - it('overwrites the binary content', async function () { + describe('overwriteFileAsync', function() { + it('overwrites the binary content', async function() { const created = await builders .remoteFile() .data('foo') @@ -305,7 +311,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { localDoc.should.equal(doc) - const stream = builders.stream().push('bar').build() + const stream = builders + .stream() + .push('bar') + .build() return Promise.resolve(stream) } } @@ -325,8 +334,11 @@ describe('remote.Remote', function () { should(doc.remote._rev).equal(file._rev) }) - it('throws an error if the checksum is invalid', async function () { - const created = await builders.remoteFile().data('foo').create() + it('throws an error if the checksum is invalid', async function() { + const created = await builders + .remoteFile() + .data('foo') + .create() const old = await builders .metafile() .fromRemote(created) @@ -341,7 +353,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync() { - const stream = builders.stream().push('bar').build() + const stream = builders + .stream() + .push('bar') + .build() return Promise.resolve(stream) } } @@ -356,7 +371,7 @@ describe('remote.Remote', function () { }) }) - it('does not throw if the file does not exists locally anymore', async function () { + it('does not throw if the file does not exists locally anymore', async function() { const doc /*: Metadata */ = builders .metafile() .path('foo') @@ -374,7 +389,7 @@ describe('remote.Remote', function () { .and.not.have.propertyByPath('remote') }) - it('sends a request if the file is a Cozy Note', async function () { + it('sends a request if the file is a Cozy Note', async function() { const created = await builders .remoteNote() .name('My Note.cozy-note') @@ -396,7 +411,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { should(localDoc).deepEqual(doc) - const stream = builders.stream().push('bar').build() + const stream = builders + .stream() + .push('bar') + .build() return Promise.resolve(stream) } } @@ -418,7 +436,7 @@ describe('remote.Remote', function () { ) }) - it('rejects if there is not enough space on the Cozy', async function () { + it('rejects if there is not enough space on the Cozy', async function() { sinon .stub(this.remote.remoteCozy, 'updateFileById') .rejects( @@ -447,7 +465,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { localDoc.should.equal(doc) - const stream = builders.stream().push('bar').build() + const stream = builders + .stream() + .push('bar') + .build() return Promise.resolve(stream) } } @@ -462,7 +483,7 @@ describe('remote.Remote', function () { } }) - it('sends the most recent modification date', async function () { + it('sends the most recent modification date', async function() { const created = await builders .remoteFile() .data('foo') @@ -487,7 +508,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { localDoc.should.equal(doc1) - const stream = builders.stream().push('bar').build() + const stream = builders + .stream() + .push('bar') + .build() return Promise.resolve(stream) } } @@ -513,7 +537,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { localDoc.should.equal(doc2) - const stream = builders.stream().push('baz').build() + const stream = builders + .stream() + .push('baz') + .build() return Promise.resolve(stream) } } @@ -551,7 +578,10 @@ describe('remote.Remote', function () { this.remote.other = { createReadStreamAsync(localDoc) { localDoc.should.equal(doc3) - const stream = builders.stream().push('boom').build() + const stream = builders + .stream() + .push('boom') + .build() return Promise.resolve(stream) } } @@ -568,8 +598,11 @@ describe('remote.Remote', function () { } describe('updateFileMetadataAsync', () => { - it('makes the remote file executable when the local one was made too', async function () { - const oldRemote = await builders.remoteFile().executable(false).create() + it('makes the remote file executable when the local one was made too', async function() { + const oldRemote = await builders + .remoteFile() + .executable(false) + .create() const doc = builders .metafile() .fromRemote(oldRemote) @@ -579,15 +612,20 @@ describe('remote.Remote', function () { await this.remote.updateFileMetadataAsync(doc) - should(doc).have.propertyByPath('remote', '_rev').not.eql(oldRemote._rev) + should(doc) + .have.propertyByPath('remote', '_rev') + .not.eql(oldRemote._rev) const newRemote = await cozy.files.statById(oldRemote._id) should(newRemote) .have.propertyByPath('attributes', 'executable') .eql(true) }) - it('makes the remote file non-executable when the local one is not anymore', async function () { - const oldRemote = await builders.remoteFile().executable(true).create() + it('makes the remote file non-executable when the local one is not anymore', async function() { + const oldRemote = await builders + .remoteFile() + .executable(true) + .create() const doc = builders .metafile() .fromRemote(oldRemote) @@ -597,15 +635,20 @@ describe('remote.Remote', function () { await this.remote.updateFileMetadataAsync(doc) - should(doc).have.propertyByPath('remote', '_rev').not.eql(oldRemote._rev) + should(doc) + .have.propertyByPath('remote', '_rev') + .not.eql(oldRemote._rev) const newRemote = await cozy.files.statById(oldRemote._id) should(newRemote) .have.propertyByPath('attributes', 'executable') .eql(false) }) - it('updates the last modification date of the remote file', async function () { - const dir = await builders.remoteDir().name('dir').create() + it('updates the last modification date of the remote file', async function() { + const dir = await builders + .remoteDir() + .name('dir') + .create() const created = await builders .remoteFile() .name('file-7') @@ -634,8 +677,8 @@ describe('remote.Remote', function () { }) }) - describe('updateFolder', function () { - it('updates the metadata of a folder', async function () { + describe('updateFolder', function() { + it('updates the metadata of a folder', async function() { const created = await builders .remoteDir() .inRootDir() @@ -667,7 +710,7 @@ describe('remote.Remote', function () { }) }) - it('throws an error if the directory does not exist', async function () { + it('throws an error if the directory does not exist', async function() { const deletedDir = await builders .remoteDir() .name('deleted-dir') @@ -686,7 +729,7 @@ describe('remote.Remote', function () { ) }) - it('throws an error if it has no remote info', async function () { + it('throws an error if it has no remote info', async function() { const remoteDir = await builders .remoteDir() .name('foo') @@ -712,20 +755,28 @@ describe('remote.Remote', function () { describe('moveAsync', () => { context('with a file', () => { - it('moves the file', async function () { + it('moves the file', async function() { const dstDir = await builders .remoteDir() .name('moved-to') .inRootDir() .create() - await builders.metadir().fromRemote(dstDir).upToDate().create() + await builders + .metadir() + .fromRemote(dstDir) + .upToDate() + .create() const remoteDoc = await builders .remoteFile() .name('cat6.jpg') .data('meow') .create() - const old = builders.metafile().fromRemote(remoteDoc).upToDate().build() + const old = builders + .metafile() + .fromRemote(remoteDoc) + .upToDate() + .build() const doc = builders .metafile() .moveFrom(old) @@ -752,8 +803,8 @@ describe('remote.Remote', function () { }) }) - context('with a folder', function () { - it('moves the folder in the Cozy', async function () { + context('with a folder', function() { + it('moves the folder in the Cozy', async function() { const remoteDoc = await builders .remoteDir() .name('folder-4') @@ -817,8 +868,11 @@ describe('remote.Remote', function () { .create() }) - it('moves the file on the Cozy', async function () { - const old = builders.metafile(file).changedSide('local').build() + it('moves the file on the Cozy', async function() { + const old = builders + .metafile(file) + .changedSide('local') + .build() const doc = builders .metafile() .moveFrom(old) @@ -868,7 +922,7 @@ describe('remote.Remote', function () { .create() }) - it('moves the file on the Cozy', async function () { + it('moves the file on the Cozy', async function() { const doc = builders .metafile(file) // XXX: Necessary to replace the default updated_at .moveFrom(file) @@ -893,7 +947,7 @@ describe('remote.Remote', function () { } ) - context('when overwriting an existing file', function () { + context('when overwriting an existing file', function() { const existingRefs = [{ _id: 'blah', _type: 'io.cozy.photos.albums' }] let existingRemote @@ -908,7 +962,11 @@ describe('remote.Remote', function () { .name('moved-to') .inRootDir() .create() - await builders.metadir().fromRemote(newDir).upToDate().create() + await builders + .metadir() + .fromRemote(newDir) + .upToDate() + .create() existingRemote = await builders .remoteFile() @@ -923,7 +981,11 @@ describe('remote.Remote', function () { .name('cat6.jpg') .data('meow') .create() - old = await builders.metafile().fromRemote(remote2).upToDate().create() + old = await builders + .metafile() + .fromRemote(remote2) + .upToDate() + .create() }) const saveMetadata = async () => { @@ -945,7 +1007,7 @@ describe('remote.Remote', function () { .build() } - it('moves the file', async function () { + it('moves the file', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -968,7 +1030,7 @@ describe('remote.Remote', function () { ) }) - it('trashes the existing file at target location', async function () { + it('trashes the existing file at target location', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -978,7 +1040,7 @@ describe('remote.Remote', function () { .be.true() }) - it('transfers the existing file references to the moved one', async function () { + it('transfers the existing file references to the moved one', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -988,7 +1050,7 @@ describe('remote.Remote', function () { .eql(existingRefs.map(ref => ({ id: ref._id, type: ref._type }))) }) - it('updates the remote attribute', async function () { + it('updates the remote attribute', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -1010,7 +1072,7 @@ describe('remote.Remote', function () { .update() }) - it('successfuly moves the file', async function () { + it('successfuly moves the file', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -1027,7 +1089,7 @@ describe('remote.Remote', function () { }) }) - it('transfers the existing file references to the moved one', async function () { + it('transfers the existing file references to the moved one', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -1039,11 +1101,11 @@ describe('remote.Remote', function () { }) context('when the overwritten file does not exist anymore', () => { - beforeEach(async function () { + beforeEach(async function() { await cozy.files.destroyById(existingRemote._id) }) - it('successfuly moves the file', async function () { + it('successfuly moves the file', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -1060,7 +1122,7 @@ describe('remote.Remote', function () { }) }) - it('does not transfer the deleted file references', async function () { + it('does not transfer the deleted file references', async function() { await saveMetadata() await this.remote.moveAsync(doc, old) @@ -1074,7 +1136,7 @@ describe('remote.Remote', function () { }) describe('trash', () => { - it('moves the file or folder to the Cozy trash', async function () { + it('moves the file or folder to the Cozy trash', async function() { const folder = await builders.remoteDir().create() const doc = builders .metadir() @@ -1090,7 +1152,7 @@ describe('remote.Remote', function () { .eql(TRASH_DIR_ID) }) - it('does nothing when file or folder does not exist anymore', async function () { + it('does nothing when file or folder does not exist anymore', async function() { const folder = await builders.remoteDir().build() const doc = builders .metadir() @@ -1107,7 +1169,7 @@ describe('remote.Remote', function () { }) describe('assignNewRemote', () => { - it('updates the remote attribute of a moved document', async function () { + it('updates the remote attribute of a moved document', async function() { const remoteSrc = await builders .remoteDir() .name('src-dir') @@ -1118,13 +1180,21 @@ describe('remote.Remote', function () { .name('foo') .inDir(remoteSrc) .create() - const file = builders.metafile().fromRemote(remoteFile).upToDate().build() + const file = builders + .metafile() + .fromRemote(remoteFile) + .upToDate() + .build() const remoteDir = await builders .remoteDir() .name('foo-dir') .inDir(remoteSrc) .create() - const dir = builders.metadir().fromRemote(remoteDir).upToDate().build() + const dir = builders + .metadir() + .fromRemote(remoteDir) + .upToDate() + .build() await this.remote.remoteCozy.updateAttributesById(remoteSrc._id, { name: 'dst-dir' @@ -1149,19 +1219,19 @@ describe('remote.Remote', function () { }) describe('ping', () => { - beforeEach(function () { + beforeEach(function() { sinon.stub(this.remote.remoteCozy, 'diskUsage') }) - afterEach(function () { + afterEach(function() { this.remote.remoteCozy.diskUsage.restore() }) - it('resolves to true if we can successfuly fetch the remote disk usage', async function () { + it('resolves to true if we can successfuly fetch the remote disk usage', async function() { this.remote.remoteCozy.diskUsage.resolves() await should(this.remote.ping()).be.fulfilledWith(true) }) - it('resolves to false if we cannot successfuly fetch the remote disk usage', async function () { + it('resolves to false if we cannot successfuly fetch the remote disk usage', async function() { this.remote.remoteCozy.diskUsage.rejects() await should(this.remote.ping()).be.fulfilledWith(false) }) @@ -1169,8 +1239,11 @@ describe('remote.Remote', function () { describe('findDirectoryByPath', () => { let oldRemoteDir, newRemoteDir, oldDir, dir - beforeEach(async function () { - oldRemoteDir = await builders.remoteDir().name('old').create() + beforeEach(async function() { + oldRemoteDir = await builders + .remoteDir() + .name('old') + .create() oldDir = await builders .metadir() .fromRemote(oldRemoteDir) @@ -1182,10 +1255,13 @@ describe('remote.Remote', function () { .path('dir') .changedSide('local') .create() - newRemoteDir = await builders.remoteDir().name('dir').create() + newRemoteDir = await builders + .remoteDir() + .name('dir') + .create() }) - it('returns the directory metadata saved in PouchDB', async function () { + it('returns the directory metadata saved in PouchDB', async function() { await should(this.remote.findDirectoryByPath('dir')).be.fulfilledWith( metadata.serializableRemote(dir.remote) ) @@ -1199,7 +1275,7 @@ describe('remote.Remote', function () { }) }) - it('handles different local and remote paths formats', async function () { + it('handles different local and remote paths formats', async function() { // XXX: The synced path of this directory on Windows will be // `whatever\childDir` and since we search by synced path, this tests that // we handle the conversion. @@ -1214,7 +1290,7 @@ describe('remote.Remote', function () { ).be.fulfilledWith(childDir.remote) }) - it('returns the remote root directory for path .', async function () { + it('returns the remote root directory for path .', async function() { // $FlowFixMe Root is a directory const root /*: RemoteDir */ = remoteJsonToRemoteDoc( // XXX: We call the cozy-client-js method directly to increase the @@ -1231,24 +1307,35 @@ describe('remote.Remote', function () { }) }) - it('returns a DirectoryNotFound error if the directory cannot be found in PouchDB', async function () { - await builders.remoteDir().name('missing').create() + it('returns a DirectoryNotFound error if the directory cannot be found in PouchDB', async function() { + await builders + .remoteDir() + .name('missing') + .create() await should(this.remote.findDirectoryByPath('missing')).be.rejectedWith( DirectoryNotFound ) }) - it('returns a DirectoryNotFound error if the local document is not a directory', async function () { - await builders.metafile().path('wrong-type').upToDate().create() + it('returns a DirectoryNotFound error if the local document is not a directory', async function() { + await builders + .metafile() + .path('wrong-type') + .upToDate() + .create() await should( this.remote.findDirectoryByPath('wrong-type') ).be.rejectedWith(DirectoryNotFound) }) - it('returns a DirectoryNotFound error if the directory has no remote side', async function () { - await builders.metadir().path('no-remote').sides({ local: 1 }).create() + it('returns a DirectoryNotFound error if the directory has no remote side', async function() { + await builders + .metadir() + .path('no-remote') + .sides({ local: 1 }) + .create() await should( this.remote.findDirectoryByPath('no-remote') @@ -1258,8 +1345,11 @@ describe('remote.Remote', function () { describe('resolveConflict', () => { let remoteFile, file - beforeEach(async function () { - remoteFile = await builders.remoteFile().name('file.txt').create() + beforeEach(async function() { + remoteFile = await builders + .remoteFile() + .name('file.txt') + .create() file = await builders .metafile() .fromRemote(remoteFile) @@ -1267,21 +1357,24 @@ describe('remote.Remote', function () { .create() }) - it('fails if there are no remote documents with the given path', async function () { + it('fails if there are no remote documents with the given path', async function() { await this.remote.remoteCozy.destroyById(remoteFile._id) await should(this.remote.resolveConflict(file)).be.rejected() }) - it('renames the remote document with a conflict suffix', async function () { + it('renames the remote document with a conflict suffix', async function() { await this.remote.resolveConflict(file) should(await this.remote.remoteCozy.find(remoteFile._id)) .have.property('name') .match(CONFLICT_REGEXP) }) - it('fails with a 412 error if file changes on remote Cozy during the call', async function () { - await builders.remoteFile(remoteFile).data('update').update() + it('fails with a 412 error if file changes on remote Cozy during the call', async function() { + await builders + .remoteFile(remoteFile) + .data('update') + .update() await should(this.remote.resolveConflict(file)).be.rejectedWith({ name: 'FetchError', @@ -1291,8 +1384,11 @@ describe('remote.Remote', function () { }) describe('fileContentWasVersioned', () => { - it('returns false if the given remote file has no old versions', async function () { - const file = await builders.remoteFile().data('original').create() + it('returns false if the given remote file has no old versions', async function() { + const file = await builders + .remoteFile() + .data('original') + .create() const { md5sum, size } = file await should( @@ -1303,8 +1399,11 @@ describe('remote.Remote', function () { ).be.fulfilledWith(false) }) - it('returns true if the given remote file has an old version with the given content', async function () { - const original = await builders.remoteFile().data('original').create() + it('returns true if the given remote file has an old version with the given content', async function() { + const original = await builders + .remoteFile() + .data('original') + .create() const modified = await builders .remoteFile(original) .data('modified') @@ -1319,8 +1418,11 @@ describe('remote.Remote', function () { ).be.fulfilledWith(true) }) - it('returns false if the given remote file has no old versions with the given content', async function () { - const original = await builders.remoteFile().data('original').create() + it('returns false if the given remote file has no old versions with the given content', async function() { + const original = await builders + .remoteFile() + .data('original') + .create() const modified = await builders .remoteFile(original) .data('modified') @@ -1336,9 +1438,9 @@ describe('remote.Remote', function () { }) }) -describe('remote', function () { +describe('remote', function() { describe('.dirAndName()', () => { - it('returns the remote path and name', function () { + it('returns the remote path and name', function() { should(remote.dirAndName('foo')).deepEqual(['.', 'foo']) should(remote.dirAndName(path.normalize('foo/bar'))).deepEqual([ 'foo', diff --git a/test/unit/remote/offline.js b/test/unit/remote/offline.js index 37b18196f..220e02675 100644 --- a/test/unit/remote/offline.js +++ b/test/unit/remote/offline.js @@ -2,18 +2,18 @@ /* eslint-env mocha */ const EventEmitter = require('events') -const sinon = require('sinon') -const should = require('should') + const { FetchError } = require('electron-fetch') +const should = require('should') +const sinon = require('sinon') const Prep = require('../../../core/prep') const { Remote } = require('../../../core/remote') const remoteErrors = require('../../../core/remote/errors') - +const Builders = require('../../support/builders') const configHelpers = require('../../support/helpers/config') -const pouchHelpers = require('../../support/helpers/pouch') const cozyHelpers = require('../../support/helpers/cozy') -const Builders = require('../../support/builders') +const pouchHelpers = require('../../support/helpers/pouch') const builders = new Builders({ cozy: cozyHelpers.cozy }) /*:: @@ -21,11 +21,11 @@ import type { Metadata } from '../../../core/metadata' import type { RemoteDoc } from '../../../core/remote/document' */ -describe('Remote', function () { +describe('Remote', function() { before('instanciate config', configHelpers.createConfig) before('register OAuth client', configHelpers.registerClient) before('instanciate pouch', pouchHelpers.createDatabase) - before('instanciate remote', function () { + before('instanciate remote', function() { this.prep = sinon.createStubInstance(Prep) this.prep.config = this.config this.events = new EventEmitter() @@ -34,14 +34,18 @@ describe('Remote', function () { this.remote.remoteCozy.client = cozyHelpers.cozy }) beforeEach(cozyHelpers.deleteAll) - beforeEach('create the couchdb folder', async function () { - await builders.remoteDir().name('couchdb-folder').inRootDir().create() + beforeEach('create the couchdb folder', async function() { + await builders + .remoteDir() + .name('couchdb-folder') + .inRootDir() + .create() }) after('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) describe('offline management', () => { - it('The remote can be started when offline ', async function () { + it('The remote can be started when offline ', async function() { const fetchStub = sinon .stub(global, 'fetch') .rejects(new FetchError('net::ERR_INTERNET_DISCONNECTED')) @@ -52,7 +56,9 @@ describe('Remote', function () { try { should(this.events.emit).have.been.calledWithMatch( 'RemoteWatcher:error', - { code: remoteErrors.UNREACHABLE_COZY_CODE } + { + code: remoteErrors.UNREACHABLE_COZY_CODE + } ) fetchStub.restore() diff --git a/test/unit/remote/realtime_manager.js b/test/unit/remote/realtime_manager.js index b683f9abb..5d54fb973 100644 --- a/test/unit/remote/realtime_manager.js +++ b/test/unit/remote/realtime_manager.js @@ -1,17 +1,16 @@ /* @flow */ /* eslint-env mocha */ -const sinon = require('sinon') const should = require('should') - -const configHelpers = require('../../support/helpers/config') -const cozyHelpers = require('../../support/helpers/cozy') -const pouchHelpers = require('../../support/helpers/pouch') +const sinon = require('sinon') const { FILES_DOCTYPE } = require('../../../core/remote/constants') const { RealtimeManager } = require('../../../core/remote/watcher/realtime_manager') +const configHelpers = require('../../support/helpers/config') +const cozyHelpers = require('../../support/helpers/cozy') +const pouchHelpers = require('../../support/helpers/pouch') const setup = async () => { const client = await cozyHelpers.newClient() @@ -27,13 +26,13 @@ const setup = async () => { } } -describe('RealtimeManager', function () { +describe('RealtimeManager', function() { before('instanciate config', configHelpers.createConfig) before('register OAuth client', configHelpers.registerClient) beforeEach(pouchHelpers.createDatabase) describe('start', () => { - it('subscribes to all io.cozy.files realtime events', async function () { + it('subscribes to all io.cozy.files realtime events', async function() { const { teardown, realtime, realtimeManager } = await setup() const subscribeSpy = sinon.spy(realtime, 'subscribe') @@ -64,7 +63,7 @@ describe('RealtimeManager', function () { }) describe('stop', () => { - it('removes all subscriptions', async function () { + it('removes all subscriptions', async function() { const { teardown, realtime, realtimeManager } = await setup() const unsubscribeSpy = sinon.spy(realtime, 'unsubscribe') @@ -95,7 +94,7 @@ describe('RealtimeManager', function () { }) describe('onCreated', () => { - it('calls event handler for a created realtime event', async function () { + it('calls event handler for a created realtime event', async function() { const { teardown, eventHandler, realtimeManager } = await setup() try { @@ -110,7 +109,7 @@ describe('RealtimeManager', function () { }) describe('onUpdated', () => { - it('calls event handler for an updated realtime event', async function () { + it('calls event handler for an updated realtime event', async function() { const { teardown, eventHandler, realtimeManager } = await setup() try { @@ -125,7 +124,7 @@ describe('RealtimeManager', function () { }) describe('onDeleted', () => { - it('calls event handler for a deleted realtime event', async function () { + it('calls event handler for a deleted realtime event', async function() { const { teardown, eventHandler, realtimeManager } = await setup() try { diff --git a/test/unit/remote/registration.js b/test/unit/remote/registration.js index f2e8a1877..b458120c6 100644 --- a/test/unit/remote/registration.js +++ b/test/unit/remote/registration.js @@ -3,25 +3,24 @@ const should = require('should') const Registration = require('../../../core/remote/registration') - const configHelpers = require('../../support/helpers/config') -describe('Registration', function () { +describe('Registration', function() { before('instanciate config', configHelpers.createConfig) after('clean config directory', configHelpers.cleanConfig) - before('create a registration', function () { + before('create a registration', function() { this.registration = new Registration(this.config.cozyUrl, this.config) }) - it('generates a unique device name', function () { + it('generates a unique device name', function() { const params = this.registration.clientParams({}) should(params.clientName).not.be.empty() const otherName = this.registration.clientParams({}).clientName should(params.clientName).should.not.equal(otherName) }) - it('configures correctly the OAuth client', function () { + it('configures correctly the OAuth client', function() { const pkg = { homepage: 'https//github.com/cozy-labs/cozy-desktop', logo: 'https://cozy.io/cozy-desktop.logo', diff --git a/test/unit/remote/warning_poller.js b/test/unit/remote/warning_poller.js index d396361a6..e4780d0c0 100644 --- a/test/unit/remote/warning_poller.js +++ b/test/unit/remote/warning_poller.js @@ -4,6 +4,7 @@ const should = require('should') const sinon = require('sinon') +const remoteErrors = require('../../../core/remote/errors') const { DEFAULT_TICKS, MODE, @@ -12,8 +13,6 @@ const { shiftTicks, ticks } = require('../../../core/remote/warning_poller') -const remoteErrors = require('../../../core/remote/errors') - const Builders = require('../../support/builders') /*:: diff --git a/test/unit/remote/watcher.js b/test/unit/remote/watcher.js index 46c6f0829..43ab77318 100644 --- a/test/unit/remote/watcher.js +++ b/test/unit/remote/watcher.js @@ -2,28 +2,20 @@ /* eslint-env mocha */ const EventEmitter = require('events') +const path = require('path') + +const async = require('async') +const { Promise } = require('bluebird') const faker = require('faker') const _ = require('lodash') -const path = require('path') -const sinon = require('sinon') const should = require('should') +const sinon = require('sinon') + const CozyClient = require('cozy-client-js').Client const { FetchError } = require('cozy-stack-client') -const async = require('async') -const { Promise } = require('bluebird') - -const configHelpers = require('../../support/helpers/config') -const { posixifyPath } = require('../../support/helpers/context_dir') -const { onPlatform, onPlatforms } = require('../../support/helpers/platform') -const pouchHelpers = require('../../support/helpers/pouch') -const cozyHelpers = require('../../support/helpers/cozy') -const Builders = require('../../support/builders') const metadata = require('../../../core/metadata') -const { ensureValidPath } = metadata const Prep = require('../../../core/prep') -const { RemoteCozy } = require('../../../core/remote/cozy') -const remoteErrors = require('../../../core/remote/errors') const { FILE_TYPE, DIR_TYPE, @@ -32,8 +24,17 @@ const { REMOTE_WATCHER_ERROR_EVENT, REMOTE_WATCHER_FATAL_EVENT } = require('../../../core/remote/constants') +const { RemoteCozy } = require('../../../core/remote/cozy') +const remoteErrors = require('../../../core/remote/errors') const { RemoteWatcher } = require('../../../core/remote/watcher') const timestamp = require('../../../core/utils/timestamp') +const Builders = require('../../support/builders') +const configHelpers = require('../../support/helpers/config') +const { posixifyPath } = require('../../support/helpers/context_dir') +const cozyHelpers = require('../../support/helpers/cozy') +const { onPlatform, onPlatforms } = require('../../support/helpers/platform') +const pouchHelpers = require('../../support/helpers/pouch') +const { ensureValidPath } = metadata /*:: import type { @@ -63,14 +64,22 @@ const saveTree = async (remoteTree, builders) => { for (const key in remoteTree) { const remoteDoc = remoteTree[key] if (remoteDoc.type === DIR_TYPE) { - await builders.metadir().fromRemote(remoteDoc).upToDate().create() + await builders + .metadir() + .fromRemote(remoteDoc) + .upToDate() + .create() } else { - await builders.metafile().fromRemote(remoteDoc).upToDate().create() + await builders + .metafile() + .fromRemote(remoteDoc) + .upToDate() + .create() } } } -describe('RemoteWatcher', function () { +describe('RemoteWatcher', function() { let builders, clock let remoteTree /*: Object */ @@ -92,7 +101,7 @@ describe('RemoteWatcher', function () { this.watcher = new RemoteWatcher(this) builders = new Builders({ cozy: cozyHelpers.cozy, pouch: this.pouch }) }) - beforeEach(async function () { + beforeEach(async function() { await async.retry({ times: 2 }, async () => { try { remoteTree = await builders.createRemoteTree([ @@ -110,7 +119,7 @@ describe('RemoteWatcher', function () { } }) }) - afterEach(async function () { + afterEach(async function() { await this.watcher.stop() }) afterEach(function removeEventListeners() { @@ -119,15 +128,15 @@ describe('RemoteWatcher', function () { afterEach(function restoreTimers() { clock.restore() }) - afterEach(async function () { + afterEach(async function() { await pouchHelpers.cleanDatabase() }) - afterEach(async function () { + afterEach(async function() { await cozyHelpers.deleteAll() }) after(configHelpers.cleanConfig) - describe('start', function () { + describe('start', function() { const fatalError = new remoteErrors.RemoteError({ code: remoteErrors.COZY_CLIENT_REVOKED_CODE, message: remoteErrors.COZY_CLIENT_REVOKED_MESSAGE, @@ -135,16 +144,16 @@ describe('RemoteWatcher', function () { }) const nonFatalError = new Error('from watch') - beforeEach(function () { + beforeEach(function() { sinon.stub(this.watcher, 'watch').resolves() sinon.spy(this.events, 'emit') }) - afterEach(function () { + afterEach(function() { this.watcher.watch.restore() this.events.emit.restore() }) - it('starts the watch loop', async function () { + it('starts the watch loop', async function() { const requestRunSpy = sinon.spy(this.watcher, 'requestRun') try { await this.watcher.start() @@ -157,7 +166,7 @@ describe('RemoteWatcher', function () { } }) - it('can be called multiple times without resetting the clock', async function () { + it('can be called multiple times without resetting the clock', async function() { await this.watcher.start() const intervalID = this.watcher.watchInterval.ref() await this.watcher.start() @@ -165,7 +174,7 @@ describe('RemoteWatcher', function () { should(this.watcher.watchInterval.ref()).eql(intervalID) }) - it('emits a REMOTE_WATCHER_FATAL_EVENT event on fatal error during first watch()', async function () { + it('emits a REMOTE_WATCHER_FATAL_EVENT event on fatal error during first watch()', async function() { this.watcher.watch.rejects(fatalError) await this.watcher.start() @@ -175,7 +184,7 @@ describe('RemoteWatcher', function () { ) }) - it('emits a REMOTE_WATCHER_FATAL_EVENT event on fatal error during second watch()', async function () { + it('emits a REMOTE_WATCHER_FATAL_EVENT event on fatal error during second watch()', async function() { this.watcher.watch .onFirstCall() .resolves() @@ -197,7 +206,7 @@ describe('RemoteWatcher', function () { await done }) - it('emits a REMOTE_WATCHER_ERROR_EVENT event on non-fatal error during first watch()', async function () { + it('emits a REMOTE_WATCHER_ERROR_EVENT event on non-fatal error during first watch()', async function() { this.watcher.watch.rejects(nonFatalError) await this.watcher.start() @@ -207,7 +216,7 @@ describe('RemoteWatcher', function () { ) }) - it('emits a REMOTE_WATCHER_ERROR_EVENT event on non-fatal error during second watch()', async function () { + it('emits a REMOTE_WATCHER_ERROR_EVENT event on non-fatal error during second watch()', async function() { this.watcher.watch .onFirstCall() .resolves() @@ -230,23 +239,23 @@ describe('RemoteWatcher', function () { }) }) - describe('stop', function () { - beforeEach(function () { + describe('stop', function() { + beforeEach(function() { sinon.stub(this.watcher, 'watch').resolves() }) - afterEach(function () { + afterEach(function() { this.watcher.watch.restore() }) - it('ensures watch is not called anymore', async function () { + it('ensures watch is not called anymore', async function() { await this.watcher.start() await this.watcher.stop() should(this.watcher.running).be.false() should(this.watcher.watchInterval).be.null() }) - it('can be called multiple times', async function () { + it('can be called multiple times', async function() { await this.watcher.start() await this.watcher.stop() await this.watcher.stop() @@ -255,8 +264,8 @@ describe('RemoteWatcher', function () { }) }) - describe('watch loop', function () { - it('requests run at REMOTE_HEARTBEAT interval only if the queue is idle', async function () { + describe('watch loop', function() { + it('requests run at REMOTE_HEARTBEAT interval only if the queue is idle', async function() { this.watcher.watch = sinon.stub().callsFake(async () => { await Promise.delay(REMOTE_HEARTBEAT + 1) }) @@ -280,8 +289,8 @@ describe('RemoteWatcher', function () { }) }) - describe('requestRun', function () { - beforeEach(async function () { + describe('requestRun', function() { + beforeEach(async function() { sinon.stub(this.watcher, 'watch') sinon.spy(this.events, 'emit') @@ -290,13 +299,13 @@ describe('RemoteWatcher', function () { this.watcher.watch.resetHistory() }) - afterEach(async function () { + afterEach(async function() { this.events.emit.restore() this.watcher.watch.restore() await this.watcher.stop() }) - it('clears enqueued watch runs', async function () { + it('clears enqueued watch runs', async function() { this.watcher.watch.resolves() // We start with an empty queue @@ -327,11 +336,11 @@ describe('RemoteWatcher', function () { }) context('when the watcher is stopped', () => { - beforeEach(function () { + beforeEach(function() { this.watcher.running = false }) - it('does not call watch()', async function () { + it('does not call watch()', async function() { await this.watcher.requestRun() should(this.watcher.watch).not.have.been.called() }) @@ -342,7 +351,7 @@ describe('RemoteWatcher', function () { let err context('when next #watch() has no chance to work anymore', () => { - beforeEach(function () { + beforeEach(function() { err = new remoteErrors.RemoteError({ code: remoteErrors.COZY_CLIENT_REVOKED_CODE, message: remoteErrors.COZY_CLIENT_REVOKED_MESSAGE, @@ -351,12 +360,12 @@ describe('RemoteWatcher', function () { this.watcher.watch.rejects(err) }) - it('stops the watcher', async function () { + it('stops the watcher', async function() { await this.watcher.requestRun() should(this.watcher.running).be.false() }) - it('emits a REMOTE_WATCHER_FATAL_EVENT event', async function () { + it('emits a REMOTE_WATCHER_FATAL_EVENT event', async function() { await this.watcher.requestRun() await should(this.events.emit).have.been.calledWith( REMOTE_WATCHER_FATAL_EVENT, @@ -366,7 +375,7 @@ describe('RemoteWatcher', function () { }) context('when next #watch() could work', () => { - beforeEach(function () { + beforeEach(function() { err = new remoteErrors.RemoteError({ code: remoteErrors.UNREACHABLE_COZY_CODE, message: 'Cannot reach remote Cozy', @@ -375,12 +384,12 @@ describe('RemoteWatcher', function () { this.watcher.watch.rejects(err) }) - it('does not stop the watcher', async function () { + it('does not stop the watcher', async function() { await this.watcher.requestRun() should(this.watcher.running).be.true() }) - it('emits a REMOTE_WATCHER_ERROR_EVENT event', async function () { + it('emits a REMOTE_WATCHER_ERROR_EVENT event', async function() { await this.watcher.requestRun() await should(this.events.emit).have.been.calledWith( REMOTE_WATCHER_ERROR_EVENT, @@ -391,12 +400,12 @@ describe('RemoteWatcher', function () { }) }) - describe('watch', function () { + describe('watch', function() { const lastLocalSeq = '123' const lastRemoteSeq = lastLocalSeq + '456' let changes - beforeEach(function () { + beforeEach(function() { changes = { isInitialFetch: false, last_seq: String(Number(lastRemoteSeq) + 2), // XXX: Include the two changes returned @@ -404,7 +413,7 @@ describe('RemoteWatcher', function () { } }) - beforeEach(function () { + beforeEach(function() { sinon.stub(this.pouch, 'getRemoteSeq') sinon.stub(this.pouch, 'setRemoteSeq') sinon.stub(this.watcher, 'processRemoteChanges') @@ -417,7 +426,7 @@ describe('RemoteWatcher', function () { this.remoteCozy.changes.resolves(changes) }) - afterEach(function () { + afterEach(function() { this.events.emit.restore() this.remoteCozy.changes.restore() this.watcher.processRemoteChanges.restore() @@ -426,14 +435,14 @@ describe('RemoteWatcher', function () { this.pouch.getRemoteSeq.restore() }) - it('pulls the changed files/dirs', async function () { + it('pulls the changed files/dirs', async function() { await this.watcher.watch() should(this.watcher.processRemoteChanges) .have.been.calledOnce() .and.be.calledWithExactly(changes.docs) }) - it('updates the last update sequence in local db', async function () { + it('updates the last update sequence in local db', async function() { await this.watcher.watch() should(this.pouch.setRemoteSeq) .have.been.calledOnce() @@ -444,7 +453,7 @@ describe('RemoteWatcher', function () { const randomMessage = faker.random.words let err - beforeEach(function () { + beforeEach(function() { const response = {} // FetchError objects defined in `cozy-stack-client` have the same // signature as FetchError objects defined in `cozy-client-js`. @@ -452,7 +461,7 @@ describe('RemoteWatcher', function () { this.remoteCozy.changes.rejects(err) }) - it('resolves with a higher-level error', async function () { + it('resolves with a higher-level error', async function() { err.status = 400 // Revoked await should(this.watcher.watch()).be.rejectedWith( new remoteErrors.RemoteError({ @@ -483,11 +492,11 @@ describe('RemoteWatcher', function () { } } - beforeEach(function () { + beforeEach(function() { this.watcher.processRemoteChanges.throws(reservedIdsError) }) - it('does not return client revoked error', async function () { + it('does not return client revoked error', async function() { await should(this.watcher.watch()).be.rejectedWith({ code: remoteErrors.UNKNOWN_REMOTE_ERROR_CODE }) @@ -495,17 +504,17 @@ describe('RemoteWatcher', function () { }) context('when a fetched directory has been modified more than once', () => { - beforeEach(function () { + beforeEach(function() { this.prep.putFolderAsync.callsFake(async (side, doc) => { metadata.markSide(side, doc, doc) await this.pouch.put(doc) }) }) - afterEach(function () { + afterEach(function() { this.prep.putFolderAsync.restore() }) - it('it fetches its content as a potentially re-included directory', async function () { + it('it fetches its content as a potentially re-included directory', async function() { const remoteDocs = [ builders.remoteFile().build(), builders @@ -529,14 +538,16 @@ describe('RemoteWatcher', function () { const spy = sinon.spy(this.remoteCozy, 'getDirectoryContent') try { await this.watcher.watch() - should(spy).have.been.calledOnce().and.calledWith(remoteDocs[1]) + should(spy) + .have.been.calledOnce() + .and.calledWith(remoteDocs[1]) } finally { spy.restore() } }) context('when fetching changes for the first time', () => { - it('does not fetch the content of modified directories', async function () { + it('does not fetch the content of modified directories', async function() { // Restored in a "parent" afterEach this.pouch.getRemoteSeq.resolves(INITIAL_SEQ) // Restored in a "parent" afterEach @@ -569,21 +580,21 @@ describe('RemoteWatcher', function () { }) context('when watcher is not running', () => { - beforeEach(function () { + beforeEach(function() { this.watcher.running = false }) - afterEach(function () { + afterEach(function() { this.watcher.running = true }) - it('returns without fetching changes', async function () { + it('returns without fetching changes', async function() { await this.watcher.watch() should(this.remoteCozy.changes).not.have.been.called() }) - it('still tries to get hold of the PouchDB lock', async function () { + it('still tries to get hold of the PouchDB lock', async function() { sinon.spy(this.pouch, 'lock') try { @@ -605,11 +616,11 @@ describe('RemoteWatcher', function () { return doc } - describe('processRemoteChanges', function () { + describe('processRemoteChanges', function() { let apply let findMaybe let remoteDocs - beforeEach(function () { + beforeEach(function() { apply = sinon.stub(this.watcher, 'apply') findMaybe = sinon.stub(this.remoteCozy, 'findMaybe') remoteDocs = [ @@ -618,12 +629,12 @@ describe('RemoteWatcher', function () { ] }) - afterEach(function () { + afterEach(function() { apply.restore() findMaybe.restore() }) - it('pulls many changed files/dirs given their ids', async function () { + it('pulls many changed files/dirs given their ids', async function() { apply.resolves() await this.watcher.processRemoteChanges(remoteDocs, { @@ -638,19 +649,17 @@ describe('RemoteWatcher', function () { should(apply.args[1][0].doc).deepEqual(remoteDocs[1]) }) - context('when apply() returns an error for some file/dir', function () { - beforeEach(function () { - apply.callsFake( - async ( - change /*: RemoteChange */ - ) /*: Promise */ => { - if (change.type === 'FileAddition') - return { change, err: new Error(change.doc) } - } - ) + context('when apply() returns an error for some file/dir', function() { + beforeEach(function() { + apply.callsFake(async ( + change /*: RemoteChange */ + ) /*: Promise */ => { + if (change.type === 'FileAddition') + return { change, err: new Error(change.doc) } + }) }) - it('rejects with the first error', async function () { + it('rejects with the first error', async function() { await should( this.watcher.processRemoteChanges(remoteDocs, { isInitialFetch: false @@ -658,7 +667,7 @@ describe('RemoteWatcher', function () { ).be.rejectedWith(new Error(remoteDocs[0])) }) - it('still tries to pull other files/dirs', async function () { + it('still tries to pull other files/dirs', async function() { await this.watcher .processRemoteChanges(remoteDocs, { isInitialFetch: false }) .catch(() => {}) @@ -672,7 +681,7 @@ describe('RemoteWatcher', function () { }) }) - it('retries failed changes application until none can be applied', async function () { + it('retries failed changes application until none can be applied', async function() { const remoteDocs = [ builders.remoteFile().build(), builders.remoteErased().build(), @@ -700,7 +709,7 @@ describe('RemoteWatcher', function () { }) }) - it('releases the Pouch lock', async function () { + it('releases the Pouch lock', async function() { await this.watcher .processRemoteChanges(remoteDocs, { isInitialFetch: false }) .catch(() => {}) @@ -708,7 +717,7 @@ describe('RemoteWatcher', function () { await should(nextLockPromise).be.fulfilled() }) - it('does not update the remote sequence', async function () { + it('does not update the remote sequence', async function() { const remoteSeq = await this.pouch.getRemoteSeq() await this.watcher .processRemoteChanges(remoteDocs, { isInitialFetch: false }) @@ -717,8 +726,11 @@ describe('RemoteWatcher', function () { }) }) - it('applies the changes when the document still exists on remote', async function () { - const remoteDoc = builders.remoteFile().name('whatever').build() + it('applies the changes when the document still exists on remote', async function() { + const remoteDoc = builders + .remoteFile() + .name('whatever') + .build() await this.watcher.processRemoteChanges([remoteDoc], { isInitialFetch: false @@ -728,7 +740,7 @@ describe('RemoteWatcher', function () { should(apply.args[0][0].doc).deepEqual(validMetadata(remoteDoc)) }) - it('tries to apply a deletion otherwise', async function () { + it('tries to apply a deletion otherwise', async function() { const remoteDeletion /*: CouchDBDeletion */ = { _id: 'missing', _rev: 'whatever', @@ -746,8 +758,11 @@ describe('RemoteWatcher', function () { describe('analyse', () => { describe('case-only renaming', () => { - it('is identified as a move', async function () { - const oldRemote = builders.remoteFile().name('foo').build() + it('is identified as a move', async function() { + const oldRemote = builders + .remoteFile() + .name('foo') + .build() const oldDoc = metadata.fromRemoteDoc(oldRemote) metadata.ensureValidPath(oldDoc) const newRemote = _.defaults( @@ -762,15 +777,19 @@ describe('RemoteWatcher', function () { const changes = await this.watcher.analyse([newRemote], [oldDoc]) should(changes.map(c => c.type)).deepEqual(['FileMove']) - should(changes[0]).have.propertyByPath('doc', 'path').eql('FOO') - should(changes[0]).have.propertyByPath('was', 'path').eql('foo') + should(changes[0]) + .have.propertyByPath('doc', 'path') + .eql('FOO') + should(changes[0]) + .have.propertyByPath('was', 'path') + .eql('foo') }) }) onPlatform('darwin', () => { describe('file update', () => { context('at root with normalization change', () => { - it('is not identified as a move', async function () { + it('is not identified as a move', async function() { const oldRemote = builders .remoteFile() .name('énoncé'.normalize('NFC')) @@ -795,7 +814,7 @@ describe('RemoteWatcher', function () { }) context('in accented folder with normalization change', () => { - it('is not identified as a move', async function () { + it('is not identified as a move', async function() { const oldRemoteDir = builders .remoteDir() .name('énoncés'.normalize('NFD')) @@ -842,7 +861,7 @@ describe('RemoteWatcher', function () { context( 'in accented folder with different local/remote normalizations', () => { - it('is not identified as a move', async function () { + it('is not identified as a move', async function() { const oldRemoteDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -885,7 +904,7 @@ describe('RemoteWatcher', function () { context( 'in renamed accented folder with different local/remote normalizations', () => { - it('is identified as a descendant change within current parent path', async function () { + it('is identified as a descendant change within current parent path', async function() { const oldRemoteDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -945,7 +964,7 @@ describe('RemoteWatcher', function () { context( 'in accented folder with different local/remote normalizations', () => { - it('is identified as an addition with old parent normalization', async function () { + it('is identified as an addition with old parent normalization', async function() { const oldRemoteDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -980,7 +999,7 @@ describe('RemoteWatcher', function () { context( 'in created folder in accented folder with different local/remote normalizations', () => { - it('is identified as an addition with old ancestor normalization', async function () { + it('is identified as an addition with old ancestor normalization', async function() { const remoteParentDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -1023,7 +1042,7 @@ describe('RemoteWatcher', function () { context( 'with the folder creation ordered after the file creation', () => { - it('is identified as an addition with old ancestor normalization', async function () { + it('is identified as an addition with old ancestor normalization', async function() { const remoteParentDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -1076,7 +1095,7 @@ describe('RemoteWatcher', function () { context( 'with different local/remote normalizations to accented folder with different local/remote normalizations', () => { - it('is identified as move with old normalization and new parent normalization', async function () { + it('is identified as move with old normalization and new parent normalization', async function() { const oldRemoteDir = builders .remoteDir() .name('énoncés'.normalize('NFC')) @@ -1184,7 +1203,7 @@ describe('RemoteWatcher', function () { return props }) - it('is detected when moved source is first', async function () { + it('is detected when moved source is first', async function() { const remoteDocs = [srcFileMoved, dstFileTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1201,7 +1220,7 @@ describe('RemoteWatcher', function () { ]) }) - it('is detected when trashed destination is first', async function () { + it('is detected when trashed destination is first', async function() { const remoteDocs = [dstFileTrashed, srcFileMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1274,7 +1293,7 @@ describe('RemoteWatcher', function () { describe('when moved source is first', () => { onPlatforms(['win32', 'darwin'], () => { - it('sorts the trashing before the move to prevent id confusion', async function () { + it('sorts the trashing before the move to prevent id confusion', async function() { const remoteDocs = [srcFileMoved, dstFileTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1293,7 +1312,7 @@ describe('RemoteWatcher', function () { }) onPlatform('linux', () => { - it('sorts the move before the trashing', async function () { + it('sorts the move before the trashing', async function() { const remoteDocs = [srcFileMoved, dstFileTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1314,7 +1333,7 @@ describe('RemoteWatcher', function () { describe('when trashed destination is first', () => { onPlatforms(['win32', 'darwin'], () => { - it('sorts the trashing before the move to prevent id confusion', async function () { + it('sorts the trashing before the move to prevent id confusion', async function() { const remoteDocs = [dstFileTrashed, srcFileMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1333,7 +1352,7 @@ describe('RemoteWatcher', function () { }) onPlatform('linux', () => { - it('sorts the move before the trashing', async function () { + it('sorts the move before the trashing', async function() { const remoteDocs = [dstFileTrashed, srcFileMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1406,7 +1425,7 @@ describe('RemoteWatcher', function () { return props }) - it('is detected when moved source is first', async function () { + it('is detected when moved source is first', async function() { const remoteDocs = [srcMoved, dstTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1423,7 +1442,7 @@ describe('RemoteWatcher', function () { ]) }) - it('is detected when trashed destination is first', async function () { + it('is detected when trashed destination is first', async function() { const remoteDocs = [dstTrashed, srcMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1496,7 +1515,7 @@ describe('RemoteWatcher', function () { describe('when moved source is first', () => { onPlatforms(['win32', 'darwin'], () => { - it('sorts the trashing before the move to prevent id confusion', async function () { + it('sorts the trashing before the move to prevent id confusion', async function() { const remoteDocs = [srcMoved, dstTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1515,7 +1534,7 @@ describe('RemoteWatcher', function () { }) onPlatform('linux', () => { - it('sorts the trashing before the move ', async function () { + it('sorts the trashing before the move ', async function() { const remoteDocs = [srcMoved, dstTrashed] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1536,7 +1555,7 @@ describe('RemoteWatcher', function () { describe('when trashed destination is first', () => { onPlatforms(['win32', 'darwin'], () => { - it('sorts the trashing before the move to prevent id confusion', async function () { + it('sorts the trashing before the move to prevent id confusion', async function() { const remoteDocs = [dstTrashed, srcMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1555,7 +1574,7 @@ describe('RemoteWatcher', function () { }) onPlatform('linux', () => { - it('sorts the trashing before the move', async function () { + it('sorts the trashing before the move', async function() { const remoteDocs = [dstTrashed, srcMoved] const changes = await this.watcher.analyse(remoteDocs, olds) should(relevantChangesProps(changes)).deepEqual([ @@ -1576,8 +1595,11 @@ describe('RemoteWatcher', function () { }) describe('descendantMoves', () => { - it('handles correctly descendantMoves', async function () { - const remoteDir1 = builders.remoteDir().name('src').build() + it('handles correctly descendantMoves', async function() { + const remoteDir1 = builders + .remoteDir() + .name('src') + .build() const remoteDir2 = builders .remoteDir() .name('parent') @@ -1679,7 +1701,7 @@ describe('RemoteWatcher', function () { }) describe('identifyAll', () => { - it('identifies all descendant moves', function () { + it('identifies all descendant moves', function() { const remotePaths = [ ['parent/', 1], ['parent/src/', 1], @@ -1799,7 +1821,7 @@ describe('RemoteWatcher', function () { ]) }) - it('identifies move from inside move', function () { + it('identifies move from inside move', function() { const remotePaths = [ ['parent/', 1], ['parent/src/', 1], @@ -1982,7 +2004,7 @@ describe('RemoteWatcher', function () { ] }) - it('sorts correctly order1', function () { + it('sorts correctly order1', function() { const order1 = [ remoteDocsByPath['parent/dst/dir2/'], remoteDocsByPath['parent/dst/'], @@ -2021,7 +2043,7 @@ describe('RemoteWatcher', function () { ]) }) - it('sorts correctly order2', function () { + it('sorts correctly order2', function() { const order2 = [ remoteDocsByPath['parent/dst/dir2/subdir/'], remoteDocsByPath['parent/dst/'], @@ -2062,9 +2084,12 @@ describe('RemoteWatcher', function () { }) }) - describe('identifyChange', function () { - it('does not fail when the path is missing', function () { - const remoteDoc = builders.remoteFile().name('whatever').build() + describe('identifyChange', function() { + it('does not fail when the path is missing', function() { + const remoteDoc = builders + .remoteFile() + .name('whatever') + .build() remoteDoc.path = '' const change /*: RemoteInvalidChange */ = this.watcher.identifyChange( @@ -2081,8 +2106,11 @@ describe('RemoteWatcher', function () { // TODO: file without checksum onPlatform('win32', () => { - it('detects path/platform incompatibilities if any', async function () { - const remoteDir = builders.remoteDir().name('f:oo').build() + it('detects path/platform incompatibilities if any', async function() { + const remoteDir = builders + .remoteDir() + .name('f:oo') + .build() const remoteDoc = builders .remoteFile() .inDir(remoteDir) @@ -2118,7 +2146,7 @@ describe('RemoteWatcher', function () { ]) }) - it('does not detect any when file/dir is in the trash', async function () { + it('does not detect any when file/dir is in the trash', async function() { const remoteDoc = builders .remoteFile() .name('f:oo/br') @@ -2136,8 +2164,11 @@ describe('RemoteWatcher', function () { }) onPlatform('darwin', () => { - it('does not mistakenly assume a new file is incompatible', async function () { - const remoteDir = builders.remoteDir().name('f:oo').build() + it('does not mistakenly assume a new file is incompatible', async function() { + const remoteDir = builders + .remoteDir() + .name('f:oo') + .build() const remoteDoc = builders .remoteFile() .inDir(remoteDir) @@ -2155,7 +2186,7 @@ describe('RemoteWatcher', function () { }) }) - it('calls addDoc for a new doc', async function () { + it('calls addDoc for a new doc', async function() { this.prep.addFileAsync = sinon.stub() this.prep.addFileAsync.resolves(null) const remoteDoc = builders @@ -2185,7 +2216,7 @@ describe('RemoteWatcher', function () { should(change.doc).not.have.properties(['_rev', 'path', 'name']) }) - it('calls updateDoc when tags are updated', async function () { + it('calls updateDoc when tags are updated', async function() { this.prep.updateFileAsync = sinon.stub() this.prep.updateFileAsync.resolves(null) const remoteDoc = builders @@ -2218,7 +2249,7 @@ describe('RemoteWatcher', function () { }) }) - it('calls updateDoc when content is overwritten', async function () { + it('calls updateDoc when content is overwritten', async function() { this.prep.updateFileAsync = sinon.stub().resolves(null) const remoteDoc = builders @@ -2252,7 +2283,7 @@ describe('RemoteWatcher', function () { should(change.doc).not.have.properties(['_rev', 'path', 'name']) }) - it('calls moveFile when file is renamed', async function () { + it('calls moveFile when file is renamed', async function() { this.prep.moveFileAsync = sinon.stub() this.prep.moveFileAsync.resolves(null) const remoteDoc = builders @@ -2287,10 +2318,13 @@ describe('RemoteWatcher', function () { should(change.doc).not.have.properties(['_rev', 'path', 'name']) }) - it('calls moveFile when file is moved', async function () { + it('calls moveFile when file is moved', async function() { this.prep.moveFileAsync = sinon.stub() this.prep.moveFileAsync.resolves(null) - const remoteDir = builders.remoteDir().name('other-folder').build() + const remoteDir = builders + .remoteDir() + .name('other-folder') + .build() const remoteDoc = builders .remoteFile(remoteTree['my-folder/file-2']) .inDir(remoteDir) @@ -2324,7 +2358,7 @@ describe('RemoteWatcher', function () { should(change.doc).not.have.properties(['_rev', 'path', 'name']) }) - it('detects when file was both moved and updated', async function () { + it('detects when file was both moved and updated', async function() { const file = await builders .remoteFile() .name('meow.txt') @@ -2348,12 +2382,20 @@ describe('RemoteWatcher', function () { type: 'FileMove', update: true }) - should(change).have.propertyByPath('was', 'path').eql(was.path) - should(change).have.propertyByPath('doc', 'path').eql(file.name) + should(change) + .have.propertyByPath('was', 'path') + .eql(was.path) + should(change) + .have.propertyByPath('doc', 'path') + .eql(file.name) }) - it('is invalid when local or remote file is corrupt', async function () { - const remoteDoc = builders.remoteFile().size('123').shortRev(1).build() + it('is invalid when local or remote file is corrupt', async function() { + const remoteDoc = builders + .remoteFile() + .size('123') + .shortRev(1) + .build() const was /*: Metadata */ = builders .metafile() .fromRemote(remoteDoc) @@ -2373,17 +2415,23 @@ describe('RemoteWatcher', function () { should(change.error).match(/corrupt/) }) - xit('calls deleteDoc & addDoc when trashed', async function () { + xit('calls deleteDoc & addDoc when trashed', async function() { this.prep.deleteFolderAsync = sinon.stub() this.prep.deleteFolderAsync.returnsPromise().resolves(null) this.prep.putFolderAsync = sinon.stub() this.prep.putFolderAsync.returnsPromise().resolves(null) - const oldDir = builders.remoteDir().name('foo').build() + const oldDir = builders + .remoteDir() + .name('foo') + .build() const oldMeta /*: Metadata */ = await builders .metadir() .fromRemote(oldDir) .create() - const newDir = builders.remoteDir(oldDir).trashed().build() + const newDir = builders + .remoteDir(oldDir) + .trashed() + .build() this.watcher.identifyChange(newDir, null, [], []) @@ -2399,16 +2447,23 @@ describe('RemoteWatcher', function () { should(addArgs[1]).have.properties(metadata.fromRemoteDoc(newDir)) }) - xit('calls deleteDoc & addDoc when restored', async function () { + xit('calls deleteDoc & addDoc when restored', async function() { this.prep.deleteFolder = sinon.stub() this.prep.deleteFolder.returnsPromise().resolves(null) this.prep.putFolderAsync = sinon.stub() this.prep.putFolderAsync.returnsPromise().resolves(null) - const oldDir = builders.remoteDir().name('foo').trashed().build() + const oldDir = builders + .remoteDir() + .name('foo') + .trashed() + .build() const oldMeta /*: Metadata */ = await builders.metadir .fromRemote(oldDir) .create() - const newDir = builders.remoteDir(oldDir).restored().build() + const newDir = builders + .remoteDir(oldDir) + .restored() + .build() this.watcher.identifyChange(newDir, null, [], []) @@ -2425,7 +2480,7 @@ describe('RemoteWatcher', function () { }) describe('restored file before trashing was synced', () => { - it('returns a FileAddition', function () { + it('returns a FileAddition', function() { const origFile = builders .remoteFile() .name('foo') @@ -2456,15 +2511,21 @@ describe('RemoteWatcher', function () { }) describe('file moved while deleted on local filesystem', () => { - it('returns a FileMove', function () { - const origFile = builders.remoteFile().name('foo').build() + it('returns a FileMove', function() { + const origFile = builders + .remoteFile() + .name('foo') + .build() const trashedFile = builders .metafile() .fromRemote(origFile) .trashed() .changedSide('local') .build() - const movedFile = builders.remoteFile(origFile).name('bar').build() + const movedFile = builders + .remoteFile(origFile) + .name('bar') + .build() const doc = metadata.fromRemoteDoc(movedFile) @@ -2515,7 +2576,7 @@ describe('RemoteWatcher', function () { should(metadata.extractRevNumber(was.remote)).equal(2) }) - it('assumes the file is up-to-date since remote rev number is lower', async function () { + it('assumes the file is up-to-date since remote rev number is lower', async function() { const change = this.watcher.identifyChange(remoteDoc, was, [], []) should(change.type).equal('UpToDate') }) diff --git a/test/unit/sync/dependency_graph.js b/test/unit/sync/dependency_graph.js index 1af8021fc..640f28123 100644 --- a/test/unit/sync/dependency_graph.js +++ b/test/unit/sync/dependency_graph.js @@ -3,13 +3,14 @@ const should = require('should') const { DependencyGraph } = require('../../../core/sync/dependency_graph') // The dependencies are defined as a map of Path -> Path dependencies -const dependencyBasedCompare = - dependencies => - ({ doc: { path: pathA } }, { doc: { path: pathB } }) => { - if (dependencies[pathA] && dependencies[pathA].includes(pathB)) return 1 - if (dependencies[pathB] && dependencies[pathB].includes(pathA)) return -1 - return 0 - } +const dependencyBasedCompare = dependencies => ( + { doc: { path: pathA } }, + { doc: { path: pathB } } +) => { + if (dependencies[pathA] && dependencies[pathA].includes(pathB)) return 1 + if (dependencies[pathB] && dependencies[pathB].includes(pathA)) return -1 + return 0 +} describe('DependencyGraph', () => { describe('toArray', () => { diff --git a/test/unit/sync/index.js b/test/unit/sync/index.js index f9b55da16..11de5834b 100644 --- a/test/unit/sync/index.js +++ b/test/unit/sync/index.js @@ -1,25 +1,26 @@ /* eslint-env mocha */ /* @flow */ -const _ = require('lodash') -const sinon = require('sinon') -const should = require('should') const EventEmitter = require('events') + const { Promise } = require('bluebird') +const _ = require('lodash') +const should = require('should') +const sinon = require('sinon') + const { FetchError } = require('cozy-stack-client') const { Ignore } = require('../../../core/ignore') const metadata = require('../../../core/metadata') +const remoteErrors = require('../../../core/remote/errors') const { otherSide } = require('../../../core/side') const { Sync, compareChanges } = require('../../../core/sync') -const remoteErrors = require('../../../core/remote/errors') const syncErrors = require('../../../core/sync/errors') - +const Builders = require('../../support/builders') +const dbBuilders = require('../../support/builders/db') const stubSide = require('../../support/doubles/side') const configHelpers = require('../../support/helpers/config') const pouchHelpers = require('../../support/helpers/pouch') -const Builders = require('../../support/builders') -const dbBuilders = require('../../support/builders/db') /*:: import type { SavedMetadata } from '../../../core/metadata' @@ -45,13 +46,13 @@ const remoteSyncError = (msg, doc) => doc }) -describe('Sync', function () { +describe('Sync', function() { before('instanciate config', configHelpers.createConfig) beforeEach('instanciate pouch', pouchHelpers.createDatabase) afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - beforeEach('instanciate sync', function () { + beforeEach('instanciate sync', function() { this.local = stubSide('local') this.remote = stubSide('remote') this.ignore = new Ignore(['ignored']) @@ -66,17 +67,17 @@ describe('Sync', function () { ) }) - afterEach(async function () { + afterEach(async function() { await this.sync.stop() }) let builders - beforeEach('prepare builders', function () { + beforeEach('prepare builders', function() { builders = new Builders(this) }) - describe('start', function () { - beforeEach('instanciate sync', function () { + describe('start', function() { + beforeEach('instanciate sync', function() { const events = new EventEmitter() this.local.start = sinon.stub().resolves() @@ -102,7 +103,7 @@ describe('Sync', function () { sinon.spy(this.sync.events, 'emit') }) - it('starts the metadata replication of both sides', async function () { + it('starts the metadata replication of both sides', async function() { this.sync.start() await this.sync.started() should(this.local.start).have.been.calledOnce() @@ -111,89 +112,89 @@ describe('Sync', function () { }) context('if local watcher fails to start', () => { - beforeEach(function () { + beforeEach(function() { this.local.start = sinon.stub().rejects(new Error('failed')) }) - it('does not start replication', async function () { + it('does not start replication', async function() { await this.sync.start() should(this.sync.sync).not.have.been.called() }) - it('does not start remote watcher', async function () { + it('does not start remote watcher', async function() { await this.sync.start() should(this.remote.start).not.have.been.called() }) - it('stops local watcher', async function () { + it('stops local watcher', async function() { await this.sync.start() should(this.local.stop).have.been.calledOnce() }) - it('emits a Sync:fatal event', async function () { + it('emits a Sync:fatal event', async function() { await this.sync.start() should(this.sync.events.emit).have.been.calledWith('Sync:fatal') }) }) context('if remote watcher throws fatal error during start', () => { - beforeEach(function () { + beforeEach(function() { this.remote.start = sinon.stub().callsFake(() => { this.remote.watcher.fatal(new Error('failed')) }) }) - it('starts local watcher', async function () { + it('starts local watcher', async function() { await this.sync.start() should(this.local.start).have.been.calledOnce() }) - it('stops local watcher', async function () { + it('stops local watcher', async function() { await this.sync.start() should(this.local.stop).have.been.calledOnce() }) - it('stops remote watcher', async function () { + it('stops remote watcher', async function() { await this.sync.start() should(this.remote.stop).have.been.calledOnce() }) - it('emits a Sync:fatal event', async function () { + it('emits a Sync:fatal event', async function() { await this.sync.start() should(this.sync.events.emit).have.been.calledWith('Sync:fatal') }) - it('stops replication', async function () { + it('stops replication', async function() { await this.sync.start() should(this.sync.stop).have.been.called() }) }) context('if local watcher rejects while running', () => { - beforeEach(async function () { + beforeEach(async function() { this.sync.start() await this.sync.started() }) - it('stops replication', async function () { + it('stops replication', async function() { this.local.watcher.fatal(new Error('failed')) await this.sync.stopped() should(this.sync.stop).have.been.calledOnce() }) - it('stops local watcher', async function () { + it('stops local watcher', async function() { this.local.watcher.fatal(new Error('failed')) await this.sync.stopped() should(this.local.stop).have.been.calledOnce() }) - it('stops remote watcher', async function () { + it('stops remote watcher', async function() { this.local.watcher.fatal(new Error('failed')) await this.sync.stopped() should(this.remote.stop).have.been.calledOnce() }) - it('emits a Sync:fatal event', async function () { + it('emits a Sync:fatal event', async function() { this.local.watcher.fatal(new Error('failed')) await this.sync.stopped() should(this.sync.events.emit).have.been.calledWith('Sync:fatal') @@ -203,19 +204,19 @@ describe('Sync', function () { // TODO: Test lock request/acquisition/release - describe('sync', function () { + describe('sync', function() { let eventsStub - beforeEach('stub lifecycle', function () { + beforeEach('stub lifecycle', function() { eventsStub = sinon.stub(this.sync, 'events').returns(new EventEmitter()) this.sync.lifecycle.transitionTo('done-start') }) - afterEach('restore lifecycle', function () { + afterEach('restore lifecycle', function() { this.sync.events.emit('stopped') eventsStub.restore() this.sync.lifecycle.transitionTo('done-stop') }) - it('waits for and applies available changes', async function () { + it('waits for and applies available changes', async function() { const apply = sinon.stub(this.sync, 'apply') apply.callsFake(change => this.pouch.setLocalSeq(change.seq)) @@ -238,8 +239,8 @@ describe('Sync', function () { }) }) - describe('apply', function () { - it('does nothing for an ignored document', async function () { + describe('apply', function() { + it('does nothing for an ignored document', async function() { const doc = await builders .metadir() .path('ignored') @@ -257,8 +258,12 @@ describe('Sync', function () { should(this.sync.applyDoc).have.not.been.called() }) - it('does nothing for an up-to-date document', async function () { - const doc = await builders.metadir().path('foo').upToDate().create() + it('does nothing for an up-to-date document', async function() { + const doc = await builders + .metadir() + .path('foo') + .upToDate() + .create() const change /*: Change */ = { changes: [{ rev: doc._rev }], doc, @@ -271,7 +276,7 @@ describe('Sync', function () { should(this.sync.applyDoc).have.not.been.called() }) - it('does nothing for an up-to-date _deleted document', async function () { + it('does nothing for an up-to-date _deleted document', async function() { const doc = await builders .metadir() .path('foo') @@ -290,7 +295,7 @@ describe('Sync', function () { should(this.sync.applyDoc).have.not.been.called() }) - it('trashes a locally deleted file', async function () { + it('trashes a locally deleted file', async function() { const doc = await builders .metafile() .path('foo') @@ -321,7 +326,7 @@ describe('Sync', function () { } }) - it('trashes a locally deleted folder with content', async function () { + it('trashes a locally deleted folder with content', async function() { const deletedChild = await builders .metadata() .path('foo/bar') @@ -359,7 +364,7 @@ describe('Sync', function () { } }) - it('skips trashing a locally deleted file if its parent is deleted', async function () { + it('skips trashing a locally deleted file if its parent is deleted', async function() { const deletedChild = await builders .metadata() .path('foo/bar') @@ -397,7 +402,7 @@ describe('Sync', function () { } }) - it('calls applyDoc for a modified file', async function () { + it('calls applyDoc for a modified file', async function() { const initial = await builders .metafile() .path('foo/bar') @@ -432,7 +437,7 @@ describe('Sync', function () { should(await this.pouch.getLocalSeq()).equal(123) }) - it('calls applyDoc for a modified folder', async function () { + it('calls applyDoc for a modified folder', async function() { const initial = await builders .metadir() .path('foo/baz') @@ -464,7 +469,7 @@ describe('Sync', function () { should(await this.pouch.getLocalSeq()).equal(124) }) - it('calls addFileAsync for an added file', async function () { + it('calls addFileAsync for an added file', async function() { const doc = await builders .metafile() .path('foo/bar') @@ -475,7 +480,7 @@ describe('Sync', function () { should(this.remote.addFileAsync).have.been.calledWith(doc) }) - it('calls overwriteFileAsync for an overwritten file', async function () { + it('calls overwriteFileAsync for an overwritten file', async function() { const initial = await builders .metafile() .path('overwrite/foo/bar') @@ -501,7 +506,7 @@ describe('Sync', function () { let file, merged, change /*: Change */ - beforeEach('set up merged local file update', async function () { + beforeEach('set up merged local file update', async function() { file = await builders .metafile() .upToDate() @@ -525,17 +530,17 @@ describe('Sync', function () { } sinon.stub(this.sync, 'getNextChanges').returns([change]) }) - afterEach(function () { + afterEach(function() { this.sync.getNextChanges.restore() }) describe('when apply throws a NEEDS_REMOTE_MERGE_CODE error', () => { - beforeEach(function () { + beforeEach(function() { sinon.stub(this.sync, 'blockSyncFor').callsFake(() => { this.sync.lifecycle.transitionTo('done-stop') }) }) - beforeEach('simulate error', async function () { + beforeEach('simulate error', async function() { this.sync.lifecycle.transitionTo('done-start') sinon.stub(this.sync, 'apply').rejects( new syncErrors.SyncError({ @@ -553,15 +558,15 @@ describe('Sync', function () { await this.sync.syncBatch() this.sync.apply.restore() }) - afterEach(function () { + afterEach(function() { this.sync.blockSyncFor.restore() }) - it('removes moveFrom and overwrite attributes', async function () { + it('removes moveFrom and overwrite attributes', async function() { should(change.doc).not.have.properties(['moveFrom', 'overwrite']) }) - it('blocks the synchronization so we can retry applying the change', async function () { + it('blocks the synchronization so we can retry applying the change', async function() { should(this.sync.blockSyncFor).have.been.calledOnce() should(this.sync.blockSyncFor).have.been.calledWithMatch({ err: { code: remoteErrors.NEEDS_REMOTE_MERGE_CODE }, @@ -571,7 +576,7 @@ describe('Sync', function () { }) }) - it('calls updateFileMetadataAsync with previous revision for updated file metadata', async function () { + it('calls updateFileMetadataAsync with previous revision for updated file metadata', async function() { const doc = await builders .metafile() .path('udpate/foo/without-errors') @@ -594,7 +599,7 @@ describe('Sync', function () { should(this.remote.updateFileMetadataAsync).have.been.calledWith(updated) }) - it('calls moveAsync for a moved file', async function () { + it('calls moveAsync for a moved file', async function() { const was = await builders .metafile() .path('foo/bar') @@ -614,7 +619,7 @@ describe('Sync', function () { should(this.remote.moveAsync).have.been.calledWith(doc, was) }) - it('calls moveAsync and overwriteFileAsync for a moved-updated file', async function () { + it('calls moveAsync and overwriteFileAsync for a moved-updated file', async function() { const was = await builders .metafile() .path('foo/bar') @@ -638,7 +643,7 @@ describe('Sync', function () { should(this.remote.overwriteFileAsync).have.been.calledWith(doc) }) - it('calls trashAsync for a deleted synced file', async function () { + it('calls trashAsync for a deleted synced file', async function() { const doc = await builders .metafile() .path('foo') @@ -649,7 +654,7 @@ describe('Sync', function () { should(this.local.trashAsync).have.been.calledWith(doc) }) - it('does nothing for a deleted file that was not synced', async function () { + it('does nothing for a deleted file that was not synced', async function() { const doc = await builders .metafile() .path('tmp/fooz') @@ -660,7 +665,7 @@ describe('Sync', function () { should(this.remote.trashAsync).not.have.been.called() }) - it('calls addFolderAsync for an added folder', async function () { + it('calls addFolderAsync for an added folder', async function() { const doc = await builders .metadir() .path('foobar/bar') @@ -672,7 +677,7 @@ describe('Sync', function () { should(this.remote.addFolderAsync).have.been.calledWith(doc) }) - it('does not call updateFolderAsync for an updated folder', async function () { + it('does not call updateFolderAsync for an updated folder', async function() { const initial = await builders .metadir() .path('foobar/baz') @@ -687,7 +692,7 @@ describe('Sync', function () { should(this.local.updateFolderAsync).not.have.been.calledWith(doc) }) - it('calls moveAsync for a moved folder', async function () { + it('calls moveAsync for a moved folder', async function() { const was = await builders .metadir() .path('foobar/bar') @@ -707,7 +712,7 @@ describe('Sync', function () { should(this.remote.moveAsync).have.been.calledWith(doc, was) }) - it('calls trashAsync for a deleted synced folder', async function () { + it('calls trashAsync for a deleted synced folder', async function() { const doc = await builders .metadir() .path('baz') @@ -718,7 +723,7 @@ describe('Sync', function () { should(this.local.trashAsync).have.been.calledWith(doc) }) - it('does nothing for a deleted folder that was not added', async function () { + it('does nothing for a deleted folder that was not added', async function() { const doc = await builders .metadir() .path('tmp/foobaz') @@ -730,8 +735,8 @@ describe('Sync', function () { }) }) - describe('updateErrors', function () { - it('retries on first local -> remote sync error', async function () { + describe('updateErrors', function() { + it('retries on first local -> remote sync error', async function() { const doc = await builders .metadata() .path('first/failure') @@ -748,7 +753,7 @@ describe('Sync', function () { should(actual._rev).not.equal(doc._rev) }) - it('retries on second remote -> local sync error', async function () { + it('retries on second remote -> local sync error', async function() { const doc = await builders .metadata() .path('second/failure') @@ -768,7 +773,7 @@ describe('Sync', function () { }) for (const syncSide of ['local', 'remote']) { - describe(`updateRevs at end of ${syncSide} Sync`, function () { + describe(`updateRevs at end of ${syncSide} Sync`, function() { const mergedSide = otherSide(syncSide) const updateRevs = ({ sync }, doc) => @@ -776,7 +781,7 @@ describe('Sync', function () { let doc, upToDate, syncedTarget, mergedTarget - beforeEach(async function () { + beforeEach(async function() { upToDate = await builders .metadata() .upToDate() // 2, 2 @@ -792,8 +797,8 @@ describe('Sync', function () { .create() // rev == 3 }) - context('without changes merged during Sync', function () { - it('marks doc as up-to-date', async function () { + context('without changes merged during Sync', function() { + it('marks doc as up-to-date', async function() { await updateRevs(this, _.cloneDeep(doc)) const updated = await this.pouch.bySyncedPath(doc.path) @@ -805,10 +810,10 @@ describe('Sync', function () { for (const extraChanges of [1, 2]) { context( `with ${extraChanges} ${mergedSide} changes merged during Sync`, - function () { + function() { let updated - beforeEach(async function () { + beforeEach(async function() { await builders .metadata(doc) .sides({ @@ -822,7 +827,7 @@ describe('Sync', function () { updated = await this.pouch.bySyncedPath(doc.path) }) - it(`keeps ${syncSide} out-of-date information`, async function () { + it(`keeps ${syncSide} out-of-date information`, async function() { should(metadata.outOfDateSide(updated)).equal(syncSide) }) @@ -832,7 +837,7 @@ describe('Sync', function () { ) }) - it(`keeps the doc rev coherent with its ${mergedSide} side`, async function () { + it(`keeps the doc rev coherent with its ${mergedSide} side`, async function() { should(metadata.target(updated)).equal( metadata.side(updated, mergedSide) ) @@ -843,8 +848,8 @@ describe('Sync', function () { }) } - describe('selectSide', function () { - it('selects the local side if remote is up-to-date', function () { + describe('selectSide', function() { + it('selects the local side if remote is up-to-date', function() { const doc1 = builders .metafile() .path('selectSide/1') @@ -860,7 +865,7 @@ describe('Sync', function () { should(this.sync.selectSide({ doc: doc2 })).eql(this.sync.local) }) - it('selects the remote side if local is up-to-date', function () { + it('selects the remote side if local is up-to-date', function() { const doc1 = builders .metafile() .path('selectSide/3') @@ -876,7 +881,7 @@ describe('Sync', function () { should(this.sync.selectSide({ doc: doc2 })).eql(this.sync.remote) }) - it('returns an empty array if both sides are up-to-date', function () { + it('returns an empty array if both sides are up-to-date', function() { const doc = builders .metafile() .path('selectSide/5') @@ -885,7 +890,7 @@ describe('Sync', function () { should(this.sync.selectSide({ doc })).be.null() }) - it('returns an empty array if a local only doc is erased', function () { + it('returns an empty array if a local only doc is erased', function() { const doc = builders .metafile() .path('selectSide/5') @@ -895,7 +900,7 @@ describe('Sync', function () { should(this.sync.selectSide({ doc })).be.null() }) - it('returns an empty array if a remote only doc is erased', function () { + it('returns an empty array if a remote only doc is erased', function() { const doc = builders .metafile() .path('selectSide/5') @@ -907,14 +912,14 @@ describe('Sync', function () { }) describe('blockSyncFor', () => { - beforeEach(function () { + beforeEach(function() { sinon.spy(this.events, 'emit') this.remote.watcher = { start: sinon.stub().returns(), stop: sinon.stub().returns() } }) - afterEach(function () { + afterEach(function() { delete this.remote.watcher this.events.emit.restore() }) @@ -931,7 +936,7 @@ describe('Sync', function () { ), 'remote' ) - beforeEach(function () { + beforeEach(function() { this.sync.blockSyncFor({ err: unknownSyncError }) @@ -940,7 +945,7 @@ describe('Sync', function () { ) }) - it('replaces the old reason with the new one', async function () { + it('replaces the old reason with the new one', async function() { this.sync.blockSyncFor({ err: unreachableSyncError }) @@ -961,17 +966,17 @@ describe('Sync', function () { ), 'remote' ) - beforeEach(function () { + beforeEach(function() { this.sync.blockSyncFor({ err: unreachableSyncError }) }) - it('emits offline event', function () { + it('emits offline event', function() { should(this.events.emit).have.been.calledWith('offline') }) - it('stops the remote watcher', function () { + it('stops the remote watcher', function() { should(this.remote.watcher.stop).have.been.called() }) @@ -982,7 +987,7 @@ describe('Sync', function () { // new interval without clearing the one created by the Sync error. // It this case we could have an endless Sync error retry loop. This test // checks that this does not occur. - it('does not allow multiple retry intervals', async function () { + it('does not allow multiple retry intervals', async function() { const unreachableRemoteError = remoteErrors.wrapError( new FetchError( { type: 'system', code: 'ENOTFOUND', errno: 'ENOTFOUND' }, @@ -1010,7 +1015,7 @@ describe('Sync', function () { describe('retry', () => { context('after Cozy is reachable again', () => { - beforeEach(async function () { + beforeEach(async function() { // Reset calls history this.events.emit.resetHistory() @@ -1023,17 +1028,17 @@ describe('Sync', function () { await Promise.delay(1000) }) - it('emits online event', async function () { + it('emits online event', async function() { should(this.events.emit).have.been.calledWith('online') }) - it('restarts the remote watcher', function () { + it('restarts the remote watcher', function() { should(this.remote.watcher.start).have.been.called() }) }) context('while Cozy is still unreachable', () => { - beforeEach(async function () { + beforeEach(async function() { // Reset calls history this.events.emit.resetHistory() @@ -1046,11 +1051,11 @@ describe('Sync', function () { await Promise.delay(1000) }) - it('emits offline event', async function () { + it('emits offline event', async function() { should(this.events.emit).have.been.calledWith('offline') }) - it('does not restart the remote watcher', function () { + it('does not restart the remote watcher', function() { should(this.remote.watcher.start).not.have.been.called() }) }) @@ -1064,7 +1069,7 @@ describe('Sync', function () { const seq = 2 beforeEach( 'set up merged local overwriting file move with update', - async function () { + async function() { const overwritten = await builders .metafile() .path('dst') @@ -1102,7 +1107,7 @@ describe('Sync', function () { } ) - beforeEach(function () { + beforeEach(function() { this.sync.blockSyncFor({ err: syncErrors.wrapError( remoteErrors.wrapError( @@ -1119,7 +1124,7 @@ describe('Sync', function () { }) describe('retry', () => { - beforeEach(async function () { + beforeEach(async function() { // Reset calls history this.events.emit.resetHistory() @@ -1129,23 +1134,23 @@ describe('Sync', function () { await Promise.delay(1000) }) - it('increases the record errors counter', async function () { + it('increases the record errors counter', async function() { const errors = merged.errors || 0 const synced = await this.pouch.bySyncedPath(merged.path) should(synced.errors).equal(errors + 1) }) - it('does not skip the change by saving seq', async function () { + it('does not skip the change by saving seq', async function() { should(await this.pouch.getLocalSeq()).equal(previousSeq) }) - it('keeps the out-of-date side', async function () { + it('keeps the out-of-date side', async function() { const outOfDateSide = metadata.outOfDateSide(merged) const synced = await this.pouch.bySyncedPath(merged.path) should(metadata.outOfDateSide(synced)).equal(outOfDateSide) }) - it('removes moveFrom and overwrite attributes', async function () { + it('removes moveFrom and overwrite attributes', async function() { // It actually only saves the record and the attributes need to be // removed before. // But this is the goal. @@ -1183,13 +1188,17 @@ describe('Sync', function () { } context('when one of the changes has no side', () => { - it('returns 0', async function () { + it('returns 0', async function() { const docA = await builders .metafile() .path('dir/file') .upToDate() .create() - const docB = await builders.metadir().path('dir').upToDate().create() + const docB = await builders + .metadir() + .path('dir') + .upToDate() + .create() const docC = await builders .metadir() .path('dir/subdir') @@ -1218,7 +1227,7 @@ describe('Sync', function () { }) context('when passing the same change twice', () => { - it('returns 0', async function () { + it('returns 0', async function() { const add = await builders .metafile() .path('add') @@ -1230,7 +1239,11 @@ describe('Sync', function () { .trashed() .changedSide('local') .create() - const src = await builders.metafile().path('src').upToDate().create() + const src = await builders + .metafile() + .path('src') + .upToDate() + .create() const move = await builders .metafile() .moveFrom(src) @@ -1252,7 +1265,7 @@ describe('Sync', function () { 'with a move outside a directory and the deletion of said directory', () => { let move, del - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1289,7 +1302,7 @@ describe('Sync', function () { 'with a move within a directory and the deletion of said directory', () => { let move, del - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1323,7 +1336,7 @@ describe('Sync', function () { 'with a move into a directory and the deletion of said directory', () => { let move, del - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1355,8 +1368,12 @@ describe('Sync', function () { context('with a directory move and an addition into said directory', () => { let move, add - beforeEach(async function () { - const srcDir = await builders.metadir().path('src').upToDate().create() + beforeEach(async function() { + const srcDir = await builders + .metadir() + .path('src') + .upToDate() + .create() const dstDir = await builders .metadir() .moveFrom(srcDir) @@ -1386,7 +1403,7 @@ describe('Sync', function () { 'with a directory addition and an addition into said directory', () => { let addDir, addFile - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1414,7 +1431,7 @@ describe('Sync', function () { context('with a directory addition and a move into said directory', () => { let addDir, moveFile - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1449,7 +1466,7 @@ describe('Sync', function () { 'with a directory deletion and a deletion within said directory', () => { let delDir, delFile - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') @@ -1479,8 +1496,12 @@ describe('Sync', function () { context('with a directory move and the move of one of its children', () => { let moveDir, moveFile - beforeEach(async function () { - const srcDir = await builders.metadir().path('src').upToDate().create() + beforeEach(async function() { + const srcDir = await builders + .metadir() + .path('src') + .upToDate() + .create() const dstDir = await builders .metadir() .moveFrom(srcDir) @@ -1519,7 +1540,7 @@ describe('Sync', function () { 'with a directory addition and an addition into said directory', () => { let addDir, addFile - beforeEach(async function () { + beforeEach(async function() { const dir = await builders .metadir() .path('dir') diff --git a/test/unit/utils/conflicts.js b/test/unit/utils/conflicts.js index d008cc8f4..554609fa0 100644 --- a/test/unit/utils/conflicts.js +++ b/test/unit/utils/conflicts.js @@ -1,9 +1,10 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') const path = require('path') +const should = require('should') + const { CONFLICT_REGEXP, generateConflictPath @@ -22,12 +23,16 @@ describe('Conflicts.generateConflictPath()', () => { it('returns a path with a conflict suffix', () => { const conflictPath = generateConflictPath(filepath) - should(conflictPath).be.a.String().and.match(CONFLICT_REGEXP) + should(conflictPath) + .be.a.String() + .and.match(CONFLICT_REGEXP) }) it('returns a path within the same parent', () => { const conflictPath = generateConflictPath(filepath) - should(conflictPath).be.a.String().and.startWith(ancestors) + should(conflictPath) + .be.a.String() + .and.startWith(ancestors) if (ancestors !== '') should(conflictPath).not.containEql(ancestors + ancestors) @@ -35,7 +40,9 @@ describe('Conflicts.generateConflictPath()', () => { it('returns a path with the same extension', () => { const conflictPath = generateConflictPath(filepath) - should(conflictPath).be.a.String().and.endWith(ext) + should(conflictPath) + .be.a.String() + .and.endWith(ext) }) it('returns a path with up to the first 180 characters of the original path', () => { @@ -111,7 +118,8 @@ describe('Conflicts.generateConflictPath()', () => { context('with long file name', () => { runSharedExamples({ - base: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Nam a velit at dolor euismod tincidunt sit amet id ante Cras vehicula lectus purus In lobortis risus lectus vitae rhoncus quam porta nullam', + base: + 'Lorem ipsum dolor sit amet consectetur adipiscing elit Nam a velit at dolor euismod tincidunt sit amet id ante Cras vehicula lectus purus In lobortis risus lectus vitae rhoncus quam porta nullam', ext: '.pdf' }) }) diff --git a/test/unit/utils/fs.js b/test/unit/utils/fs.js index 49359b92c..63c7ba451 100644 --- a/test/unit/utils/fs.js +++ b/test/unit/utils/fs.js @@ -1,14 +1,14 @@ /* @flow */ /* eslint-env mocha */ -const Promise = require('bluebird') const childProcess = require('child_process') -const fse = require('fs-extra') const path = require('path') + +const Promise = require('bluebird') +const fse = require('fs-extra') const should = require('should') const { hideOnWindows, sendToTrash } = require('../../../core/utils/fs') - const configHelpers = require('../../support/helpers/config') Promise.promisifyAll(childProcess) @@ -22,7 +22,7 @@ describe('utils/fs', () => { const dirName = '.dir-to-hide' let parentPath, dirPath, missingPath - before(async function () { + before(async function() { parentPath = this.syncPath dirPath = path.join(parentPath, dirName) missingPath = path.join(parentPath, 'missing') @@ -49,7 +49,7 @@ describe('utils/fs', () => { }) describe('sendToTrash', () => { - it('removes the given file from the sync directory', async function () { + it('removes the given file from the sync directory', async function() { const fullpath = p => path.join(this.syncPath, p) await fse.ensureDir(fullpath('dir')) await fse.ensureFile(fullpath('dir/file')) @@ -59,7 +59,7 @@ describe('utils/fs', () => { await should(fse.exists(fullpath('dir/file'))).be.fulfilledWith(false) }) - it('removes the given directory and its content from the sync directory', async function () { + it('removes the given directory and its content from the sync directory', async function() { const fullpath = p => path.join(this.syncPath, p) await fse.ensureDir(fullpath('dir')) await fse.ensureFile(fullpath('dir/file')) @@ -69,7 +69,7 @@ describe('utils/fs', () => { await should(fse.exists(fullpath('dir/file'))).be.fulfilledWith(false) }) - it('throws an error with code ENOENT when the document is missing', async function () { + it('throws an error with code ENOENT when the document is missing', async function() { const fullpath = p => path.join(this.syncPath, p) try { await fse.remove(fullpath('doc')) diff --git a/test/unit/utils/mime.js b/test/unit/utils/mime.js index 996adc323..0b0da7fad 100644 --- a/test/unit/utils/mime.js +++ b/test/unit/utils/mime.js @@ -4,9 +4,8 @@ const sysMime = require('mime') const should = require('should') -const mime = require('../../../core/utils/mime') - const { NOTE_MIME_TYPE } = require('../../../core/remote/constants') +const mime = require('../../../core/utils/mime') describe('utils/mime', () => { it('detects the Cozy Notes mime type', () => { diff --git a/test/unit/utils/notes.js b/test/unit/utils/notes.js index 4bba3b208..4d173211f 100644 --- a/test/unit/utils/notes.js +++ b/test/unit/utils/notes.js @@ -1,18 +1,18 @@ /* @flow */ /* eslint-env mocha */ -const should = require('should') -const path = require('path') const os = require('os') +const path = require('path') +const should = require('should') + +const { findNote, localDoc, remoteDoc } = require('../../../core/utils/notes') +const Builders = require('../../support/builders') const configHelpers = require('../../support/helpers/config') -const pouchHelpers = require('../../support/helpers/pouch') const cozyHelpers = require('../../support/helpers/cozy') const { LocalTestHelpers } = require('../../support/helpers/local') +const pouchHelpers = require('../../support/helpers/pouch') const { RemoteTestHelpers } = require('../../support/helpers/remote') -const Builders = require('../../support/builders') - -const { findNote, localDoc, remoteDoc } = require('../../../core/utils/notes') const cozy = cozyHelpers.cozy @@ -23,17 +23,21 @@ describe('utils/notes', () => { afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - it('returns the Metadata with the given path', async function () { + it('returns the Metadata with the given path', async function() { const docPath = 'Notes/Some interesting stuff.cozy-note' const filePath = path.join(this.config.syncPath, docPath) const builders = new Builders(this) - const doc = await builders.metafile().path(docPath).upToDate().create() + const doc = await builders + .metafile() + .path(docPath) + .upToDate() + .create() await should(localDoc(filePath, this)).be.fulfilledWith(doc) }) - it('throws a CozyNoteError with code CozyDocumentMissingError if no doc exist with the given path', async function () { + it('throws a CozyNoteError with code CozyDocumentMissingError if no doc exist with the given path', async function() { const docPath = 'Notes/Some interesting stuff.cozy-note' const filePath = path.join(this.config.syncPath, docPath) @@ -42,12 +46,16 @@ describe('utils/notes', () => { }) }) - it('throws a CozyNoteError with code CozyDocumentMissingError if the note is not within the synced folder', async function () { + it('throws a CozyNoteError with code CozyDocumentMissingError if the note is not within the synced folder', async function() { const docPath = 'Notes/Some interesting stuff.cozy-note' const filePath = path.join(os.tmpdir(), docPath) const builders = new Builders(this) - await builders.metafile().path(docPath).upToDate().create() + await builders + .metafile() + .path(docPath) + .upToDate() + .create() await should(localDoc(filePath, this)).be.rejectedWith({ code: 'CozyDocumentMissingError' @@ -61,12 +69,15 @@ describe('utils/notes', () => { beforeEach('clean remote cozy', cozyHelpers.deleteAll) after('clean config directory', configHelpers.cleanConfig) - it('fetches the remote io.cozy.files document associated with the given local doc', async function () { + it('fetches the remote io.cozy.files document associated with the given local doc', async function() { const docPath = 'Some interesting stuff.cozy-note' const remoteHelpers = new RemoteTestHelpers(this) const builders = new Builders({ cozy }) - const remote = await builders.remoteFile().name(docPath).create() + const remote = await builders + .remoteFile() + .name(docPath) + .create() const doc = await builders .metafile() .fromRemote(remote) @@ -78,7 +89,7 @@ describe('utils/notes', () => { ).be.fulfilledWith(remote) }) - it('throws a CozyNoteError with code CozyDocumentMissingError if no remote doc exist for the given local doc', async function () { + it('throws a CozyNoteError with code CozyDocumentMissingError if no remote doc exist for the given local doc', async function() { const docPath = 'Some interesting stuff.cozy-note' const remoteHelpers = new RemoteTestHelpers(this) @@ -95,13 +106,20 @@ describe('utils/notes', () => { ).be.rejectedWith({ code: 'CozyDocumentMissingError' }) }) - it('throws a CozyNoteError with code CozyDocumentMissingError if the local doc is not associated with a remote doc', async function () { + it('throws a CozyNoteError with code CozyDocumentMissingError if the local doc is not associated with a remote doc', async function() { const docPath = 'Some interesting stuff.cozy-note' const remoteHelpers = new RemoteTestHelpers(this) const builders = new Builders({ cozy }) - await builders.remoteFile().name(docPath).create() - const doc = await builders.metafile().path(docPath).upToDate().build() + await builders + .remoteFile() + .name(docPath) + .create() + const doc = await builders + .metafile() + .path(docPath) + .upToDate() + .build() await should( remoteDoc(doc, { config: this.config, remote: remoteHelpers.side }) @@ -117,7 +135,7 @@ describe('utils/notes', () => { afterEach('clean pouch', pouchHelpers.cleanDatabase) after('clean config directory', configHelpers.cleanConfig) - it('throws an Error when filePath does not correspond to a synced note', async function () { + it('throws an Error when filePath does not correspond to a synced note', async function() { const docPath = 'Notes/Some interesting stuff.cozy-note' const filePath = path.join(this.config.syncPath, docPath) @@ -126,14 +144,18 @@ describe('utils/notes', () => { ) }) - it('throws a CozyNoteError with code CozyDocumentMissingError if the synced note does not exist anymore on the Cozy', async function () { + it('throws a CozyNoteError with code CozyDocumentMissingError if the synced note does not exist anymore on the Cozy', async function() { const docPath = 'Some interesting stuff.cozy-note' const filePath = path.join(this.config.syncPath, docPath) const localHelpers = new LocalTestHelpers(this) await localHelpers.syncDir.outputFile(docPath, 'Note content') const builders = new Builders({ cozy }) - await builders.metafile().path(docPath).remoteId('3232').build() + await builders + .metafile() + .path(docPath) + .remoteId('3232') + .build() await should(findNote(filePath, this)).be.rejectedWith({ code: 'CozyDocumentMissingError' diff --git a/test/unit/utils/path.js b/test/unit/utils/path.js index f8b4bc0c9..72ab28c47 100644 --- a/test/unit/utils/path.js +++ b/test/unit/utils/path.js @@ -2,6 +2,7 @@ /* eslint-env mocha */ const path = require('path') + const should = require('should') const { localToRemote, remoteToLocal } = require('../../../core/utils/path') diff --git a/test/unit/utils/sentry.js b/test/unit/utils/sentry.js index 3f2f434d2..3717dd0b9 100644 --- a/test/unit/utils/sentry.js +++ b/test/unit/utils/sentry.js @@ -1,10 +1,11 @@ /* eslint-env mocha */ -const should = require('should') const fs = require('fs') -const sentry = require('../../../core/utils/sentry') const { FetchError } = require('electron-fetch') +const should = require('should') + +const sentry = require('../../../core/utils/sentry') // This class is a copy of the `cozy-client-js` package's `FetchError` as it is // not exported and could therefore not be imported. @@ -28,9 +29,9 @@ class CozyClientFetchError extends Error { } } -describe('Sentry', function () { - describe('toSentryContext', function () { - it('properly parse all urls', function () { +describe('Sentry', function() { + describe('toSentryContext', function() { + it('properly parse all urls', function() { sentry .toSentryContext('https://somedevcozy.cozy.localhost:8080') .should.deepEqual({ @@ -53,7 +54,7 @@ describe('Sentry', function () { }) }) - describe('formatError', function () { + describe('formatError', function() { it('formats Node system errors', () => { try { fs.readFileSync(`${__filename}.missing-file`) diff --git a/test/unit/utils/timestamp.js b/test/unit/utils/timestamp.js index 2c0b4f8be..2e2f1915e 100644 --- a/test/unit/utils/timestamp.js +++ b/test/unit/utils/timestamp.js @@ -66,7 +66,7 @@ describe('timestamp', () => { }) describe('almostSameDate', () => { - it('returns true if the date are nearly the same', function () { + it('returns true if the date are nearly the same', function() { let a = '2015-12-01T11:22:56.517Z' let b = '2015-12-01T11:22:56.000Z' let c = '2015-12-01T11:22:57.000Z' @@ -95,7 +95,7 @@ describe('timestamp', () => { should(maxDate(d1, d1)).deepEqual(d1) }) - it('increments the most recent date by 1 millisecond if it has more than 3 millisecond digits', function () { + it('increments the most recent date by 1 millisecond if it has more than 3 millisecond digits', function() { const d1 = '2015-12-31T23:59:59.999232345Z' const d2 = '2015-12-31T23:59:59.999Z' @@ -111,24 +111,24 @@ describe('timestamp', () => { }) describe('roundedRemoteDate', () => { - it('adds the milliseconds when they are missing', function () { + it('adds the milliseconds when they are missing', function() { const time = '2015-12-31T23:59:59Z' should(roundedRemoteDate(time)).equal('2015-12-31T23:59:59.000Z') }) - it('pads the milliseconds with 0s if they have less than 3 digits', function () { + it('pads the milliseconds with 0s if they have less than 3 digits', function() { const a = '2015-12-31T23:59:59.5Z' const b = '2015-12-31T23:59:59.54Z' should(roundedRemoteDate(a)).equal('2015-12-31T23:59:59.500Z') should(roundedRemoteDate(b)).equal('2015-12-31T23:59:59.540Z') }) - it('increments the time by 1 millisecond if they have more than 3 digits', function () { + it('increments the time by 1 millisecond if they have more than 3 digits', function() { const time = '2015-12-31T23:59:59.999232345Z' should(roundedRemoteDate(time)).equal('2016-01-01T00:00:00.000Z') }) - it('handles dates with timezones other than UTC', function () { + it('handles dates with timezones other than UTC', function() { // All previous examples with a different timezone const a = '2020-04-05T19:50:06+02:00' const b = '2020-04-05T19:50:06.029+02:00' diff --git a/test/world/case_and_encoding.js b/test/world/case_and_encoding.js index 0b9395af4..c4f88a7a7 100644 --- a/test/world/case_and_encoding.js +++ b/test/world/case_and_encoding.js @@ -1,13 +1,14 @@ /* @flow */ /* eslint-env mocha */ -const fse = require('fs-extra') const path = require('path') + +const fse = require('fs-extra') const should = require('should') const MacOSRelease = require('../support/helpers/MacOSRelease') -should.Assertion.add('hex', function (expectedPretty) { +should.Assertion.add('hex', function(expectedPretty) { const expected = expectedPretty.trim().split(/\s+/) const actual = Buffer.from(this.obj) .toString('hex') diff --git a/test/world/file_systems.js b/test/world/file_systems.js index 8fed8aa78..10f6c2879 100644 --- a/test/world/file_systems.js +++ b/test/world/file_systems.js @@ -3,14 +3,14 @@ const should = require('should') -const { ContextDir } = require('../support/helpers/context_dir') const macOSRelease = require('../support/helpers/MacOSRelease') +const TmpDir = require('../support/helpers/TmpDir') +const { ContextDir } = require('../support/helpers/context_dir') const { onMacOSAtLeast, onMacOSAtMost, onPlatforms } = require('../support/helpers/platform') -const TmpDir = require('../support/helpers/TmpDir') describe('File systems', () => { let dir