diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d9629c46..c1926cfce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,14 @@ # Changelog + +## 2018/01/11 3.11.38 +(#761) Add new file format (SST_V2) and methods to get/parse remote locations. +(#761) Upload files from SnapshotMetaService in backup version 2.0, if enabled. +(#761) Process older SNAPSHOT_V2 at the restart of Priam. +(#767) Backup Verification for Backup 2.0. +(#767) Restore for Backup 2.0 +(#767) Some API changes for Snapshot Verification +(#767) Remove deprecated code like flush hour or snapshot hour. + ## 2018/10/29 3.11.37 * Bug Fix: SnapshotMetaService can leave snapshots if there is any error. * Bug Fix: SnapshotMetaService should continue building snapshot even if an unexpected file is found in snapshot. diff --git a/build.gradle b/build.gradle index a9dd2bf9b..f7554e4f6 100644 --- a/build.gradle +++ b/build.gradle @@ -30,9 +30,9 @@ allprojects { } dependencies { - compile 'org.apache.commons:commons-lang3:3.5' + compile 'org.apache.commons:commons-lang3:3.8.1' compile 'commons-logging:commons-logging:1.2' - compile 'org.apache.commons:commons-collections4:4.1' + compile 'org.apache.commons:commons-collections4:4.2' compile 'commons-io:commons-io:2.6' compile 'commons-cli:commons-cli:1.4' compile 'commons-httpclient:commons-httpclient:3.1' @@ -42,7 +42,7 @@ allprojects { compile 'com.sun.jersey.contribs:jersey-guice:1.19.4' compile 'com.google.guava:guava:21.0' compile 'com.google.code.findbugs:jsr305:3.0.2' - compile 'com.amazonaws:aws-java-sdk:1.11.467' + compile 'com.amazonaws:aws-java-sdk:1.11.475' compile 'com.google.inject:guice:4.2.2' compile 'com.google.inject.extensions:guice-servlet:4.2.2' compile 'org.quartz-scheduler:quartz:2.3.0' @@ -52,11 +52,11 @@ allprojects { compile 'org.apache.cassandra:cassandra-all:3.0.17' compile 'javax.ws.rs:jsr311-api:1.1.1' compile 'joda-time:joda-time:2.10.1' - compile 'org.apache.commons:commons-configuration2:2.1.1' + compile 'org.apache.commons:commons-configuration2:2.4' compile 'xerces:xercesImpl:2.12.0' - compile 'net.java.dev.jna:jna:4.4.0' - compile 'org.apache.httpcomponents:httpclient:4.5.3' - compile 'org.apache.httpcomponents:httpcore:4.4.6' + compile 'net.java.dev.jna:jna:5.2.0' + compile 'org.apache.httpcomponents:httpclient:4.5.6' + compile 'org.apache.httpcomponents:httpcore:4.4.10' compile 'com.ning:compress-lzf:1.0.4' compile 'com.google.code.gson:gson:2.8.5' compile 'org.slf4j:slf4j-api:1.7.25' @@ -66,9 +66,9 @@ allprojects { compile ('com.google.appengine.tools:appengine-gcs-client:0.7') { exclude module: 'guava' } - compile 'com.google.apis:google-api-services-storage:v1-rev100-1.22.0' + compile 'com.google.apis:google-api-services-storage:v1-rev141-1.25.0' compile 'com.google.http-client:google-http-client-jackson2:1.22.0' - compile 'com.netflix.spectator:spectator-api:0.81.2' + compile 'com.netflix.spectator:spectator-api:0.82.0' compileOnly 'javax.servlet:javax.servlet-api:3.1.0' testCompile 'org.jmockit:jmockit:1.31' testCompile "org.spockframework:spock-core:1.1-groovy-2.4" diff --git a/priam/src/main/java/com/netflix/priam/aws/RemoteBackupPath.java b/priam/src/main/java/com/netflix/priam/aws/RemoteBackupPath.java index 43140cc64..b1e59e3d5 100644 --- a/priam/src/main/java/com/netflix/priam/aws/RemoteBackupPath.java +++ b/priam/src/main/java/com/netflix/priam/aws/RemoteBackupPath.java @@ -22,6 +22,7 @@ import com.netflix.priam.config.IConfiguration; import com.netflix.priam.cryptography.IFileCryptography; import com.netflix.priam.identity.InstanceIdentity; +import com.netflix.priam.utils.DateUtil; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; @@ -128,7 +129,11 @@ private void parseV2Location(String remoteFile) { } private Path getV1Location() { - Path path = Paths.get(getV1Prefix().toString(), formatDate(time), type.toString()); + Path path = + Paths.get( + getV1Prefix().toString(), + DateUtil.formatyyyyMMddHHmm(time), + type.toString()); if (BackupFileType.isDataFile(type)) path = Paths.get(path.toString(), keyspace, columnFamily); return Paths.get(path.toString(), fileName); @@ -141,7 +146,7 @@ private void parseV1Location(Path remoteFilePath) { String.format( "Too few elements (expected: [%d]) in path: %s", 7, remoteFilePath)); - time = parseDate(remoteFilePath.getName(4).toString()); + time = DateUtil.getDate(remoteFilePath.getName(4).toString()); type = BackupFileType.valueOf(remoteFilePath.getName(5).toString()); if (BackupFileType.isDataFile(type)) { keyspace = remoteFilePath.getName(6).toString(); diff --git a/priam/src/main/java/com/netflix/priam/backup/AbstractBackup.java b/priam/src/main/java/com/netflix/priam/backup/AbstractBackup.java index cec5c53c9..3f17facd8 100644 --- a/priam/src/main/java/com/netflix/priam/backup/AbstractBackup.java +++ b/priam/src/main/java/com/netflix/priam/backup/AbstractBackup.java @@ -106,7 +106,6 @@ protected List upload( true); bps.add(bp); - addToRemotePath(bp.getRemotePath()); } } @@ -183,7 +182,4 @@ private boolean isValidBackupDir(File keyspaceDir, File backupDir) { return true; } - - /** Adds Remote path to the list of Remote Paths */ - protected abstract void addToRemotePath(String remotePath); } diff --git a/priam/src/main/java/com/netflix/priam/backup/AbstractBackupPath.java b/priam/src/main/java/com/netflix/priam/backup/AbstractBackupPath.java index ad0cf036e..c3dd2afe2 100644 --- a/priam/src/main/java/com/netflix/priam/backup/AbstractBackupPath.java +++ b/priam/src/main/java/com/netflix/priam/backup/AbstractBackupPath.java @@ -22,23 +22,19 @@ import com.netflix.priam.config.IConfiguration; import com.netflix.priam.cryptography.IFileCryptography; import com.netflix.priam.identity.InstanceIdentity; +import com.netflix.priam.utils.DateUtil; import java.io.File; import java.nio.file.Path; import java.text.ParseException; import java.time.Instant; import java.util.Date; import org.apache.commons.lang3.StringUtils; -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @ImplementedBy(RemoteBackupPath.class) public abstract class AbstractBackupPath implements Comparable { private static final Logger logger = LoggerFactory.getLogger(AbstractBackupPath.class); - private static final String FMT = "yyyyMMddHHmm"; - private static final DateTimeFormatter DATE_FORMAT = DateTimeFormat.forPattern(FMT); public static final char PATH_SEP = File.separatorChar; public enum BackupFileType { @@ -82,18 +78,6 @@ public AbstractBackupPath(IConfiguration config, InstanceIdentity instanceIdenti this.config = config; } - // TODO: This is so wrong as it completely depends on the timezone where application is running. - // Hopefully everyone running Priam has their clocks set to UTC. - public static String formatDate(Date d) { - return new DateTime(d).toString(FMT); - } - - // TODO: This is so wrong as it completely depends on the timezone where application is running. - // Hopefully everyone running Priam has their clocks set to UTC. - public Date parseDate(String s) { - return DATE_FORMAT.parseDateTime(s).toDate(); - } - public void parseLocal(File file, BackupFileType type) throws ParseException { this.backupFile = file; @@ -117,7 +101,7 @@ public void parseLocal(File file, BackupFileType type) throws ParseException { 2. This is to ensure that all the files from the snapshot are uploaded under single directory in remote file system. 3. For META file we always override the time field via @link{Metadata#decorateMetaJson} */ - if (type == BackupFileType.SNAP) time = parseDate(elements[3]); + if (type == BackupFileType.SNAP) time = DateUtil.getDate(elements[3]); this.lastModified = Instant.ofEpochMilli(file.lastModified()); this.fileName = file.getName(); @@ -126,8 +110,8 @@ public void parseLocal(File file, BackupFileType type) throws ParseException { /** Given a date range, find a common string prefix Eg: 20120212, 20120213 = 2012021 */ protected String match(Date start, Date end) { - String sString = formatDate(start); - String eString = formatDate(end); + String sString = DateUtil.formatyyyyMMddHHmm(start); // formatDate(start); + String eString = DateUtil.formatyyyyMMddHHmm(end); // formatDate(end); int diff = StringUtils.indexOfDifference(sString, eString); if (diff < 0) return sString; return sString.substring(0, diff); diff --git a/priam/src/main/java/com/netflix/priam/backup/AbstractFileSystem.java b/priam/src/main/java/com/netflix/priam/backup/AbstractFileSystem.java index e2b79298a..c8126daff 100644 --- a/priam/src/main/java/com/netflix/priam/backup/AbstractFileSystem.java +++ b/priam/src/main/java/com/netflix/priam/backup/AbstractFileSystem.java @@ -115,8 +115,8 @@ public Future asyncDownloadFile( public void downloadFile(final Path remotePath, final Path localPath, final int retry) throws BackupRestoreException { // TODO: Should we download the file if localPath already exists? - if (remotePath == null) return; - + if (remotePath == null || localPath == null) return; + localPath.toFile().getParentFile().mkdirs(); logger.info("Downloading file: {} to location: {}", remotePath, localPath); try { new BoundedExponentialRetryCallable(500, 10000, retry) { diff --git a/priam/src/main/java/com/netflix/priam/backup/BackupVerification.java b/priam/src/main/java/com/netflix/priam/backup/BackupVerification.java index 0c7dbf45d..13e8d881c 100644 --- a/priam/src/main/java/com/netflix/priam/backup/BackupVerification.java +++ b/priam/src/main/java/com/netflix/priam/backup/BackupVerification.java @@ -14,150 +14,58 @@ package com.netflix.priam.backup; import com.google.inject.Inject; +import com.google.inject.Provider; import com.google.inject.Singleton; import com.google.inject.name.Named; -import com.netflix.priam.config.IConfiguration; -import com.netflix.priam.utils.DateUtil; -import java.io.FileReader; -import java.nio.file.FileSystems; +import com.netflix.priam.backupv2.IMetaProxy; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; -import java.util.stream.Collectors; -import org.apache.commons.collections4.CollectionUtils; -import org.json.simple.parser.JSONParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by aagrawal on 2/16/17. This class validates the backup by doing listing of files in the * backup destination and comparing with meta.json by downloading from the location. Input: - * BackupMetadata that needs to be verified. Since one backupmetadata can have multiple start time, - * provide one startTime if interested in verifying one particular backup. Leave startTime as null - * to get the latest snapshot for the provided BackupMetadata. + * BackupMetadata that needs to be verified. */ @Singleton public class BackupVerification { private static final Logger logger = LoggerFactory.getLogger(BackupVerification.class); - private final IBackupFileSystem bkpStatusFs; - private final IConfiguration config; + private final IMetaProxy metaProxy; + private final Provider abstractBackupPathProvider; @Inject - BackupVerification(@Named("backup") IBackupFileSystem bkpStatusFs, IConfiguration config) { - this.bkpStatusFs = bkpStatusFs; - this.config = config; + BackupVerification( + @Named("v1") IMetaProxy metaProxy, + Provider abstractBackupPathProvider) { + this.metaProxy = metaProxy; + this.abstractBackupPathProvider = abstractBackupPathProvider; } - public BackupVerificationResult verifyBackup(List metadata, Date startTime) { - BackupVerificationResult result = new BackupVerificationResult(); - - if (metadata == null || metadata.isEmpty()) return result; - - result.snapshotAvailable = true; - // All the dates should be same. - result.selectedDate = metadata.get(0).getSnapshotDate(); - - List backups = - metadata.stream() - .map( - backupMetadata -> - DateUtil.formatyyyyMMddHHmm(backupMetadata.getStart())) - .collect(Collectors.toList()); - logger.info("Snapshots found for {} : [{}]", result.selectedDate, backups); - - // find the latest date (default) or verify if one provided - Date latestDate = null; - for (BackupMetadata backupMetadata : metadata) { - if (latestDate == null || latestDate.before(backupMetadata.getStart())) - latestDate = backupMetadata.getStart(); - - if (startTime != null - && DateUtil.formatyyyyMMddHHmm(backupMetadata.getStart()) - .equals(DateUtil.formatyyyyMMddHHmm(startTime))) { - latestDate = startTime; - break; - } - } - - result.snapshotTime = DateUtil.formatyyyyMMddHHmm(latestDate); - logger.info( - "Latest/Requested snapshot date found: {}, for selected/provided date: {}", - result.snapshotTime, - result.selectedDate); - - // Get Backup File Iterator - String prefix = config.getBackupPrefix(); - logger.info("Looking for meta file in the location: {}", prefix); - - Date strippedMsSnapshotTime = DateUtil.getDate(result.snapshotTime); - Iterator backupfiles = - bkpStatusFs.list(prefix, strippedMsSnapshotTime, strippedMsSnapshotTime); - // Return validation fail if backup filesystem listing failed. - if (!backupfiles.hasNext()) { - logger.warn( - "ERROR: No files available while doing backup filesystem listing. Declaring the verification failed."); - return result; - } - - result.backupFileListAvail = true; - - List metas = new LinkedList<>(); - List s3Listing = new ArrayList<>(); - - while (backupfiles.hasNext()) { - AbstractBackupPath path = backupfiles.next(); - if (path.getFileName().equalsIgnoreCase("meta.json")) metas.add(path); - else s3Listing.add(path.getRemotePath()); - } - - if (metas.size() == 0) { - logger.error( - "No meta found for snapshotdate: {}", DateUtil.formatyyyyMMddHHmm(latestDate)); - return result; - } - - result.metaFileFound = true; - // Download meta.json from backup location and uncompress it. - List metaFileList = new ArrayList<>(); - try { - Path metaFileLocation = - FileSystems.getDefault().getPath(config.getDataFileLocation(), "tmp_meta.json"); - bkpStatusFs.downloadFile(Paths.get(metas.get(0).getRemotePath()), metaFileLocation, 5); - logger.info( - "Meta file successfully downloaded to localhost: {}", - metaFileLocation.toString()); + public Optional getLatestBackupMetaData(List metadata) { + return metadata.stream() + .filter(backupMetadata -> backupMetadata != null) + .filter(backupMetadata -> backupMetadata.getStatus() == Status.FINISHED) + .sorted(Comparator.comparing(BackupMetadata::getStart).reversed()) + .findFirst(); + } - JSONParser jsonParser = new JSONParser(); - org.json.simple.JSONArray fileList = - (org.json.simple.JSONArray) - jsonParser.parse(new FileReader(metaFileLocation.toFile())); - for (Object aFileList : fileList) metaFileList.add(aFileList.toString()); + public Optional verifyBackup(List metadata) { + if (metadata == null || metadata.isEmpty()) return Optional.empty(); - } catch (Exception e) { - logger.error("Error while fetching meta.json from path: {}", metas.get(0), e); - return result; - } + Optional latestBackupMetaData = getLatestBackupMetaData(metadata); - if (metaFileList.isEmpty() && s3Listing.isEmpty()) { - logger.info( - "Uncommon Scenario: Both meta file and backup filesystem listing is empty. Considering this as success"); - result.valid = true; - return result; + if (!latestBackupMetaData.isPresent()) { + logger.error("No backup found which finished during the time provided."); + return Optional.empty(); } - // Atleast meta file or s3 listing contains some file. - result.filesInS3Only = new ArrayList<>(s3Listing); - result.filesInS3Only.removeAll(metaFileList); - result.filesInMetaOnly = new ArrayList<>(metaFileList); - result.filesInMetaOnly.removeAll(s3Listing); - result.filesMatched = - (ArrayList) CollectionUtils.intersection(metaFileList, s3Listing); - - // There could be a scenario that backupfilesystem has more files than meta file. e.g. some - // leftover objects - if (result.filesInMetaOnly.size() == 0) result.valid = true; - - return result; + Path metadataLocation = Paths.get(latestBackupMetaData.get().getSnapshotLocation()); + metadataLocation = metadataLocation.subpath(1, metadataLocation.getNameCount()); + AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); + abstractBackupPath.parseRemote(metadataLocation.toString()); + return Optional.of((metaProxy.isMetaFileValid(abstractBackupPath))); } } diff --git a/priam/src/main/java/com/netflix/priam/backup/BackupVerificationResult.java b/priam/src/main/java/com/netflix/priam/backup/BackupVerificationResult.java index 2bb0dd948..1076eb66a 100644 --- a/priam/src/main/java/com/netflix/priam/backup/BackupVerificationResult.java +++ b/priam/src/main/java/com/netflix/priam/backup/BackupVerificationResult.java @@ -13,6 +13,9 @@ */ package com.netflix.priam.backup; +import com.netflix.priam.utils.GsonJsonSerializer; +import java.time.Instant; +import java.util.ArrayList; import java.util.List; /** @@ -20,13 +23,15 @@ * are all null and false. */ public class BackupVerificationResult { - public boolean snapshotAvailable = false; public boolean valid = false; - public boolean metaFileFound = false; - public boolean backupFileListAvail = false; - public String selectedDate = null; - public String snapshotTime = null; - public List filesInMetaOnly = null; - public List filesInS3Only = null; - public List filesMatched = null; + public String remotePath = null; + public Instant snapshotInstant = null; + public boolean manifestAvailable = false; + public List filesInMetaOnly = new ArrayList<>(); + public int filesMatched = 0; + + @Override + public String toString() { + return GsonJsonSerializer.getGson().toJson(this); + } } diff --git a/priam/src/main/java/com/netflix/priam/backup/CommitLogBackup.java b/priam/src/main/java/com/netflix/priam/backup/CommitLogBackup.java index 04fff0880..c667d3a54 100644 --- a/priam/src/main/java/com/netflix/priam/backup/CommitLogBackup.java +++ b/priam/src/main/java/com/netflix/priam/backup/CommitLogBackup.java @@ -18,6 +18,7 @@ import com.google.inject.Provider; import com.google.inject.name.Named; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; +import com.netflix.priam.utils.DateUtil; import java.io.File; import java.nio.file.Paths; import java.util.List; @@ -29,7 +30,6 @@ public class CommitLogBackup { private static final Logger logger = LoggerFactory.getLogger(CommitLogBackup.class); private final Provider pathFactory; - private static final List observers = Lists.newArrayList(); private final List clRemotePaths = Lists.newArrayList(); private final IBackupFileSystem fs; @@ -64,7 +64,7 @@ public List upload(String archivedDir, final String snapshot AbstractBackupPath bp = pathFactory.get(); bp.parseLocal(file, BackupFileType.CL); - if (snapshotName != null) bp.time = bp.parseDate(snapshotName); + if (snapshotName != null) bp.time = DateUtil.getDate(snapshotName); fs.uploadFile( Paths.get(bp.getBackupFile().getAbsolutePath()), @@ -84,24 +84,6 @@ public List upload(String archivedDir, final String snapshot return bps; } - public static void addObserver(IMessageObserver observer) { - observers.add(observer); - } - - public static void removeObserver(IMessageObserver observer) { - observers.remove(observer); - } - - public void notifyObservers() { - for (IMessageObserver observer : observers) - if (observer != null) { - logger.debug("Updating CommitLog observers now ..."); - observer.update(IMessageObserver.BACKUP_MESSAGE_TYPE.COMMITLOG, this.clRemotePaths); - } else { - logger.debug("Observer is Null, hence can not notify ..."); - } - } - private void addToRemotePath(String remotePath) { this.clRemotePaths.add(remotePath); } diff --git a/priam/src/main/java/com/netflix/priam/backup/CommitLogBackupTask.java b/priam/src/main/java/com/netflix/priam/backup/CommitLogBackupTask.java index 1f7ba4766..18ca2566c 100644 --- a/priam/src/main/java/com/netflix/priam/backup/CommitLogBackupTask.java +++ b/priam/src/main/java/com/netflix/priam/backup/CommitLogBackupTask.java @@ -16,13 +16,10 @@ import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; -import com.netflix.priam.backup.IMessageObserver.BACKUP_MESSAGE_TYPE; import com.netflix.priam.config.IConfiguration; import com.netflix.priam.scheduler.SimpleTimer; import com.netflix.priam.scheduler.TaskTimer; import java.io.File; -import java.util.ArrayList; -import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,8 +29,6 @@ public class CommitLogBackupTask extends AbstractBackup { public static final String JOBNAME = "CommitLogBackup"; private static final Logger logger = LoggerFactory.getLogger(CommitLogBackupTask.class); - private final List clRemotePaths = new ArrayList<>(); - private static final List observers = new ArrayList<>(); private final CommitLogBackup clBackup; @Inject @@ -67,31 +62,9 @@ public static TaskTimer getTimer(IConfiguration config) { return new SimpleTimer(JOBNAME, 60L * 1000); // every 1 min } - public static void addObserver(IMessageObserver observer) { - observers.add(observer); - } - - public static void removeObserver(IMessageObserver observer) { - observers.remove(observer); - } - - public void notifyObservers() { - for (IMessageObserver observer : observers) { - if (observer != null) { - logger.debug("Updating CL observers now ..."); - observer.update(BACKUP_MESSAGE_TYPE.COMMITLOG, clRemotePaths); - } else logger.info("Observer is Null, hence can not notify ..."); - } - } - @Override protected void processColumnFamily(String keyspace, String columnFamily, File backupDir) throws Exception { // Do nothing. } - - @Override - protected void addToRemotePath(String remotePath) { - clRemotePaths.add(remotePath); - } } diff --git a/priam/src/main/java/com/netflix/priam/backup/IMessageObserver.java b/priam/src/main/java/com/netflix/priam/backup/IMessageObserver.java deleted file mode 100644 index 58af87dca..000000000 --- a/priam/src/main/java/com/netflix/priam/backup/IMessageObserver.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2013 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.netflix.priam.backup; - -import java.util.List; - -public interface IMessageObserver { - - enum BACKUP_MESSAGE_TYPE { - SNAPSHOT, - INCREMENTAL, - COMMITLOG, - META - } - - enum RESTORE_MESSAGE_TYPE { - SNAPSHOT, - INCREMENTAL, - COMMITLOG, - META - } - - enum RESTORE_MESSAGE_STATUS { - UPLOADED, - DOWNLOADED, - STREAMED - } - - void update(BACKUP_MESSAGE_TYPE bkpMsgType, List remotePathNames); - - void update( - RESTORE_MESSAGE_TYPE rstMsgType, - List remotePathNames, - RESTORE_MESSAGE_STATUS rstMsgStatus); - - void update( - RESTORE_MESSAGE_TYPE rstMsgType, - String remotePath, - String fileDiskPath, - RESTORE_MESSAGE_STATUS rstMsgStatus); -} diff --git a/priam/src/main/java/com/netflix/priam/backup/IncrementalBackup.java b/priam/src/main/java/com/netflix/priam/backup/IncrementalBackup.java index 0dafbd699..627c303e5 100644 --- a/priam/src/main/java/com/netflix/priam/backup/IncrementalBackup.java +++ b/priam/src/main/java/com/netflix/priam/backup/IncrementalBackup.java @@ -20,12 +20,12 @@ import com.google.inject.Provider; import com.google.inject.Singleton; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; -import com.netflix.priam.backup.IMessageObserver.BACKUP_MESSAGE_TYPE; +import com.netflix.priam.config.IBackupRestoreConfig; import com.netflix.priam.config.IConfiguration; import com.netflix.priam.scheduler.SimpleTimer; import com.netflix.priam.scheduler.TaskTimer; +import com.netflix.priam.utils.DateUtil; import java.io.File; -import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,14 +37,14 @@ public class IncrementalBackup extends AbstractBackup { private static final Logger logger = LoggerFactory.getLogger(IncrementalBackup.class); public static final String JOBNAME = "IncrementalBackup"; - private final List incrementalRemotePaths = new ArrayList<>(); private final IncrementalMetaData metaData; private final BackupRestoreUtil backupRestoreUtil; - private static final List observers = new ArrayList<>(); + private final IBackupRestoreConfig backupRestoreConfig; @Inject public IncrementalBackup( IConfiguration config, + IBackupRestoreConfig backupRestoreConfig, Provider pathFactory, IFileSystemContext backupFileSystemCtx, IncrementalMetaData metaData) { @@ -52,6 +52,7 @@ public IncrementalBackup( // a means to upload audit trail (via meta_cf_yyyymmddhhmm.json) of files successfully // uploaded) this.metaData = metaData; + this.backupRestoreConfig = backupRestoreConfig; backupRestoreUtil = new BackupRestoreUtil( config.getIncrementalIncludeCFList(), config.getIncrementalExcludeCFList()); @@ -60,11 +61,7 @@ public IncrementalBackup( @Override public void execute() throws Exception { // Clearing remotePath List - incrementalRemotePaths.clear(); initiateBackup(INCREMENTAL_BACKUP_FOLDER, backupRestoreUtil); - if (incrementalRemotePaths.size() > 0) { - notifyObservers(); - } } /** Run every 10 Sec */ @@ -77,33 +74,19 @@ public String getName() { return JOBNAME; } - public static void addObserver(IMessageObserver observer) { - observers.add(observer); - } - - public static void removeObserver(IMessageObserver observer) { - observers.remove(observer); - } - - private void notifyObservers() { - for (IMessageObserver observer : observers) { - if (observer != null) { - logger.debug("Updating incremental observers now ..."); - observer.update(BACKUP_MESSAGE_TYPE.INCREMENTAL, incrementalRemotePaths); - } else logger.info("Observer is Null, hence can not notify ..."); - } - } - @Override protected void processColumnFamily(String keyspace, String columnFamily, File backupDir) throws Exception { + BackupFileType fileType = BackupFileType.SST; + if (backupRestoreConfig.enableV2Backups()) fileType = BackupFileType.SST_V2; + List uploadedFiles = - upload(backupDir, BackupFileType.SST, config.enableAsyncIncremental(), true); + upload(backupDir, fileType, config.enableAsyncIncremental(), true); if (!uploadedFiles.isEmpty()) { // format of yyyymmddhhmm (e.g. 201505060901) String incrementalUploadTime = - AbstractBackupPath.formatDate(uploadedFiles.get(0).getTime()); + DateUtil.formatyyyyMMddHHmm(uploadedFiles.get(0).getTime()); String metaFileName = "meta_" + columnFamily + "_" + incrementalUploadTime; logger.info("Uploading meta file for incremental backup: {}", metaFileName); this.metaData.setMetaFileName(metaFileName); @@ -111,9 +94,4 @@ protected void processColumnFamily(String keyspace, String columnFamily, File ba logger.info("Uploaded meta file for incremental backup: {}", metaFileName); } } - - @Override - protected void addToRemotePath(String remotePath) { - incrementalRemotePaths.add(remotePath); - } } diff --git a/priam/src/main/java/com/netflix/priam/backup/MetaData.java b/priam/src/main/java/com/netflix/priam/backup/MetaData.java index a5cbb8737..be7a0bdab 100644 --- a/priam/src/main/java/com/netflix/priam/backup/MetaData.java +++ b/priam/src/main/java/com/netflix/priam/backup/MetaData.java @@ -16,14 +16,12 @@ */ package com.netflix.priam.backup; -import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Provider; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; -import com.netflix.priam.backup.IMessageObserver.BACKUP_MESSAGE_TYPE; import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.utils.DateUtil; import java.io.File; -import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Paths; @@ -32,7 +30,6 @@ import java.util.List; import org.apache.commons.io.FileUtils; import org.json.simple.JSONArray; -import org.json.simple.parser.JSONParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +40,6 @@ public class MetaData { private static final Logger logger = LoggerFactory.getLogger(MetaData.class); private final Provider pathFactory; - private static final List observers = new ArrayList<>(); private final List metaRemotePaths = new ArrayList<>(); private final IBackupFileSystem fs; @@ -73,10 +69,6 @@ public AbstractBackupPath set(List bps, String snapshotName) 10, true); addToRemotePath(backupfile.getRemotePath()); - if (metaRemotePaths.size() > 0) { - notifyObservers(); - } - return backupfile; } @@ -87,7 +79,7 @@ public AbstractBackupPath decorateMetaJson(File metafile, String snapshotName) throws ParseException { AbstractBackupPath backupfile = pathFactory.get(); backupfile.parseLocal(metafile, BackupFileType.META); - backupfile.setTime(backupfile.parseDate(snapshotName)); + backupfile.setTime(DateUtil.getDate(snapshotName)); return backupfile; } @@ -119,50 +111,7 @@ public File createTmpMetaFile() throws IOException { return destFile; } - public static void addObserver(IMessageObserver observer) { - observers.add(observer); - } - - public static void removeObserver(IMessageObserver observer) { - observers.remove(observer); - } - - private void notifyObservers() { - for (IMessageObserver observer : observers) { - if (observer != null) { - logger.debug("Updating snapshot observers now ..."); - observer.update(BACKUP_MESSAGE_TYPE.META, metaRemotePaths); - } else logger.info("Observer is Null, hence can not notify ..."); - } - } - private void addToRemotePath(String remotePath) { metaRemotePaths.add(remotePath); } - - public List toJson(File input) { - List files = Lists.newArrayList(); - try { - JSONArray jsonObj = (JSONArray) new JSONParser().parse(new FileReader(input)); - for (Object aJsonObj : jsonObj) { - AbstractBackupPath p = pathFactory.get(); - p.parseRemote((String) aJsonObj); - files.add(p); - } - - } catch (Exception ex) { - throw new RuntimeException( - "Error transforming file " - + input.getAbsolutePath() - + " to JSON format. Msg:" - + ex.getLocalizedMessage(), - ex); - } - - logger.debug( - "Transformed file {} to JSON. Number of JSON elements: {}", - input.getAbsolutePath(), - files.size()); - return files; - } } diff --git a/priam/src/main/java/com/netflix/priam/backup/SnapshotBackup.java b/priam/src/main/java/com/netflix/priam/backup/SnapshotBackup.java index 3afc226b6..ff5efd488 100644 --- a/priam/src/main/java/com/netflix/priam/backup/SnapshotBackup.java +++ b/priam/src/main/java/com/netflix/priam/backup/SnapshotBackup.java @@ -21,15 +21,17 @@ import com.google.inject.Provider; import com.google.inject.Singleton; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; -import com.netflix.priam.backup.IMessageObserver.BACKUP_MESSAGE_TYPE; +import com.netflix.priam.backupv2.ForgottenFilesManager; import com.netflix.priam.config.IConfiguration; import com.netflix.priam.defaultimpl.CassandraOperations; import com.netflix.priam.identity.InstanceIdentity; import com.netflix.priam.scheduler.CronTimer; import com.netflix.priam.scheduler.TaskTimer; import com.netflix.priam.utils.CassandraMonitor; +import com.netflix.priam.utils.DateUtil; import com.netflix.priam.utils.ThreadSleeper; import java.io.File; +import java.time.Instant; import java.util.*; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -42,14 +44,14 @@ public class SnapshotBackup extends AbstractBackup { private static final Logger logger = LoggerFactory.getLogger(SnapshotBackup.class); public static final String JOBNAME = "SnapshotBackup"; private final MetaData metaData; - private final List snapshotRemotePaths = new ArrayList<>(); - private static final List observers = new ArrayList<>(); private final ThreadSleeper sleeper = new ThreadSleeper(); private static final long WAIT_TIME_MS = 60 * 1000 * 10; private final InstanceIdentity instanceIdentity; private final IBackupStatusMgr snapshotStatusMgr; private final BackupRestoreUtil backupRestoreUtil; + private final ForgottenFilesManager forgottenFilesManager; private String snapshotName = null; + private Instant snapshotInstant = DateUtil.getInstant(); private List abstractBackupPaths = null; private final CassandraOperations cassandraOperations; private static final Lock lock = new ReentrantLock(); @@ -62,7 +64,8 @@ public SnapshotBackup( IFileSystemContext backupFileSystemCtx, IBackupStatusMgr snapshotStatusMgr, InstanceIdentity instanceIdentity, - CassandraOperations cassandraOperations) { + CassandraOperations cassandraOperations, + ForgottenFilesManager forgottenFilesManager) { super(config, backupFileSystemCtx, pathFactory); this.metaData = metaData; this.snapshotStatusMgr = snapshotStatusMgr; @@ -71,6 +74,7 @@ public SnapshotBackup( backupRestoreUtil = new BackupRestoreUtil( config.getSnapshotIncludeCFList(), config.getSnapshotExcludeCFList()); + this.forgottenFilesManager = forgottenFilesManager; } @Override @@ -100,7 +104,8 @@ public void execute() throws Exception { private void executeSnapshot() throws Exception { Date startTime = Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime(); - snapshotName = pathFactory.get().formatDate(startTime); + snapshotName = DateUtil.formatyyyyMMddHHmm(startTime); + snapshotInstant = DateUtil.getInstant(); String token = instanceIdentity.getInstance().getToken(); // Save start snapshot status @@ -109,8 +114,6 @@ private void executeSnapshot() throws Exception { try { logger.info("Starting snapshot {}", snapshotName); - // Clearing remotePath List - snapshotRemotePaths.clear(); cassandraOperations.takeSnapshot(snapshotName); // Collect all snapshot dir's under keyspace dir's @@ -132,11 +135,6 @@ private void executeSnapshot() throws Exception { backupMetadata.setSnapshotLocation( config.getBackupPrefix() + File.separator + metaJson.getRemotePath()); snapshotStatusMgr.finish(backupMetadata); - - if (snapshotRemotePaths.size() > 0) { - notifyObservers(); - } - } catch (Exception e) { logger.error( "Exception occurred while taking snapshot: {}. Exception: {}", @@ -169,43 +167,7 @@ public static boolean isBackupEnabled(IConfiguration config) throws Exception { } public static TaskTimer getTimer(IConfiguration config) throws Exception { - CronTimer cronTimer = null; - switch (config.getBackupSchedulerType()) { - case HOUR: - if (config.getBackupHour() < 0) - logger.info( - "Skipping {} as it is disabled via backup hour: {}", - JOBNAME, - config.getBackupHour()); - else { - cronTimer = new CronTimer(JOBNAME, config.getBackupHour(), 1, 0); - logger.info( - "Starting snapshot backup with backup hour: {}", - config.getBackupHour()); - } - break; - case CRON: - cronTimer = CronTimer.getCronTimer(JOBNAME, config.getBackupCronExpression()); - break; - } - return cronTimer; - } - - public static void addObserver(IMessageObserver observer) { - observers.add(observer); - } - - public static void removeObserver(IMessageObserver observer) { - observers.remove(observer); - } - - private void notifyObservers() { - for (IMessageObserver observer : observers) { - if (observer != null) { - logger.debug("Updating snapshot observers now ..."); - observer.update(BACKUP_MESSAGE_TYPE.SNAPSHOT, snapshotRemotePaths); - } else logger.info("Observer is Null, hence can not notify ..."); - } + return CronTimer.getCronTimer(JOBNAME, config.getBackupCronExpression()); } @Override @@ -219,74 +181,9 @@ protected void processColumnFamily(String keyspace, String columnFamily, File ba return; } + forgottenFilesManager.findAndMoveForgottenFiles(snapshotInstant, snapshotDir); // Add files to this dir abstractBackupPaths.addAll( upload(snapshotDir, BackupFileType.SNAP, config.enableAsyncSnapshot(), true)); } - - // private void findForgottenFiles(File snapshotDir) { - // try { - // Collection snapshotFiles = FileUtils.listFiles(snapshotDir, - // FileFilterUtils.fileFileFilter(), null); - // File columnfamilyDir = snapshotDir.getParentFile().getParentFile(); - // - // //Find all the files in columnfamily folder which is : - // // 1. Not a temp file. - // // 2. Is a file. (we don't care about directories) - // // 3. Is older than snapshot time, as new files keep getting created after taking - // a snapshot. - // IOFileFilter tmpFileFilter1 = FileFilterUtils.suffixFileFilter(TMP_EXT); - // IOFileFilter tmpFileFilter2 = FileFilterUtils.asFileFilter(pathname -> - // tmpFilePattern.matcher(pathname.getName()).matches()); - // IOFileFilter tmpFileFilter = FileFilterUtils.or(tmpFileFilter1, tmpFileFilter2); - // // Here we are allowing files which were more than - // @link{IConfiguration#getForgottenFileGracePeriodDays}. We do this to allow cassandra to - // // clean up any files which were generated as part of repair/compaction and - // cleanup thread has not already deleted. - // // Refer to https://issues.apache.org/jira/browse/CASSANDRA-6756 and - // https://issues.apache.org/jira/browse/CASSANDRA-7066 - // // for more information. - // IOFileFilter ageFilter = - // FileFilterUtils.ageFileFilter(snapshotInstant.minus(config.getForgottenFileGracePeriodDays(), - // ChronoUnit.DAYS).toEpochMilli()); - // IOFileFilter fileFilter = - // FileFilterUtils.and(FileFilterUtils.notFileFilter(tmpFileFilter), - // FileFilterUtils.fileFileFilter(), ageFilter); - // - // Collection columnfamilyFiles = FileUtils.listFiles(columnfamilyDir, - // fileFilter, null); - // - // //Remove the SSTable(s) which are part of snapshot from the CF file list. - // //This cannot be a simple removeAll as snapshot files have "different" file folder - // prefix. - // for (File file : snapshotFiles) { - // //Get its parent directory file based on this file. - // File originalFile = new File(columnfamilyDir, file.getName()); - // columnfamilyFiles.remove(originalFile); - // } - // - // //If there are no "extra" SSTables in CF data folder, we are done. - // if (columnfamilyFiles.size() == 0) - // return; - // - // columnfamilyFiles.parallelStream().forEach(file -> logger.info("Forgotten file: {} - // found for CF: {}", file.getAbsolutePath(), columnfamilyDir.getName())); - // - // //TODO: The eventual plan is to move the forgotten files to a lost+found directory - // and clean the directory after 'x' amount of time. This behavior should be configurable. - // backupMetrics.incrementForgottenFiles(columnfamilyFiles.size()); - // logger.warn("# of forgotten files: {} found for CF: {}", columnfamilyFiles.size(), - // columnfamilyDir.getName()); - // } catch (Exception e) { - // //Eat the exception, if there, for any reason. This should not stop the snapshot - // for any reason. - // logger.error("Exception occurred while trying to find forgottenFile. Ignoring the - // error and continuing with remaining backup", e); - // } - // } - - @Override - protected void addToRemotePath(String remotePath) { - snapshotRemotePaths.add(remotePath); - } } diff --git a/priam/src/main/java/com/netflix/priam/backupv2/BackupValidator.java b/priam/src/main/java/com/netflix/priam/backupv2/BackupValidator.java index 583bbad41..2b7a7d3e4 100644 --- a/priam/src/main/java/com/netflix/priam/backupv2/BackupValidator.java +++ b/priam/src/main/java/com/netflix/priam/backupv2/BackupValidator.java @@ -17,21 +17,12 @@ package com.netflix.priam.backupv2; -import com.google.inject.Provider; import com.netflix.priam.backup.*; -import com.netflix.priam.config.IConfiguration; import com.netflix.priam.utils.DateUtil; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; import java.util.List; +import java.util.Optional; import javax.inject.Inject; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; +import javax.inject.Named; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,57 +32,11 @@ */ public class BackupValidator { private static final Logger logger = LoggerFactory.getLogger(BackupVerification.class); - private final IBackupFileSystem fs; - private final Provider abstractBackupPathProvider; - private boolean isBackupValid; + private IMetaProxy metaProxy; @Inject - public BackupValidator( - IConfiguration configuration, - IFileSystemContext backupFileSystemCtx, - Provider abstractBackupPathProvider) { - fs = backupFileSystemCtx.getFileStrategy(configuration); - this.abstractBackupPathProvider = abstractBackupPathProvider; - } - - /** - * Fetch the list of all META_V2 files on the remote file system for the provided valid - * daterange. - * - * @param dateRange the time period to scan in the remote file system for meta files. - * @return List of all the META_V2 files from the remote file system. - */ - public List findMetaFiles(DateUtil.DateRange dateRange) { - ArrayList metas = new ArrayList<>(); - String prefix = getMetaPrefix(dateRange); - String marker = getMetaPrefix(new DateUtil.DateRange(dateRange.getStartTime(), null)); - logger.info( - "Listing filesystem with prefix: {}, marker: {}, daterange: {}", - prefix, - marker, - dateRange); - Iterator iterator = fs.listFileSystem(prefix, null, marker); - - while (iterator.hasNext()) { - AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); - abstractBackupPath.parseRemote(iterator.next()); - logger.debug("Meta file found: {}", abstractBackupPath); - if (abstractBackupPath.getLastModified().toEpochMilli() - >= dateRange.getStartTime().toEpochMilli() - && abstractBackupPath.getLastModified().toEpochMilli() - <= dateRange.getEndTime().toEpochMilli()) { - metas.add(abstractBackupPath); - } - } - - Collections.sort(metas, Collections.reverseOrder()); - - if (metas.size() == 0) { - logger.info( - "No meta file found on remote file system for the time period: {}", dateRange); - } - - return metas; + public BackupValidator(@Named("v2") IMetaProxy metaProxy) { + this.metaProxy = metaProxy; } /** @@ -99,105 +44,22 @@ public List findMetaFiles(DateUtil.DateRange dateRange) { * file via AbstractBackupPath object. * * @param dateRange the time period to scan in the remote file system for meta files. - * @return the AbstractBackupPath denoting the "local" file which is valid or null. Caller needs - * to delete the file. + * @return the BackupVerificationResult containing the details of the valid meta file. If none + * is found, null is returned. * @throws BackupRestoreException if there is issue contacting remote file system, fetching the * file etc. */ - public AbstractBackupPath findLatestValidMetaFile(DateUtil.DateRange dateRange) + public Optional findLatestValidMetaFile(DateUtil.DateRange dateRange) throws BackupRestoreException { - List metas = findMetaFiles(dateRange); + List metas = metaProxy.findMetaFiles(dateRange); logger.info("Meta files found: {}", metas); for (AbstractBackupPath meta : metas) { - Path localFile = downloadMetaFile(meta); - boolean isValid = isMetaFileValid(localFile); - logger.info("Meta: {}, isValid: {}", meta, isValid); - if (!isValid) FileUtils.deleteQuietly(localFile.toFile()); - else return meta; - } - - return null; - } - - /** - * Download the meta file to disk. - * - * @param meta AbstractBackupPath denoting the meta file on remote file system. - * @return the location of the meta file on disk after downloading from remote file system. - * @throws BackupRestoreException if unable to download for any reason. - */ - public Path downloadMetaFile(AbstractBackupPath meta) throws BackupRestoreException { - Path localFile = Paths.get(meta.newRestoreFile().getAbsolutePath()); - fs.downloadFile(Paths.get(meta.getRemotePath()), localFile, 10); - return localFile; - } - - /** - * Get the prefix for the META_V2 file. This will depend on the configuration, if restore prefix - * is set. - * - * @param dateRange date range for which we are trying to find META_V2 files. - * @return prefix for the META_V2 files. - */ - public String getMetaPrefix(DateUtil.DateRange dateRange) { - Path location = fs.getPrefix(); - AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); - String match = StringUtils.EMPTY; - if (dateRange != null) match = dateRange.match(); - return Paths.get( - abstractBackupPath - .remoteV2Prefix(location, AbstractBackupPath.BackupFileType.META_V2) - .toString(), - match) - .toString(); - } - - /** - * Validate that all the files mentioned in the meta file actually exists on remote file system. - * - * @param metaFile Path to the local uncompressed/unencrypted meta file - * @return true if all the files mentioned in meta file are present on remote file system. It - * will return false in case of any error. - */ - public boolean isMetaFileValid(Path metaFile) { - try { - isBackupValid = true; - new MetaFileBackupValidator().readMeta(metaFile); - } catch (FileNotFoundException fne) { - isBackupValid = false; - logger.error(fne.getLocalizedMessage()); - } catch (IOException ioe) { - isBackupValid = false; - logger.error( - "IO Error while processing meta file: " + metaFile, ioe.getLocalizedMessage()); - ioe.printStackTrace(); + BackupVerificationResult result = metaProxy.isMetaFileValid(meta); + logger.info("BackupVerificationResult: {}", result); + if (result.valid) return Optional.of(result); } - return isBackupValid; - } - private class MetaFileBackupValidator extends MetaFileReader { - @Override - public void process(ColumnfamilyResult columnfamilyResult) { - for (ColumnfamilyResult.SSTableResult ssTableResult : - columnfamilyResult.getSstables()) { - for (FileUploadResult fileUploadResult : ssTableResult.getSstableComponents()) { - if (!isBackupValid) { - break; - } - - try { - isBackupValid = - isBackupValid - && fs.doesRemoteFileExist( - Paths.get(fileUploadResult.getBackupPath())); - } catch (BackupRestoreException e) { - // For any error, mark that file is not available. - isBackupValid = false; - break; - } - } - } - } + return Optional.empty(); } } diff --git a/priam/src/main/java/com/netflix/priam/backupv2/ForgottenFilesManager.java b/priam/src/main/java/com/netflix/priam/backupv2/ForgottenFilesManager.java new file mode 100644 index 000000000..4c8884008 --- /dev/null +++ b/priam/src/main/java/com/netflix/priam/backupv2/ForgottenFilesManager.java @@ -0,0 +1,147 @@ +/* + * Copyright 2019 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backupv2; + +import com.google.inject.Inject; +import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.merics.BackupMetrics; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Collection; +import java.util.regex.Pattern; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.FileFilterUtils; +import org.apache.commons.io.filefilter.IOFileFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Created by aagrawal on 1/1/19. */ +public class ForgottenFilesManager { + private static final Logger logger = LoggerFactory.getLogger(ForgottenFilesManager.class); + + private BackupMetrics backupMetrics; + private IConfiguration config; + private static final String TMP_EXT = ".tmp"; + + @SuppressWarnings("Annotator") + private static final Pattern tmpFilePattern = + Pattern.compile("^((.*)\\-(.*)\\-)?tmp(link)?\\-((?:l|k).)\\-(\\d)*\\-(.*)$"); + + protected static final String LOST_FOUND = "lost+found"; + + @Inject + public ForgottenFilesManager(IConfiguration configuration, BackupMetrics backupMetrics) { + this.config = configuration; + this.backupMetrics = backupMetrics; + } + + public void findAndMoveForgottenFiles(Instant snapshotInstant, File snapshotDir) { + try { + Collection snapshotFiles = + FileUtils.listFiles(snapshotDir, FileFilterUtils.fileFileFilter(), null); + File columnfamilyDir = snapshotDir.getParentFile().getParentFile(); + Collection columnfamilyFiles = + getColumnfamilyFiles(snapshotInstant, columnfamilyDir); + + // Remove the SSTable(s) which are part of snapshot from the CF file list. + // This cannot be a simple removeAll as snapshot files have "different" file folder + // prefix. + for (File file : snapshotFiles) { + // Get its parent directory file based on this file. + File originalFile = new File(columnfamilyDir, file.getName()); + columnfamilyFiles.remove(originalFile); + } + + // If there are no "extra" SSTables in CF data folder, we are done. + if (columnfamilyFiles.size() == 0) return; + + logger.warn( + "# of forgotten files: {} found for CF: {}", + columnfamilyFiles.size(), + columnfamilyDir.getName()); + backupMetrics.incrementForgottenFiles(columnfamilyFiles.size()); + + // Move the files to lost_found directory if configured. + moveForgottenFiles(columnfamilyDir, columnfamilyFiles); + + } catch (Exception e) { + // Eat the exception, if there, for any reason. This should not stop the snapshot for + // any reason. + logger.error( + "Exception occurred while trying to find forgottenFile. Ignoring the error and continuing with remaining backup", + e); + e.printStackTrace(); + } + } + + protected Collection getColumnfamilyFiles(Instant snapshotInstant, File columnfamilyDir) { + // Find all the files in columnfamily folder which is : + // 1. Not a temp file. + // 2. Is a file. (we don't care about directories) + // 3. Is older than snapshot time, as new files keep getting created after taking a + // snapshot. + IOFileFilter tmpFileFilter1 = FileFilterUtils.suffixFileFilter(TMP_EXT); + IOFileFilter tmpFileFilter2 = + FileFilterUtils.asFileFilter( + pathname -> tmpFilePattern.matcher(pathname.getName()).matches()); + IOFileFilter tmpFileFilter = FileFilterUtils.or(tmpFileFilter1, tmpFileFilter2); + // Here we are allowing files which were more than + // @link{IConfiguration#getForgottenFileGracePeriodDays}. We do this to allow cassandra + // to clean up any files which were generated as part of repair/compaction and cleanup + // thread has not already deleted. + // Refer to https://issues.apache.org/jira/browse/CASSANDRA-6756 and + // https://issues.apache.org/jira/browse/CASSANDRA-7066 + // for more information. + IOFileFilter ageFilter = + FileFilterUtils.ageFileFilter( + snapshotInstant + .minus(config.getForgottenFileGracePeriodDays(), ChronoUnit.DAYS) + .toEpochMilli()); + IOFileFilter fileFilter = + FileFilterUtils.and( + FileFilterUtils.notFileFilter(tmpFileFilter), + FileFilterUtils.fileFileFilter(), + ageFilter); + + return FileUtils.listFiles(columnfamilyDir, fileFilter, null); + } + + protected void moveForgottenFiles(File columnfamilyDir, Collection columnfamilyFiles) { + final Path destDir = Paths.get(columnfamilyDir.getAbsolutePath(), LOST_FOUND); + for (File file : columnfamilyFiles) { + logger.warn( + "Forgotten file: {} found for CF: {}", + file.getAbsolutePath(), + columnfamilyDir.getName()); + if (config.isForgottenFileMoveEnabled()) { + try { + FileUtils.moveFileToDirectory(file, destDir.toFile(), true); + } catch (IOException e) { + logger.error( + "Exception occurred while trying to move forgottenFile: {}. Ignoring the error and continuing with remaining backup/forgotten files.", + file); + e.printStackTrace(); + } + } + } + } +} diff --git a/priam/src/main/java/com/netflix/priam/backupv2/IMetaProxy.java b/priam/src/main/java/com/netflix/priam/backupv2/IMetaProxy.java new file mode 100644 index 000000000..99c52abb2 --- /dev/null +++ b/priam/src/main/java/com/netflix/priam/backupv2/IMetaProxy.java @@ -0,0 +1,97 @@ +/* + * Copyright 2018 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backupv2; + +import com.netflix.priam.backup.AbstractBackupPath; +import com.netflix.priam.backup.BackupRestoreException; +import com.netflix.priam.backup.BackupVerificationResult; +import com.netflix.priam.utils.DateUtil; +import java.nio.file.Path; +import java.util.Iterator; +import java.util.List; + +/** Proxy to do management tasks for meta files. Created by aagrawal on 12/18/18. */ +public interface IMetaProxy { + + /** + * Path on the local file system where meta file should be stored for processing. + * + * @return location on local file system. + */ + Path getLocalMetaFileDirectory(); + + /** + * Get the prefix for the manifest file. This will depend on the configuration, if restore + * prefix is set. + * + * @param dateRange date range for which we are trying to find manifest files. + * @return prefix for the manifest files. + */ + String getMetaPrefix(DateUtil.DateRange dateRange); + + /** + * Fetch the list of all manifest files on the remote file system for the provided valid + * daterange. + * + * @param dateRange the time period to scan in the remote file system for meta files. + * @return List of all the manifest files from the remote file system. + */ + List findMetaFiles(DateUtil.DateRange dateRange); + + /** + * Download the meta file to disk. + * + * @param meta AbstractBackupPath denoting the meta file on remote file system. + * @return the location of the meta file on disk after downloading from remote file system. + * @throws BackupRestoreException if unable to download for any reason. + */ + Path downloadMetaFile(AbstractBackupPath meta) throws BackupRestoreException; + + /** + * Read the manifest file and give the contents of the file (all the sstable components) as + * list. + * + * @param localMetaPath location of the manifest file on disk. + * @return list containing all the remote locations of sstable components. + * @throws Exception if file is not found on local system or is corrupt. + */ + List getSSTFilesFromMeta(Path localMetaPath) throws Exception; + + /** + * Get the list of incremental files given the daterange. + * + * @param dateRange the time period to scan in the remote file system for incremental files. + * @return iterator containing the list of path on the remote file system satisfying criteria. + * @throws BackupRestoreException if there is an issue contacting remote file system. + */ + Iterator getIncrementals(DateUtil.DateRange dateRange) + throws BackupRestoreException; + + /** + * Validate that all the files mentioned in the meta file actually exists on remote file system. + * + * @param metaBackupPath Path to the remote meta file. + * @return backupVerificationResult containing the information like valid - if all the files + * mentioned in meta file are present on remote file system. It will return false in case of + * any error. + */ + BackupVerificationResult isMetaFileValid(AbstractBackupPath metaBackupPath); + + /** Delete the old meta files, if any present in the metaFileDirectory */ + void cleanupOldMetaFiles(); +} diff --git a/priam/src/main/java/com/netflix/priam/backupv2/LocalDBReaderWriter.java b/priam/src/main/java/com/netflix/priam/backupv2/LocalDBReaderWriter.java deleted file mode 100644 index 154c25af1..000000000 --- a/priam/src/main/java/com/netflix/priam/backupv2/LocalDBReaderWriter.java +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package com.netflix.priam.backupv2; - -import com.google.inject.Inject; -import com.netflix.priam.config.IConfiguration; -import com.netflix.priam.utils.GsonJsonSerializer; -import java.io.File; -import java.io.FileReader; -import java.io.FileWriter; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is used to create a local DB entry for SSTable components. This will be used to - * identify when a version of a SSTable component was last uploaded or last referenced. This db - * entry will be used to enforce the TTL of a version of a SSTable component as we will not be - * relying on backup file system level TTL. Local DB should be copied over to new instance replacing - * this token in case of instance replacement. If no local DB is found then Priam will try to - * re-create the local DB using meta files uploaded to backup file system. The check operation for - * local DB is done at every start of Priam and when operator requests to re-build the local DB. - * Created by aagrawal on 8/31/18. - */ -public class LocalDBReaderWriter { - private static final Logger logger = LoggerFactory.getLogger(LocalDBReaderWriter.class); - private final IConfiguration configuration; - public static final String LOCAL_DB = "localdb"; - - @Inject - public LocalDBReaderWriter(IConfiguration configuration) { - this.configuration = configuration; - } - - public synchronized LocalDB upsertLocalDBEntry(final LocalDBEntry localDBEntry) - throws Exception { - // validate the localDBEntry first - if (localDBEntry.getTimeLastReferenced() == null) - throw new Exception("Time last referenced in localDB can never be null"); - - if (localDBEntry.getBackupTime() == null) - throw new Exception("Backup time for localDB can never be null"); - - final Path localDBFile = getLocalDBPath(localDBEntry.getFileUploadResult()); - - localDBFile.getParent().toFile().mkdirs(); - - LocalDB localDB = readAndGetLocalDB(localDBEntry.getFileUploadResult()); - - if (localDB == null) localDB = new LocalDB(new ArrayList<>()); - - // Verify again if someone beat you to write the entry. - LocalDBEntry entry = getLocalDBEntry(localDBEntry.getFileUploadResult(), localDB); - // Write entry as it might be either - - // 1. new component entry - // 2. new version of file (change in compression type or file is modified e.g. stats file) - if (entry == null) { - localDB.getLocalDBEntries().add(localDBEntry); - writeLocalDB(localDBFile, localDB); - } else { - // An entry already exists. Maybe last referenced time or backup time changed. We want - // to write the last time referenced. - entry.setBackupTime(localDBEntry.getBackupTime()); - entry.setTimeLastReferenced(localDBEntry.getTimeLastReferenced()); - writeLocalDB(localDBFile, localDB); - } - - return localDB; - } - - private LocalDB readAndGetLocalDB(final FileUploadResult fileUploadResult) throws Exception { - final Path localDbPath = getLocalDBPath(fileUploadResult); - return readLocalDB(localDbPath); - } - - /** - * Get the local database entry for a given file upload result. - * - * @param fileUploadResult File upload result for which local db is required. - * @return LocalDBEntry if one exists. - * @throws Exception if there is any error in getting local db file. - */ - public LocalDBEntry getLocalDBEntry(final FileUploadResult fileUploadResult) throws Exception { - LocalDB localDB = readAndGetLocalDB(fileUploadResult); - return getLocalDBEntry(fileUploadResult, localDB); - } - - private LocalDBEntry getLocalDBEntry( - final FileUploadResult fileUploadResult, final LocalDB localDB) throws Exception { - if (localDB == null - || localDB.getLocalDBEntries() == null - || localDB.getLocalDBEntries().isEmpty()) return null; - - // Get local db entry for same file and version. - List localDBEntries = - localDB.getLocalDBEntries() - .stream() - .filter( - localDBEntry -> - // Name of the file should be same. - (localDBEntry - .getFileUploadResult() - .getFileName() - .toFile() - .getName() - .toLowerCase() - .equals( - fileUploadResult - .getFileName() - .toFile() - .getName() - .toLowerCase()))) - // Should be same version (same last modified time) - .filter( - localDBEntry -> - (localDBEntry - .getFileUploadResult() - .getLastModifiedTime() - .equals(fileUploadResult.getLastModifiedTime()))) - // Same compression as before. If we switch compression technique we can - // upload again. - .filter( - localDBEntry -> - (localDBEntry - .getFileUploadResult() - .getCompression() - .equals(fileUploadResult.getCompression()))) - .collect(Collectors.toList()); - - if (localDBEntries.isEmpty()) return null; - - if (localDBEntries.size() == 1) { - if (logger.isDebugEnabled()) - logger.debug("Local entry found: {}", localDBEntries.get(0)); - - return localDBEntries.get(0); - } - - throw new Exception( - "Unexpected behavior: More than one entry found in local database for the same file. FileUploadResult: " - + fileUploadResult); - } - - /** - * Gets the local db path on the local file system. - * - * @param fileUploadResult This contains the SSTable component for which local db path is - * required. - * @return the local db path on local file system. - */ - public Path getLocalDBPath(final FileUploadResult fileUploadResult) { - return Paths.get( - configuration.getDataFileLocation(), - LOCAL_DB, - fileUploadResult.getKeyspaceName(), - fileUploadResult.getColumnFamilyName(), - PrefixGenerator.getSSTFileBase(fileUploadResult.getFileName().toFile().getName()) - + ".localdb"); - } - - /** - * Writes the local database to the local db file. This will do a complete replace of existing - * local database if any. - * - * @param localDBFile path to the local database file. - * @param localDB local database containing all the entries to the local database. - * @throws Exception If the path denoted is directory, write permission issues or any other - * exceptions. - */ - public void writeLocalDB(final Path localDBFile, final LocalDB localDB) throws Exception { - if (localDB == null || localDBFile == null || localDBFile.toFile().isDirectory()) - throw new Exception( - "Invalid Arguments: localDbFile: " + localDBFile + ", localDB: " + localDB); - - if (!localDBFile.getParent().toFile().exists()) localDBFile.getParent().toFile().mkdirs(); - - File tmpFile = - File.createTempFile( - localDBFile.toFile().getName(), ".tmp", localDBFile.getParent().toFile()); - try (FileWriter writer = new FileWriter(tmpFile)) { - writer.write(localDB.toString()); - - // Atomically swap out the new local db file for the old local db file. - if (!tmpFile.renameTo(localDBFile.toFile())) - logger.error("Failed to persist local db: {}", localDB); - } finally { - if (tmpFile != null) Files.deleteIfExists(tmpFile.toPath()); - } - } - - /** - * Reads the local database file stored on local file system. - * - * @param localDBFile path the local database. - * @return local database if file exists or empty local database. - * @throws Exception If there is any error in de-serializing the object or any other file system - * errors. - */ - public LocalDB readLocalDB(final Path localDBFile) throws Exception { - // Verify file exists - if (!localDBFile.toFile().exists()) return new LocalDB(new ArrayList<>()); - - try (FileReader reader = new FileReader(localDBFile.toFile())) { - LocalDB localDB = GsonJsonSerializer.getGson().fromJson(reader, LocalDB.class); - String columnfamilyName = localDBFile.getParent().toFile().getName(); - String keyspaceName = localDBFile.getParent().getParent().toFile().getName(); - localDB.getLocalDBEntries() - .forEach( - localDBEntry -> { - localDBEntry - .getFileUploadResult() - .setColumnFamilyName(columnfamilyName); - localDBEntry.getFileUploadResult().setKeyspaceName(keyspaceName); - }); - - if (logger.isDebugEnabled()) logger.debug("Local DB: {}", localDB); - return localDB; - } - } - - static class LocalDB { - private final List localDBEntries; - - public LocalDB(List localDBEntries) { - this.localDBEntries = localDBEntries; - } - - public List getLocalDBEntries() { - - return localDBEntries; - } - - @Override - public String toString() { - return GsonJsonSerializer.getGson().toJson(this); - } - } - - static class LocalDBEntry { - private final FileUploadResult fileUploadResult; - private Instant timeLastReferenced; - private Instant backupTime; - - public LocalDBEntry(FileUploadResult fileUploadResult) { - this.fileUploadResult = fileUploadResult; - } - - public FileUploadResult getFileUploadResult() { - return fileUploadResult; - } - - public Instant getTimeLastReferenced() { - return timeLastReferenced; - } - - public void setTimeLastReferenced(Instant timeLastReferenced) { - this.timeLastReferenced = timeLastReferenced; - } - - public Instant getBackupTime() { - return backupTime; - } - - public void setBackupTime(Instant backupTime) { - this.backupTime = backupTime; - } - - public LocalDBEntry( - FileUploadResult fileUploadResult, Instant timeLastReferenced, Instant backupTime) { - - this.fileUploadResult = fileUploadResult; - this.timeLastReferenced = timeLastReferenced; - this.backupTime = backupTime; - } - - @Override - public String toString() { - return GsonJsonSerializer.getGson().toJson(this); - } - } -} diff --git a/priam/src/main/java/com/netflix/priam/backupv2/MetaFileManager.java b/priam/src/main/java/com/netflix/priam/backupv2/MetaFileManager.java deleted file mode 100644 index 415dd5e97..000000000 --- a/priam/src/main/java/com/netflix/priam/backupv2/MetaFileManager.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package com.netflix.priam.backupv2; - -import com.netflix.priam.config.IConfiguration; -import java.io.File; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collection; -import javax.inject.Inject; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.filefilter.FileFilterUtils; -import org.apache.commons.io.filefilter.IOFileFilter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Do any management task for meta files. Created by aagrawal on 8/2/18. */ -public class MetaFileManager { - private static final Logger logger = LoggerFactory.getLogger(MetaFileManager.class); - private final Path metaFileDirectory; - - @Inject - MetaFileManager(IConfiguration configuration) { - metaFileDirectory = Paths.get(configuration.getDataFileLocation()); - } - - public Path getMetaFileDirectory() { - return metaFileDirectory; - } - - /** Delete the old meta files, if any present in the metaFileDirectory */ - public void cleanupOldMetaFiles() { - logger.info("Deleting any old META_V2 files if any"); - IOFileFilter fileNameFilter = - FileFilterUtils.and( - FileFilterUtils.prefixFileFilter(MetaFileInfo.META_FILE_PREFIX), - FileFilterUtils.or( - FileFilterUtils.suffixFileFilter(MetaFileInfo.META_FILE_SUFFIX), - FileFilterUtils.suffixFileFilter( - MetaFileInfo.META_FILE_SUFFIX + ".tmp"))); - Collection files = - FileUtils.listFiles(metaFileDirectory.toFile(), fileNameFilter, null); - files.stream() - .filter(File::isFile) - .forEach( - file -> { - logger.debug( - "Deleting old META_V2 file found: {}", file.getAbsolutePath()); - file.delete(); - }); - } -} diff --git a/priam/src/main/java/com/netflix/priam/backupv2/MetaFileWriterBuilder.java b/priam/src/main/java/com/netflix/priam/backupv2/MetaFileWriterBuilder.java index 750f00029..4e0c259c9 100644 --- a/priam/src/main/java/com/netflix/priam/backupv2/MetaFileWriterBuilder.java +++ b/priam/src/main/java/com/netflix/priam/backupv2/MetaFileWriterBuilder.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.List; import javax.inject.Inject; +import javax.inject.Named; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,8 +77,9 @@ public static class MetaFileWriter implements StartStep, DataStep, UploadStep { private final IBackupFileSystem backupFileSystem; private final MetaFileInfo metaFileInfo; - private final MetaFileManager metaFileManager; + private final IMetaProxy metaProxy; private JsonWriter jsonWriter; + private Instant snapshotInstant; private Path metaFilePath; @Inject @@ -86,10 +88,10 @@ private MetaFileWriter( InstanceIdentity instanceIdentity, Provider pathFactory, IFileSystemContext backupFileSystemCtx, - MetaFileManager metaFileManager) { + @Named("v2") IMetaProxy metaProxy) { this.pathFactory = pathFactory; this.backupFileSystem = backupFileSystemCtx.getFileStrategy(configuration); - this.metaFileManager = metaFileManager; + this.metaProxy = metaProxy; List backupIdentifier = new ArrayList<>(); backupIdentifier.add(instanceIdentity.getInstance().getToken()); metaFileInfo = @@ -107,10 +109,11 @@ private MetaFileWriter( */ public DataStep startMetaFileGeneration(Instant snapshotInstant) throws IOException { // Compute meta file name. + this.snapshotInstant = snapshotInstant; String fileName = MetaFileInfo.getMetaFileName(snapshotInstant); - metaFilePath = Paths.get(metaFileManager.getMetaFileDirectory().toString(), fileName); + metaFilePath = Paths.get(metaProxy.getLocalMetaFileDirectory().toString(), fileName); Path tempMetaFilePath = - Paths.get(metaFileManager.getMetaFileDirectory().toString(), fileName + ".tmp"); + Paths.get(metaProxy.getLocalMetaFileDirectory().toString(), fileName + ".tmp"); logger.info("Starting to write a new meta file: {}", metaFilePath); @@ -159,11 +162,16 @@ public MetaFileWriterBuilder.UploadStep endMetaFileGeneration() throws IOExcepti Path tempMetaFilePath = Paths.get( - metaFileManager.getMetaFileDirectory().toString(), + metaProxy.getLocalMetaFileDirectory().toString(), metaFilePath.toFile().getName() + ".tmp"); // Rename the tmp file. tempMetaFilePath.toFile().renameTo(metaFilePath.toFile()); + + // Set the last modified time to snapshot time as generating manifest file may take some + // time. + metaFilePath.toFile().setLastModified(snapshotInstant.toEpochMilli()); + logger.info("Finished writing to meta file: {}", metaFilePath); return this; diff --git a/priam/src/main/java/com/netflix/priam/backupv2/MetaV1Proxy.java b/priam/src/main/java/com/netflix/priam/backupv2/MetaV1Proxy.java new file mode 100644 index 000000000..cfce6ae81 --- /dev/null +++ b/priam/src/main/java/com/netflix/priam/backupv2/MetaV1Proxy.java @@ -0,0 +1,189 @@ +/* + * Copyright 2018 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backupv2; + +import com.google.common.collect.Lists; +import com.google.inject.Inject; +import com.netflix.priam.backup.*; +import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.utils.DateUtil; +import java.io.FileReader; +import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.temporal.ChronoUnit; +import java.util.*; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.iterators.FilterIterator; +import org.apache.commons.io.FileUtils; +import org.json.simple.parser.JSONParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Created by aagrawal on 12/18/18. */ +public class MetaV1Proxy implements IMetaProxy { + private static final Logger logger = LoggerFactory.getLogger(MetaV1Proxy.class); + private final IBackupFileSystem fs; + + @Inject + MetaV1Proxy(IConfiguration configuration, IFileSystemContext backupFileSystemCtx) { + fs = backupFileSystemCtx.getFileStrategy(configuration); + } + + @Override + public Path getLocalMetaFileDirectory() { + return null; + } + + @Override + public String getMetaPrefix(DateUtil.DateRange dateRange) { + return null; + } + + @Override + public List findMetaFiles(DateUtil.DateRange dateRange) { + Date startTime = new Date(dateRange.getStartTime().toEpochMilli()); + Date endTime = new Date(dateRange.getEndTime().toEpochMilli()); + String restorePrefix = fs.getPrefix().toString(); + logger.debug("Looking for snapshot meta file within restore prefix: {}", restorePrefix); + List metas = Lists.newArrayList(); + + Iterator backupfiles = fs.list(restorePrefix, startTime, endTime); + + while (backupfiles.hasNext()) { + AbstractBackupPath path = backupfiles.next(); + if (path.getType() == AbstractBackupPath.BackupFileType.META) + // Since there are now meta file for incrementals as well as snapshot, we need to + // find the correct one (i.e. the snapshot meta file (meta.json)) + if (path.getFileName().equalsIgnoreCase("meta.json")) { + metas.add(path); + } + } + + metas.sort(Collections.reverseOrder()); + + if (metas.size() == 0) { + logger.info( + "No meta v1 file found on remote file system for the time period: {}", + dateRange); + } + + return metas; + } + + @Override + public BackupVerificationResult isMetaFileValid(AbstractBackupPath metaBackupPath) { + BackupVerificationResult result = new BackupVerificationResult(); + result.remotePath = metaBackupPath.getRemotePath(); + result.snapshotInstant = metaBackupPath.getTime().toInstant(); + + try { + // Download the meta file. + Path metaFile = downloadMetaFile(metaBackupPath); + // Read the local meta file. + List metaFileList = getSSTFilesFromMeta(metaFile); + FileUtils.deleteQuietly(metaFile.toFile()); + result.manifestAvailable = true; + + // List the remote file system to validate the backup. + String prefix = fs.getPrefix().toString(); + Date strippedMsSnapshotTime = + new Date(result.snapshotInstant.truncatedTo(ChronoUnit.MINUTES).toEpochMilli()); + Iterator backupfiles = + fs.list(prefix, strippedMsSnapshotTime, strippedMsSnapshotTime); + + // Return validation fail if backup filesystem listing failed. + if (!backupfiles.hasNext()) { + logger.warn( + "ERROR: No files available while doing backup filesystem listing. Declaring the verification failed."); + return result; + } + + // Convert the remote listing to String. + List remoteListing = new ArrayList<>(); + while (backupfiles.hasNext()) { + AbstractBackupPath path = backupfiles.next(); + if (path.getType() == AbstractBackupPath.BackupFileType.SNAP) + remoteListing.add(path.getRemotePath()); + } + + if (metaFileList.isEmpty() && remoteListing.isEmpty()) { + logger.info( + "Uncommon Scenario: Both meta file and backup filesystem listing is empty. Considering this as success"); + result.valid = true; + return result; + } + + ArrayList filesMatched = + (ArrayList) CollectionUtils.intersection(metaFileList, remoteListing); + result.filesMatched = filesMatched.size(); + result.filesInMetaOnly = metaFileList; + result.filesInMetaOnly.removeAll(filesMatched); + + // There could be a scenario that backupfilesystem has more files than meta file. e.g. + // some leftover objects + result.valid = (result.filesInMetaOnly.isEmpty()); + } catch (Exception e) { + logger.error( + "Error while processing meta file: " + metaBackupPath, e.getLocalizedMessage()); + e.printStackTrace(); + } + + return result; + } + + @Override + public Path downloadMetaFile(AbstractBackupPath meta) throws BackupRestoreException { + Path localFile = Paths.get(meta.newRestoreFile().getAbsolutePath() + ".download"); + fs.downloadFile(Paths.get(meta.getRemotePath()), localFile, 10); + return localFile; + } + + @Override + public List getSSTFilesFromMeta(Path localMetaPath) throws Exception { + if (localMetaPath.toFile().isDirectory() || !localMetaPath.toFile().exists()) + throw new InvalidPathException( + localMetaPath.toString(), "Input path is either directory or do not exist"); + + List result = new ArrayList<>(); + JSONParser jsonParser = new JSONParser(); + org.json.simple.JSONArray fileList = + (org.json.simple.JSONArray) + jsonParser.parse(new FileReader(localMetaPath.toFile())); + fileList.forEach(entry -> result.add(entry.toString())); + return result; + } + + @Override + public Iterator getIncrementals(DateUtil.DateRange dateRange) + throws BackupRestoreException { + String prefix = fs.getPrefix().toString(); + Iterator iterator = + fs.list( + prefix, + new Date(dateRange.getStartTime().toEpochMilli()), + new Date(dateRange.getEndTime().toEpochMilli())); + return new FilterIterator<>( + iterator, + abstractBackupPath -> + abstractBackupPath.getType() == AbstractBackupPath.BackupFileType.SST); + } + + @Override + public void cleanupOldMetaFiles() {} +} diff --git a/priam/src/main/java/com/netflix/priam/backupv2/MetaV2Proxy.java b/priam/src/main/java/com/netflix/priam/backupv2/MetaV2Proxy.java new file mode 100644 index 000000000..2a4167b1f --- /dev/null +++ b/priam/src/main/java/com/netflix/priam/backupv2/MetaV2Proxy.java @@ -0,0 +1,255 @@ +/* + * Copyright 2018 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backupv2; + +import com.google.inject.Provider; +import com.netflix.priam.backup.*; +import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.utils.DateUtil; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; +import javax.inject.Inject; +import org.apache.commons.collections4.iterators.FilterIterator; +import org.apache.commons.collections4.iterators.TransformIterator; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.FileFilterUtils; +import org.apache.commons.io.filefilter.IOFileFilter; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Do any management task for meta files. Created by aagrawal on 8/2/18. */ +public class MetaV2Proxy implements IMetaProxy { + private static final Logger logger = LoggerFactory.getLogger(MetaV2Proxy.class); + private final Path metaFileDirectory; + private final IBackupFileSystem fs; + private final Provider abstractBackupPathProvider; + + @Inject + MetaV2Proxy( + IConfiguration configuration, + IFileSystemContext backupFileSystemCtx, + Provider abstractBackupPathProvider) { + fs = backupFileSystemCtx.getFileStrategy(configuration); + this.abstractBackupPathProvider = abstractBackupPathProvider; + metaFileDirectory = Paths.get(configuration.getDataFileLocation()); + } + + @Override + public Path getLocalMetaFileDirectory() { + return metaFileDirectory; + } + + @Override + public String getMetaPrefix(DateUtil.DateRange dateRange) { + return getMatch(dateRange, AbstractBackupPath.BackupFileType.META_V2); + } + + private String getMatch( + DateUtil.DateRange dateRange, AbstractBackupPath.BackupFileType backupFileType) { + Path location = fs.getPrefix(); + AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); + String match = StringUtils.EMPTY; + if (dateRange != null) match = dateRange.match(); + if (dateRange != null && dateRange.getEndTime() == null) + match = dateRange.getStartTime().toEpochMilli() + ""; + return Paths.get( + abstractBackupPath.remoteV2Prefix(location, backupFileType).toString(), + match) + .toString(); + } + + @Override + public Iterator getIncrementals(DateUtil.DateRange dateRange) + throws BackupRestoreException { + String incrementalPrefix = getMatch(dateRange, AbstractBackupPath.BackupFileType.SST_V2); + String marker = + getMatch( + new DateUtil.DateRange(dateRange.getStartTime(), null), + AbstractBackupPath.BackupFileType.SST_V2); + logger.info( + "Listing filesystem with prefix: {}, marker: {}, daterange: {}", + incrementalPrefix, + marker, + dateRange); + Iterator iterator = fs.listFileSystem(incrementalPrefix, null, marker); + Iterator transformIterator = + new TransformIterator<>( + iterator, + s -> { + AbstractBackupPath path = abstractBackupPathProvider.get(); + path.parseRemote(s); + return path; + }); + + return new FilterIterator<>( + transformIterator, + abstractBackupPath -> + (abstractBackupPath.getLastModified().isAfter(dateRange.getStartTime()) + && abstractBackupPath + .getLastModified() + .isBefore(dateRange.getEndTime())) + || abstractBackupPath + .getLastModified() + .equals(dateRange.getStartTime()) + || abstractBackupPath + .getLastModified() + .equals(dateRange.getEndTime())); + } + + @Override + public List findMetaFiles(DateUtil.DateRange dateRange) { + ArrayList metas = new ArrayList<>(); + String prefix = getMetaPrefix(dateRange); + String marker = getMetaPrefix(new DateUtil.DateRange(dateRange.getStartTime(), null)); + logger.info( + "Listing filesystem with prefix: {}, marker: {}, daterange: {}", + prefix, + marker, + dateRange); + Iterator iterator = fs.listFileSystem(prefix, null, marker); + + while (iterator.hasNext()) { + AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); + abstractBackupPath.parseRemote(iterator.next()); + logger.debug("Meta file found: {}", abstractBackupPath); + if (abstractBackupPath.getLastModified().toEpochMilli() + >= dateRange.getStartTime().toEpochMilli() + && abstractBackupPath.getLastModified().toEpochMilli() + <= dateRange.getEndTime().toEpochMilli()) { + metas.add(abstractBackupPath); + } + } + + metas.sort(Collections.reverseOrder()); + + if (metas.size() == 0) { + logger.info( + "No meta file found on remote file system for the time period: {}", dateRange); + } + + return metas; + } + + @Override + public Path downloadMetaFile(AbstractBackupPath meta) throws BackupRestoreException { + Path localFile = Paths.get(meta.newRestoreFile().getAbsolutePath()); + fs.downloadFile(Paths.get(meta.getRemotePath()), localFile, 10); + return localFile; + } + + @Override + public void cleanupOldMetaFiles() { + logger.info("Deleting any old META_V2 files if any"); + IOFileFilter fileNameFilter = + FileFilterUtils.and( + FileFilterUtils.prefixFileFilter(MetaFileInfo.META_FILE_PREFIX), + FileFilterUtils.or( + FileFilterUtils.suffixFileFilter(MetaFileInfo.META_FILE_SUFFIX), + FileFilterUtils.suffixFileFilter( + MetaFileInfo.META_FILE_SUFFIX + ".tmp"))); + Collection files = + FileUtils.listFiles(metaFileDirectory.toFile(), fileNameFilter, null); + files.stream() + .filter(File::isFile) + .forEach( + file -> { + logger.debug( + "Deleting old META_V2 file found: {}", file.getAbsolutePath()); + file.delete(); + }); + } + + @Override + public List getSSTFilesFromMeta(Path localMetaPath) throws Exception { + MetaFileBackupWalker metaFileBackupWalker = new MetaFileBackupWalker(); + metaFileBackupWalker.readMeta(localMetaPath); + return metaFileBackupWalker.backupRemotePaths; + } + + @Override + public BackupVerificationResult isMetaFileValid(AbstractBackupPath metaBackupPath) { + MetaFileBackupValidator metaFileBackupValidator = new MetaFileBackupValidator(); + BackupVerificationResult result = metaFileBackupValidator.verificationResult; + result.remotePath = metaBackupPath.getRemotePath(); + result.snapshotInstant = metaBackupPath.getLastModified(); + + Path metaFile = null; + try { + metaFile = downloadMetaFile(metaBackupPath); + result.manifestAvailable = true; + + metaFileBackupValidator.readMeta(metaFile); + result.valid = (result.filesInMetaOnly.isEmpty()); + } catch (FileNotFoundException fne) { + logger.error(fne.getLocalizedMessage()); + } catch (IOException ioe) { + logger.error( + "IO Error while processing meta file: " + metaFile, ioe.getLocalizedMessage()); + ioe.printStackTrace(); + } catch (BackupRestoreException bre) { + logger.error("Error while trying to download the manifest file: {}", metaBackupPath); + } finally { + if (metaFile != null) FileUtils.deleteQuietly(metaFile.toFile()); + } + return result; + } + + private class MetaFileBackupValidator extends MetaFileReader { + private BackupVerificationResult verificationResult = new BackupVerificationResult(); + + @Override + public void process(ColumnfamilyResult columnfamilyResult) { + for (ColumnfamilyResult.SSTableResult ssTableResult : + columnfamilyResult.getSstables()) { + for (FileUploadResult fileUploadResult : ssTableResult.getSstableComponents()) { + try { + if (fs.doesRemoteFileExist(Paths.get(fileUploadResult.getBackupPath()))) { + verificationResult.filesMatched++; + } else { + verificationResult.filesInMetaOnly.add( + fileUploadResult.getBackupPath()); + } + } catch (BackupRestoreException e) { + // For any error, mark that file is not available. + verificationResult.valid = false; + break; + } + } + } + } + } + + private class MetaFileBackupWalker extends MetaFileReader { + private List backupRemotePaths = new ArrayList<>(); + + @Override + public void process(ColumnfamilyResult columnfamilyResult) { + for (ColumnfamilyResult.SSTableResult ssTableResult : + columnfamilyResult.getSstables()) { + for (FileUploadResult fileUploadResult : ssTableResult.getSstableComponents()) { + backupRemotePaths.add(fileUploadResult.getBackupPath()); + } + } + } + } +} diff --git a/priam/src/main/java/com/netflix/priam/cli/Restorer.java b/priam/src/main/java/com/netflix/priam/cli/Restorer.java index 77cb6113e..742fb9166 100644 --- a/priam/src/main/java/com/netflix/priam/cli/Restorer.java +++ b/priam/src/main/java/com/netflix/priam/cli/Restorer.java @@ -16,9 +16,9 @@ */ package com.netflix.priam.cli; -import com.netflix.priam.backup.AbstractBackupPath; import com.netflix.priam.restore.Restore; -import java.util.Date; +import com.netflix.priam.utils.DateUtil; +import java.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,20 +32,17 @@ static void displayHelp() { public static void main(String[] args) { try { Application.initialize(); - - Date startTime, endTime; + Instant startTime, endTime; if (args.length < 2) { displayHelp(); return; } - AbstractBackupPath path = - Application.getInjector().getInstance(AbstractBackupPath.class); - startTime = path.parseDate(args[0]); - endTime = path.parseDate(args[1]); + startTime = DateUtil.parseInstant(args[0]); + endTime = DateUtil.parseInstant(args[1]); Restore restorer = Application.getInjector().getInstance(Restore.class); try { - restorer.restore(startTime, endTime); + restorer.restore(new DateUtil.DateRange(startTime, endTime)); } catch (Exception e) { logger.error("Unable to restore: ", e); } diff --git a/priam/src/main/java/com/netflix/priam/cluster/management/Flush.java b/priam/src/main/java/com/netflix/priam/cluster/management/Flush.java index 3379772de..3f53cec6c 100644 --- a/priam/src/main/java/com/netflix/priam/cluster/management/Flush.java +++ b/priam/src/main/java/com/netflix/priam/cluster/management/Flush.java @@ -18,7 +18,6 @@ import com.netflix.priam.merics.NodeToolFlushMeasurement; import com.netflix.priam.scheduler.CronTimer; import com.netflix.priam.scheduler.TaskTimer; -import com.netflix.priam.scheduler.UnsupportedTypeException; import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -105,56 +104,12 @@ private void deriveKeyspaces() throws Exception { * Timer to be used for flush interval. * * @param config {@link IConfiguration} to get configuration details from priam. - * @return the timer to be used for flush interval. - *

If {@link IConfiguration#getFlushSchedulerType()} is {@link - * com.netflix.priam.scheduler.SchedulerType#HOUR} then it expects {@link - * IConfiguration#getFlushInterval()} in the format of hour=x or daily=x - *

If {@link IConfiguration#getFlushSchedulerType()} is {@link - * com.netflix.priam.scheduler.SchedulerType#CRON} then it expects a valid CRON expression - * from {@link IConfiguration#getFlushCronExpression()} - * @throws Exception if the configurations are wrong. .e.g invalid cron expression. + * @return the timer to be used for compaction interval from {@link + * IConfiguration#getFlushCronExpression()} + * @throws Exception If the cron expression is invalid. */ public static TaskTimer getTimer(IConfiguration config) throws Exception { - CronTimer cronTimer = null; - switch (config.getFlushSchedulerType()) { - case HOUR: - String timerVal = config.getFlushInterval(); // e.g. hour=0 or daily=10 - if (timerVal == null) return null; - String s[] = timerVal.split("="); - if (s.length != 2) { - throw new IllegalArgumentException( - "Flush interval format is invalid. Expecting name=value, received: " - + timerVal); - } - String name = s[0].toUpperCase(); - Integer time = new Integer(s[1]); - switch (name) { - case "HOUR": - cronTimer = - new CronTimer( - Task.FLUSH.name(), time, 0); // minute, sec after each hour - break; - case "DAILY": - cronTimer = - new CronTimer( - Task.FLUSH.name(), - time, - 0, - 0); // hour, minute, sec to run on a daily basis - break; - default: - throw new UnsupportedTypeException( - "Flush interval type is invalid. Expecting \"hour, daily\", received: " - + name); - } - - break; - case CRON: - cronTimer = - CronTimer.getCronTimer(Task.FLUSH.name(), config.getFlushCronExpression()); - break; - } - return cronTimer; + return CronTimer.getCronTimer(Task.FLUSH.name(), config.getFlushCronExpression()); } } diff --git a/priam/src/main/java/com/netflix/priam/config/BackupRestoreConfig.java b/priam/src/main/java/com/netflix/priam/config/BackupRestoreConfig.java index a0e0957ac..aae6b6739 100644 --- a/priam/src/main/java/com/netflix/priam/config/BackupRestoreConfig.java +++ b/priam/src/main/java/com/netflix/priam/config/BackupRestoreConfig.java @@ -35,4 +35,14 @@ public String getSnapshotMetaServiceCronExpression() { public boolean enableV2Backups() { return config.get("priam.enableV2Backups", false); } + + @Override + public boolean enableV2Restore() { + return config.get("priam.enableV2Restore", false); + } + + @Override + public String getBackupTTLCronExpression() { + return config.get("priam.backupTTLCronExpression", "0 0 0/6 1/1 * ? *"); + } } diff --git a/priam/src/main/java/com/netflix/priam/config/IBackupRestoreConfig.java b/priam/src/main/java/com/netflix/priam/config/IBackupRestoreConfig.java index 86d366a0e..bfdfbca4f 100644 --- a/priam/src/main/java/com/netflix/priam/config/IBackupRestoreConfig.java +++ b/priam/src/main/java/com/netflix/priam/config/IBackupRestoreConfig.java @@ -35,8 +35,8 @@ default String getSnapshotMetaServiceCronExpression() { } /** - * Enable the backup version 2.0 in new format. This will start uploading of backups in new - * format. This is to be used for migration from backup version 1.0. + * Enable the backup version 2.0 in new format. This will start uploads of "incremental" backups + * in new format. This is to be used for migration from backup version 1.0. * * @return boolean value indicating if backups in version 2.0 should be started. */ @@ -59,4 +59,14 @@ default boolean enableV2Backups() { default String getBackupTTLCronExpression() { return "0 0 0/6 1/1 * ? *"; } + + /** + * If restore is enabled and if this flag is enabled, we will try to restore using Backup V2.0. + * + * @return if restore should be using backup version 2.0. If this is false we will use backup + * version 1.0. + */ + default boolean enableV2Restore() { + return false; + } } diff --git a/priam/src/main/java/com/netflix/priam/config/IConfiguration.java b/priam/src/main/java/com/netflix/priam/config/IConfiguration.java index d473ac3c5..b5def0c87 100644 --- a/priam/src/main/java/com/netflix/priam/config/IConfiguration.java +++ b/priam/src/main/java/com/netflix/priam/config/IConfiguration.java @@ -19,7 +19,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.ImplementedBy; -import com.netflix.priam.scheduler.SchedulerType; import com.netflix.priam.scheduler.UnsupportedTypeException; import com.netflix.priam.tuner.GCType; import java.io.File; @@ -297,16 +296,6 @@ default String getCompactionExcludeCFList() { return null; } - /** - * @return Backup hour for snapshot backups (0 - 23) - * @deprecated Use the {{@link #getBackupCronExpression()}} instead. Scheduled for deletion in - * Dec 2018. - */ - @Deprecated - default int getBackupHour() { - return 12; - } - /** * Cron expression to be used for snapshot backups. * @@ -319,18 +308,6 @@ default String getBackupCronExpression() { return "0 0 12 1/1 * ? *"; } - /** - * Backup scheduler type to use for backup. - * - * @return Type of scheduler to use for backup. Note the default is TIMER based i.e. to use - * {@link #getBackupHour()}. If value of "CRON" is provided it starts using {@link - * #getBackupCronExpression()}. - * @throws UnsupportedTypeException if the scheduler type is not CRON/HOUR. - */ - default SchedulerType getBackupSchedulerType() throws UnsupportedTypeException { - return SchedulerType.HOUR; - } - /** * Column Family(ies), comma delimited, to include during snapshot backup. Note 1: The expected * format is keyspace.cfname. If no value is provided then snapshot contains all KS,CF(s) Note @@ -895,31 +872,6 @@ default String getFlushKeyspaces() { return StringUtils.EMPTY; } - /** - * Interval to be used for flush. - * - * @return the interval to run the flush task. Format is name=value where “name” is an enum of - * hour, daily, value is ... - * @deprecated Use the {{@link #getFlushCronExpression()} instead. This is set for deletion in - * Dec 2018. - */ - @Deprecated - default String getFlushInterval() { - return null; - } - - /** - * Scheduler type to use for flush. Default: HOUR. - * - * @return Type of scheduler to use for flush. Note the default is TIMER based i.e. to use - * {@link #getFlushInterval()}. If value of "CRON" is provided it starts using {@link - * #getFlushCronExpression()}. - * @throws UnsupportedTypeException if the scheduler type is not HOUR/CRON. - */ - default SchedulerType getFlushSchedulerType() throws UnsupportedTypeException { - return SchedulerType.HOUR; - } - /** * Cron expression to be used for flush. Use "-1" to disable the CRON. Default: -1 * @@ -1028,6 +980,19 @@ default int getForgottenFileGracePeriodDays() { return 1; } + /** + * If any forgotten file is found in Cassandra, it is usually good practice to move/delete them + * so when cassandra restarts, it does not load old data which should be removed else you may + * run into data resurrection issues. This behavior is fixed in 3.x. This configuration will + * allow Priam to move the forgotten files to a "lost_found" directory for user to review at + * later time at the same time ensuring that Cassandra does not resurrect data. + * + * @return true if Priam should move forgotten file to "lost_found" directory of that CF. + */ + default boolean isForgottenFileMoveEnabled() { + return false; + } + /** * A method for allowing access to outside programs to Priam configuration when paired with the * Priam configuration HTTP endpoint at /v1/config/structured/all/property diff --git a/priam/src/main/java/com/netflix/priam/config/PriamConfiguration.java b/priam/src/main/java/com/netflix/priam/config/PriamConfiguration.java index 24a356e01..09704ec6d 100644 --- a/priam/src/main/java/com/netflix/priam/config/PriamConfiguration.java +++ b/priam/src/main/java/com/netflix/priam/config/PriamConfiguration.java @@ -21,7 +21,6 @@ import com.google.inject.Singleton; import com.netflix.priam.configSource.IConfigSource; import com.netflix.priam.identity.config.InstanceInfo; -import com.netflix.priam.scheduler.SchedulerType; import com.netflix.priam.scheduler.UnsupportedTypeException; import com.netflix.priam.tuner.GCType; import java.io.File; @@ -213,24 +212,11 @@ public String getMaxDirectMemory() { (PRIAM_PRE + ".direct.memory.size.") + instanceInfo.getInstanceType(), "50G"); } - @Override - public int getBackupHour() { - return config.get(PRIAM_PRE + ".backup.hour", 12); - } - @Override public String getBackupCronExpression() { return config.get(PRIAM_PRE + ".backup.cron", "0 0 12 1/1 * ? *"); // Backup daily at 12 } - @Override - public SchedulerType getBackupSchedulerType() throws UnsupportedTypeException { - String schedulerType = - config.get( - PRIAM_PRE + ".backup.schedule.type", SchedulerType.HOUR.getSchedulerType()); - return SchedulerType.lookup(schedulerType); - } - @Override public GCType getGCType() throws UnsupportedTypeException { String gcType = config.get(PRIAM_PRE + ".gc.type", GCType.CMS.getGcType()); @@ -247,14 +233,6 @@ public String getJVMUpsertSet() { return config.get(PRIAM_PRE + ".jvm.options.upsert"); } - @Override - public SchedulerType getFlushSchedulerType() throws UnsupportedTypeException { - String schedulerType = - config.get( - PRIAM_PRE + ".flush.schedule.type", SchedulerType.HOUR.getSchedulerType()); - return SchedulerType.lookup(schedulerType); - } - @Override public String getFlushCronExpression() { return config.get(PRIAM_PRE + ".flush.cron", "-1"); @@ -694,11 +672,6 @@ public String getFlushKeyspaces() { return config.get(PRIAM_PRE + ".flush.keyspaces"); } - @Override - public String getFlushInterval() { - return config.get(PRIAM_PRE + ".flush.interval"); - } - @Override public String getBackupStatusFileLoc() { return config.get( @@ -765,4 +738,14 @@ public String getMergedConfigurationCronExpression() { // Every minute on the top of the minute. return config.get(PRIAM_PRE + ".configMerge.cron", "0 * * * * ? *"); } + + @Override + public int getForgottenFileGracePeriodDays() { + return config.get(PRIAM_PRE + ".forgottenFileGracePeriodDays", 1); + } + + @Override + public boolean isForgottenFileMoveEnabled() { + return config.get(PRIAM_PRE + ".forgottenFileMoveEnabled", false); + } } diff --git a/priam/src/main/java/com/netflix/priam/defaultimpl/PriamGuiceModule.java b/priam/src/main/java/com/netflix/priam/defaultimpl/PriamGuiceModule.java index a351ed42c..0f68962ac 100644 --- a/priam/src/main/java/com/netflix/priam/defaultimpl/PriamGuiceModule.java +++ b/priam/src/main/java/com/netflix/priam/defaultimpl/PriamGuiceModule.java @@ -25,6 +25,9 @@ import com.netflix.priam.aws.auth.IS3Credential; import com.netflix.priam.aws.auth.S3RoleAssumptionCredential; import com.netflix.priam.backup.IBackupFileSystem; +import com.netflix.priam.backupv2.IMetaProxy; +import com.netflix.priam.backupv2.MetaV1Proxy; +import com.netflix.priam.backupv2.MetaV2Proxy; import com.netflix.priam.cred.ICredential; import com.netflix.priam.cred.ICredentialGeneric; import com.netflix.priam.cryptography.IFileCryptography; @@ -67,6 +70,8 @@ protected void configure() { bind(ICredentialGeneric.class) .annotatedWith(Names.named("pgpcredential")) .to(PgpCredential.class); + bind(IMetaProxy.class).annotatedWith(Names.named("v1")).to(MetaV1Proxy.class); + bind(IMetaProxy.class).annotatedWith(Names.named("v2")).to(MetaV2Proxy.class); bind(Registry.class).toInstance(new NoopRegistry()); } } diff --git a/priam/src/main/java/com/netflix/priam/resources/BackupServlet.java b/priam/src/main/java/com/netflix/priam/resources/BackupServlet.java index 5d14c0eec..37b012a96 100644 --- a/priam/src/main/java/com/netflix/priam/resources/BackupServlet.java +++ b/priam/src/main/java/com/netflix/priam/resources/BackupServlet.java @@ -25,15 +25,11 @@ import com.netflix.priam.scheduler.PriamScheduler; import com.netflix.priam.utils.DateUtil; import com.netflix.priam.utils.SystemUtils; -import java.util.ArrayList; -import java.util.Date; -import java.util.Iterator; -import java.util.List; +import java.util.*; import java.util.stream.Collectors; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.commons.io.FileUtils; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; @@ -244,59 +240,31 @@ public Response validateSnapshotByDate(@PathParam("daterange") String daterange) throws Exception { DateUtil.DateRange dateRange = new DateUtil.DateRange(daterange); - - JSONObject jsonReply = new JSONObject(); - jsonReply.put( - "inputStartDate", - DateUtil.formatInstant(DateUtil.yyyyMMddHHmm, dateRange.getStartTime())); - jsonReply.put( - "inputEndDate", - DateUtil.formatInstant(DateUtil.yyyyMMddHHmm, dateRange.getEndTime())); - logger.info( - "Will try to validate latest backup during startTime: {}, and endTime: {}", - dateRange.getStartTime(), - dateRange.getEndTime()); - List metadata = getLatestBackupMetadata(dateRange); - BackupVerificationResult result = - backupVerification.verifyBackup(metadata, Date.from(dateRange.getStartTime())); - jsonReply.put("snapshotAvailable", result.snapshotAvailable); - jsonReply.put("valid", result.valid); - jsonReply.put("backupFileListAvailable", result.backupFileListAvail); - jsonReply.put("metaFileFound", result.metaFileFound); - jsonReply.put("selectedDate", result.selectedDate); - jsonReply.put("snapshotTime", result.snapshotTime); - jsonReply.put("filesInMetaOnly", result.filesInMetaOnly); - jsonReply.put("filesInS3Only", result.filesInS3Only); - jsonReply.put("filesMatched", result.filesMatched); - return Response.ok(jsonReply.toString()).build(); + Optional result = backupVerification.verifyBackup(metadata); + if (!result.isPresent()) { + return Response.noContent() + .entity("No valid meta found for provided time range") + .build(); + } + + return Response.ok(result.get().toString()).build(); } @GET @Path("/validate/snapshot/v2/{daterange}") public Response validateV2SnapshotByDate(@PathParam("daterange") String daterange) throws Exception { - try { - DateUtil.DateRange dateRange = new DateUtil.DateRange(daterange); - AbstractBackupPath validMeta = backupValidator.findLatestValidMetaFile(dateRange); - if (validMeta == null) { - return Response.noContent() - .entity("No valid meta found for provided time range") - .build(); - } - - // Delete the file as we don't need it anymore. - FileUtils.deleteQuietly(validMeta.newRestoreFile()); - JSONObject jsonObject = new JSONObject(); - jsonObject.put("daterange", daterange); - jsonObject.put("remoteMetaPath", validMeta.getRemotePath()); - jsonObject.put("valid", true); - return Response.ok(jsonObject.toString()).build(); - } catch (BackupRestoreException ex) { - logger.error( - "Error while trying to fetch the latest valid meta file: {}", ex.getMessage()); - return Response.serverError().build(); + DateUtil.DateRange dateRange = new DateUtil.DateRange(daterange); + Optional result = + backupValidator.findLatestValidMetaFile(dateRange); + if (!result.isPresent()) { + return Response.noContent() + .entity("No valid meta found for provided time range") + .build(); } + + return Response.ok(result.get().toString()).build(); } /* diff --git a/priam/src/main/java/com/netflix/priam/resources/RestoreServlet.java b/priam/src/main/java/com/netflix/priam/resources/RestoreServlet.java index 6b987798c..9451eb33e 100644 --- a/priam/src/main/java/com/netflix/priam/resources/RestoreServlet.java +++ b/priam/src/main/java/com/netflix/priam/resources/RestoreServlet.java @@ -17,7 +17,6 @@ import com.netflix.priam.health.InstanceState; import com.netflix.priam.restore.Restore; import com.netflix.priam.utils.DateUtil; -import java.util.Date; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; @@ -66,7 +65,7 @@ public Response restore(@QueryParam("daterange") String daterange) throws Except "Parameters: {startTime: [{}], endTime: [{}]}", dateRange.getStartTime().toString(), dateRange.getEndTime().toString()); - restoreObj.restore(Date.from(dateRange.getStartTime()), Date.from(dateRange.getEndTime())); + restoreObj.restore(dateRange); return Response.ok("[\"ok\"]", MediaType.APPLICATION_JSON).build(); } } diff --git a/priam/src/main/java/com/netflix/priam/restore/AbstractRestore.java b/priam/src/main/java/com/netflix/priam/restore/AbstractRestore.java index 47225d070..574005351 100644 --- a/priam/src/main/java/com/netflix/priam/restore/AbstractRestore.java +++ b/priam/src/main/java/com/netflix/priam/restore/AbstractRestore.java @@ -16,11 +16,12 @@ */ package com.netflix.priam.restore; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; +import com.google.inject.Inject; import com.google.inject.Provider; import com.netflix.priam.backup.*; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; +import com.netflix.priam.backupv2.IMetaProxy; +import com.netflix.priam.config.IBackupRestoreConfig; import com.netflix.priam.config.IConfiguration; import com.netflix.priam.defaultimpl.ICassandraProcess; import com.netflix.priam.health.InstanceState; @@ -32,9 +33,13 @@ import java.io.IOException; import java.math.BigInteger; import java.nio.file.Path; +import java.time.Instant; import java.time.LocalDateTime; +import java.time.ZoneId; import java.util.*; import java.util.concurrent.Future; +import java.util.stream.Collectors; +import javax.inject.Named; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; @@ -48,9 +53,6 @@ * thread pool to execute the restores. */ public abstract class AbstractRestore extends Task implements IRestoreStrategy { - // keeps track of the last few download which was executed. - // TODO fix the magic number of 1000 => the idea of 80% of 1000 files limit per s3 query - protected static final FifoQueue tracker = new FifoQueue<>(800); private static final Logger logger = LoggerFactory.getLogger(AbstractRestore.class); private static final String JOBNAME = "AbstractRestore"; private static final String SYSTEM_KEYSPACE = "system"; @@ -66,7 +68,17 @@ public abstract class AbstractRestore extends Task implements IRestoreStrategy { private final MetaData metaData; private final IPostRestoreHook postRestoreHook; - AbstractRestore( + @Inject + @Named("v1") + IMetaProxy metaV1Proxy; + + @Inject + @Named("v2") + IMetaProxy metaV2Proxy; + + @Inject IBackupRestoreConfig backupRestoreConfig; + + public AbstractRestore( IConfiguration config, IBackupFileSystem fs, String name, @@ -106,15 +118,10 @@ public void setRestoreConfiguration(String restoreIncludeCFList, String restoreE } private List> download( - Iterator fsIterator, - BackupFileType bkupFileType, - boolean waitForCompletion) - throws Exception { + Iterator fsIterator, boolean waitForCompletion) throws Exception { List> futureList = new ArrayList<>(); while (fsIterator.hasNext()) { AbstractBackupPath temp = fsIterator.next(); - if (temp.getType() == BackupFileType.SST && tracker.contains(temp)) continue; - if (backupRestoreUtil.isFiltered( temp.getKeyspace(), temp.getColumnFamily())) { // is filtered? logger.info( @@ -125,16 +132,14 @@ private List> download( continue; } - if (temp.getType() == bkupFileType) { - File localFileHandler = temp.newRestoreFile(); - if (logger.isDebugEnabled()) - logger.debug( - "Created local file name: " - + localFileHandler.getAbsolutePath() - + File.pathSeparator - + localFileHandler.getName()); - futureList.add(downloadFile(temp, localFileHandler)); - } + File localFileHandler = temp.newRestoreFile(); + if (logger.isDebugEnabled()) + logger.debug( + "Created local file name: " + + localFileHandler.getAbsolutePath() + + File.pathSeparator + + localFileHandler.getName()); + futureList.add(downloadFile(temp, localFileHandler)); } // Wait for all download to finish that were started from this method. @@ -148,81 +153,35 @@ private void waitForCompletion(List> futureList) throws Exception { } private List> downloadCommitLogs( - Iterator fsIterator, - BackupFileType filter, - int lastN, - boolean waitForCompletion) + Iterator fsIterator, int lastN, boolean waitForCompletion) throws Exception { if (fsIterator == null) return null; BoundedList bl = new BoundedList(lastN); while (fsIterator.hasNext()) { AbstractBackupPath temp = fsIterator.next(); - if (temp.getType() == BackupFileType.SST && tracker.contains(temp)) continue; - - if (temp.getType() == filter) { + if (temp.getType() == BackupFileType.CL) { bl.add(temp); } } - return download(bl.iterator(), filter, waitForCompletion); + return download(bl.iterator(), waitForCompletion); } private void stopCassProcess() throws IOException { cassProcess.stop(true); } - private String getRestorePrefix() { - String prefix; - - if (StringUtils.isNotBlank(config.getRestorePrefix())) prefix = config.getRestorePrefix(); - else prefix = config.getBackupPrefix(); - - return prefix; - } - - /* - * Fetches meta.json used to store snapshots metadata. - */ - private List fetchSnapshotMetaFile( - String restorePrefix, Date startTime, Date endTime) throws IllegalStateException { - logger.debug("Looking for snapshot meta file within restore prefix: {}", restorePrefix); - List metas = Lists.newArrayList(); - - Iterator backupfiles = fs.list(restorePrefix, startTime, endTime); - if (!backupfiles.hasNext()) { - throw new IllegalStateException( - "meta.json not found, restore prefix: " + restorePrefix); - } - - while (backupfiles.hasNext()) { - AbstractBackupPath path = backupfiles.next(); - if (path.getType() == BackupFileType.META) - // Since there are now meta file for incrementals as well as snapshot, we need to - // find the correct one (i.e. the snapshot meta file (meta.json)) - if (path.getFileName().equalsIgnoreCase("meta.json")) { - metas.add(path); - } - } - - // Sort the meta files in ascending order. - Collections.sort(metas); - - return metas; - } - @Override public void execute() throws Exception { if (!isRestoreEnabled(config, instanceIdentity.getInstanceInfo())) return; logger.info("Starting restore for {}", config.getRestoreSnapshot()); - String[] restore = config.getRestoreSnapshot().split(","); - final Date startTime = DateUtil.getDate(restore[0]); - final Date endTime = DateUtil.getDate(restore[1]); + final DateUtil.DateRange dateRange = new DateUtil.DateRange(config.getRestoreSnapshot()); new RetryableCallable() { public Void retriableCall() throws Exception { logger.info("Attempting restore"); - restore(startTime, endTime); + restore(dateRange); logger.info("Restore completed"); // Wait for other server init to complete @@ -232,15 +191,38 @@ public Void retriableCall() throws Exception { }.call(); } - public void restore(Date startTime, Date endTime) throws Exception { + private Optional getLatestValidMetaPath( + IMetaProxy metaProxy, DateUtil.DateRange dateRange) { + // Get a list of manifest files. + List metas = metaProxy.findMetaFiles(dateRange); + + // Find a valid manifest file. + for (AbstractBackupPath meta : metas) { + BackupVerificationResult result = metaProxy.isMetaFileValid(meta); + if (result.valid) { + return Optional.of(meta); + } + } + + return Optional.empty(); + } + + public void restore(DateUtil.DateRange dateRange) throws Exception { // fail early if post restore hook has invalid parameters if (!postRestoreHook.hasValidParameters()) { throw new PostRestoreHookException("Invalid PostRestoreHook parameters"); } + Date endTime = new Date(dateRange.getEndTime().toEpochMilli()); + IMetaProxy metaProxy = metaV1Proxy; + if (backupRestoreConfig.enableV2Restore()) metaProxy = metaV2Proxy; + // Set the restore status. instanceState.getRestoreStatus().resetStatus(); - instanceState.getRestoreStatus().setStartDateRange(DateUtil.convert(startTime)); + instanceState + .getRestoreStatus() + .setStartDateRange( + LocalDateTime.ofInstant(dateRange.getStartTime(), ZoneId.of("UTC"))); instanceState.getRestoreStatus().setEndDateRange(DateUtil.convert(endTime)); instanceState.getRestoreStatus().setExecutionStartTime(LocalDateTime.now()); instanceState.setRestoreStatus(Status.STARTED); @@ -248,7 +230,10 @@ public void restore(Date startTime, Date endTime) throws Exception { try { if (config.isRestoreClosestToken()) { - restoreToken = tokenSelector.getClosestToken(new BigInteger(origToken), startTime); + restoreToken = + tokenSelector.getClosestToken( + new BigInteger(origToken), + new Date(dateRange.getStartTime().toEpochMilli())); instanceIdentity.getInstance().setToken(restoreToken.toString()); } @@ -259,39 +244,60 @@ public void restore(Date startTime, Date endTime) throws Exception { File dataDir = new File(config.getDataFileLocation()); if (dataDir.exists() && dataDir.isDirectory()) FileUtils.cleanDirectory(dataDir); - // Try and read the Meta file. - String prefix = getRestorePrefix(); - List metas = fetchSnapshotMetaFile(prefix, startTime, endTime); + // Find latest valid meta file. + Optional latestValidMetaFile = + getLatestValidMetaPath(metaProxy, dateRange); - if (metas.size() == 0) { - logger.info("[cass_backup] No snapshot meta file found, Restore Failed."); + if (!latestValidMetaFile.isPresent()) { + logger.info("No valid snapshot meta file found, Restore Failed."); instanceState.getRestoreStatus().setExecutionEndTime(LocalDateTime.now()); - instanceState.setRestoreStatus(Status.FINISHED); + instanceState.setRestoreStatus(Status.FAILED); return; } - AbstractBackupPath meta = Iterators.getLast(metas.iterator()); - logger.info("Snapshot Meta file for restore {}", meta.getRemotePath()); - instanceState.getRestoreStatus().setSnapshotMetaFile(meta.getRemotePath()); + logger.info( + "Snapshot Meta file for restore {}", latestValidMetaFile.get().getRemotePath()); + instanceState + .getRestoreStatus() + .setSnapshotMetaFile(latestValidMetaFile.get().getRemotePath()); // Download the meta.json file. - ArrayList metaFile = new ArrayList<>(); - metaFile.add(meta); - download(metaFile.iterator(), BackupFileType.META, true); - - List> futureList = new ArrayList<>(); + Path metaFile = metaProxy.downloadMetaFile(latestValidMetaFile.get()); // Parse meta.json file to find the files required to download from this snapshot. - List snapshots = metaData.toJson(meta.newRestoreFile()); + List snapshots = + metaProxy + .getSSTFilesFromMeta(metaFile) + .stream() + .map( + value -> { + AbstractBackupPath path = pathProvider.get(); + path.parseRemote(value); + return path; + }) + .collect(Collectors.toList()); + + FileUtils.deleteQuietly(metaFile.toFile()); // Download snapshot which is listed in the meta file. - futureList.addAll(download(snapshots.iterator(), BackupFileType.SNAP, false)); + List> futureList = new ArrayList<>(); + futureList.addAll(download(snapshots.iterator(), false)); logger.info("Downloading incrementals"); + // Download incrementals (SST) after the snapshot meta file. - Iterator incrementals = fs.list(prefix, meta.getTime(), endTime); - futureList.addAll(download(incrementals, BackupFileType.SST, false)); + Instant snapshotTime; + if (backupRestoreConfig.enableV2Restore()) + snapshotTime = latestValidMetaFile.get().getLastModified(); + else snapshotTime = latestValidMetaFile.get().getTime().toInstant(); + + DateUtil.DateRange incrementalDateRange = + new DateUtil.DateRange(snapshotTime, dateRange.getEndTime()); + Iterator incrementals = + metaProxy.getIncrementals(incrementalDateRange); + futureList.addAll(download(incrementals, false)); // Downloading CommitLogs + // Note for Backup V2.0 we do not backup commit logs, as saving them is cost-expensive. if (config.isBackingUpCommitLogs()) { logger.info( "Delete all backuped commitlog files in {}", @@ -300,15 +306,12 @@ public void restore(Date startTime, Date endTime) throws Exception { logger.info("Delete all commitlog files in {}", config.getCommitLogLocation()); SystemUtils.cleanupDir(config.getCommitLogLocation(), null); - + String prefix = fs.getPrefix().toString(); Iterator commitLogPathIterator = - fs.list(prefix, meta.getTime(), endTime); + fs.list(prefix, latestValidMetaFile.get().getTime(), endTime); futureList.addAll( downloadCommitLogs( - commitLogPathIterator, - BackupFileType.CL, - config.maxCommitLogsRestore(), - false)); + commitLogPathIterator, config.maxCommitLogsRestore(), false)); } // Wait for all the futures to finish. diff --git a/priam/src/main/java/com/netflix/priam/restore/EncryptedRestoreBase.java b/priam/src/main/java/com/netflix/priam/restore/EncryptedRestoreBase.java index 300ad9e45..ee9ebfc6a 100755 --- a/priam/src/main/java/com/netflix/priam/restore/EncryptedRestoreBase.java +++ b/priam/src/main/java/com/netflix/priam/restore/EncryptedRestoreBase.java @@ -106,7 +106,6 @@ public Path retriableCall() throws Exception { Paths.get(path.getRemotePath()), Paths.get(tempFile.getAbsolutePath()), 0); - tracker.adjustAndAdd(path); } catch (Exception ex) { // This behavior is retryable; therefore, lets get to a clean state // before each retry. diff --git a/priam/src/main/java/com/netflix/priam/restore/Restore.java b/priam/src/main/java/com/netflix/priam/restore/Restore.java index a7a5d2d6c..ac0a27f66 100644 --- a/priam/src/main/java/com/netflix/priam/restore/Restore.java +++ b/priam/src/main/java/com/netflix/priam/restore/Restore.java @@ -72,7 +72,6 @@ public Restore( @Override protected final Future downloadFile( final AbstractBackupPath path, final File restoreLocation) throws Exception { - tracker.adjustAndAdd(path); return fs.asyncDownloadFile( Paths.get(path.getRemotePath()), Paths.get(restoreLocation.getAbsolutePath()), 5); } diff --git a/priam/src/main/java/com/netflix/priam/scheduler/SchedulerType.java b/priam/src/main/java/com/netflix/priam/scheduler/SchedulerType.java deleted file mode 100644 index 0663f5455..000000000 --- a/priam/src/main/java/com/netflix/priam/scheduler/SchedulerType.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2013 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.netflix.priam.scheduler; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Created by aagrawal on 3/8/17. */ -public enum SchedulerType { - HOUR("HOUR"), - CRON("CRON"); - - private static final Logger logger = LoggerFactory.getLogger(SchedulerType.class); - private final String schedulerType; - - SchedulerType(String schedulerType) { - this.schedulerType = schedulerType.toUpperCase(); - } - - /* - * Helper method to find the scheduler type - case insensitive as user may put value which are not right case. - * This returns the ScheulerType if one is found. Refer to table below to understand the use-case. - * - * SchedulerTypeValue|acceptNullorEmpty|acceptIllegalValue|Result - * Valid value |NA |NA |SchedulerType - * Empty string |True |NA |NULL - * NULL |True |NA |NULL - * Empty string |False |NA |UnsupportedTypeException - * NULL |False |NA |UnsupportedTypeException - * Illegal value |NA |True |NULL - * Illegal value |NA |False |UnsupportedTypeException - */ - - public static SchedulerType lookup( - String schedulerType, boolean acceptNullOrEmpty, boolean acceptIllegalValue) - throws UnsupportedTypeException { - if (StringUtils.isEmpty(schedulerType)) - if (acceptNullOrEmpty) return null; - else { - String message = - String.format( - "%s is not a supported SchedulerType. Supported values are %s", - schedulerType, getSupportedValues()); - logger.error(message); - throw new UnsupportedTypeException(message); - } - - try { - return SchedulerType.valueOf(schedulerType.toUpperCase()); - } catch (IllegalArgumentException ex) { - String message = - String.format( - "%s is not a supported SchedulerType. Supported values are %s", - schedulerType, getSupportedValues()); - - if (acceptIllegalValue) { - message = - message - + ". Since acceptIllegalValue is set to True, returning NULL instead."; - logger.error(message); - return null; - } - - logger.error(message); - throw new UnsupportedTypeException(message, ex); - } - } - - private static String getSupportedValues() { - StringBuilder supportedValues = new StringBuilder(); - boolean first = true; - for (SchedulerType type : SchedulerType.values()) { - if (!first) supportedValues.append(","); - supportedValues.append(type); - first = false; - } - - return supportedValues.toString(); - } - - public static SchedulerType lookup(String schedulerType) throws UnsupportedTypeException { - return lookup(schedulerType, false, false); - } - - public String getSchedulerType() { - return schedulerType; - } -} diff --git a/priam/src/main/java/com/netflix/priam/services/BackupTTLService.java b/priam/src/main/java/com/netflix/priam/services/BackupTTLService.java index a4f770d70..a3170255e 100644 --- a/priam/src/main/java/com/netflix/priam/services/BackupTTLService.java +++ b/priam/src/main/java/com/netflix/priam/services/BackupTTLService.java @@ -24,8 +24,8 @@ import com.netflix.priam.backup.BackupRestoreException; import com.netflix.priam.backup.IBackupFileSystem; import com.netflix.priam.backup.IFileSystemContext; -import com.netflix.priam.backupv2.BackupValidator; import com.netflix.priam.backupv2.ColumnfamilyResult; +import com.netflix.priam.backupv2.IMetaProxy; import com.netflix.priam.backupv2.MetaFileReader; import com.netflix.priam.config.IBackupRestoreConfig; import com.netflix.priam.config.IConfiguration; @@ -40,6 +40,8 @@ import java.util.*; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import javax.inject.Named; +import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,7 +60,7 @@ public class BackupTTLService extends Task { private static final Logger logger = LoggerFactory.getLogger(BackupTTLService.class); private IBackupRestoreConfig backupRestoreConfig; - private BackupValidator backupValidator; + private IMetaProxy metaProxy; private IBackupFileSystem fileSystem; private Provider abstractBackupPathProvider; public static final String JOBNAME = "BackupTTLService"; @@ -72,12 +74,12 @@ public class BackupTTLService extends Task { public BackupTTLService( IConfiguration configuration, IBackupRestoreConfig backupRestoreConfig, - BackupValidator backupValidator, + @Named("v2") IMetaProxy metaProxy, IFileSystemContext backupFileSystemCtx, Provider abstractBackupPathProvider) { super(configuration); this.backupRestoreConfig = backupRestoreConfig; - this.backupValidator = backupValidator; + this.metaProxy = metaProxy; this.fileSystem = backupFileSystemCtx.getFileStrategy(configuration); this.abstractBackupPathProvider = abstractBackupPathProvider; } @@ -110,7 +112,7 @@ public void execute() throws Exception { // Find the snapshot just after this date. List metas = - backupValidator.findMetaFiles( + metaProxy.findMetaFiles( new DateUtil.DateRange(dateToTtl, DateUtil.getInstant())); if (metas.size() == 0) { @@ -123,13 +125,17 @@ public void execute() throws Exception { AbstractBackupPath metaFile = metas.get(metas.size() - 1); // Download the meta file to local file system. - Path localFile = backupValidator.downloadMetaFile(metaFile); + Path localFile = metaProxy.downloadMetaFile(metaFile); // Walk over the file system iterator and if not in map, it is eligible for delete. new MetaFileWalker().readMeta(localFile); + if (logger.isDebugEnabled()) logger.debug("Files in meta file: {}", filesInMeta.keySet().toString()); + // Delete the meta file downloaded locally + FileUtils.deleteQuietly(localFile.toFile()); + Iterator remoteFileLocations = fileSystem.listFileSystem(getSSTPrefix(), null, null); @@ -159,7 +165,7 @@ public void execute() throws Exception { // all the META files. // This feature did not exist in Jan 2018. metas = - backupValidator.findMetaFiles( + metaProxy.findMetaFiles( new DateUtil.DateRange( start_of_feature, dateToTtl.minus(1, ChronoUnit.MINUTES))); diff --git a/priam/src/main/java/com/netflix/priam/services/SnapshotMetaService.java b/priam/src/main/java/com/netflix/priam/services/SnapshotMetaService.java index 43daf9d7e..1cf62e885 100644 --- a/priam/src/main/java/com/netflix/priam/services/SnapshotMetaService.java +++ b/priam/src/main/java/com/netflix/priam/services/SnapshotMetaService.java @@ -30,6 +30,7 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.inject.Inject; +import javax.inject.Named; import javax.inject.Singleton; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.FileFilterUtils; @@ -60,11 +61,10 @@ public class SnapshotMetaService extends AbstractBackup { private static final String SNAPSHOT_PREFIX = "snap_v2_"; private static final String CASSANDRA_MANIFEST_FILE = "manifest.json"; private static final String CASSANDRA_SCHEMA_FILE = "schema.cql"; - private final IBackupRestoreConfig backupRestoreConfig; private final BackupRestoreUtil backupRestoreUtil; private final MetaFileWriterBuilder metaFileWriter; private MetaFileWriterBuilder.DataStep dataStep; - private final MetaFileManager metaFileManager; + private final IMetaProxy metaProxy; private final CassandraOperations cassandraOperations; private String snapshotName = null; private static final Lock lock = new ReentrantLock(); @@ -79,20 +79,18 @@ private enum MetaStep { @Inject SnapshotMetaService( IConfiguration config, - IBackupRestoreConfig backupRestoreConfig, IFileSystemContext backupFileSystemCtx, Provider pathFactory, MetaFileWriterBuilder metaFileWriter, - MetaFileManager metaFileManager, + @Named("v2") IMetaProxy metaProxy, CassandraOperations cassandraOperations) { super(config, backupFileSystemCtx, pathFactory); - this.backupRestoreConfig = backupRestoreConfig; this.cassandraOperations = cassandraOperations; backupRestoreUtil = new BackupRestoreUtil( config.getSnapshotIncludeCFList(), config.getSnapshotExcludeCFList()); this.metaFileWriter = metaFileWriter; - this.metaFileManager = metaFileManager; + this.metaProxy = metaProxy; } /** @@ -177,7 +175,7 @@ public void execute() throws Exception { // These files may be leftover // 1) when Priam shutdown in middle of this service and may not be full JSON // 2) No permission to upload to backup file system. - metaFileManager.cleanupOldMetaFiles(); + metaProxy.cleanupOldMetaFiles(); // Take a new snapshot cassandraOperations.takeSnapshot(snapshotName); @@ -246,8 +244,7 @@ private void uploadAllFiles( if (!snapshotDirectory.getName().startsWith(SNAPSHOT_PREFIX) || !snapshotDirectory.isDirectory()) continue; - if (snapshotDirectory.list().length == 0 - || !backupRestoreConfig.enableV2Backups()) { + if (snapshotDirectory.list().length == 0) { FileUtils.cleanDirectory(snapshotDirectory); FileUtils.deleteDirectory(snapshotDirectory); continue; @@ -368,11 +365,6 @@ private void generateMetaFile( columnfamilyResult.getColumnfamilyName()); } - @Override - protected void addToRemotePath(String remotePath) { - // Do nothing - } - // For testing purposes only. void setSnapshotName(String snapshotName) { this.snapshotName = snapshotName; diff --git a/priam/src/main/java/com/netflix/priam/utils/DateUtil.java b/priam/src/main/java/com/netflix/priam/utils/DateUtil.java index edcee23af..ff28cf96f 100644 --- a/priam/src/main/java/com/netflix/priam/utils/DateUtil.java +++ b/priam/src/main/java/com/netflix/priam/utils/DateUtil.java @@ -218,5 +218,12 @@ public Instant getEndTime() { public String toString() { return GsonJsonSerializer.getGson().toJson(this); } + + @Override + public boolean equals(Object obj) { + return obj.getClass().equals(this.getClass()) + && (startTime.toEpochMilli() == ((DateRange) obj).startTime.toEpochMilli()) + && (endTime.toEpochMilli() == ((DateRange) obj).endTime.toEpochMilli()); + } } } diff --git a/priam/src/test/groovy/com.netflix.priam.scheduler/TestFlushTask.groovy b/priam/src/test/groovy/com.netflix.priam.scheduler/TestFlushTask.groovy deleted file mode 100644 index e03a147a9..000000000 --- a/priam/src/test/groovy/com.netflix.priam.scheduler/TestFlushTask.groovy +++ /dev/null @@ -1,82 +0,0 @@ -package com.netflix.priam.scheduler - -import com.netflix.priam.config.FakeConfiguration -import com.netflix.priam.cluster.management.Flush -import spock.lang.Specification -import spock.lang.Unroll - -/** - Created by aagrawal on 7/15/17. - */ -@Unroll -class TestFlushTask extends Specification { - - def "Exception for value #flushSchedulerType, #flushCronExpression, #flushInterval"() { - when: - Flush.getTimer(new FlushConfiguration(flushSchedulerType, flushCronExpression, flushInterval)) - - then: - thrown(expectedException) - - where: - flushSchedulerType | flushCronExpression | flushInterval || expectedException - "sdf" | null | null || UnsupportedTypeException - "hour" | null | "2" || IllegalArgumentException - "hour" | "0 0 2 * * ?" | "2" || IllegalArgumentException - "cron" | "abc" | null || IllegalArgumentException - "cron" | "abc" | "daily=2" || IllegalArgumentException - "cron" | null | "daily=2" || IllegalArgumentException - "hour" | null | "hour=2,daily=2" || IllegalArgumentException - } - - def "SchedulerType for value #flushSchedulerType, #flushCronExpression, #flushInterval is null"() { - expect: - Flush.getTimer(new FlushConfiguration(flushSchedulerType, flushCronExpression, flushInterval)) == result - - where: - flushSchedulerType | flushCronExpression | flushInterval || result - "hour" | null | null || null - "cron" | "-1" | null || null - "hour" | "abc" | null || null - "cron" | "-1" | "abc" || null - } - - def "SchedulerType for value #flushSchedulerType, #flushCronExpression, #flushInterval is #result"() { - expect: - Flush.getTimer(new FlushConfiguration(flushSchedulerType, flushCronExpression, flushInterval)).getCronExpression() == result - - where: - flushSchedulerType | flushCronExpression | flushInterval || result - "hour" | null | "daily=2" || "0 0 2 * * ?" - "hour" | null | "hour=2" || "0 2 0/1 * * ?" - "cron" | "0 0 0/1 1/1 * ? *" | null || "0 0 0/1 1/1 * ? *" - "cron" | "0 0 0/1 1/1 * ? *" | "daily=2" || "0 0 0/1 1/1 * ? *" - } - - - private class FlushConfiguration extends FakeConfiguration { - private String flushSchedulerType, flushCronExpression, flushInterval - - FlushConfiguration(String flushSchedulerType, String flushCronExpression, String flushInterval) { - this.flushCronExpression = flushCronExpression - this.flushSchedulerType = flushSchedulerType - this.flushInterval = flushInterval - } - - @Override - SchedulerType getFlushSchedulerType() throws UnsupportedTypeException { - return SchedulerType.lookup(flushSchedulerType) - } - - @Override - String getFlushCronExpression() { - return flushCronExpression - } - - @Override - String getFlushInterval() { - return flushInterval - } - } - -} diff --git a/priam/src/test/groovy/com.netflix.priam.scheduler/TestSchedulerType.groovy b/priam/src/test/groovy/com.netflix.priam.scheduler/TestSchedulerType.groovy deleted file mode 100644 index 2c8429045..000000000 --- a/priam/src/test/groovy/com.netflix.priam.scheduler/TestSchedulerType.groovy +++ /dev/null @@ -1,52 +0,0 @@ -package com.netflix.priam.scheduler - -/** - * Created by aagrawal on 3/16/17. - * This is used to test SchedulerType with all the values you might get. - */ -import spock.lang.* - -@Unroll -class TestSchedulerType extends Specification{ - - def "Exception for value #schedulerType , #acceptNullorEmpty , #acceptIllegalValue"() { - when: - SchedulerType.lookup(schedulerType, acceptNullorEmpty, acceptIllegalValue) - - then: - thrown(expectedException) - - where: - schedulerType | acceptNullorEmpty | acceptIllegalValue || expectedException - "sdf" | true | false || UnsupportedTypeException - "" | false | true || UnsupportedTypeException - null | false | true || UnsupportedTypeException - - } - - def "SchedulerType for value #schedulerType , #acceptNullorEmpty , #acceptIllegalValue is #result"() { - expect: - SchedulerType.lookup(schedulerType, acceptNullorEmpty, acceptIllegalValue) == result - - where: - schedulerType | acceptNullorEmpty | acceptIllegalValue || result - "hour" | true | true || SchedulerType.HOUR - "Hour" | true | true || SchedulerType.HOUR - "HOUR" | true | true || SchedulerType.HOUR - "hour" | true | false || SchedulerType.HOUR - "Hour" | true | false || SchedulerType.HOUR - "HOUR" | true | false || SchedulerType.HOUR - "hour" | false | false || SchedulerType.HOUR - "Hour" | false | false || SchedulerType.HOUR - "HOUR" | false | false || SchedulerType.HOUR - "hour" | false | true || SchedulerType.HOUR - "Hour" | false | true || SchedulerType.HOUR - "HOUR" | false | true || SchedulerType.HOUR - "" | true | false || null - null | true | false || null - "sdf" | false | true || null - "sdf" | true | true || null - } - - -} \ No newline at end of file diff --git a/priam/src/test/groovy/com.netflix.priam/backup/TestBackupScheduler.groovy b/priam/src/test/groovy/com.netflix.priam/backup/TestBackupScheduler.groovy index 45c8f89e0..22a391a68 100644 --- a/priam/src/test/groovy/com.netflix.priam/backup/TestBackupScheduler.groovy +++ b/priam/src/test/groovy/com.netflix.priam/backup/TestBackupScheduler.groovy @@ -1,8 +1,6 @@ package com.netflix.priam.backup import com.netflix.priam.config.FakeConfiguration -import com.netflix.priam.scheduler.SchedulerType -import com.netflix.priam.scheduler.UnsupportedTypeException import spock.lang.Specification import spock.lang.Unroll @@ -11,23 +9,19 @@ import spock.lang.Unroll */ @Unroll class TestBackupScheduler extends Specification { - def "IsBackupEnabled for SchedulerType #schedulerType with hour #configHour and CRON #configCRON is #result"() { + def "IsBackupEnabled CRON #configCRON is #result"() { expect: - SnapshotBackup.isBackupEnabled(new BackupConfiguration(schedulerType, configCRON, configHour)) == result + SnapshotBackup.isBackupEnabled(new BackupConfiguration(configCRON)) == result where: - schedulerType | configCRON | configHour || result - "hour" | null | -1 || false - "hour" | "0 0 9 1/1 * ? *" | -1 || false - "hour" | null | 1 || true - "cron" | "-1" | 1 || false - "cron" | "-1" | -1 || false - "cron" | "0 0 9 1/1 * ? *" | -1 || true + configCRON || result + "-1" || false + "0 0 9 1/1 * ? *" || true } def "Exception for illegal value of Snapshot CRON expression , #configCRON"() { when: - SnapshotBackup.isBackupEnabled(new BackupConfiguration("cron", configCRON, 1)) + SnapshotBackup.isBackupEnabled(new BackupConfiguration(configCRON)) then: thrown(expectedException) @@ -38,40 +32,26 @@ class TestBackupScheduler extends Specification { "0 9 1/1 * ? *"|| Exception } - def "Validate CRON for backup for SchedulerType #schedulerType with hour #configHour and CRON #configCRON is #result"() { + def "Validate CRON for backup CRON #configCRON is #result"() { expect: - SnapshotBackup.getTimer(new BackupConfiguration(schedulerType, configCRON, configHour)).cronExpression == result + SnapshotBackup.getTimer(new BackupConfiguration(configCRON)).cronExpression == result where: - schedulerType | configCRON | configHour || result - "hour" | null | 1 || "0 1 1 * * ?" - "cron" | "0 0 9 1/1 * ? *" | -1 || "0 0 9 1/1 * ? *" + configCRON || result + "0 0 9 1/1 * ? *" || "0 0 9 1/1 * ? *" } private class BackupConfiguration extends FakeConfiguration { - private String backupSchedulerType, backupCronExpression - private int backupHour + private String backupCronExpression - BackupConfiguration(String backupSchedulerType, String backupCronExpression, int backupHour) { + BackupConfiguration(String backupCronExpression) { this.backupCronExpression = backupCronExpression - this.backupSchedulerType = backupSchedulerType - this.backupHour = backupHour - } - - @Override - SchedulerType getBackupSchedulerType() throws UnsupportedTypeException { - return SchedulerType.lookup(backupSchedulerType) } @Override String getBackupCronExpression() { return backupCronExpression } - - @Override - int getBackupHour() { - return backupHour - } } } diff --git a/priam/src/test/groovy/com/netflix/priam/cluser/management/TestFlushTask.groovy b/priam/src/test/groovy/com/netflix/priam/cluser/management/TestFlushTask.groovy new file mode 100644 index 000000000..85a6ec12d --- /dev/null +++ b/priam/src/test/groovy/com/netflix/priam/cluser/management/TestFlushTask.groovy @@ -0,0 +1,80 @@ +/* + * Copyright 2018 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.cluser.management + +import com.google.inject.Guice +import com.netflix.priam.backup.BRTestModule +import com.netflix.priam.cluster.management.Compaction +import com.netflix.priam.cluster.management.Flush +import com.netflix.priam.config.FakeConfiguration +import com.netflix.priam.defaultimpl.CassandraOperations +import mockit.Mock +import mockit.MockUp +import spock.lang.Shared +import spock.lang.Specification +import spock.lang.Unroll + +/** + Created by aagrawal on 7/15/17. + */ +@Unroll +class TestFlushTask extends Specification { + def "Exception for value #flushCronExpression"() { + when: + Flush.getTimer(new FlushConfiguration(flushCronExpression)) + + then: + thrown(expectedException) + + where: + flushCronExpression || expectedException + "abc" || IllegalArgumentException + null || IllegalArgumentException + } + + def "SchedulerType for value #flushSchedulerType, #flushCronExpression, #flushInterval is null"() { + expect: + Flush.getTimer(new FlushConfiguration(flushCronExpression)) == result + + where: + flushCronExpression || result + "-1" || null + } + + def "SchedulerType for value #flushCronExpression is #result"() { + expect: + Flush.getTimer(new FlushConfiguration(flushCronExpression)).getCronExpression() == result + + where: + flushCronExpression || result + "0 0 0/1 1/1 * ? *" || "0 0 0/1 1/1 * ? *" + } + + private class FlushConfiguration extends FakeConfiguration { + private String flushCronExpression + + FlushConfiguration(String flushCronExpression) { + this.flushCronExpression = flushCronExpression + } + + @Override + String getFlushCronExpression() { + return flushCronExpression + } + } +} \ No newline at end of file diff --git a/priam/src/test/java/com/netflix/priam/aws/MockAmazonS3Client.java b/priam/src/test/java/com/netflix/priam/aws/MockAmazonS3Client.java deleted file mode 100644 index d80501a51..000000000 --- a/priam/src/test/java/com/netflix/priam/aws/MockAmazonS3Client.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package com.netflix.priam.aws; - -import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.AmazonS3Client; -import com.amazonaws.services.s3.model.ListObjectsRequest; -import com.amazonaws.services.s3.model.ObjectListing; -import mockit.Mock; -import mockit.MockUp; - -/** Created by aagrawal on 12/6/18. */ -public class MockAmazonS3Client extends MockUp { - @Mock - public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) - throws AmazonClientException { - ObjectListing listing = new ObjectListing(); - listing.setBucketName(listObjectsRequest.getBucketName()); - listing.setPrefix(listObjectsRequest.getPrefix()); - return listing; - } - - @Mock - public ObjectListing listNextBatchOfObjects(ObjectListing previousObjectListing) - throws AmazonClientException { - ObjectListing listing = new ObjectListing(); - listing.setBucketName(previousObjectListing.getBucketName()); - listing.setPrefix(previousObjectListing.getPrefix()); - return new ObjectListing(); - } -} diff --git a/priam/src/test/java/com/netflix/priam/aws/TestRemoteBackupPath.java b/priam/src/test/java/com/netflix/priam/aws/TestRemoteBackupPath.java index 90fca2b3a..67e1d0dc7 100644 --- a/priam/src/test/java/com/netflix/priam/aws/TestRemoteBackupPath.java +++ b/priam/src/test/java/com/netflix/priam/aws/TestRemoteBackupPath.java @@ -112,7 +112,7 @@ public void testV1BackupPathsSnap() throws ParseException { Assert.assertEquals(BackupFileType.SNAP, abstractBackupPath.getType()); Assert.assertEquals(path.toFile(), abstractBackupPath.getBackupFile()); Assert.assertEquals( - "201801011201", AbstractBackupPath.formatDate(abstractBackupPath.getTime())); + "201801011201", DateUtil.formatyyyyMMddHHmm(abstractBackupPath.getTime())); // Verify toRemote and parseRemote. String remotePath = abstractBackupPath.getRemotePath(); diff --git a/priam/src/test/java/com/netflix/priam/backup/BRTestModule.java b/priam/src/test/java/com/netflix/priam/backup/BRTestModule.java index 176b610d5..29c87b051 100644 --- a/priam/src/test/java/com/netflix/priam/backup/BRTestModule.java +++ b/priam/src/test/java/com/netflix/priam/backup/BRTestModule.java @@ -22,6 +22,9 @@ import com.google.inject.name.Names; import com.netflix.priam.aws.auth.IS3Credential; import com.netflix.priam.aws.auth.S3RoleAssumptionCredential; +import com.netflix.priam.backupv2.IMetaProxy; +import com.netflix.priam.backupv2.MetaV1Proxy; +import com.netflix.priam.backupv2.MetaV2Proxy; import com.netflix.priam.config.FakeBackupRestoreConfig; import com.netflix.priam.config.FakeConfiguration; import com.netflix.priam.config.IBackupRestoreConfig; @@ -78,5 +81,7 @@ protected void configure() { bind(ICassandraProcess.class).to(FakeCassandraProcess.class); bind(IPostRestoreHook.class).to(FakePostRestoreHook.class); bind(Registry.class).toInstance(new DefaultRegistry()); + bind(IMetaProxy.class).annotatedWith(Names.named("v1")).to(MetaV1Proxy.class); + bind(IMetaProxy.class).annotatedWith(Names.named("v2")).to(MetaV2Proxy.class); } } diff --git a/priam/src/test/java/com/netflix/priam/backup/FakeBackupFileSystem.java b/priam/src/test/java/com/netflix/priam/backup/FakeBackupFileSystem.java index 91dd7b101..b55d385c5 100644 --- a/priam/src/test/java/com/netflix/priam/backup/FakeBackupFileSystem.java +++ b/priam/src/test/java/com/netflix/priam/backup/FakeBackupFileSystem.java @@ -49,7 +49,6 @@ public FakeBackupFileSystem( } public void setupTest(List files) { - System.out.println("Setting up: " + files); clearTest(); for (String file : files) { AbstractBackupPath path = pathProvider.get(); @@ -104,8 +103,6 @@ public Iterator listFileSystem(String prefix, String delimiter, String m if (abstractBackupPath.getRemotePath().startsWith(prefix)) items.add(abstractBackupPath.getRemotePath()); }); - System.out.println(flist); - System.out.println(items); return items.iterator(); } @@ -154,7 +151,8 @@ protected void downloadFileImpl(Path remotePath, Path localPath) throws BackupRe try (FileWriter fr = new FileWriter(localPath.toFile())) { JSONArray jsonObj = new JSONArray(); for (AbstractBackupPath filePath : flist) { - if (filePath.type == AbstractBackupPath.BackupFileType.SNAP) { + if (filePath.type == AbstractBackupPath.BackupFileType.SNAP + && filePath.time.equals(path.time)) { jsonObj.add(filePath.getRemotePath()); } } diff --git a/priam/src/test/java/com/netflix/priam/backup/TestAbstractFileSystem.java b/priam/src/test/java/com/netflix/priam/backup/TestAbstractFileSystem.java index b05fe0d78..7bfeab6df 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestAbstractFileSystem.java +++ b/priam/src/test/java/com/netflix/priam/backup/TestAbstractFileSystem.java @@ -139,7 +139,7 @@ public void testUpload() throws Exception { @Test public void testDownload() throws Exception { // Dummy download - myFileSystem.downloadFile(Paths.get(""), null, 2); + myFileSystem.downloadFile(Paths.get(""), Paths.get(configuration.getDataFileLocation()), 2); // Verify the success metric for download is incremented. Assert.assertEquals(1, (int) backupMetrics.getValidDownloads().actualCount()); } @@ -254,7 +254,9 @@ public void testAsyncUploadFailure() throws Exception { @Test public void testAsyncDownload() throws Exception { // Testing single async download. - Future future = myFileSystem.asyncDownloadFile(Paths.get(""), null, 2); + Future future = + myFileSystem.asyncDownloadFile( + Paths.get(""), Paths.get(configuration.getDataFileLocation()), 2); future.get(); // 1. Verify the success metric for download is incremented. Assert.assertEquals(1, (int) backupMetrics.getValidDownloads().actualCount()); @@ -269,7 +271,9 @@ public void testAsyncDownloadBulk() throws Exception { int totalFiles = 1000; List> futureList = new ArrayList<>(); for (int i = 0; i < totalFiles; i++) - futureList.add(myFileSystem.asyncDownloadFile(Paths.get("" + i), null, 2)); + futureList.add( + myFileSystem.asyncDownloadFile( + Paths.get("" + i), Paths.get(configuration.getDataFileLocation()), 2)); // Ensure processing is finished. for (Future future1 : futureList) { diff --git a/priam/src/test/java/com/netflix/priam/backup/TestBackupFile.java b/priam/src/test/java/com/netflix/priam/backup/TestBackupFile.java index 365c38110..52478f3f6 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestBackupFile.java +++ b/priam/src/test/java/com/netflix/priam/backup/TestBackupFile.java @@ -22,6 +22,7 @@ import com.netflix.priam.aws.RemoteBackupPath; import com.netflix.priam.backup.AbstractBackupPath.BackupFileType; import com.netflix.priam.identity.InstanceIdentity; +import com.netflix.priam.utils.DateUtil; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; @@ -101,7 +102,7 @@ public void testIncBackupFileCreation() throws ParseException { Assert.assertEquals("fake-app", backupfile.clusterName); Assert.assertEquals(region, backupfile.region); Assert.assertEquals("casstestbackup", backupfile.baseDir); - String datestr = AbstractBackupPath.formatDate(new Date(bfile.lastModified())); + String datestr = DateUtil.formatyyyyMMddHHmm(new Date(bfile.lastModified())); Assert.assertEquals( "casstestbackup/" + region @@ -118,7 +119,7 @@ public void testMetaFileCreation() throws ParseException { File bfile = new File(filestr); RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class); backupfile.parseLocal(bfile, BackupFileType.META); - backupfile.setTime(backupfile.parseDate("201108082320")); + backupfile.setTime(DateUtil.getDate("201108082320")); Assert.assertEquals(BackupFileType.META, backupfile.type); Assert.assertEquals("1234567", backupfile.token); Assert.assertEquals("fake-app", backupfile.clusterName); diff --git a/priam/src/test/java/com/netflix/priam/backup/TestBackupVerification.java b/priam/src/test/java/com/netflix/priam/backup/TestBackupVerification.java new file mode 100644 index 000000000..3b6c0d512 --- /dev/null +++ b/priam/src/test/java/com/netflix/priam/backup/TestBackupVerification.java @@ -0,0 +1,86 @@ +/* + * Copyright 2018 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backup; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.netflix.priam.utils.DateUtil; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** Created by aagrawal on 12/21/18. */ +public class TestBackupVerification { + + private static BackupVerification backupVerification; + private final String backupDate = "201812011000"; + private List backupMetadataList = new ArrayList<>(); + + public TestBackupVerification() { + Injector injector = Guice.createInjector(new BRTestModule()); + backupVerification = injector.getInstance(BackupVerification.class); + } + + @Before + public void prepare() throws Exception { + backupMetadataList.clear(); + Instant start = DateUtil.parseInstant(backupDate); + backupMetadataList.add(getBackupMetaData(start, Status.FINISHED)); + backupMetadataList.add(getBackupMetaData(start.plus(2, ChronoUnit.HOURS), Status.FAILED)); + backupMetadataList.add(getBackupMetaData(start.plus(4, ChronoUnit.HOURS), Status.FINISHED)); + backupMetadataList.add(getBackupMetaData(start.plus(6, ChronoUnit.HOURS), Status.FAILED)); + backupMetadataList.add(getBackupMetaData(start.plus(8, ChronoUnit.HOURS), Status.FAILED)); + } + + @Test + public void getLatestBackup() { + Optional backupMetadata = + backupVerification.getLatestBackupMetaData(backupMetadataList); + Instant start = DateUtil.parseInstant(backupDate); + Assert.assertEquals( + start.plus(4, ChronoUnit.HOURS), backupMetadata.get().getStart().toInstant()); + } + + @Test + public void getLatestBackupFailure() throws Exception { + Optional backupMetadata = + backupVerification.getLatestBackupMetaData(new ArrayList<>()); + Assert.assertFalse(backupMetadata.isPresent()); + + List failList = new ArrayList<>(); + failList.add(getBackupMetaData(DateUtil.getInstant(), Status.FAILED)); + backupMetadata = backupVerification.getLatestBackupMetaData(failList); + Assert.assertFalse(backupMetadata.isPresent()); + } + + private BackupMetadata getBackupMetaData(Instant startTime, Status status) throws Exception { + BackupMetadata backupMetadata = + new BackupMetadata("123", new Date(startTime.toEpochMilli())); + backupMetadata.setCompleted( + new Date(startTime.plus(30, ChronoUnit.MINUTES).toEpochMilli())); + backupMetadata.setStatus(status); + backupMetadata.setSnapshotLocation("file.txt"); + return backupMetadata; + } +} diff --git a/priam/src/test/java/com/netflix/priam/backup/TestCompression.java b/priam/src/test/java/com/netflix/priam/backup/TestCompression.java index 4de6c36f3..63fdb0dbd 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestCompression.java +++ b/priam/src/test/java/com/netflix/priam/backup/TestCompression.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import com.netflix.priam.compress.ICompression; import com.netflix.priam.compress.SnappyCompression; import com.netflix.priam.utils.SystemUtils; import java.io.*; @@ -106,11 +107,16 @@ public void zipTest() throws IOException { @Test public void snappyTest() throws IOException { - SnappyCompression compress = new SnappyCompression(); - File compressedOutputFile = new File("/tmp/test1.snp"); + ICompression compress = new SnappyCompression(); + testCompressor(compress); + } + + private void testCompressor(ICompression compress) throws IOException { + File compressedOutputFile = new File("/tmp/test1.compress"); File decompressedTempOutput = new File("/tmp/compress-test-out.txt"); long chunkSize = 5L * 1024 * 1024; try { + Iterator it = compress.compress(new FileInputStream(randomContentFile), chunkSize); try (FileOutputStream ostream = new FileOutputStream(compressedOutputFile)) { @@ -118,6 +124,7 @@ public void snappyTest() throws IOException { byte[] chunk = it.next(); ostream.write(chunk); } + ostream.flush(); } assertTrue(randomContentFile.length() > compressedOutputFile.length()); diff --git a/priam/src/test/java/com/netflix/priam/backup/TestFileIterator.java b/priam/src/test/java/com/netflix/priam/backup/TestFileIterator.java index bb63946ee..e79b45f51 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestFileIterator.java +++ b/priam/src/test/java/com/netflix/priam/backup/TestFileIterator.java @@ -17,15 +17,16 @@ package com.netflix.priam.backup; +import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.google.inject.Guice; import com.google.inject.Injector; -import com.netflix.priam.aws.MockAmazonS3Client; import com.netflix.priam.aws.S3FileSystem; -import com.netflix.priam.config.IConfiguration; import com.netflix.priam.identity.InstanceIdentity; +import com.netflix.priam.utils.DateUtil; import java.io.IOException; import java.util.*; import mockit.Mock; @@ -41,35 +42,46 @@ * @author Praveen Sadhu */ public class TestFileIterator { - private static Injector injector; private static Date startTime, endTime; - private static Calendar cal; - private static AmazonS3Client s3client; private static S3FileSystem s3FileSystem; - private static IConfiguration conf; - private static InstanceIdentity factory; private static String region; private static String bucket = "TESTBUCKET"; @BeforeClass public static void setup() throws InterruptedException, IOException { - s3client = new MockAmazonS3Client().getMockInstance(); + AmazonS3Client s3client = new MockAmazonS3Client().getMockInstance(); new MockObjectListing(); - injector = Guice.createInjector(new BRTestModule()); - conf = injector.getInstance(IConfiguration.class); - factory = injector.getInstance(InstanceIdentity.class); + Injector injector = Guice.createInjector(new BRTestModule()); + InstanceIdentity factory = injector.getInstance(InstanceIdentity.class); region = factory.getInstanceInfo().getRegion(); s3FileSystem = injector.getInstance(S3FileSystem.class); s3FileSystem.setS3Client(s3client); - cal = Calendar.getInstance(); - cal.set(2011, 7, 11, 0, 30, 0); - cal.set(Calendar.MILLISECOND, 0); - startTime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - endTime = cal.getTime(); + DateUtil.DateRange dateRange = new DateUtil.DateRange("201108110030,201108110530"); + startTime = new Date(dateRange.getStartTime().toEpochMilli()); + endTime = new Date(dateRange.getEndTime().toEpochMilli()); + } + + static class MockAmazonS3Client extends MockUp { + @Mock + public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) + throws AmazonClientException { + ObjectListing listing = new ObjectListing(); + listing.setBucketName(listObjectsRequest.getBucketName()); + listing.setPrefix(listObjectsRequest.getPrefix()); + return listing; + } + + @Mock + public ObjectListing listNextBatchOfObjects(ObjectListing previousObjectListing) + throws AmazonClientException { + ObjectListing listing = new ObjectListing(); + listing.setBucketName(previousObjectListing.getBucketName()); + listing.setPrefix(previousObjectListing.getPrefix()); + return new ObjectListing(); + } } // MockObjectListing class @@ -102,11 +114,9 @@ public boolean isTruncated() { @Test public void testIteratorEmptySet() { - cal.set(2011, 7, 11, 6, 1, 0); - cal.set(Calendar.MILLISECOND, 0); - Date stime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - Date etime = cal.getTime(); + DateUtil.DateRange dateRange = new DateUtil.DateRange("201107110601,201107111101"); + Date stime = new Date(dateRange.getStartTime().toEpochMilli()); + Date etime = new Date(dateRange.getEndTime().toEpochMilli()); Iterator fileIterator = s3FileSystem.list(bucket, stime, etime); Set files = new HashSet<>(); diff --git a/priam/src/test/java/com/netflix/priam/backup/TestS3FileSystem.java b/priam/src/test/java/com/netflix/priam/backup/TestS3FileSystem.java index c78e818fa..e8f2f92e2 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestS3FileSystem.java +++ b/priam/src/test/java/com/netflix/priam/backup/TestS3FileSystem.java @@ -134,6 +134,7 @@ public void testFileUploadCompleteFailure() throws Exception { MockS3PartUploader.setup(); MockS3PartUploader.completionFailure = true; S3FileSystem fs = injector.getInstance(S3FileSystem.class); + fs.setS3Client(new MockAmazonS3Client().getMockInstance()); String snapshotfile = "target/data/Keyspace1/Standard1/backups/201108082320/Keyspace1-Standard1-ia-1-Data.db"; RemoteBackupPath backupfile = injector.getInstance(RemoteBackupPath.class); diff --git a/priam/src/test/java/com/netflix/priam/backupv2/TestForgottenFileManager.java b/priam/src/test/java/com/netflix/priam/backupv2/TestForgottenFileManager.java new file mode 100644 index 000000000..2f566d328 --- /dev/null +++ b/priam/src/test/java/com/netflix/priam/backupv2/TestForgottenFileManager.java @@ -0,0 +1,169 @@ +/* + * Copyright 2019 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.priam.backupv2; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.netflix.priam.backup.BRTestModule; +import com.netflix.priam.config.FakeConfiguration; +import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.merics.BackupMetrics; +import com.netflix.priam.utils.DateUtil; +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** Created by aagrawal on 1/1/19. */ +public class TestForgottenFileManager { + private ForgottenFilesManager forgottenFilesManager; + private TestBackupUtils testBackupUtils; + private IConfiguration configuration; + private List allFiles = new ArrayList<>(); + private Instant snapshotInstant; + private Path snapshotDir; + + public TestForgottenFileManager() { + Injector injector = Guice.createInjector(new BRTestModule()); + BackupMetrics backupMetrics = injector.getInstance(BackupMetrics.class); + configuration = new ForgottenFilesConfiguration(); + forgottenFilesManager = new ForgottenFilesManager(configuration, backupMetrics); + testBackupUtils = injector.getInstance(TestBackupUtils.class); + } + + @Before + public void prep() throws Exception { + cleanup(); + Instant now = DateUtil.getInstant(); + snapshotInstant = now; + Path file1 = Paths.get(testBackupUtils.createFile("file1", now.minus(5, ChronoUnit.DAYS))); + Path file2 = Paths.get(testBackupUtils.createFile("file2", now.minus(3, ChronoUnit.DAYS))); + Path file3 = Paths.get(testBackupUtils.createFile("file3", now.minus(2, ChronoUnit.DAYS))); + Path file4 = Paths.get(testBackupUtils.createFile("file4", now.minus(6, ChronoUnit.HOURS))); + Path file5 = + Paths.get(testBackupUtils.createFile("file5", now.minus(1, ChronoUnit.MINUTES))); + Path file6 = + Paths.get( + testBackupUtils.createFile( + "tmplink-lb-59516-big-Index.db", now.minus(3, ChronoUnit.DAYS))); + Path file7 = + Paths.get(testBackupUtils.createFile("file7.tmp", now.minus(3, ChronoUnit.DAYS))); + + allFiles.add(file1); + allFiles.add(file2); + allFiles.add(file3); + allFiles.add(file4); + allFiles.add(file5); + allFiles.add(file6); + allFiles.add(file7); + + // Create a snapshot with file2, file3, file4. + Path columnfamilyDir = file1.getParent(); + snapshotDir = + Paths.get( + columnfamilyDir.toString(), + "snapshot", + "snap_v2_" + DateUtil.formatInstant(DateUtil.yyyyMMddHHmm, now)); + snapshotDir.toFile().mkdirs(); + Files.createLink(Paths.get(snapshotDir.toString(), file2.getFileName().toString()), file2); + Files.createLink(Paths.get(snapshotDir.toString(), file3.getFileName().toString()), file3); + Files.createLink(Paths.get(snapshotDir.toString(), file4.getFileName().toString()), file4); + } + + @After + public void cleanup() throws Exception { + String dataDir = configuration.getDataFileLocation(); + org.apache.commons.io.FileUtils.cleanDirectory(new File(dataDir)); + } + + @Test + public void testMoveForgottenFiles() { + Collection files = allFiles.stream().map(Path::toFile).collect(Collectors.toList()); + forgottenFilesManager.moveForgottenFiles( + new File(configuration.getDataFileLocation()), files); + Path lostFoundDir = + Paths.get(configuration.getDataFileLocation(), forgottenFilesManager.LOST_FOUND); + Collection movedFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false); + Assert.assertEquals(allFiles.size(), movedFiles.size()); + for (Path file : allFiles) + Assert.assertTrue( + movedFiles.contains( + Paths.get(lostFoundDir.toString(), file.getFileName().toString()) + .toFile())); + } + + @Test + public void getColumnfamilyFiles() { + Path columnfamilyDir = allFiles.get(0).getParent(); + Collection columnfamilyFiles = + forgottenFilesManager.getColumnfamilyFiles( + snapshotInstant, columnfamilyDir.toFile()); + Assert.assertEquals(3, columnfamilyFiles.size()); + Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(0).toFile())); + Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(1).toFile())); + Assert.assertTrue(columnfamilyFiles.contains(allFiles.get(2).toFile())); + } + + @Test + public void findAndMoveForgottenFiles() { + Path lostFoundDir = + Paths.get(allFiles.get(0).getParent().toString(), forgottenFilesManager.LOST_FOUND); + forgottenFilesManager.findAndMoveForgottenFiles(snapshotInstant, snapshotDir.toFile()); + + // Only one forgotten file - file1. + Collection movedFiles = FileUtils.listFiles(lostFoundDir.toFile(), null, false); + Assert.assertEquals(1, movedFiles.size()); + Assert.assertTrue( + movedFiles + .iterator() + .next() + .getName() + .equals(allFiles.get(0).getFileName().toString())); + + // All other files still remain in columnfamily dir. + Collection cfFiles = + FileUtils.listFiles(new File(allFiles.get(0).getParent().toString()), null, false); + Assert.assertEquals(6, cfFiles.size()); + int temp_file_name = 1; + for (File file : cfFiles) { + file.getName().equals(allFiles.get(temp_file_name++).getFileName().toString()); + } + + // Snapshot is untouched. + Collection snapshotFiles = FileUtils.listFiles(snapshotDir.toFile(), null, false); + Assert.assertEquals(3, snapshotFiles.size()); + } + + private class ForgottenFilesConfiguration extends FakeConfiguration { + @Override + public boolean isForgottenFileMoveEnabled() { + return true; + } + } +} diff --git a/priam/src/test/java/com/netflix/priam/backupv2/TestLocalDBReaderWriter.java b/priam/src/test/java/com/netflix/priam/backupv2/TestLocalDBReaderWriter.java deleted file mode 100644 index 1b9a0124d..000000000 --- a/priam/src/test/java/com/netflix/priam/backupv2/TestLocalDBReaderWriter.java +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package com.netflix.priam.backupv2; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.priam.backup.BRTestModule; -import com.netflix.priam.config.IConfiguration; -import com.netflix.priam.utils.DateUtil; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.EnumSet; -import java.util.List; -import java.util.Random; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import org.apache.cassandra.io.sstable.Component; -import org.apache.commons.io.FileUtils; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Created by aagrawal on 9/4/18. */ -public class TestLocalDBReaderWriter { - private static final Logger logger = - LoggerFactory.getLogger(TestLocalDBReaderWriter.class.getName()); - private static IConfiguration configuration; - private static LocalDBReaderWriter localDBReaderWriter; - private static Path dummyDataDirectoryLocation; - - @Before - public void setUp() { - Injector injector = Guice.createInjector(new BRTestModule()); - - if (configuration == null) configuration = injector.getInstance(IConfiguration.class); - - if (localDBReaderWriter == null) - localDBReaderWriter = injector.getInstance(LocalDBReaderWriter.class); - - dummyDataDirectoryLocation = Paths.get(configuration.getDataFileLocation()); - cleanupDir(dummyDataDirectoryLocation); - } - - @After - public void destroy() { - cleanupDir(dummyDataDirectoryLocation); - } - - private void cleanupDir(Path dir) { - if (dir.toFile().exists()) - try { - FileUtils.cleanDirectory(dir.toFile()); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @Test - public void readWriteLocalDB() throws Exception { - int noOfKeyspaces = 2; - int noOfCf = 1; - int noOfSstables = 2; - Path localDbPath = - Paths.get(dummyDataDirectoryLocation.toString(), LocalDBReaderWriter.LOCAL_DB); - List localDBList = - generateDummyLocalDB(noOfKeyspaces, noOfCf, noOfSstables); - - localDBList.forEach( - localDB -> { - FileUploadResult fileUploadResult = - localDB.getLocalDBEntries().get(0).getFileUploadResult(); - final Path localDBPath = localDBReaderWriter.getLocalDBPath(fileUploadResult); - try { - localDBReaderWriter.writeLocalDB(localDBPath, localDB); - } catch (Exception e) { - logger.error("Error while writing to local DB: " + e.getMessage(), e); - } - }); - - // Verify the write succeeded for each KS/CF/SStable. - Assert.assertEquals(localDbPath.toFile().listFiles().length, noOfKeyspaces); - Path cfLocalDBPath = localDbPath.toFile().listFiles()[0].listFiles()[0].toPath(); - Assert.assertEquals(noOfSstables, cfLocalDBPath.toFile().listFiles().length); - - // Read the database. - LocalDBReaderWriter.LocalDB localDB = - localDBReaderWriter.readLocalDB(cfLocalDBPath.toFile().listFiles()[0].toPath()); - Assert.assertEquals( - EnumSet.allOf(Component.Type.class).size(), localDB.getLocalDBEntries().size()); - } - - @Test - public void upsertLocalDB() throws Exception { - LocalDBReaderWriter.LocalDB localDB = generateDummyLocalDB(1, 1, 1).get(0); - - // Lets do write with each LocalDBEntry first. - localDB.getLocalDBEntries() - .forEach( - localDBEntry -> { - try { - localDBReaderWriter.upsertLocalDBEntry(localDBEntry); - } catch (Exception e) { - e.printStackTrace(); - } - }); - - // Verify the write has happened. - LocalDBReaderWriter.LocalDBEntry localDBEntry = - localDBReaderWriter.getLocalDBEntry( - localDB.getLocalDBEntries().get(0).getFileUploadResult()); - Assert.assertNotNull(localDBEntry); - - // Now lets see if we can write the same entry again?? - LocalDBReaderWriter.LocalDB localDBUpsert = - localDBReaderWriter.upsertLocalDBEntry(localDBEntry); - Assert.assertEquals( - localDB.getLocalDBEntries().size(), localDBUpsert.getLocalDBEntries().size()); - - // Now lets change the localDBEntry and see if upsert succeeds. - localDBEntry.setTimeLastReferenced(DateUtil.getInstant()); - localDBUpsert = localDBReaderWriter.upsertLocalDBEntry(localDBEntry); - LocalDBReaderWriter.LocalDBEntry localDBEntryUpsert = - localDBReaderWriter.getLocalDBEntry(localDBEntry.getFileUploadResult()); - Assert.assertEquals( - localDBEntryUpsert.getTimeLastReferenced(), localDBEntry.getTimeLastReferenced()); - Assert.assertEquals( - localDB.getLocalDBEntries().size(), localDBUpsert.getLocalDBEntries().size()); - - // Now change the file modification time. This should end up creating a new DB Entry. - localDBEntry.getFileUploadResult().setLastModifiedTime(DateUtil.getInstant()); - localDBUpsert = localDBReaderWriter.upsertLocalDBEntry(localDBEntry); - localDBEntryUpsert = - localDBReaderWriter.getLocalDBEntry(localDBEntry.getFileUploadResult()); - Assert.assertEquals( - localDB.getLocalDBEntries().size() + 1, localDBUpsert.getLocalDBEntries().size()); - Assert.assertEquals( - localDBEntry.getFileUploadResult().getLastModifiedTime(), - localDBEntryUpsert.getFileUploadResult().getLastModifiedTime()); - } - - @Test - public void readConcurrentLocalDB() throws Exception { - List localDBList = generateDummyLocalDB(1, 1, 1); - localDBList.forEach( - localDB -> { - FileUploadResult fileUploadResult = - localDB.getLocalDBEntries().get(0).getFileUploadResult(); - final Path localDBPath = localDBReaderWriter.getLocalDBPath(fileUploadResult); - try { - localDBReaderWriter.writeLocalDB(localDBPath, localDB); - } catch (Exception e) { - logger.error("Error while writing to local DB: " + e.getMessage(), e); - } - }); - - FileUploadResult sample = - localDBList.get(0).getLocalDBEntries().get(0).getFileUploadResult(); - int size = 5; - - ExecutorService threads = Executors.newFixedThreadPool(size); - List> torun = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - torun.add(() -> localDBReaderWriter.getLocalDBEntry(sample) != null); - } - - // all tasks executed in different threads, at 'once'. - List> futures = threads.invokeAll(torun); - - // no more need for the threadpool - threads.shutdown(); - // check the results of the tasks. - int noOfBadRun = 0; - for (Future fut : futures) { - if (!fut.get()) noOfBadRun++; - } - - Assert.assertEquals(0, noOfBadRun); - } - - @Test - public void writeConcurrentLocalDB() throws Exception { - LocalDBReaderWriter.LocalDB localDB = generateDummyLocalDB(1, 1, 1).get(0); - int size = localDB.getLocalDBEntries().size(); - ExecutorService threads = Executors.newFixedThreadPool(size); - List> torun = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - int finalI = i; - torun.add( - () -> - localDBReaderWriter.upsertLocalDBEntry( - localDB.getLocalDBEntries().get(finalI))); - } - // all tasks executed in different threads, at 'once'. - List> futures = threads.invokeAll(torun); - - // no more need for the threadpool - threads.shutdown(); - // check the results of the tasks. - int noOfBadRun = 0; - for (Future fut : futures) { - // We expect exception here. - try { - fut.get(); - } catch (Exception e) { - noOfBadRun++; - } - } - - Assert.assertEquals(0, noOfBadRun); - LocalDBReaderWriter.LocalDB localDBRead = - localDBReaderWriter.readLocalDB( - localDBReaderWriter.getLocalDBPath( - localDB.getLocalDBEntries().get(0).getFileUploadResult())); - Assert.assertEquals( - localDB.getLocalDBEntries().size(), localDBRead.getLocalDBEntries().size()); - } - - private List generateDummyLocalDB( - int noOfKeyspaces, int noOfCf, int noOfSstables) { - - // Clean the dummy directory - cleanupDir(dummyDataDirectoryLocation); - List localDBList = new ArrayList<>(); - - Random random = new Random(); - - for (int i = 1; i <= noOfKeyspaces; i++) { - String keyspaceName = "sample" + i; - - for (int j = 1; j <= noOfCf; j++) { - String columnfamilyname = "cf" + j; - - for (int k = 1; k <= noOfSstables; k++) { - String prefixName = "mc-" + k + "-big"; - LocalDBReaderWriter.LocalDB localDB = - new LocalDBReaderWriter.LocalDB(new ArrayList<>()); - localDBList.add(localDB); - for (Component.Type type : EnumSet.allOf(Component.Type.class)) { - Path componentPath = - Paths.get( - dummyDataDirectoryLocation.toFile().getAbsolutePath(), - keyspaceName, - columnfamilyname, - prefixName + "-" + type.name() + ".db"); - FileUploadResult fileUploadResult = - new FileUploadResult( - componentPath, - keyspaceName, - columnfamilyname, - DateUtil.getInstant(), - DateUtil.getInstant(), - random.nextLong()); - LocalDBReaderWriter.LocalDBEntry localDBEntry = - new LocalDBReaderWriter.LocalDBEntry( - fileUploadResult, - DateUtil.getInstant(), - DateUtil.getInstant()); - localDB.getLocalDBEntries().add(localDBEntry); - } - } - } - } - - return localDBList; - } -} diff --git a/priam/src/test/java/com/netflix/priam/backupv2/TestMetaFileManager.java b/priam/src/test/java/com/netflix/priam/backupv2/TestMetaFileManager.java deleted file mode 100644 index a1dd5482a..000000000 --- a/priam/src/test/java/com/netflix/priam/backupv2/TestMetaFileManager.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2018 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package com.netflix.priam.backupv2; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.netflix.priam.backup.BRTestModule; -import com.netflix.priam.config.IConfiguration; -import com.netflix.priam.utils.DateUtil; -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.temporal.ChronoUnit; -import org.apache.commons.io.FileUtils; -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; - -/** Created by aagrawal on 11/28/18. */ -public class TestMetaFileManager { - - private MetaFileManager metaFileManager; - private IConfiguration configuration; - - public TestMetaFileManager() { - Injector injector = Guice.createInjector(new BRTestModule()); - metaFileManager = injector.getInstance(MetaFileManager.class); - configuration = injector.getInstance(IConfiguration.class); - } - - @After - public void cleanup() throws IOException { - FileUtils.cleanDirectory(new File(configuration.getDataFileLocation())); - } - - @Test - public void testCleanupOldMetaFiles() throws IOException { - generateDummyMetaFiles(); - Path dataDir = Paths.get(configuration.getDataFileLocation()); - Assert.assertEquals(4, dataDir.toFile().listFiles().length); - - // clean the directory - metaFileManager.cleanupOldMetaFiles(); - - Assert.assertEquals(1, dataDir.toFile().listFiles().length); - Path dummy = Paths.get(dataDir.toString(), "dummy.tmp"); - Assert.assertTrue(dummy.toFile().exists()); - } - - private void generateDummyMetaFiles() throws IOException { - Path dataDir = Paths.get(configuration.getDataFileLocation()); - FileUtils.write( - Paths.get( - configuration.getDataFileLocation(), - MetaFileInfo.getMetaFileName(DateUtil.getInstant())) - .toFile(), - "dummy", - "UTF-8"); - - FileUtils.write( - Paths.get( - configuration.getDataFileLocation(), - MetaFileInfo.getMetaFileName( - DateUtil.getInstant().minus(10, ChronoUnit.MINUTES))) - .toFile(), - "dummy", - "UTF-8"); - - FileUtils.write( - Paths.get( - configuration.getDataFileLocation(), - MetaFileInfo.getMetaFileName(DateUtil.getInstant()) + ".tmp") - .toFile(), - "dummy", - "UTF-8"); - - FileUtils.write( - Paths.get(configuration.getDataFileLocation(), "dummy.tmp").toFile(), - "dummy", - "UTF-8"); - } -} diff --git a/priam/src/test/java/com/netflix/priam/backupv2/TestBackupValidator.java b/priam/src/test/java/com/netflix/priam/backupv2/TestMetaV2Proxy.java similarity index 60% rename from priam/src/test/java/com/netflix/priam/backupv2/TestBackupValidator.java rename to priam/src/test/java/com/netflix/priam/backupv2/TestMetaV2Proxy.java index 0a52afc2d..f434204d7 100644 --- a/priam/src/test/java/com/netflix/priam/backupv2/TestBackupValidator.java +++ b/priam/src/test/java/com/netflix/priam/backupv2/TestMetaV2Proxy.java @@ -19,63 +19,76 @@ import com.google.inject.Guice; import com.google.inject.Injector; +import com.google.inject.Provider; import com.netflix.priam.backup.AbstractBackupPath; import com.netflix.priam.backup.BRTestModule; import com.netflix.priam.backup.BackupRestoreException; import com.netflix.priam.backup.FakeBackupFileSystem; import com.netflix.priam.config.IConfiguration; import com.netflix.priam.utils.DateUtil; +import java.io.File; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; +import org.junit.After; import org.junit.Assert; import org.junit.Test; /** Created by aagrawal on 12/5/18. */ -public class TestBackupValidator { +public class TestMetaV2Proxy { private FakeBackupFileSystem fs; private IConfiguration configuration; - private BackupValidator backupValidator; private TestBackupUtils backupUtils; + private IMetaProxy metaProxy; + private Provider abstractBackupPathProvider; - public TestBackupValidator() { + public TestMetaV2Proxy() { Injector injector = Guice.createInjector(new BRTestModule()); configuration = injector.getInstance(IConfiguration.class); fs = injector.getInstance(FakeBackupFileSystem.class); fs.setupTest(getRemoteFakeFiles()); - backupValidator = injector.getInstance(BackupValidator.class); backupUtils = new TestBackupUtils(); + metaProxy = injector.getInstance(MetaV2Proxy.class); + abstractBackupPathProvider = injector.getProvider(AbstractBackupPath.class); } @Test public void testMetaPrefix() { // Null date range - Assert.assertEquals(getPrefix() + "/META_V2", backupValidator.getMetaPrefix(null)); + Assert.assertEquals(getPrefix() + "/META_V2", metaProxy.getMetaPrefix(null)); + Instant now = Instant.now(); // No end date. Assert.assertEquals( - getPrefix() + "/META_V2", - backupValidator.getMetaPrefix(new DateUtil.DateRange(Instant.now(), null))); + getPrefix() + "/META_V2/" + now.toEpochMilli(), + metaProxy.getMetaPrefix(new DateUtil.DateRange(now, null))); // No start date Assert.assertEquals( getPrefix() + "/META_V2", - backupValidator.getMetaPrefix(new DateUtil.DateRange(null, Instant.now()))); + metaProxy.getMetaPrefix(new DateUtil.DateRange(null, Instant.now()))); long start = 1834567890L; long end = 1834877776L; Assert.assertEquals( getPrefix() + "/META_V2/1834", - backupValidator.getMetaPrefix( + metaProxy.getMetaPrefix( new DateUtil.DateRange( Instant.ofEpochSecond(start), Instant.ofEpochSecond(end)))); } @Test public void testIsMetaFileValid() throws Exception { - Path metaPath = backupUtils.createMeta(getRemoteFakeFiles(), DateUtil.getInstant()); - Assert.assertTrue(backupValidator.isMetaFileValid(metaPath)); + Instant snapshotInstant = DateUtil.getInstant(); + Path metaPath = backupUtils.createMeta(getRemoteFakeFiles(), snapshotInstant); + AbstractBackupPath abstractBackupPath = abstractBackupPathProvider.get(); + abstractBackupPath.parseLocal(metaPath.toFile(), AbstractBackupPath.BackupFileType.META_V2); + + Assert.assertTrue(metaProxy.isMetaFileValid(abstractBackupPath).valid); FileUtils.deleteQuietly(metaPath.toFile()); List fileToAdd = getRemoteFakeFiles(); @@ -91,18 +104,40 @@ public void testIsMetaFileValid() throws Exception { "file9.Data.db") .toString()); - metaPath = backupUtils.createMeta(fileToAdd, DateUtil.getInstant()); - Assert.assertFalse(backupValidator.isMetaFileValid(metaPath)); + metaPath = backupUtils.createMeta(fileToAdd, snapshotInstant); + Assert.assertFalse(metaProxy.isMetaFileValid(abstractBackupPath).valid); FileUtils.deleteQuietly(metaPath.toFile()); - metaPath = Paths.get(configuration.getDataFileLocation(), "meta_v2_201901010000.json"); - Assert.assertFalse(backupValidator.isMetaFileValid(metaPath)); + metaPath = Paths.get(configuration.getDataFileLocation(), "meta_v2_201801010000.json"); + Assert.assertFalse(metaProxy.isMetaFileValid(abstractBackupPath).valid); + } + + @Test + public void testGetSSTFilesFromMeta() throws Exception { + Instant snapshotInstant = DateUtil.getInstant(); + List remoteFiles = getRemoteFakeFiles(); + Path metaPath = backupUtils.createMeta(remoteFiles, snapshotInstant); + List filesFromMeta = metaProxy.getSSTFilesFromMeta(metaPath); + filesFromMeta.removeAll(remoteFiles); + Assert.assertTrue(filesFromMeta.isEmpty()); + } + + @Test + public void testGetIncrementalFiles() throws Exception { + DateUtil.DateRange dateRange = new DateUtil.DateRange("202812071820,20281229"); + Iterator incrementals = metaProxy.getIncrementals(dateRange); + int i = 0; + while (incrementals.hasNext()) { + System.out.println(incrementals.next()); + i++; + } + Assert.assertEquals(3, i); } @Test public void testFindMetaFiles() throws BackupRestoreException { List metas = - backupValidator.findMetaFiles( + metaProxy.findMetaFiles( new DateUtil.DateRange( Instant.ofEpochMilli(1859824860000L), Instant.ofEpochMilli(1859828420000L))); @@ -111,7 +146,7 @@ public void testFindMetaFiles() throws BackupRestoreException { Assert.assertTrue(fs.doesRemoteFileExist(Paths.get(metas.get(0).getRemotePath()))); metas = - backupValidator.findMetaFiles( + metaProxy.findMetaFiles( new DateUtil.DateRange( Instant.ofEpochMilli(1859824860000L), Instant.ofEpochMilli(1859828460000L))); @@ -196,4 +231,57 @@ private List getRemoteFakeFiles() { "meta_v2_202812071901.json")); return files.stream().map(Path::toString).collect(Collectors.toList()); } + + @After + public void cleanup() throws IOException { + FileUtils.cleanDirectory(new File(configuration.getDataFileLocation())); + } + + @Test + public void testCleanupOldMetaFiles() throws IOException { + generateDummyMetaFiles(); + Path dataDir = Paths.get(configuration.getDataFileLocation()); + Assert.assertEquals(4, dataDir.toFile().listFiles().length); + + // clean the directory + metaProxy.cleanupOldMetaFiles(); + + Assert.assertEquals(1, dataDir.toFile().listFiles().length); + Path dummy = Paths.get(dataDir.toString(), "dummy.tmp"); + Assert.assertTrue(dummy.toFile().exists()); + } + + private void generateDummyMetaFiles() throws IOException { + Path dataDir = Paths.get(configuration.getDataFileLocation()); + FileUtils.cleanDirectory(dataDir.toFile()); + FileUtils.write( + Paths.get( + configuration.getDataFileLocation(), + MetaFileInfo.getMetaFileName(DateUtil.getInstant())) + .toFile(), + "dummy", + "UTF-8"); + + FileUtils.write( + Paths.get( + configuration.getDataFileLocation(), + MetaFileInfo.getMetaFileName( + DateUtil.getInstant().minus(10, ChronoUnit.MINUTES))) + .toFile(), + "dummy", + "UTF-8"); + + FileUtils.write( + Paths.get( + configuration.getDataFileLocation(), + MetaFileInfo.getMetaFileName(DateUtil.getInstant()) + ".tmp") + .toFile(), + "dummy", + "UTF-8"); + + FileUtils.write( + Paths.get(configuration.getDataFileLocation(), "dummy.tmp").toFile(), + "dummy", + "UTF-8"); + } } diff --git a/priam/src/test/java/com/netflix/priam/config/FakeBackupRestoreConfig.java b/priam/src/test/java/com/netflix/priam/config/FakeBackupRestoreConfig.java index 7339d5b04..246a91a86 100644 --- a/priam/src/test/java/com/netflix/priam/config/FakeBackupRestoreConfig.java +++ b/priam/src/test/java/com/netflix/priam/config/FakeBackupRestoreConfig.java @@ -24,4 +24,9 @@ public String getSnapshotMetaServiceCronExpression() { public boolean enableV2Backups() { return true; } + + @Override + public boolean enableV2Restore() { + return false; + } } diff --git a/priam/src/test/java/com/netflix/priam/resources/BackupServletTest.java b/priam/src/test/java/com/netflix/priam/resources/BackupServletTest.java index 793292ffb..ce1ae0fb5 100644 --- a/priam/src/test/java/com/netflix/priam/resources/BackupServletTest.java +++ b/priam/src/test/java/com/netflix/priam/resources/BackupServletTest.java @@ -21,18 +21,13 @@ import com.google.inject.Guice; import com.google.inject.Injector; -import com.google.inject.Provider; -import com.netflix.priam.PriamServer; import com.netflix.priam.backup.*; import com.netflix.priam.config.IConfiguration; -import com.netflix.priam.defaultimpl.ICassandraProcess; import com.netflix.priam.health.InstanceState; -import com.netflix.priam.identity.IPriamInstanceFactory; import com.netflix.priam.identity.config.InstanceInfo; import com.netflix.priam.restore.Restore; -import com.netflix.priam.tuner.ICassandraTuner; import com.netflix.priam.utils.DateUtil; -import java.util.Date; +import java.time.Instant; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import mockit.Expectations; @@ -45,19 +40,11 @@ @RunWith(JMockit.class) public class BackupServletTest { - private @Mocked PriamServer priamServer; private IConfiguration config; - private @Mocked IBackupFileSystem bkpFs; private @Mocked Restore restoreObj; - private @Mocked Provider pathProvider; - private @Mocked ICassandraTuner tuner; private @Mocked SnapshotBackup snapshotBackup; - private @Mocked IPriamInstanceFactory factory; - private @Mocked ICassandraProcess cassProcess; - private @Mocked BackupStatusMgr bkupStatusMgr; private BackupServlet resource; private RestoreServlet restoreResource; - private BackupVerification backupVerification; private InstanceInfo instanceInfo; @Before @@ -66,9 +53,8 @@ public void setUp() { config = injector.getInstance(IConfiguration.class); InstanceState instanceState = injector.getInstance(InstanceState.class); instanceInfo = injector.getInstance(InstanceInfo.class); - resource = - new BackupServlet(config, bkpFs, snapshotBackup, bkupStatusMgr, backupVerification); - restoreResource = new RestoreServlet(restoreObj, instanceState); + resource = injector.getInstance(BackupServlet.class); + restoreResource = injector.getInstance(RestoreServlet.class); } @Test @@ -95,7 +81,7 @@ public void restore_minimal() throws Exception { instanceInfo.getRegion(); result = oldRegion; - restoreObj.restore((Date) any, (Date) any); // TODO: test default value + restoreObj.restore(new DateUtil.DateRange((Instant) any, (Instant) any)); } }; @@ -121,9 +107,7 @@ public void restore_withDateRange() throws Exception { DateUtil.getDate(dateRange.split(",")[1]); result = new DateTime(2011, 12, 31, 23, 59).toDate(); times = 1; - restoreObj.restore( - DateUtil.getDate(dateRange.split(",")[0]), - DateUtil.getDate(dateRange.split(",")[1])); + restoreObj.restore(new DateUtil.DateRange(dateRange)); } }; diff --git a/priam/src/test/java/com/netflix/priam/backup/TestRestore.java b/priam/src/test/java/com/netflix/priam/restore/TestRestore.java similarity index 52% rename from priam/src/test/java/com/netflix/priam/backup/TestRestore.java rename to priam/src/test/java/com/netflix/priam/restore/TestRestore.java index b603f3dd0..06a21b1a6 100644 --- a/priam/src/test/java/com/netflix/priam/backup/TestRestore.java +++ b/priam/src/test/java/com/netflix/priam/restore/TestRestore.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 Netflix, Inc. + * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,45 +15,40 @@ * */ -package com.netflix.priam.backup; +package com.netflix.priam.restore; import com.google.inject.Guice; import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.name.Names; +import com.netflix.priam.backup.BRTestModule; +import com.netflix.priam.backup.FakeBackupFileSystem; +import com.netflix.priam.backup.Status; import com.netflix.priam.config.FakeConfiguration; import com.netflix.priam.config.IConfiguration; +import com.netflix.priam.health.InstanceState; import com.netflix.priam.identity.config.InstanceInfo; -import com.netflix.priam.restore.Restore; -import java.io.File; +import com.netflix.priam.utils.DateUtil; import java.io.IOException; import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class TestRestore { private static FakeBackupFileSystem filesystem; - private static ArrayList fileList; - private static Calendar cal; + private static ArrayList fileList = new ArrayList<>(); private static FakeConfiguration conf; private static String region; private static Restore restore; + private static InstanceState instanceState; @BeforeClass public static void setup() throws InterruptedException, IOException { Injector injector = Guice.createInjector(new BRTestModule()); - filesystem = - (FakeBackupFileSystem) - injector.getInstance( - Key.get(IBackupFileSystem.class, Names.named("backup"))); - conf = (FakeConfiguration) injector.getInstance(IConfiguration.class); + if (filesystem == null) filesystem = injector.getInstance(FakeBackupFileSystem.class); + if (conf == null) conf = (FakeConfiguration) injector.getInstance(IConfiguration.class); region = injector.getInstance(InstanceInfo.class).getRegion(); - restore = injector.getInstance(Restore.class); - fileList = new ArrayList<>(); - cal = Calendar.getInstance(); + if (restore == null) restore = injector.getInstance(Restore.class); + if (instanceState == null) instanceState = injector.getInstance(InstanceState.class); } private static void populateBackupFileSystem(String baseDir) { @@ -74,73 +69,93 @@ private static void populateBackupFileSystem(String baseDir) { @Test public void testRestore() throws Exception { populateBackupFileSystem("test_backup"); - File tmpdir = new File(conf.getDataFileLocation() + "/test"); - tmpdir.mkdir(); - Assert.assertTrue(tmpdir.exists()); - cal.set(2011, Calendar.AUGUST, 11, 0, 30, 0); - cal.set(Calendar.MILLISECOND, 0); - Date startTime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - restore.restore(startTime, cal.getTime()); + String dateRange = "201108110030,201108110530"; + restore.restore(new DateUtil.DateRange(dateRange)); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5))); - tmpdir = new File(conf.getDataFileLocation() + "/test"); - Assert.assertFalse(tmpdir.exists()); + Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus()); + } + + @Test + public void testRestoreWithIncremental() throws Exception { + populateBackupFileSystem("test_backup"); + String dateRange = "201108110030,201108110730"; + restore.restore(new DateUtil.DateRange(dateRange)); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0))); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1))); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2))); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3))); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(4))); + Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(5))); + Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus()); + } + + @Test + public void testRestoreLatestWithEmptyMeta() throws Exception { + populateBackupFileSystem("test_backup"); + String metafile = + "test_backup/" + region + "/fakecluster/123456/201108110130/META/meta.json"; + filesystem.addFile(metafile); + String dateRange = "201108110030,201108110530"; + restore.restore(new DateUtil.DateRange(dateRange)); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(0))); + Assert.assertTrue(filesystem.downloadedFiles.contains(metafile)); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(1))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(2))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(3))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5))); + Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus()); + Assert.assertEquals(metafile, instanceState.getRestoreStatus().getSnapshotMetaFile()); } - // Pick latest file @Test public void testRestoreLatest() throws Exception { populateBackupFileSystem("test_backup"); String metafile = "test_backup/" + region + "/fakecluster/123456/201108110130/META/meta.json"; filesystem.addFile(metafile); - cal.set(2011, Calendar.AUGUST, 11, 0, 30, 0); - cal.set(Calendar.MILLISECOND, 0); - Date startTime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - restore.restore(startTime, cal.getTime()); + String snapFile = + "test_backup/" + region + "/fakecluster/123456/201108110130/SNAP/ks1/cf1/f9.db"; + filesystem.addFile(snapFile); + String dateRange = "201108110030,201108110530"; + restore.restore(new DateUtil.DateRange(dateRange)); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(0))); Assert.assertTrue(filesystem.downloadedFiles.contains(metafile)); - Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1))); - Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2))); - Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3))); + Assert.assertTrue(filesystem.downloadedFiles.contains(snapFile)); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(1))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(2))); + Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(3))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5))); + Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus()); + Assert.assertEquals(metafile, instanceState.getRestoreStatus().getSnapshotMetaFile()); } @Test public void testNoSnapshots() throws Exception { - try { - filesystem.setupTest(fileList); - cal.set(2011, Calendar.SEPTEMBER, 11, 0, 30); - Date startTime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - restore.restore(startTime, cal.getTime()); - Assert.assertFalse(true); // No exception thrown - } catch (IllegalStateException e) { - // We are ok. No snapshot found. - Assert.assertTrue(true); - } + populateBackupFileSystem("test_backup"); + filesystem.setupTest(fileList); + String dateRange = "201109110030,201109110530"; + restore.restore(new DateUtil.DateRange(dateRange)); + Assert.assertEquals(Status.FAILED, instanceState.getRestoreStatus().getStatus()); } @Test public void testRestoreFromDiffCluster() throws Exception { populateBackupFileSystem("test_backup_new"); - cal.set(2011, Calendar.AUGUST, 11, 0, 30, 0); - cal.set(Calendar.MILLISECOND, 0); - Date startTime = cal.getTime(); - cal.add(Calendar.HOUR, 5); - restore.restore(startTime, cal.getTime()); + String dateRange = "201108110030,201108110530"; + restore.restore(new DateUtil.DateRange(dateRange)); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(0))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(1))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(2))); Assert.assertTrue(filesystem.downloadedFiles.contains(fileList.get(3))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(4))); Assert.assertFalse(filesystem.downloadedFiles.contains(fileList.get(5))); + Assert.assertEquals(Status.FINISHED, instanceState.getRestoreStatus().getStatus()); } } diff --git a/priam/src/test/java/com/netflix/priam/services/TestSnapshotMetaService.java b/priam/src/test/java/com/netflix/priam/services/TestSnapshotMetaService.java index ce0bc81b8..b65480f17 100644 --- a/priam/src/test/java/com/netflix/priam/services/TestSnapshotMetaService.java +++ b/priam/src/test/java/com/netflix/priam/services/TestSnapshotMetaService.java @@ -103,6 +103,9 @@ private void test(int noOfSstables, int noOfKeyspaces, int noOfCf) throws Except Assert.assertNotNull(metaFileLocation); Assert.assertTrue(metaFileLocation.toFile().exists()); Assert.assertTrue(metaFileLocation.toFile().isFile()); + Assert.assertEquals( + snapshotInstant.getEpochSecond(), + (metaFileLocation.toFile().lastModified() / 1000)); // Try reading meta file. metaFileReader.setNoOfSstables(noOfSstables + 1);