initial commit
All checks were successful
Locusworks Team/aws-s3-sync/pipeline/head This commit looks good

This commit is contained in:
2020-11-01 23:24:20 -06:00
commit b2a8a83988
11 changed files with 949 additions and 0 deletions

View File

@@ -0,0 +1,49 @@
package net.locusworks.s3sync;
import java.nio.file.Paths;
import net.locusworks.logger.ApplicationLogger;
import net.locusworks.logger.ApplicationLoggerFactory;
import net.locusworks.logger.ApplicationLoggerInitializer;
import net.locusworks.logger.LogLevel;
import net.locusworks.s3sync.client.S3Client;
import net.locusworks.s3sync.conf.ConfigurationManager;
public class Entry {
public static void main(String[] args) {
ConfigurationManager conf = ConfigurationManager.getInstance();
ApplicationLoggerFactory.init(new ApplicationLoggerInitializer() {
@Override
public LogLevel initialize() {
return conf.getLogLevel();
}
});
ApplicationLogger logger = ApplicationLoggerFactory.getLogger(Entry.class);
if (System.getenv("AWS_ACCESS_KEY_ID") == null) {
logger.error("AWS_ACCESS_KEY_ID is not set in environment variables");
System.exit(-1);
}
if (System.getenv("AWS_SECRET_ACCESS_KEY") == null) {
logger.error("AWS_SECRET_ACCESS_KEY is not set in environment variables");
System.exit(-1);
}
logger.info("Starting S3 Sync");
try {
S3Client client = new S3Client(ConfigurationManager.getInstance());
client.uploadFile(Paths.get("D:\\OneDrive\\Documents\\config.docx"));
} catch (Exception | Error e) {
logger.error(e);
System.exit(-1);
}
}
}

View File

@@ -0,0 +1,59 @@
package net.locusworks.s3sync.client;
import java.nio.file.Path;
public class FileDetail {
private Path file;
private String hash;
private boolean uploaded;
public FileDetail() {}
/**
* @param file
* @param hash
* @param uploaded
*/
public FileDetail(Path file, String hash, boolean uploaded) {
this.file = file;
this.hash = hash;
this.uploaded = uploaded;
}
/**
* @return the file
*/
public synchronized final Path getFile() {
return file;
}
/**
* @param file the file to set
*/
public synchronized final void setFile(Path file) {
this.file = file;
}
/**
* @return the hash
*/
public synchronized final String getHash() {
return hash;
}
/**
* @param hash the hash to set
*/
public synchronized final void setHash(String hash) {
this.hash = hash;
}
/**
* @return the uploaded
*/
public synchronized final boolean isUploaded() {
return uploaded;
}
/**
* @param uploaded the uploaded to set
*/
public synchronized final void setUploaded(boolean uploaded) {
this.uploaded = uploaded;
}
}

View File

@@ -0,0 +1,76 @@
package net.locusworks.s3sync.client;
import java.nio.file.Path;
import java.util.concurrent.ExecutorService;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.client.builder.ExecutorFactory;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.transfer.MultipleFileUpload;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
import com.amazonaws.services.s3.transfer.Upload;
import net.locusworks.logger.ApplicationLogger;
import net.locusworks.logger.ApplicationLoggerFactory;
import net.locusworks.s3sync.conf.ConfigurationManager;
public class S3Client {
private ApplicationLogger logger = ApplicationLoggerFactory.getLogger(S3Client.class);
private AmazonS3 s3Client;
private String bucket;
private Path synchFolder;
public S3Client(ConfigurationManager conf) {
String region = conf.getRegion();
this.s3Client = AmazonS3ClientBuilder.standard().withRegion(Regions.fromName(region)).build();
Bucket bucket = s3Client.listBuckets().stream()
.filter(b -> b.getName().equals(conf.getBucketName())).findFirst().orElse(null);
if (bucket == null) {
logger.error("Unable to find bucket with name: %s%nExiting", conf.getBucketName());
System.exit(-1);
}
logger.info("Found Bucket: %s", bucket);
this.bucket = conf.getBucketName();
this.synchFolder = conf.getSyncFolder();
}
public void uploadFile(Path file) {
logger.info("Uploading file: %s", file);
TransferManager xferMgr = TransferManagerBuilder.standard().withS3Client(s3Client).build();
try {
Upload xfer = xferMgr.upload(bucket, file.getFileName().toString(), file.toFile());
// loop with Transfer.isDone()
XferMgrProgress.showTransferProgress(xfer);
// or block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(xfer);
logger.info("Done uploading %s", file);
} catch (AmazonServiceException e) {
logger.error(e.getErrorMessage());
System.exit(1);
}
xferMgr.shutdownNow();
}
public void syncFolder() {
TransferManager xferMgr = TransferManagerBuilder.standard()
.withS3Client(s3Client)
.build();
MultipleFileUpload xfer = xferMgr.uploadDirectory(bucket, null, synchFolder.toFile(), true);
}
}

View File

@@ -0,0 +1,270 @@
//snippet-sourcedescription:[XferMgrProgress.java demonstrates how to use the S3 transfermanager to upload files to a bucket and show progress of the upload.]
//snippet-keyword:[Java]
//snippet-sourcesyntax:[java]
//snippet-keyword:[Code Sample]
//snippet-keyword:[Amazon S3]
//snippet-keyword:[TransferProgress]
//snippet-keyword:[TransferManager]
//snippet-service:[s3]
//snippet-sourcetype:[full-example]
//snippet-sourcedate:[]
//snippet-sourceauthor:[soo-aws]
/*
Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
This file is licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License. A copy of
the License is located at
http://aws.amazon.com/apache2.0/
This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
*/
package net.locusworks.s3sync.client;
// snippet-start:[s3.java1.s3_xfer_mgr_progress.import]
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.event.ProgressEvent;
import com.amazonaws.event.ProgressListener;
import com.amazonaws.services.s3.transfer.*;
import com.amazonaws.services.s3.transfer.Transfer.TransferState;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
// snippet-end:[s3.java1.s3_xfer_mgr_progress.import]
// snippet-start:[s3.java1.s3_xfer_mgr_progress.complete]
public class XferMgrProgress {
// waits for the transfer to complete, catching any exceptions that occur.
public static void waitForCompletion(Transfer xfer) {
// snippet-start:[s3.java1.s3_xfer_mgr_progress.wait_for_transfer]
try {
xfer.waitForCompletion();
} catch (AmazonServiceException e) {
System.err.println("Amazon service error: " + e.getMessage());
System.exit(1);
} catch (AmazonClientException e) {
System.err.println("Amazon client error: " + e.getMessage());
System.exit(1);
} catch (InterruptedException e) {
System.err.println("Transfer interrupted: " + e.getMessage());
System.exit(1);
}
// snippet-end:[s3.java1.s3_xfer_mgr_progress.wait_for_transfer]
}
// Prints progress while waiting for the transfer to finish.
public static void showTransferProgress(Transfer xfer) {
// snippet-start:[s3.java1.s3_xfer_mgr_progress.poll]
// print the transfer's human-readable description
System.out.println(xfer.getDescription());
// print an empty progress bar...
printProgressBar(0.0);
// update the progress bar while the xfer is ongoing.
do {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
return;
}
// Note: so_far and total aren't used, they're just for
// documentation purposes.
TransferProgress progress = xfer.getProgress();
long so_far = progress.getBytesTransferred();
long total = progress.getTotalBytesToTransfer();
double pct = progress.getPercentTransferred();
eraseProgressBar();
printProgressBar(pct);
} while (xfer.isDone() == false);
// print the final state of the transfer.
TransferState xfer_state = xfer.getState();
System.out.println(": " + xfer_state);
// snippet-end:[s3.java1.s3_xfer_mgr_progress.poll]
}
// Prints progress of a multiple file upload while waiting for it to finish.
public static void showMultiUploadProgress(MultipleFileUpload multi_upload) {
// print the upload's human-readable description
System.out.println(multi_upload.getDescription());
// snippet-start:[s3.java1.s3_xfer_mgr_progress.substranferes]
Collection<? extends Upload> sub_xfers = new ArrayList<Upload>();
sub_xfers = multi_upload.getSubTransfers();
do {
System.out.println("\nSubtransfer progress:\n");
for (Upload u : sub_xfers) {
System.out.println(" " + u.getDescription());
if (u.isDone()) {
TransferState xfer_state = u.getState();
System.out.println(" " + xfer_state);
} else {
TransferProgress progress = u.getProgress();
double pct = progress.getPercentTransferred();
printProgressBar(pct);
System.out.println();
}
}
// wait a bit before the next update.
try {
Thread.sleep(200);
} catch (InterruptedException e) {
return;
}
} while (multi_upload.isDone() == false);
// print the final state of the transfer.
TransferState xfer_state = multi_upload.getState();
System.out.println("\nMultipleFileUpload " + xfer_state);
// snippet-end:[s3.java1.s3_xfer_mgr_progress.substranferes]
}
// prints a simple text progressbar: [##### ]
public static void printProgressBar(double pct) {
// if bar_size changes, then change erase_bar (in eraseProgressBar) to
// match.
final int bar_size = 40;
final String empty_bar = " ";
final String filled_bar = "########################################";
int amt_full = (int) (bar_size * (pct / 100.0));
System.out.format(" [%s%s]", filled_bar.substring(0, amt_full),
empty_bar.substring(0, bar_size - amt_full));
}
// erases the progress bar.
public static void eraseProgressBar() {
// erase_bar is bar_size (from printProgressBar) + 4 chars.
final String erase_bar = "\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b";
System.out.format(erase_bar);
}
public static void uploadFileWithListener(String file_path,
String bucket_name, String key_prefix, boolean pause) {
System.out.println("file: " + file_path +
(pause ? " (pause)" : ""));
String key_name = null;
if (key_prefix != null) {
key_name = key_prefix + '/' + file_path;
} else {
key_name = file_path;
}
// snippet-start:[s3.java1.s3_xfer_mgr_progress.progress_listener]
File f = new File(file_path);
TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
try {
Upload u = xfer_mgr.upload(bucket_name, key_name, f);
// print an empty progress bar...
printProgressBar(0.0);
u.addProgressListener(new ProgressListener() {
public void progressChanged(ProgressEvent e) {
double pct = e.getBytesTransferred() * 100.0 / e.getBytes();
eraseProgressBar();
printProgressBar(pct);
}
});
// block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(u);
// print the final state of the transfer.
TransferState xfer_state = u.getState();
System.out.println(": " + xfer_state);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
// snippet-end:[s3.java1.s3_xfer_mgr_progress.progress_listener]
}
public static void uploadDirWithSubprogress(String dir_path,
String bucket_name, String key_prefix, boolean recursive,
boolean pause) {
System.out.println("directory: " + dir_path + (recursive ?
" (recursive)" : "") + (pause ? " (pause)" : ""));
TransferManager xfer_mgr = new TransferManager();
try {
MultipleFileUpload multi_upload = xfer_mgr.uploadDirectory(
bucket_name, key_prefix, new File(dir_path), recursive);
// loop with Transfer.isDone()
XferMgrProgress.showMultiUploadProgress(multi_upload);
// or block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(multi_upload);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
}
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" XferMgrProgress [--recursive] [--pause] <s3_path> <local_path>\n\n" +
"Where:\n" +
" --recursive - Only applied if local_path is a directory.\n" +
" Copies the contents of the directory recursively.\n\n" +
" --pause - Attempt to pause+resume the upload. This may not work for\n" +
" small files.\n\n" +
" s3_path - The S3 destination (bucket/path) to upload the file(s) to.\n\n" +
" local_path - The path to a local file or directory path to upload to S3.\n\n" +
"Examples:\n" +
" XferMgrProgress public_photos/cat_happy.png my_photos/funny_cat.png\n" +
" XferMgrProgress public_photos my_photos/cat_sad.png\n" +
" XferMgrProgress public_photos my_photos\n\n";
if (args.length < 2) {
System.out.println(USAGE);
System.exit(1);
}
int cur_arg = 0;
boolean recursive = false;
boolean pause = false;
// first, parse any switches
while (args[cur_arg].startsWith("--")) {
if (args[cur_arg].equals("--recursive")) {
recursive = true;
} else if (args[cur_arg].equals("--pause")) {
pause = true;
} else {
System.out.println("Unknown argument: " + args[cur_arg]);
System.out.println(USAGE);
System.exit(1);
}
cur_arg += 1;
}
// only the first '/' character is of interest to get the bucket name.
// Subsequent ones are part of the key name.
String[] s3_path = args[cur_arg].split("/", 2);
cur_arg += 1;
String bucket_name = s3_path[0];
String key_prefix = null;
if (s3_path.length > 1) {
key_prefix = s3_path[1];
}
String local_path = args[cur_arg];
// check to see if local path is a directory or file...
File f = new File(args[cur_arg]);
if (f.exists() == false) {
System.out.println("Input path doesn't exist: " + args[cur_arg]);
System.exit(1);
} else if (f.isDirectory()) {
uploadDirWithSubprogress(local_path, bucket_name, key_prefix,
recursive, pause);
} else {
uploadFileWithListener(local_path, bucket_name, key_prefix, pause);
}
}
}
// snippet-end:[s3.java1.s3_xfer_mgr_progress.complete]

View File

@@ -0,0 +1,64 @@
package net.locusworks.s3sync.conf;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Properties;
import net.locusworks.logger.LogLevel;
public class ConfigurationManager {
private static final String CONF_FILE= "s3.properties";
private Properties conf;
private static ConfigurationManager confManager;
private ConfigurationManager() {
init();
}
private void init() {
try {
Properties defaultProps = PropertiesManager.loadConfiguration(ConfigurationManager.class, CONF_FILE);
Path confFile = Paths.get(CONF_FILE);
if (Files.notExists(confFile)) {
PropertiesManager.saveConfiguration(defaultProps, confFile, "AWS S3 Configuration");
conf = defaultProps;
} else {
conf = PropertiesManager.loadConfiguration(confFile);
PropertiesManager.addConfiguration(conf, defaultProps);
PropertiesManager.removeConfiguration(conf, defaultProps);
PropertiesManager.saveConfiguration(conf, confFile, "AWS S3 Configuration");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public String getRegion() {
return conf.getProperty("region");
}
public String getBucketName() {
return conf.getProperty("bucketName");
}
public Path getSyncFolder() {
return Paths.get(conf.getProperty("syncFolder"));
}
public LogLevel getLogLevel() {
return LogLevel.getEnum(conf.getProperty("logLevel"));
}
public static ConfigurationManager getInstance() {
if (confManager == null) {
confManager = new ConfigurationManager();
}
return confManager;
}
}

View File

@@ -0,0 +1,118 @@
package net.locusworks.s3sync.conf;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import static java.nio.charset.StandardCharsets.UTF_8;
public class PropertiesManager {
/**
* Load a configuration from resource
* @param clazz class loader
* @param src source of the file
* @return properties
* @throws IOException Exception thrown the file can't be read
*/
public static Properties loadConfiguration(Class<?> clazz, String src) throws IOException {
InputStream is = clazz.getResourceAsStream(src);
if (is == null) {
is = clazz.getClassLoader().getResourceAsStream(src);
}
if (is == null) {
return null;
}
BufferedReader br = new BufferedReader(new InputStreamReader(is, UTF_8));
return loadConfiguration(br);
}
/**
* Load configuration from a file
* @param file File to load
* @return properties
* @throws IOException Exception thrown the file can't be read
*/
public static Properties loadConfiguration(Path file) throws IOException {
if (Files.notExists(file)) {
return new Properties();
}
BufferedReader br = Files.newBufferedReader(file, UTF_8);
return loadConfiguration(br);
}
/**
* Load configuration from a buffered reader
* @param reader Buffered reader to read the properties values from
* @return properties
* @throws IOException Exception thrown the file can't be read
*/
public static Properties loadConfiguration(BufferedReader reader) throws IOException {
Properties config = new Properties();
config.load(reader);
return config;
}
/**
* Add configurations from one properties file to another
* @param to Properties file to copy values to
* @param from Properties file to copy values from
* @return a map containing the results of the values added
*/
public static Map<String, String> addConfiguration(Properties to, Properties from) {
Map<String, String> results = from.entrySet()
.stream()
.filter(entry -> !to.containsKey(entry.getKey()))
.map(entry -> {
String key = entry.getKey().toString();
String value = entry.getValue().toString();
to.put(key, value);
return Pair.of(key, value);
})
.collect(Collectors.toMap(key -> key.getLeft(), value -> value.getRight()));
return results;
}
/**
* Removes configuration values that are not present in the comparedTo
* @param from Properties file to remove values from
* @param comparedTo Properties file to compare to
* @return a map containing the results of the values removed
*/
public static Map<String, String> removeConfiguration(Properties from, Properties comparedTo) {
Map<String, String> results = from.keySet()
.stream()
.filter(key -> !comparedTo.containsKey(key)) //only get the items that are not in the comparedTo properties
.map(key -> Pair.of(String.valueOf(key), String.valueOf(from.get(key))))
.collect(Collectors.toList()) //Create a list of paired items (key value) of the items that were filtered
.stream()
.map(pair -> { //remove those pairs from the from properties
from.remove(pair.getLeft());
return pair;
})
.collect(Collectors.toMap(key -> key.getLeft(), value -> value.getRight())); //create a map of what was removed
return results;
}
/**
* Save the properties file to disk
* @param props Properties file to save
* @param fileToSave File to save to
* @param comment Any comments to add
*/
public static void saveConfiguration(Properties props, Path fileToSave, String comment) {
try(OutputStream fos = Files.newOutputStream(fileToSave)) {
props.store(fos, comment == null ? "" : comment);
} catch (IOException ex) {
throw new RuntimeException(ex.getMessage(), ex);
}
}
}