Compare commits
3 Commits
1.0.1-RELE
...
feature/h2
Author | SHA1 | Date | |
---|---|---|---|
6d908b43d7 | |||
6d6f0ed891 | |||
4b61c811be |
1
.gitignore
vendored
1
.gitignore
vendored
@ -5,3 +5,4 @@
|
||||
/.settings/
|
||||
**/*.csv
|
||||
*.properties
|
||||
*.db
|
||||
|
202
pom.xml
202
pom.xml
@ -13,6 +13,14 @@
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
<dep.check.version>6.0.2</dep.check.version>
|
||||
<jackson.version>2.11.3</jackson.version>
|
||||
<flyway.version>7.3.0</flyway.version>
|
||||
<h2.version>1.4.200</h2.version>
|
||||
<hibernate.version>5.4.24.Final</hibernate.version>
|
||||
<spring.version>5.3.1</spring.version>
|
||||
<spring.boot.version>2.4.0</spring.boot.version>
|
||||
<spring.data.version>2.4.1</spring.data.version>
|
||||
<maven.enforcer.version>3.0.0-M2</maven.enforcer.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
@ -20,7 +28,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<version>3.0.0-M2</version>
|
||||
<version>${maven.enforcer.version}</version>
|
||||
<configuration>
|
||||
<rules>
|
||||
<dependencyConvergence />
|
||||
@ -53,6 +61,14 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>versions-maven-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<configuration>
|
||||
<generateBackupPoms>false</generateBackupPoms>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
@ -68,7 +84,7 @@
|
||||
<transformers>
|
||||
<transformer
|
||||
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
|
||||
<mainClass>net.locusworks.s3sync.Entry</mainClass>
|
||||
<mainClass>net.locusworks.s3sync.S3SyncLauncher</mainClass>
|
||||
</transformer>
|
||||
</transformers>
|
||||
<filters>
|
||||
@ -85,6 +101,39 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-maven-plugin</artifactId>
|
||||
<version>${flyway.version}</version>
|
||||
<configuration>
|
||||
<serverId>flyway-h2</serverId>
|
||||
<user>sa</user>
|
||||
<url>jdbc:h2:file:./s3sync;MODE=MYSQL;DATABASE_TO_UPPER=false</url>
|
||||
<outOfOrder>false</outOfOrder>
|
||||
<schemas>
|
||||
<schema>s3sync</schema>
|
||||
</schemas>
|
||||
<table>_flyway_migration</table>
|
||||
<locations>
|
||||
<location>filesystem:${basedir}/src/main/resources/database/migration</location>
|
||||
</locations>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>process-sources</phase>
|
||||
<goals>
|
||||
<goal>migrate</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<version>${h2.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
@ -95,6 +144,27 @@
|
||||
<version>1.11.892</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@ -120,6 +190,134 @@
|
||||
<version>1.0.2-RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.jcraft/jsch -->
|
||||
<dependency>
|
||||
<groupId>com.jcraft</groupId>
|
||||
<artifactId>jsch</artifactId>
|
||||
<version>0.1.55</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-core</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.jboss.logging</groupId>
|
||||
<artifactId>jboss-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-hikaricp</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot</artifactId>
|
||||
<version>${spring.boot.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-autoconfigure</artifactId>
|
||||
<version>${spring.boot.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Framework jars to be shared across projects -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aop</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jdbc</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-jpa</artifactId>
|
||||
<version>${spring.data.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api -->
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
<version>1.3.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.flywaydb/flyway-core -->
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>${flyway.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.h2database/h2 -->
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<version>${h2.version}</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<distributionManagement>
|
||||
|
@ -1,48 +0,0 @@
|
||||
package net.locusworks.s3sync;
|
||||
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.logger.ApplicationLoggerInitializer;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
import net.locusworks.s3sync.client.FileManager;
|
||||
import net.locusworks.s3sync.client.S3Client;
|
||||
import net.locusworks.s3sync.conf.ConfigurationManager;
|
||||
|
||||
public class Entry {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
ConfigurationManager conf = ConfigurationManager.getInstance();
|
||||
ApplicationLoggerFactory.init(new ApplicationLoggerInitializer() {
|
||||
|
||||
@Override
|
||||
public LogLevel initialize() {
|
||||
return conf.getLogLevel();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
ApplicationLogger logger = ApplicationLoggerFactory.getLogger(Entry.class);
|
||||
|
||||
if (System.getenv("AWS_ACCESS_KEY_ID") == null) {
|
||||
logger.error("AWS_ACCESS_KEY_ID is not set in environment variables");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
if (System.getenv("AWS_SECRET_ACCESS_KEY") == null) {
|
||||
logger.error("AWS_SECRET_ACCESS_KEY is not set in environment variables");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
logger.info("Starting S3 Sync");
|
||||
|
||||
try (S3Client client = new S3Client(ConfigurationManager.getInstance())) {
|
||||
FileManager manager = FileManager.newInstance(client);
|
||||
client.syncFolder();
|
||||
manager.removeOrphanedFiles();
|
||||
} catch (Exception | Error e) {
|
||||
logger.error(e);
|
||||
System.exit(-1);
|
||||
}
|
||||
}
|
||||
}
|
55
src/main/java/net/locusworks/s3sync/S3SyncLauncher.java
Normal file
55
src/main/java/net/locusworks/s3sync/S3SyncLauncher.java
Normal file
@ -0,0 +1,55 @@
|
||||
package net.locusworks.s3sync;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.ApplicationArguments;
|
||||
import org.springframework.boot.ApplicationRunner;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.builder.SpringApplicationBuilder;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.stereotype.Component;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.s3sync.client.S3Client;
|
||||
|
||||
|
||||
@SpringBootApplication(scanBasePackages= {"net.locusworks.s3sync"})
|
||||
@ComponentScan("net.locusworks.s3sync")
|
||||
@Component
|
||||
@EnableScheduling
|
||||
public class S3SyncLauncher implements ApplicationRunner {
|
||||
|
||||
@Autowired
|
||||
private S3Client client;
|
||||
|
||||
public static void main(String[] args) {
|
||||
new SpringApplicationBuilder(S3SyncLauncher.class).headless(false).run(args);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(ApplicationArguments args) throws Exception {
|
||||
|
||||
ApplicationLogger logger = ApplicationLoggerFactory.getLogger(S3SyncLauncher.class);
|
||||
|
||||
if (System.getenv("AWS_ACCESS_KEY_ID") == null) {
|
||||
logger.error("AWS_ACCESS_KEY_ID is not set in environment variables");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
if (System.getenv("AWS_SECRET_ACCESS_KEY") == null) {
|
||||
logger.error("AWS_SECRET_ACCESS_KEY is not set in environment variables");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
logger.info("Starting S3 Sync");
|
||||
|
||||
try {
|
||||
client.syncFolder();
|
||||
} catch (Exception | Error e) {
|
||||
logger.error(e);
|
||||
System.exit(-1);
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
package net.locusworks.s3sync.client;
|
||||
|
||||
public class FileDetail {
|
||||
|
||||
private String file;
|
||||
private String hash;
|
||||
private boolean uploaded;
|
||||
|
||||
public FileDetail() {}
|
||||
/**
|
||||
* @param file
|
||||
* @param hash
|
||||
* @param uploaded
|
||||
*/
|
||||
public FileDetail(String file, String hash, boolean uploaded) {
|
||||
this.file = file;
|
||||
this.hash = hash;
|
||||
this.uploaded = uploaded;
|
||||
}
|
||||
/**
|
||||
* @return the file
|
||||
*/
|
||||
public synchronized final String getFile() {
|
||||
return file;
|
||||
}
|
||||
/**
|
||||
* @param file the file to set
|
||||
*/
|
||||
public synchronized final void setFile(String file) {
|
||||
this.file = file;
|
||||
}
|
||||
/**
|
||||
* @return the hash
|
||||
*/
|
||||
public synchronized final String getHash() {
|
||||
return hash;
|
||||
}
|
||||
/**
|
||||
* @param hash the hash to set
|
||||
*/
|
||||
public synchronized final void setHash(String hash) {
|
||||
this.hash = hash;
|
||||
}
|
||||
/**
|
||||
* @return the uploaded
|
||||
*/
|
||||
public synchronized final boolean isUploaded() {
|
||||
return uploaded;
|
||||
}
|
||||
/**
|
||||
* @param uploaded the uploaded to set
|
||||
*/
|
||||
public synchronized final void setUploaded(boolean uploaded) {
|
||||
this.uploaded = uploaded;
|
||||
}
|
||||
|
||||
}
|
@ -1,130 +1,52 @@
|
||||
package net.locusworks.s3sync.client;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.Date;
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import net.locusworks.s3sync.database.entities.FileInfo;
|
||||
import net.locusworks.s3sync.database.repos.FileInfoRepository;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
|
||||
public class FileManager implements AutoCloseable {
|
||||
|
||||
@Service
|
||||
public class FileManager {
|
||||
|
||||
private ApplicationLogger logger = ApplicationLoggerFactory.getLogger(FileManager.class);
|
||||
|
||||
public static final String FILE_CSV = "upload.csv";
|
||||
|
||||
private Map<String, FileDetail> detailMap;
|
||||
|
||||
private Set<String> s3Files;
|
||||
@Autowired
|
||||
private FileInfoRepository fileInfoRepo;
|
||||
|
||||
private S3Client client;
|
||||
public static final String S3SYNC_DB_FILE = "s3sync.mv.db";
|
||||
|
||||
private String bucket;
|
||||
|
||||
private static FileManager instance;
|
||||
|
||||
private FileManager(S3Client client) throws IOException {
|
||||
detailMap = new LinkedHashMap<String, FileDetail>();
|
||||
s3Files = new HashSet<String>();
|
||||
this.client = client;
|
||||
this.bucket = client.getBucket();
|
||||
readFile();
|
||||
public FileInfo addEntry(Path file, String hash, boolean uploaded) {
|
||||
return addEntry(new FileInfo(null, file.getFileName().toString(), file.getParent().toString(), hash, new Date(), uploaded));
|
||||
}
|
||||
|
||||
private void readFile() throws IOException {
|
||||
public FileInfo addEntry(FileInfo fd) {
|
||||
logger.debug("Saving file: " + fd);
|
||||
if (fd.getUploadDate() == null) fd.setUploadDate(new Date());
|
||||
return fileInfoRepo.save(fd);
|
||||
}
|
||||
|
||||
public FileInfo getFileDetail(Path file, String relativePath) throws IOException {
|
||||
String sha1 = DigestUtils.sha1Hex(Files.newInputStream(file));
|
||||
|
||||
S3Object hashFile = client.getObject(bucket, FILE_CSV);
|
||||
if (hashFile == null) return;
|
||||
Path file = Paths.get(FILE_CSV);
|
||||
FileInfo fi = fileInfoRepo.findByFileHash(sha1);
|
||||
|
||||
client.downloadFile(FILE_CSV, file);
|
||||
s3Files = client.getFileList();
|
||||
if (Files.notExists(file)) return;
|
||||
|
||||
try(BufferedReader reader = Files.newBufferedReader(file)) {
|
||||
String line = null;
|
||||
while((line = reader.readLine()) != null) {
|
||||
String[] values = line.split(",");
|
||||
if (values.length != 3) {
|
||||
logger.warn("Invalid entry detected: " + line);
|
||||
continue;
|
||||
}
|
||||
|
||||
FileDetail fd = new FileDetail(values[0], values[1], Boolean.valueOf(values[2]));
|
||||
detailMap.put(values[0], fd);
|
||||
}
|
||||
if (fi == null) {
|
||||
fi = new FileInfo();
|
||||
fi.setFileHash(sha1);
|
||||
fi.setFilePath(relativePath);
|
||||
fi.setFileName(file.getFileName().toString());
|
||||
fi.setUploaded(false);
|
||||
}
|
||||
}
|
||||
|
||||
private void saveFile() throws IOException {
|
||||
try(BufferedWriter writer = Files.newBufferedWriter(Paths.get(FILE_CSV))) {
|
||||
writer.write("FILE_NAME,HASH,STATUS\n");
|
||||
for(FileDetail v : detailMap.values()) {
|
||||
writer.write(String.format("%s,%s,%s%n", v.getFile(), v.getHash(), v.isUploaded()));
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public boolean addEntry(String file, String hash, boolean uploaded) {
|
||||
return addEntry(new FileDetail(file, hash, uploaded));
|
||||
}
|
||||
|
||||
public boolean addEntry(FileDetail fd) {
|
||||
return detailMap.put(fd.getFile(), fd) != null;
|
||||
}
|
||||
|
||||
public FileDetail getFileDetail(Path path, String key) throws IOException {
|
||||
boolean newFile = false;
|
||||
FileDetail fd = null;
|
||||
if (detailMap.containsKey(key)) {
|
||||
fd = detailMap.get(key);
|
||||
} else {
|
||||
newFile = true;
|
||||
fd = new FileDetail(key, DigestUtils.sha1Hex(Files.newInputStream(path)), false);
|
||||
}
|
||||
|
||||
String sha1 = newFile ? fd.getHash() : DigestUtils.sha1Hex(Files.newInputStream(path));
|
||||
|
||||
if (sha1.equals(fd.getHash()) && s3Files.contains(key)) {
|
||||
fd.setUploaded(true);
|
||||
s3Files.remove(key);
|
||||
} else {
|
||||
fd.setUploaded(false);
|
||||
}
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
||||
public void removeOrphanedFiles() {
|
||||
client.removeFiles(s3Files);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
saveFile();
|
||||
client.uploadFile(Paths.get(FILE_CSV));
|
||||
Files.deleteIfExists(Paths.get(FILE_CSV));
|
||||
}
|
||||
|
||||
public static FileManager newInstance(S3Client client) throws IOException {
|
||||
instance = new FileManager(client);
|
||||
return instance;
|
||||
}
|
||||
|
||||
public static FileManager getInstance() throws IOException {
|
||||
if (instance == null || instance.client == null || StringUtils.isBlank(instance.bucket)) {
|
||||
throw new IOException("a call to newInstance() was not made. Please call newInstance first");
|
||||
}
|
||||
return instance;
|
||||
return fi;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,10 +1,15 @@
|
||||
package net.locusworks.s3sync.client;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import javax.annotation.PostConstruct;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.regions.Regions;
|
||||
@ -17,19 +22,33 @@ import com.amazonaws.services.s3.transfer.Download;
|
||||
import com.amazonaws.services.s3.transfer.TransferManager;
|
||||
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
|
||||
import com.amazonaws.services.s3.transfer.Upload;
|
||||
import com.jcraft.jsch.JSchException;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.s3sync.conf.ConfigurationManager;
|
||||
import net.locusworks.s3sync.conf.TransferType;
|
||||
import net.locusworks.s3sync.database.entities.FileInfo;
|
||||
import net.locusworks.s3sync.scp.AbstractSshMessage;
|
||||
import net.locusworks.s3sync.scp.LocalFileMessage;
|
||||
import net.locusworks.s3sync.scp.ScpFromMessage;
|
||||
|
||||
public class S3Client implements AutoCloseable {
|
||||
@Component
|
||||
public class S3Client {
|
||||
|
||||
private ApplicationLogger logger = ApplicationLoggerFactory.getLogger(S3Client.class);
|
||||
|
||||
private AmazonS3 s3Client;
|
||||
private String bucket;
|
||||
private Path syncFolder;
|
||||
|
||||
public S3Client(ConfigurationManager conf) {
|
||||
@Autowired
|
||||
private ConfigurationManager conf;
|
||||
|
||||
@Autowired
|
||||
private FileManager fileManager;
|
||||
|
||||
public S3Client() {}
|
||||
|
||||
@PostConstruct
|
||||
private void init() {
|
||||
String region = conf.getRegion();
|
||||
this.s3Client = AmazonS3ClientBuilder.standard().withRegion(Regions.fromName(region)).build();
|
||||
Bucket bucket = s3Client.listBuckets().stream()
|
||||
@ -40,45 +59,40 @@ public class S3Client implements AutoCloseable {
|
||||
System.exit(-1);
|
||||
}
|
||||
logger.info("Found Bucket: %s", bucket);
|
||||
this.bucket = conf.getBucketName();
|
||||
this.syncFolder = conf.getSyncFolder();
|
||||
}
|
||||
|
||||
public String getBucket() {
|
||||
return this.bucket;
|
||||
return this.conf.getBucketName();
|
||||
}
|
||||
|
||||
public void uploadFile(Path file) {
|
||||
public void uploadFile(Path file, Path localFolder) {
|
||||
TransferManager xferMgr = TransferManagerBuilder.standard().withS3Client(s3Client).build();
|
||||
uploadFile(xferMgr, file);
|
||||
uploadFile(xferMgr, file, localFolder);
|
||||
xferMgr.shutdownNow(false);
|
||||
}
|
||||
|
||||
public void uploadFile(TransferManager xferMgr, Path file) {
|
||||
public void uploadFile(TransferManager xferMgr, Path file, Path localFolder) {
|
||||
boolean xferMgrNull = xferMgr == null;
|
||||
xferMgr = !xferMgrNull ? xferMgr : TransferManagerBuilder.standard().withS3Client(s3Client).build();
|
||||
FileDetail fd = null;
|
||||
FileInfo fd = null;
|
||||
try {
|
||||
String key = getPath(file);
|
||||
fd = FileManager.getInstance().getFileDetail(file, key);
|
||||
if (fd.isUploaded()) return;
|
||||
String key = getPath(file, localFolder);
|
||||
fd = fileManager.getFileDetail(file, key);
|
||||
if (fd.getUploaded()) return;
|
||||
logger.info("Uploading file: %s", file);
|
||||
Upload xfer = xferMgr.upload(bucket, key, file.toFile());
|
||||
Upload xfer = xferMgr.upload(getBucket(), key, file.toFile());
|
||||
xfer.waitForCompletion();
|
||||
fd.setUploaded(true);
|
||||
FileManager.getInstance().addEntry(fd);
|
||||
logger.info("Done uploading %s", file);
|
||||
} catch (AmazonClientException | InterruptedException | IOException e) {
|
||||
if (fd != null) {
|
||||
fd.setUploaded(false);
|
||||
try {
|
||||
FileManager.getInstance().addEntry(fd);
|
||||
} catch (IOException e1) {
|
||||
logger.error("Unable to save file to file manager: " + e1.getMessage());
|
||||
}
|
||||
}
|
||||
logger.error(e.getMessage());
|
||||
} finally {
|
||||
if (fd != null) {
|
||||
fileManager.addEntry(fd);
|
||||
}
|
||||
if (xferMgrNull) {
|
||||
xferMgr.shutdownNow(false);
|
||||
}
|
||||
@ -96,7 +110,7 @@ public class S3Client implements AutoCloseable {
|
||||
xferMgr = !xferMgrNull ? xferMgr : TransferManagerBuilder.standard().withS3Client(s3Client).build();
|
||||
try {
|
||||
logger.info("Downloading file: %s", file);
|
||||
Download xfer = xferMgr.download(bucket, key, file.toFile());
|
||||
Download xfer = xferMgr.download(getBucket(), key, file.toFile());
|
||||
xfer.waitForCompletion();
|
||||
logger.info("Done downloading %s", file);
|
||||
} catch (AmazonClientException | InterruptedException e) {
|
||||
@ -108,17 +122,41 @@ public class S3Client implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
public void syncFolder() throws IOException {
|
||||
public void syncFolder() throws IOException, JSchException {
|
||||
TransferManager xferMgr = TransferManagerBuilder.standard().withS3Client(s3Client).build();
|
||||
try (FileManager manager = FileManager.getInstance()) {
|
||||
Files.walk(syncFolder)
|
||||
.filter(f -> Files.isRegularFile(f))
|
||||
.forEach(f -> uploadFile(xferMgr, syncFolder.resolve(f)));
|
||||
Path localFolder = conf.getLocalFolder();
|
||||
if (Files.notExists(localFolder)) {
|
||||
logger.info("Creating local folder: " + localFolder);
|
||||
Files.createDirectory(localFolder);
|
||||
}
|
||||
|
||||
AbstractSshMessage aMsg = conf.getTransferType() == TransferType.REMOTE ?
|
||||
new ScpFromMessage(true, ScpFromMessage.newSession(conf.getIdentity(), conf.getUser(), conf.getHost(), conf.getPort()), conf.getRemoteFolder(), localFolder, true) :
|
||||
new LocalFileMessage(localFolder);
|
||||
|
||||
try(AbstractSshMessage msg = aMsg) {
|
||||
msg.fileCallback(p -> {
|
||||
if (Files.isRegularFile(p)) {
|
||||
uploadFile(xferMgr, p, localFolder);
|
||||
try {
|
||||
if (conf.deleteLocal())
|
||||
Files.delete(p);
|
||||
} catch (Exception ex) {
|
||||
logger.warn("Unable to delete local file %s: %s", p, ex.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
msg.execute();
|
||||
} catch (Exception e) {
|
||||
logger.error("Unable to load file Manager: " + e.getMessage());
|
||||
}
|
||||
|
||||
|
||||
xferMgr.shutdownNow(false);
|
||||
if (conf.deleteLocal())
|
||||
Files.walk(localFolder)
|
||||
.sorted(Comparator.reverseOrder())
|
||||
.map(Path::toFile)
|
||||
.forEach(File::delete);
|
||||
}
|
||||
|
||||
public S3Object getObject(String bucketName, String key) {
|
||||
@ -147,13 +185,12 @@ public class S3Client implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
private String getPath(Path file) {
|
||||
private String getPath(Path file, Path localFolder) {
|
||||
if (file.getParent() == null) return file.getFileName().toString();
|
||||
Path relative = syncFolder.relativize(file);
|
||||
Path relative = localFolder.relativize(file);
|
||||
return relative.toString().replace("\\", "/");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
if (s3Client != null) {
|
||||
s3Client.shutdown();
|
||||
|
@ -5,19 +5,17 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Properties;
|
||||
import javax.annotation.PostConstruct;
|
||||
import org.springframework.stereotype.Component;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
|
||||
@Component
|
||||
public class ConfigurationManager {
|
||||
|
||||
private static final String CONF_FILE= "s3.properties";
|
||||
private Properties conf;
|
||||
|
||||
private static ConfigurationManager confManager;
|
||||
|
||||
private ConfigurationManager() {
|
||||
init();
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
private void init() {
|
||||
try {
|
||||
Properties defaultProps = PropertiesManager.loadConfiguration(ConfigurationManager.class, CONF_FILE);
|
||||
@ -38,6 +36,14 @@ public class ConfigurationManager {
|
||||
}
|
||||
}
|
||||
|
||||
public TransferType getTransferType() {
|
||||
return TransferType.getEnum(conf.getProperty("transferType"));
|
||||
}
|
||||
|
||||
public boolean deleteLocal() {
|
||||
return Boolean.getBoolean(conf.getProperty("deleteLocalFiles"));
|
||||
}
|
||||
|
||||
public String getRegion() {
|
||||
return conf.getProperty("region");
|
||||
}
|
||||
@ -46,19 +52,32 @@ public class ConfigurationManager {
|
||||
return conf.getProperty("bucketName");
|
||||
}
|
||||
|
||||
public Path getSyncFolder() {
|
||||
return Paths.get(conf.getProperty("syncFolder"));
|
||||
public String getRemoteFolder() {
|
||||
return conf.getProperty("remoteFolder");
|
||||
}
|
||||
|
||||
public Path getLocalFolder() {
|
||||
return Paths.get(conf.getProperty("localFolder"));
|
||||
}
|
||||
|
||||
public String getUser() {
|
||||
return conf.getProperty("user");
|
||||
}
|
||||
|
||||
public String getIdentity() {
|
||||
return conf.getProperty("identity");
|
||||
}
|
||||
|
||||
public String getHost() {
|
||||
return conf.getProperty("host");
|
||||
}
|
||||
|
||||
public Integer getPort() {
|
||||
return Integer.valueOf(conf.getProperty("port"));
|
||||
}
|
||||
|
||||
public LogLevel getLogLevel() {
|
||||
return LogLevel.getEnum(conf.getProperty("logLevel"));
|
||||
}
|
||||
|
||||
public static ConfigurationManager getInstance() {
|
||||
if (confManager == null) {
|
||||
confManager = new ConfigurationManager();
|
||||
}
|
||||
return confManager;
|
||||
}
|
||||
|
||||
}
|
||||
|
15
src/main/java/net/locusworks/s3sync/conf/TransferType.java
Normal file
15
src/main/java/net/locusworks/s3sync/conf/TransferType.java
Normal file
@ -0,0 +1,15 @@
|
||||
package net.locusworks.s3sync.conf;
|
||||
|
||||
public enum TransferType {
|
||||
|
||||
REMOTE,
|
||||
LOCAL;
|
||||
|
||||
public static TransferType getEnum(String value) {
|
||||
for (TransferType s : values()) {
|
||||
if (s.name().toLowerCase().equals(value.toLowerCase().trim())) return s;
|
||||
}
|
||||
throw new RuntimeException("No transfer type of " + value);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,195 @@
|
||||
package net.locusworks.s3sync.database;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.flywaydb.core.Flyway;
|
||||
import org.flywaydb.core.api.configuration.FluentConfiguration;
|
||||
import org.flywaydb.core.api.logging.Log;
|
||||
import org.flywaydb.core.api.logging.LogCreator;
|
||||
import org.flywaydb.core.api.logging.LogFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.DependsOn;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
|
||||
import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.JpaVendorAdapter;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.springframework.orm.jpa.vendor.Database;
|
||||
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.logger.ApplicationLoggerInitializer;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
import net.locusworks.s3sync.conf.ConfigurationManager;
|
||||
|
||||
@Configuration(value="net.locusworks.s3sync.database.S3SyncBeanConfiguration")
|
||||
@EnableTransactionManagement
|
||||
@EnableJpaRepositories(
|
||||
basePackages = {"net.locusworks.s3sync.database.repos"},
|
||||
entityManagerFactoryRef = "s3syncEntityManagerFactory",
|
||||
transactionManagerRef = "s3syncTransactionManager"
|
||||
)
|
||||
public class S3SyncBeanConfiguration {
|
||||
|
||||
public static final String PERSISTENCE_UNIT = "s3sync_pu";
|
||||
|
||||
public static final HashSet<String> cacheNames = new HashSet<>();
|
||||
|
||||
private static final String[] PACKAGES_TO_SCAN = {"net.locusworks.s3sync.database.entities", "net.locusworks.s3sync.database.repos"};
|
||||
|
||||
private static final Properties hibernateProperties;
|
||||
static {
|
||||
hibernateProperties = new Properties();
|
||||
hibernateProperties.setProperty("hibernate.connection.zeroDateTimeBehavior", "convertToNull");
|
||||
hibernateProperties.setProperty("hibernate.dbcp.maxActive", "50");
|
||||
hibernateProperties.setProperty("hibernate.dbcp.maxIdle", "10");
|
||||
hibernateProperties.setProperty("hibernate.dbcp.maxWait", "5000");
|
||||
hibernateProperties.setProperty("hibernate.jdbc.batch_size property", "50");
|
||||
hibernateProperties.setProperty("hibernate.connection.charSet", "UTF-8");
|
||||
hibernateProperties.setProperty("hibernate.connection.characterEncoding", "UTF-8");
|
||||
hibernateProperties.setProperty("hibernate.connection.useUnicode", "true");
|
||||
}
|
||||
|
||||
@Autowired
|
||||
private DataSource dataSource;
|
||||
|
||||
/**
|
||||
* Create the entity manager factory bean used in the database connection
|
||||
* @return entityManagerFactoryBean
|
||||
*/
|
||||
@Primary
|
||||
@Bean
|
||||
@DependsOn("flyway")
|
||||
public LocalContainerEntityManagerFactoryBean s3syncEntityManagerFactory() {
|
||||
LocalContainerEntityManagerFactoryBean lef = new LocalContainerEntityManagerFactoryBean();
|
||||
lef.setDataSource(dataSource);
|
||||
lef.setJpaVendorAdapter(s3syncJpaVendorAdapter());
|
||||
lef.setPackagesToScan(PACKAGES_TO_SCAN);
|
||||
lef.setJpaProperties(hibernateProperties);
|
||||
lef.setPersistenceUnitName(PERSISTENCE_UNIT);
|
||||
return lef;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the session factory bean
|
||||
* @return sessionFactoryBean
|
||||
*/
|
||||
@Bean
|
||||
public LocalSessionFactoryBean s3syncSessionFactory() {
|
||||
LocalSessionFactoryBean sessionBean = new LocalSessionFactoryBean();
|
||||
sessionBean.setDataSource(dataSource);
|
||||
sessionBean.setPackagesToScan(PACKAGES_TO_SCAN);
|
||||
sessionBean.setHibernateProperties(hibernateProperties);
|
||||
return sessionBean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the JPA Vendor adaptor bean that is used in the entity manager factory
|
||||
* @return jpaVendorAdaptor
|
||||
*/
|
||||
@Primary
|
||||
@Bean
|
||||
public JpaVendorAdapter s3syncJpaVendorAdapter() {
|
||||
HibernateJpaVendorAdapter hibernateJpaVendorAdapter = new HibernateJpaVendorAdapter();
|
||||
hibernateJpaVendorAdapter.setShowSql(false);
|
||||
hibernateJpaVendorAdapter.setGenerateDdl(false);
|
||||
hibernateJpaVendorAdapter.setDatabase(Database.H2);
|
||||
return hibernateJpaVendorAdapter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the transaction manager bean
|
||||
* @return transactionManager
|
||||
*/
|
||||
@Primary
|
||||
@Bean
|
||||
public PlatformTransactionManager s3syncTransactionManager() {
|
||||
JpaTransactionManager manager = new JpaTransactionManager();
|
||||
manager.setEntityManagerFactory(s3syncEntityManagerFactory().getObject());
|
||||
return new JpaTransactionManager();
|
||||
}
|
||||
|
||||
@Bean(name="flyway", initMethod="migrate")
|
||||
@DependsOn({"initializer"})
|
||||
public Flyway flyway() {
|
||||
LogFactory.setLogCreator(new FlywayLogCreator());
|
||||
FluentConfiguration fc = Flyway.configure();
|
||||
fc.schemas("s3sync");
|
||||
fc.table("_flyway_migration");
|
||||
fc.locations("database/migration");
|
||||
fc.outOfOrder(false);
|
||||
fc.dataSource(dataSource);
|
||||
|
||||
return fc.load();
|
||||
}
|
||||
|
||||
@Bean(name="initializer")
|
||||
public Boolean initializer(ConfigurationManager confManager) {
|
||||
ApplicationLoggerFactory.init(new ApplicationLoggerInitializer() {
|
||||
|
||||
@Override
|
||||
public LogLevel initialize() {
|
||||
return confManager.getLogLevel();
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
private class FlywayLogCreator implements LogCreator {
|
||||
|
||||
@Override
|
||||
public Log createLogger(Class<?> clazz) {
|
||||
return new FlywayLog(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
private class FlywayLog implements Log {
|
||||
|
||||
private ApplicationLogger logger;
|
||||
|
||||
public FlywayLog(Class<?> clazz) {
|
||||
logger = ApplicationLoggerFactory.getLogger(clazz);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
return logger.isDebugEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String message) {
|
||||
logger.debug(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String message) {
|
||||
logger.info(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String message) {
|
||||
logger.warn(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String message) {
|
||||
logger.error(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String message, Exception e) {
|
||||
logger.error(message, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package net.locusworks.s3sync.database;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import org.springframework.boot.jdbc.DataSourceBuilder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.stereotype.Component;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
|
||||
@Primary
|
||||
@Component
|
||||
public class S3SyncDataSource {
|
||||
|
||||
private static final String DRIVER = "org.h2.Driver";
|
||||
private static final String JNDI_STRING = "jdbc:h2:file:./s3sync;MODE=MYSQL;DATABASE_TO_UPPER=false";
|
||||
|
||||
/**
|
||||
* Create the data source bean
|
||||
* @return the data source bean
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public DataSource dataSource() throws Exception {
|
||||
return getDataSource();
|
||||
}
|
||||
|
||||
|
||||
private DataSource getDataSource() throws Exception {
|
||||
ApplicationLogger logger = ApplicationLoggerFactory.getLogger(this.getClass());
|
||||
|
||||
String url = JNDI_STRING;
|
||||
|
||||
logger.debug(String.format("Database connection string: %s", url));
|
||||
|
||||
return DataSourceBuilder
|
||||
.create()
|
||||
.username("sa")
|
||||
.password("")
|
||||
.url(url)
|
||||
.driverClassName(DRIVER)
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,183 @@
|
||||
package net.locusworks.s3sync.database.entities;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
import javax.persistence.Basic;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.NamedQueries;
|
||||
import javax.persistence.NamedQuery;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
@Entity
|
||||
@Table(name ="file_info", catalog="S3SYNC", schema="s3sync")
|
||||
@NamedQueries({
|
||||
@NamedQuery(name = "FileInfo.findAll", query="SELECT f FROM FileInfo f")
|
||||
})
|
||||
public class FileInfo implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
@Basic(optional = false)
|
||||
@Column(name = "ID")
|
||||
private Long id;
|
||||
|
||||
@Basic(optional = false)
|
||||
@Column(name = "FILE_NAME")
|
||||
private String fileName;
|
||||
|
||||
@Basic(optional = false)
|
||||
@Column(name = "FILE_HASH")
|
||||
private String fileHash;
|
||||
|
||||
@Basic(optional = false)
|
||||
@Column(name = "FILE_PATH")
|
||||
private String filePath;
|
||||
|
||||
@Basic(optional = true)
|
||||
@Column(name = "UPLOAD_DATE")
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date uploadDate;
|
||||
|
||||
@Basic(optional = false)
|
||||
@Column(name = "UPLOADED")
|
||||
private Boolean uploaded;
|
||||
|
||||
public FileInfo() {
|
||||
}
|
||||
|
||||
public FileInfo(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param id
|
||||
* @param fileName
|
||||
* @param fileHash
|
||||
* @param fileType
|
||||
* @param uploadDate
|
||||
* @param uploaded
|
||||
*/
|
||||
public FileInfo(Long id, String fileName, String fileHash, String filePath, Date uploadDate, Boolean uploaded) {
|
||||
this.id = id;
|
||||
this.fileName = fileName;
|
||||
this.fileHash = fileHash;
|
||||
this.filePath = filePath;
|
||||
this.uploadDate = uploadDate;
|
||||
this.uploaded = uploaded;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the id
|
||||
*/
|
||||
public synchronized final Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param id the id to set
|
||||
*/
|
||||
public synchronized final void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the fileName
|
||||
*/
|
||||
public synchronized final String getFileName() {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fileName the fileName to set
|
||||
*/
|
||||
public synchronized final void setFileName(String fileName) {
|
||||
this.fileName = fileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the fileHash
|
||||
*/
|
||||
public synchronized final String getFileHash() {
|
||||
return fileHash;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fileHash the fileHash to set
|
||||
*/
|
||||
public synchronized final void setFileHash(String fileHash) {
|
||||
this.fileHash = fileHash;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the fileType
|
||||
*/
|
||||
public synchronized final String getFilePath() {
|
||||
return filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fileType the fileType to set
|
||||
*/
|
||||
public synchronized final void setFilePath(String filePath) {
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the uploadDate
|
||||
*/
|
||||
public synchronized final Date getUploadDate() {
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uploadDate the uploadDate to set
|
||||
*/
|
||||
public synchronized final void setUploadDate(Date uploadDate) {
|
||||
this.uploadDate = uploadDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the uploaded
|
||||
*/
|
||||
public synchronized final Boolean getUploaded() {
|
||||
return uploaded;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uploaded the uploaded to set
|
||||
*/
|
||||
public synchronized final void setUploaded(Boolean uploaded) {
|
||||
this.uploaded = uploaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 0;
|
||||
hash += (id != null ? id.hashCode() : 0);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
// TODO: Warning - this method won't work in the case the id fields are not set
|
||||
if (!(object instanceof FileInfo)) {
|
||||
return false;
|
||||
}
|
||||
FileInfo other = (FileInfo) object;
|
||||
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "net.locusworks.battletech.s3sync.entities.FileInfo[ id=" + id + " ]";
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
package net.locusworks.s3sync.database.repos;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.CrudRepository;
|
||||
|
||||
import net.locusworks.s3sync.database.entities.FileInfo;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author isaac
|
||||
*/
|
||||
public interface FileInfoRepository extends CrudRepository<FileInfo, Long> {
|
||||
|
||||
FileInfo findByFileHash(String fileHash);
|
||||
|
||||
boolean existsByFileHash(String fileHash);
|
||||
|
||||
@Modifying
|
||||
@Transactional
|
||||
@Query("DELETE FROM FileInfo fh WHERE fh.fileHash = ?1")
|
||||
void deleteByFileHash(String fileHash);
|
||||
|
||||
}
|
304
src/main/java/net/locusworks/s3sync/scp/AbstractSshMessage.java
Normal file
304
src/main/java/net/locusworks/s3sync/scp/AbstractSshMessage.java
Normal file
@ -0,0 +1,304 @@
|
||||
package net.locusworks.s3sync.scp;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Path;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.function.Consumer;
|
||||
import com.jcraft.jsch.Channel;
|
||||
import com.jcraft.jsch.ChannelExec;
|
||||
import com.jcraft.jsch.ChannelSftp;
|
||||
import com.jcraft.jsch.JSchException;
|
||||
import com.jcraft.jsch.Session;
|
||||
import com.jcraft.jsch.SftpProgressMonitor;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
|
||||
public abstract class AbstractSshMessage implements AutoCloseable {
|
||||
|
||||
private ApplicationLogger logger;
|
||||
|
||||
private static final double ONE_SECOND = 1000.0;
|
||||
|
||||
private Session session;
|
||||
private final boolean verbose;
|
||||
private final boolean compressed;
|
||||
|
||||
protected Consumer<Path> fileCallback;
|
||||
|
||||
/**
|
||||
* Constructor for AbstractSshMessage
|
||||
* @param session the ssh session to use
|
||||
*/
|
||||
public AbstractSshMessage(final Session session, ApplicationLogger logger) {
|
||||
this(false, session, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for AbstractSshMessage
|
||||
* @param verbose if true do verbose logging
|
||||
* @param session the ssh session to use
|
||||
* @since Ant 1.6.2
|
||||
*/
|
||||
public AbstractSshMessage(final boolean verbose, final Session session, ApplicationLogger logger) {
|
||||
this(verbose, false, session, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for AbstractSshMessage
|
||||
* @param verbose if true do verbose logging
|
||||
* @param compressed if true use compression
|
||||
* @param session the ssh session to use
|
||||
* @since Ant 1.9.8
|
||||
*/
|
||||
public AbstractSshMessage(boolean verbose, boolean compressed, Session session, ApplicationLogger logger) {
|
||||
this.verbose = verbose;
|
||||
this.compressed = compressed;
|
||||
this.session = session;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an ssh channel.
|
||||
* @param command the command to use
|
||||
* @return the channel
|
||||
* @throws JSchException on error
|
||||
*/
|
||||
protected Channel openExecChannel(final String command) throws JSchException {
|
||||
final ChannelExec channel = (ChannelExec) session.openChannel("exec");
|
||||
channel.setCommand(command);
|
||||
|
||||
return channel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an ssh sftp channel.
|
||||
* @return the channel
|
||||
* @throws JSchException on error
|
||||
*/
|
||||
protected ChannelSftp openSftpChannel() throws JSchException {
|
||||
return (ChannelSftp) session.openChannel("sftp");
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an ack.
|
||||
* @param out the output stream to use
|
||||
* @throws IOException on error
|
||||
*/
|
||||
protected void sendAck(final OutputStream out) throws IOException {
|
||||
final byte[] buf = new byte[1];
|
||||
buf[0] = 0;
|
||||
out.write(buf);
|
||||
out.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the response, throws a BuildException if the response
|
||||
* indicates an error.
|
||||
* @param in the input stream to use
|
||||
* @throws IOException on I/O error
|
||||
* @throws BuildException on other errors
|
||||
*/
|
||||
protected void waitForAck(final InputStream in) throws IOException, JSchException {
|
||||
final int b = in.read();
|
||||
|
||||
// b may be 0 for success,
|
||||
// 1 for error,
|
||||
// 2 for fatal error,
|
||||
|
||||
if (b == -1) {
|
||||
// didn't receive any response
|
||||
throw new JSchException("No response from server");
|
||||
}
|
||||
if (b != 0) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
|
||||
int c = in.read();
|
||||
while (c > 0 && c != '\n') {
|
||||
sb.append((char) c);
|
||||
c = in.read();
|
||||
}
|
||||
|
||||
if (b == 1) {
|
||||
throw new JSchException("server indicated an error: " + sb.toString());
|
||||
} else if (b == 2) {
|
||||
throw new JSchException("server indicated a fatal error: " + sb.toString());
|
||||
} else {
|
||||
throw new JSchException("unknown response, code " + b + " message: " + sb.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Carry out the transfer.
|
||||
* @throws IOException on I/O errors
|
||||
* @throws JSchException on ssh errors
|
||||
*/
|
||||
public abstract void execute() throws IOException, JSchException;
|
||||
|
||||
/**
|
||||
* Log a message to the log listener.
|
||||
* @param message the message to log
|
||||
*/
|
||||
protected void log(final String message, LogLevel logLevel) {
|
||||
switch (logLevel) {
|
||||
case DEBUG:
|
||||
logger.debug(message);
|
||||
break;
|
||||
case ERROR:
|
||||
case FATAL:
|
||||
logger.error(message);
|
||||
break;
|
||||
case INFO:
|
||||
logger.info(message);
|
||||
break;
|
||||
case WARN:
|
||||
logger.warn(message);
|
||||
break;
|
||||
case TRACE:
|
||||
logger.trace(message);
|
||||
break;
|
||||
case OFF:
|
||||
default:
|
||||
logger.trace(message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log transfer stats to the log listener.
|
||||
* @param timeStarted the time started
|
||||
* @param timeEnded the finishing time
|
||||
* @param totalLength the total length
|
||||
*/
|
||||
protected void logStats(final long timeStarted, final long timeEnded, final long totalLength) {
|
||||
final double duration = (timeEnded - timeStarted) / ONE_SECOND;
|
||||
final NumberFormat format = NumberFormat.getNumberInstance();
|
||||
format.setMaximumFractionDigits(2);
|
||||
format.setMinimumFractionDigits(1);
|
||||
log("File transfer time: " + format.format(duration) + " Average Rate: " + format.format(totalLength / duration) + " B/s", LogLevel.DEBUG);
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the verbose attribute set.
|
||||
* @return true if the verbose attribute is set
|
||||
* @since Ant 1.6.2
|
||||
*/
|
||||
protected final boolean getVerbose() {
|
||||
return verbose;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the compressed attribute set.
|
||||
* @return true if the compressed attribute is set
|
||||
* @since Ant 1.9.8
|
||||
*/
|
||||
protected final boolean getCompressed() {
|
||||
return compressed;
|
||||
}
|
||||
|
||||
protected final Session getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
protected void connect() throws JSchException {
|
||||
if (session != null && !session.isConnected()) session.connect();
|
||||
}
|
||||
|
||||
protected void disconnect() {
|
||||
if (session != null && session.isConnected()) session.disconnect();
|
||||
}
|
||||
|
||||
public Consumer<Path> fileCallback(Consumer<Path> callback) {
|
||||
this.fileCallback = callback;
|
||||
return this.fileCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
if (session != null && session.isConnected()) {
|
||||
session.disconnect();
|
||||
session = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Track progress every 10% if 100kb < filesize < 1Mb. For larger
|
||||
* files track progress for every percent transmitted.
|
||||
* @param filesize the size of the file been transmitted
|
||||
* @param totalLength the total transmission size
|
||||
* @param percentTransmitted the current percent transmitted
|
||||
* @return the percent that the file is of the total
|
||||
*/
|
||||
protected final int trackProgress(final long filesize, final long totalLength,
|
||||
final int percentTransmitted) {
|
||||
|
||||
// CheckStyle:MagicNumber OFF
|
||||
final int percent = (int) Math.round(Math.floor((totalLength / (double) filesize) * 100));
|
||||
|
||||
if (percent > percentTransmitted) {
|
||||
if (filesize < 1048576) {
|
||||
if (percent % 10 == 0) {
|
||||
if (percent == 100) {
|
||||
System.out.println(" 100%");
|
||||
} else {
|
||||
System.out.print("*");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (percent == 50) {
|
||||
System.out.println(" 50%");
|
||||
} else if (percent == 100) {
|
||||
System.out.println(" 100%");
|
||||
} else {
|
||||
System.out.print(".");
|
||||
}
|
||||
}
|
||||
}
|
||||
// CheckStyle:MagicNumber ON
|
||||
|
||||
return percent;
|
||||
}
|
||||
|
||||
private ProgressMonitor monitor = null;
|
||||
|
||||
/**
|
||||
* Get the progress monitor.
|
||||
* @return the progress monitor.
|
||||
*/
|
||||
protected SftpProgressMonitor getProgressMonitor() {
|
||||
if (monitor == null) {
|
||||
monitor = new ProgressMonitor();
|
||||
}
|
||||
return monitor;
|
||||
}
|
||||
|
||||
private class ProgressMonitor implements SftpProgressMonitor {
|
||||
private long initFileSize = 0;
|
||||
private long totalLength = 0;
|
||||
private int percentTransmitted = 0;
|
||||
|
||||
@Override
|
||||
public void init(final int op, final String src, final String dest, final long max) {
|
||||
initFileSize = max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean count(final long len) {
|
||||
totalLength += len;
|
||||
percentTransmitted = trackProgress(initFileSize, totalLength, percentTransmitted);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void end() {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public long getTotalLength() {
|
||||
return totalLength;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
package net.locusworks.s3sync.scp;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import com.jcraft.jsch.JSchException;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
|
||||
public class LocalFileMessage extends AbstractSshMessage {
|
||||
|
||||
private Path syncFolder;
|
||||
|
||||
public LocalFileMessage(Path localFolder) {
|
||||
super(true, false, null, ApplicationLoggerFactory.getLogger(LocalFileMessage.class));
|
||||
this.syncFolder = localFolder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws IOException, JSchException {
|
||||
Files.walk(syncFolder)
|
||||
.filter(f -> Files.isRegularFile(f))
|
||||
.forEach(f -> {
|
||||
log("Found File: " + f, LogLevel.DEBUG);
|
||||
if (fileCallback != null) fileCallback.accept(syncFolder.resolve(f));
|
||||
});
|
||||
}
|
||||
|
||||
}
|
320
src/main/java/net/locusworks/s3sync/scp/ScpFromMessage.java
Normal file
320
src/main/java/net/locusworks/s3sync/scp/ScpFromMessage.java
Normal file
@ -0,0 +1,320 @@
|
||||
package net.locusworks.s3sync.scp;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import com.jcraft.jsch.Channel;
|
||||
import com.jcraft.jsch.ChannelSftp;
|
||||
import com.jcraft.jsch.JSch;
|
||||
import com.jcraft.jsch.JSchException;
|
||||
import com.jcraft.jsch.Session;
|
||||
import com.jcraft.jsch.SftpATTRS;
|
||||
import com.jcraft.jsch.SftpException;
|
||||
import net.locusworks.logger.ApplicationLoggerFactory;
|
||||
import net.locusworks.logger.LogLevel;
|
||||
import net.locusworks.logger.ApplicationLogger;
|
||||
|
||||
public class ScpFromMessage extends AbstractSshMessage {
|
||||
|
||||
private static ApplicationLogger logger = ApplicationLoggerFactory.getLogger(ScpFromMessage.class);
|
||||
|
||||
private static final int HUNDRED_KILOBYTES = 102400;
|
||||
private static final byte LINE_FEED = 0x0a;
|
||||
private static final int BUFFER_SIZE = 100 * 1024;
|
||||
|
||||
private String remoteFile;
|
||||
private Path localFile;
|
||||
private boolean isRecursive = false;
|
||||
private boolean preserveLastModified = false;
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage
|
||||
* @param session the ssh session to use
|
||||
*/
|
||||
public ScpFromMessage(final Session session) {
|
||||
super(session, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage
|
||||
* @param verbose if true do verbose logging
|
||||
* @param session the ssh session to use
|
||||
* @since Ant 1.7
|
||||
*/
|
||||
public ScpFromMessage(final boolean verbose, final Session session) {
|
||||
super(verbose, session, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage.
|
||||
* @param verbose if true log extra information
|
||||
* @param session the Scp session to use
|
||||
* @param aRemoteFile the remote file name
|
||||
* @param aLocalFile the local file
|
||||
* @param recursive if true use recursion (-r option to scp)
|
||||
* @since Ant 1.6.2
|
||||
*/
|
||||
public ScpFromMessage(final boolean verbose, final Session session, final String aRemoteFile,
|
||||
final Path aLocalFile, final boolean recursive) {
|
||||
this(false, session, aRemoteFile, aLocalFile, recursive, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage.
|
||||
* @param session the Scp session to use
|
||||
* @param aRemoteFile the remote file name
|
||||
* @param aLocalFile the local file
|
||||
* @param recursive if true use recursion (-r option to scp)
|
||||
*/
|
||||
public ScpFromMessage(final Session session, final String aRemoteFile, final Path aLocalFile, final boolean recursive) {
|
||||
this(false, session, aRemoteFile, aLocalFile, recursive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage.
|
||||
* @param verbose if true log extra information
|
||||
* @param session the Scp session to use
|
||||
* @param aRemoteFile the remote file name
|
||||
* @param aLocalFile the local file
|
||||
* @param recursive if true use recursion (-r option to scp)
|
||||
* @param preserveLastModified whether to preserve file
|
||||
* modification times
|
||||
* @since Ant 1.8.0
|
||||
*/
|
||||
public ScpFromMessage(final boolean verbose, final Session session, final String aRemoteFile, final Path aLocalFile,
|
||||
final boolean recursive, final boolean preserveLastModified) {
|
||||
this(verbose, session, aRemoteFile, aLocalFile, recursive, preserveLastModified, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for ScpFromMessage.
|
||||
* @param verbose if true log extra information
|
||||
* @param session the Scp session to use
|
||||
* @param aRemoteFile the remote file name
|
||||
* @param aLocalFile the local file
|
||||
* @param recursive if true use recursion (-r option to scp)
|
||||
* @param preserveLastModified whether to preserve file
|
||||
* @param compressed if true use compression (-C option to scp)
|
||||
* modification times
|
||||
* @since Ant 1.9.8
|
||||
*/
|
||||
public ScpFromMessage(boolean verbose, Session session, String aRemoteFile, Path aLocalFile, boolean recursive,
|
||||
boolean preserveLastModified, boolean compressed) {
|
||||
super(verbose, compressed, session, logger);
|
||||
this.remoteFile = aRemoteFile;
|
||||
this.localFile = aLocalFile;
|
||||
this.isRecursive = recursive;
|
||||
this.preserveLastModified = preserveLastModified;
|
||||
}
|
||||
|
||||
/**
|
||||
* Carry out the transfer.
|
||||
* @throws IOException on i/o errors
|
||||
* @throws JSchException on errors detected by scp
|
||||
*/
|
||||
@Override
|
||||
public void execute() throws IOException, JSchException {
|
||||
connect();
|
||||
|
||||
String command = "scp -f ";
|
||||
if (isRecursive) {
|
||||
command += "-r ";
|
||||
}
|
||||
if (getCompressed()) {
|
||||
command += "-C ";
|
||||
}
|
||||
command += remoteFile;
|
||||
log("Executing command: " + command, LogLevel.DEBUG);
|
||||
final Channel channel = openExecChannel(command);
|
||||
try {
|
||||
// get I/O streams for remote scp
|
||||
final OutputStream out = channel.getOutputStream();
|
||||
final InputStream in = channel.getInputStream();
|
||||
|
||||
channel.connect();
|
||||
|
||||
sendAck(out);
|
||||
startRemoteCpProtocol(in, out, localFile);
|
||||
} finally {
|
||||
if (channel != null) {
|
||||
channel.disconnect();
|
||||
}
|
||||
}
|
||||
log("Done\n", LogLevel.INFO);
|
||||
}
|
||||
|
||||
protected boolean getPreserveLastModified() {
|
||||
return preserveLastModified;
|
||||
}
|
||||
|
||||
private void startRemoteCpProtocol(final InputStream in, final OutputStream out, final Path localFile) throws IOException, JSchException {
|
||||
Path startFile = localFile;
|
||||
while (true) {
|
||||
// C0644 filesize filename - header for a regular file
|
||||
// T time 0 time 0\n - present if perserve time.
|
||||
// D directory - this is the header for a directory.
|
||||
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
while (true) {
|
||||
final int read = in.read();
|
||||
if (read < 0) {
|
||||
return;
|
||||
}
|
||||
if ((byte) read == LINE_FEED) {
|
||||
break;
|
||||
}
|
||||
stream.write(read);
|
||||
}
|
||||
final String serverResponse = stream.toString("UTF-8");
|
||||
if (serverResponse.charAt(0) == 'C') {
|
||||
parseAndFetchFile(serverResponse, startFile, out, in);
|
||||
} else if (serverResponse.charAt(0) == 'D') {
|
||||
startFile = parseAndCreateDirectory(serverResponse, startFile);
|
||||
sendAck(out);
|
||||
} else if (serverResponse.charAt(0) == 'E') {
|
||||
startFile = startFile.getParent();
|
||||
sendAck(out);
|
||||
} else if (serverResponse.charAt(0) == '\01' || serverResponse.charAt(0) == '\02') {
|
||||
// this indicates an error.
|
||||
throw new IOException(serverResponse.substring(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Path parseAndCreateDirectory(final String serverResponse, final Path localFile) throws IOException {
|
||||
int start = serverResponse.indexOf(' ');
|
||||
// appears that the next token is not used and it's zero.
|
||||
start = serverResponse.indexOf(' ', start + 1);
|
||||
final String directoryName = serverResponse.substring(start + 1);
|
||||
if (Files.isDirectory(localFile)) {
|
||||
final Path dir = localFile.resolve(directoryName);
|
||||
Files.createDirectories(dir);
|
||||
log("Creating: " + dir, LogLevel.DEBUG);
|
||||
return dir;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void parseAndFetchFile(final String serverResponse, final Path localFile, final OutputStream out, final InputStream in) throws IOException, JSchException {
|
||||
int start = 0;
|
||||
int end = serverResponse.indexOf(' ', start + 1);
|
||||
start = end + 1;
|
||||
end = serverResponse.indexOf(' ', start + 1);
|
||||
final long filesize = Long.parseLong(serverResponse.substring(start, end));
|
||||
final String filename = serverResponse.substring(end + 1);
|
||||
final Path transferFile = Files.isDirectory(localFile) ? localFile.resolve(filename) : localFile;
|
||||
log("Receiving: " + transferFile + " : " + filesize, LogLevel.INFO);
|
||||
fetchFile(transferFile, filesize, out, in);
|
||||
waitForAck(in);
|
||||
sendAck(out);
|
||||
}
|
||||
|
||||
private void fetchFile(final Path localFile, long filesize, final OutputStream out, final InputStream in) throws IOException, JSchException {
|
||||
final byte[] buf = new byte[BUFFER_SIZE];
|
||||
sendAck(out);
|
||||
|
||||
// read a content of lfile
|
||||
final OutputStream fos = Files.newOutputStream(localFile);
|
||||
int length;
|
||||
long totalLength = 0;
|
||||
final long startTime = System.currentTimeMillis();
|
||||
|
||||
// only track progress for files larger than 100kb in verbose mode
|
||||
final boolean trackProgress = getVerbose() && filesize > HUNDRED_KILOBYTES;
|
||||
// since filesize keeps on decreasing we have to store the
|
||||
// initial filesize
|
||||
final long initFilesize = filesize;
|
||||
int percentTransmitted = 0;
|
||||
boolean goodXfer = false;
|
||||
try {
|
||||
|
||||
while (true) {
|
||||
length = in.read(buf, 0, BUFFER_SIZE < filesize ? BUFFER_SIZE : (int) filesize);
|
||||
if (length < 0) {
|
||||
throw new EOFException("Unexpected end of stream.");
|
||||
}
|
||||
fos.write(buf, 0, length);
|
||||
filesize -= length;
|
||||
totalLength += length;
|
||||
if (trackProgress) {
|
||||
percentTransmitted = trackProgress(initFilesize, totalLength, percentTransmitted);
|
||||
}
|
||||
if (filesize == 0) {
|
||||
goodXfer = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
final long endTime = System.currentTimeMillis();
|
||||
logStats(startTime, endTime, totalLength);
|
||||
fos.flush();
|
||||
fos.close();
|
||||
}
|
||||
|
||||
if (getPreserveLastModified()) {
|
||||
setLastModified(localFile);
|
||||
}
|
||||
|
||||
if (goodXfer && fileCallback != null) {
|
||||
fileCallback.accept(localFile);
|
||||
}
|
||||
}
|
||||
|
||||
private void setLastModified(final Path localFile) throws JSchException {
|
||||
SftpATTRS fileAttributes = null;
|
||||
final ChannelSftp channel = openSftpChannel();
|
||||
channel.connect();
|
||||
try {
|
||||
fileAttributes = channel.lstat(remoteDir(remoteFile) + localFile.getFileName().toString());
|
||||
} catch (final SftpException e) {
|
||||
throw new JSchException("failed to stat remote file", e);
|
||||
}
|
||||
|
||||
try {
|
||||
Files.setLastModifiedTime(localFile, FileTime.fromMillis(fileAttributes.getMTime() * 1000));
|
||||
} catch (IOException e) {
|
||||
logger.warn(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the directory part of the remote file, if any.
|
||||
*/
|
||||
private static String remoteDir(final String remoteFile) {
|
||||
int index = remoteFile.lastIndexOf('/');
|
||||
if (index < 0) {
|
||||
index = remoteFile.lastIndexOf('\\');
|
||||
}
|
||||
return index < 0 ? "" : remoteFile.substring(0, index + 1);
|
||||
}
|
||||
|
||||
public static Session newSession(String identity, String user, String host, Integer port) throws JSchException {
|
||||
JSch.setConfig("StrictHostKeyChecking", "no");
|
||||
JSch jsch = new JSch();
|
||||
jsch.addIdentity(identity);
|
||||
Session session = jsch.getSession(user, host, port);
|
||||
return session;
|
||||
}
|
||||
}
|
8
src/main/resources/META-INF/persistence.xml
Normal file
8
src/main/resources/META-INF/persistence.xml
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<persistence version="2.1" xmlns="http://xmlns.jcp.org/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd">
|
||||
<persistence-unit name="s3sync_pu" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>net.locusworks.s3sync.database.entities.FileInfo</class>
|
||||
<properties/>
|
||||
</persistence-unit>
|
||||
</persistence>
|
@ -0,0 +1,10 @@
|
||||
CREATE TABLE s3sync.file_info (
|
||||
ID IDENTITY NOT NULL AUTO_INCREMENT,
|
||||
FILE_NAME VARCHAR(1000) NOT NULL,
|
||||
FILE_PATH VARCHAR(1000) NOT NULL,
|
||||
FILE_HASH VARCHAR(40) NOT NULL,
|
||||
UPLOAD_DATE TIMESTAMP,
|
||||
UPLOADED BOOLEAN NOT NULL,
|
||||
CONSTRAINT FILE_INFO_PK PRIMARY KEY (ID),
|
||||
CONSTRAINT FILE_INFO_UN UNIQUE (FILE_HASH)
|
||||
);
|
@ -0,0 +1 @@
|
||||
CREATE SCHEMA IF NOT EXISTS s3sync AUTHORIZATION sa;
|
@ -1,4 +1,11 @@
|
||||
region=us-east-2
|
||||
bucketName=locus2k-backup
|
||||
syncFolder=D:/OneDrive
|
||||
logLevel=INFO
|
||||
transferType=remote
|
||||
remoteFolder=logs
|
||||
logLevel=INFO
|
||||
identity=${user.home}/.ssh/id_rsa
|
||||
host=s3sync.locusworks.net
|
||||
user=iparenteau
|
||||
port=22
|
||||
localFolder=Downloads
|
||||
deleteLocalFiles=true
|
Reference in New Issue
Block a user