Compare commits
10 Commits
287241f03b
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| c4cf0f7f93 | |||
| 6dfe1f024c | |||
|
|
321af6ecb6 | ||
|
|
65d290e2cd | ||
|
|
567922657c | ||
|
|
42f7d506d2 | ||
|
|
a8437ccef6 | ||
|
|
ccdfc5d3f4 | ||
|
|
1dfacaee02 | ||
|
|
e23a6d565c |
3
.cursorindexingignore
Normal file
3
.cursorindexingignore
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
# Don't index SpecStory auto-save files, but allow explicit context inclusion via @ references
|
||||
.specstory/**
|
||||
2
.specstory/.gitignore
vendored
Normal file
2
.specstory/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# SpecStory explanation file
|
||||
/.what-is-this.md
|
||||
File diff suppressed because it is too large
Load Diff
@@ -45,6 +45,7 @@ dependencies {
|
||||
implementation 'com.formdev:flatlaf:2.2' // 2.3+ causes illegal reflective access warning on win + adoptium java 11.0.16
|
||||
implementation 'com.squareup.okhttp3:okhttp:4.12.+'
|
||||
implementation 'com.squareup.okhttp3:okhttp-urlconnection:4.12.+'
|
||||
implementation 'com.squareup.moshi:moshi:1.15.0'
|
||||
implementation 'org.slf4j:slf4j-simple:2.0.12'
|
||||
implementation 'commons-io:commons-io:2.11.0'
|
||||
testImplementation 'org.junit.jupiter:junit-jupiter:5.7.+'
|
||||
|
||||
@@ -39,6 +39,7 @@ import java.util.TimerTask;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
@@ -74,22 +75,16 @@ import com.sheepit.client.os.OS;
|
||||
|
||||
import com.sheepit.client.rendering.IncompatibleProcessChecker;
|
||||
import com.sheepit.client.rendering.Job;
|
||||
import com.sheepit.client.rendering.State;
|
||||
import com.sheepit.client.ui.Gui;
|
||||
import com.sheepit.client.utils.Pair;
|
||||
import com.sheepit.client.utils.Utils;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.Getter;
|
||||
import okhttp3.HttpUrl;
|
||||
|
||||
@Data public class Client {
|
||||
public enum State {
|
||||
DOWNLOADING,
|
||||
PREPARING,
|
||||
RENDERING,
|
||||
UPLOADING,
|
||||
UNKNOWN
|
||||
}
|
||||
|
||||
public static final int MIN_JOB_ID = 20; //to distinguish between actual jobs and test frames
|
||||
private static final Locale LOCALE = Locale.ENGLISH;
|
||||
private DirectoryManager directoryManager;
|
||||
@@ -97,13 +92,13 @@ import okhttp3.HttpUrl;
|
||||
private ServerRequest serverRequest;
|
||||
private Configuration configuration;
|
||||
private Log log;
|
||||
private State state;
|
||||
private Job renderingJob;
|
||||
private Job previousJob;
|
||||
private BlockingQueue<QueuedJob> jobsToValidate;
|
||||
private boolean isValidatingJob;
|
||||
private long startTime;
|
||||
private boolean sessionStarted;
|
||||
private CompletableFuture<Void> directoryInit;
|
||||
|
||||
private boolean disableErrorSending;
|
||||
private boolean running;
|
||||
@@ -115,13 +110,12 @@ import okhttp3.HttpUrl;
|
||||
private long uploadQueueVolume;
|
||||
private int noJobRetryIter;
|
||||
|
||||
public Client(Gui gui, Configuration configuration, String url) {
|
||||
public Client(Gui gui, Configuration configuration, String url, CompletableFuture<Void> directoryInit) {
|
||||
this.configuration = configuration;
|
||||
this.serverRequest = new ServerRequest(url, this.configuration, this);
|
||||
this.log = Log.getInstance();
|
||||
this.gui = gui;
|
||||
this.directoryManager = new DirectoryManager(this.configuration);
|
||||
this.state = State.UNKNOWN;
|
||||
this.renderingJob = null;
|
||||
this.previousJob = null;
|
||||
this.jobsToValidate = new ArrayBlockingQueue<>(5);
|
||||
@@ -138,6 +132,7 @@ import okhttp3.HttpUrl;
|
||||
this.noJobRetryIter = 0;
|
||||
|
||||
this.sessionStarted = false;
|
||||
this.directoryInit = directoryInit;
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
@@ -162,6 +157,8 @@ import okhttp3.HttpUrl;
|
||||
step = this.log.newCheckPoint();
|
||||
this.gui.status("Starting");
|
||||
|
||||
// Wait for the cache to finish setup
|
||||
this.directoryInit.join();
|
||||
Error.Type ret;
|
||||
ret = this.serverRequest.getConfiguration();
|
||||
|
||||
@@ -722,10 +719,11 @@ import okhttp3.HttpUrl;
|
||||
public Error.Type work(final Job ajob) {
|
||||
Error.Type downloadRet;
|
||||
|
||||
ajob.setState(State.PREPARING);
|
||||
gui.setRenderingProjectName(ajob.getName());
|
||||
|
||||
try {
|
||||
this.state = State.DOWNLOADING;
|
||||
ajob.setState(State.DOWNLOADING);
|
||||
downloadRet = this.downloadExecutable(ajob);
|
||||
if (downloadRet != Error.Type.OK) {
|
||||
gui.setRenderingProjectName("");
|
||||
@@ -746,7 +744,7 @@ import okhttp3.HttpUrl;
|
||||
return downloadRet;
|
||||
}
|
||||
|
||||
this.state = State.PREPARING;
|
||||
ajob.setState(State.PREPARING);
|
||||
|
||||
int ret = this.prepareWorkingDirectory(ajob); // decompress renderer and scene archives
|
||||
if (ret != 0) {
|
||||
@@ -759,7 +757,7 @@ import okhttp3.HttpUrl;
|
||||
}
|
||||
}
|
||||
catch (SheepItException e) {
|
||||
this.state = State.UNKNOWN;
|
||||
ajob.setState(State.ERRORING);
|
||||
gui.setRenderingProjectName("");
|
||||
for (String logline : directoryManager.filesystemHealthCheck()) {
|
||||
log.debug(logline);
|
||||
@@ -814,7 +812,7 @@ import okhttp3.HttpUrl;
|
||||
}
|
||||
};
|
||||
|
||||
this.state = State.RENDERING;
|
||||
ajob.setState(State.RENDERING);
|
||||
Error.Type err = ajob.render(removeSceneDirectoryOnceRenderHasStartedObserver);
|
||||
gui.setRenderingProjectName("");
|
||||
gui.setRemainingTime("");
|
||||
@@ -853,7 +851,7 @@ import okhttp3.HttpUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.state = State.UNKNOWN;
|
||||
ajob.setState(State.UNKNOWN);
|
||||
return err;
|
||||
}
|
||||
|
||||
@@ -974,11 +972,24 @@ import okhttp3.HttpUrl;
|
||||
// unzip the archive
|
||||
|
||||
Instant startUnzip = Instant.now();
|
||||
ret = Utils.unzipChunksIntoDirectory(
|
||||
ajob.getProjectDownload().getChunks().stream().map(chunk -> this.directoryManager.getCachePathFor(chunk)).collect(Collectors.toList()),
|
||||
scenePath,
|
||||
ajob.getPassword(),
|
||||
log);
|
||||
String mirrorDir = this.configuration.getShadowExtractDirectory();
|
||||
if (mirrorDir != null && !mirrorDir.isEmpty() && ajob.getPassword() != null) {
|
||||
String safeProjectName = sanitizeForFolderName(ajob.getName());
|
||||
String projectMirrorDir = new File(mirrorDir, safeProjectName).getAbsolutePath();
|
||||
ret = Utils.unzipChunksIntoDirectoryWithMirror(
|
||||
ajob.getProjectDownload().getChunks().stream().map(chunk -> this.directoryManager.getCachePathFor(chunk)).collect(Collectors.toList()),
|
||||
scenePath,
|
||||
projectMirrorDir,
|
||||
ajob.getPassword(),
|
||||
log);
|
||||
}
|
||||
else {
|
||||
ret = Utils.unzipChunksIntoDirectory(
|
||||
ajob.getProjectDownload().getChunks().stream().map(chunk -> this.directoryManager.getCachePathFor(chunk)).collect(Collectors.toList()),
|
||||
scenePath,
|
||||
ajob.getPassword(),
|
||||
log);
|
||||
}
|
||||
|
||||
Instant stopUnzip = Instant.now();
|
||||
Duration unzipDuration = Duration.between(startUnzip, stopUnzip);
|
||||
@@ -994,6 +1005,18 @@ import okhttp3.HttpUrl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static String sanitizeForFolderName(String name) {
|
||||
if (name == null) {
|
||||
return "project";
|
||||
}
|
||||
// Replace invalid Windows characters and trim
|
||||
String sanitized = name.replaceAll("[\\\\/:*?\"<>|]", "_").trim();
|
||||
if (sanitized.isEmpty()) {
|
||||
return "project";
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused") // Suppress false positive about this.isValidatingJob - PMD rule cannot detect time-sensitive reads
|
||||
protected Error.Type confirmJob(Job ajob, int checkpoint) {
|
||||
String urlReal = String.format(LOCALE, "%s&rendertime=%d&preptime=%d&memoryused=%s", ajob.getValidationUrl(), ajob.getRenderProcess().getRenderDuration(), ajob.getRenderProcess().getScenePrepDuration(),
|
||||
@@ -1004,6 +1027,7 @@ import okhttp3.HttpUrl;
|
||||
this.log.debug(checkpoint, "Client::confirmeJob url " + urlReal);
|
||||
this.log.debug(checkpoint, "path frame " + ajob.getRenderOutput().getFullImagePath());
|
||||
|
||||
ajob.setState(State.UPLOADING);
|
||||
this.isValidatingJob = true;
|
||||
int maxTries = 3;
|
||||
int timeToSleep = 22_000;
|
||||
@@ -1096,7 +1120,7 @@ import okhttp3.HttpUrl;
|
||||
* @int checkpoint - the checkpoint associated with the job (to add any additional log to the render output)
|
||||
* @Job job - the job to be validated
|
||||
*/
|
||||
@AllArgsConstructor private class QueuedJob {
|
||||
@AllArgsConstructor @Getter public class QueuedJob {
|
||||
final private int checkpoint;
|
||||
final private Job job;
|
||||
}
|
||||
|
||||
@@ -82,6 +82,7 @@ import lombok.Data;
|
||||
private String theme;
|
||||
private boolean disableLargeDownloads;
|
||||
private String incompatibleProcess;
|
||||
private String shadowExtractDirectory;
|
||||
|
||||
public Configuration(File cache_dir_, String login_, String password_) {
|
||||
this.configFilePath = null;
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
package com.sheepit.client.config;
|
||||
|
||||
import com.sheepit.client.logger.Log;
|
||||
import com.sheepit.client.utils.Utils;
|
||||
import lombok.AllArgsConstructor;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
@AllArgsConstructor
|
||||
public class DirectoryCleaner implements Runnable {
|
||||
|
||||
public static final String FILE_SEPARATOR = " file: ";
|
||||
static Log log = Log.getInstance();
|
||||
private File directory;
|
||||
|
||||
|
||||
@Override public void run() {
|
||||
if (directory == null) {
|
||||
return;
|
||||
}
|
||||
log.debug("DirectoryCleaner initialized. Cleaning " + directory.getName());
|
||||
cleanDirectory();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans a directory and removes files in it from the md5 cache
|
||||
*/
|
||||
private void cleanDirectory() {
|
||||
if (directory == null) {
|
||||
return;
|
||||
}
|
||||
final String LOG_PREFIX = "DirectoryCleaner::cleanDirectory:"+ directory.getName();
|
||||
File[] files = directory.listFiles();
|
||||
int removedFilesCount = 0;
|
||||
if (files != null) {
|
||||
log.debug(LOG_PREFIX + " found files in directory " + files.length);
|
||||
for (File file : files) {
|
||||
if (file.isDirectory()) {
|
||||
Utils.delete(file);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
String extension = FilenameUtils.getExtension(file.getName()).toLowerCase();
|
||||
String name = FilenameUtils.removeExtension(file.getName());
|
||||
if ("wool".equals(extension)) {
|
||||
String md5Local = Utils.md5(file.getAbsolutePath());
|
||||
// Check if .wool file is corrupt
|
||||
if (md5Local.equals(name) == false) {
|
||||
if (file.delete()) {
|
||||
removedFilesCount++;
|
||||
}
|
||||
else {
|
||||
String baseMessage = " failed to delete .wool file that is corrupt";
|
||||
String message = LOG_PREFIX + baseMessage + " file: " + file.getName();
|
||||
log.debug(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (file.delete()) {
|
||||
removedFilesCount++;
|
||||
}
|
||||
else {
|
||||
String baseMessage = LOG_PREFIX + " failed to delete non-wool file that is not present in cache";
|
||||
String message = baseMessage + FILE_SEPARATOR + file.getName();
|
||||
log.debug(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IllegalArgumentException e) { // because the file does not have a . in its path
|
||||
if (file.delete()) {
|
||||
removedFilesCount++;
|
||||
}
|
||||
else {
|
||||
String baseMessage = LOG_PREFIX + " failed to delete file with no extension";
|
||||
String message = baseMessage + FILE_SEPARATOR + file.getName();
|
||||
log.debug(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
log.debug(LOG_PREFIX + " successfully removed " + removedFilesCount + " from " + directory.getName());
|
||||
}
|
||||
}
|
||||
@@ -37,6 +37,7 @@ import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@AllArgsConstructor
|
||||
public class DirectoryManager {
|
||||
@@ -62,6 +63,8 @@ public class DirectoryManager {
|
||||
return copyFileFromSharedToCache(getSharedPathFor(chunk), getCachePathFor(chunk));
|
||||
}
|
||||
|
||||
private static final Log log = Log.getInstance();
|
||||
|
||||
private boolean copyFileFromSharedToCache(String source, String target) {
|
||||
Path existingArchivePath = Paths.get(source);
|
||||
Path targetArchivePath = Paths.get(target);
|
||||
@@ -82,12 +85,12 @@ public class DirectoryManager {
|
||||
| SecurityException // user is not allowed to create hard-links
|
||||
ignore) {
|
||||
// Creating hardlinks might not be supported on some filesystems
|
||||
Log.getInstance().debug("Failed to create hardlink, falling back to copying file to " + targetArchivePath);
|
||||
log.debug("Failed to create hardlink, falling back to copying file to " + targetArchivePath);
|
||||
Files.copy(existingArchivePath, targetArchivePath, StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
Log.getInstance().error("Error while copying " + source + " from shared downloads directory to working dir");
|
||||
log.error("Error while copying " + source + " from shared downloads directory to working dir");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -98,8 +101,6 @@ public class DirectoryManager {
|
||||
* Creates cache directory
|
||||
*/
|
||||
public void createCacheDir() {
|
||||
this.removeWorkingDirectory();
|
||||
|
||||
this.configuration.getWorkingDirectory().mkdirs();
|
||||
this.configuration.getWoolCacheDirectory().mkdirs();
|
||||
|
||||
@@ -107,10 +108,14 @@ public class DirectoryManager {
|
||||
this.configuration.getSharedDownloadsDirectory().mkdirs();
|
||||
|
||||
if (this.configuration.getSharedDownloadsDirectory().exists() == false) {
|
||||
System.err.println("DirectoryManager::createCacheDir Unable to create common directory " + this.configuration.getSharedDownloadsDirectory().getAbsolutePath());
|
||||
log.error("DirectoryManager::createCacheDir Unable to create common directory " + this.configuration.getSharedDownloadsDirectory().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> cleanupDirectoryAsync() {
|
||||
return CompletableFuture.runAsync(this::cleanWorkingDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans working directory and also deletes it if the user hasn't specified a cache directory
|
||||
@@ -128,51 +133,9 @@ public class DirectoryManager {
|
||||
* Deletes the working and storage directories
|
||||
*/
|
||||
public void cleanWorkingDirectory() {
|
||||
this.cleanDirectory(this.configuration.getWorkingDirectory());
|
||||
this.cleanDirectory(this.configuration.getWoolCacheDirectory());
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans a directory and removes files in it from the md5 cache
|
||||
* @param dir representing the directory to be cleaned
|
||||
* @return false if the dir null, true otherwise
|
||||
*/
|
||||
private boolean cleanDirectory(File dir) {
|
||||
if (dir == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
File[] files = dir.listFiles();
|
||||
if (files != null) {
|
||||
for (File file : files) {
|
||||
if (file.isDirectory()) {
|
||||
Utils.delete(file);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
String extension = FilenameUtils.getExtension(file.getName()).toLowerCase();
|
||||
String name = FilenameUtils.removeExtension(file.getName());
|
||||
if ("wool".equals(extension)) {
|
||||
// check if the md5 of the file is ok
|
||||
String md5_local = Utils.md5(file.getAbsolutePath());
|
||||
|
||||
if (md5_local.equals(name) == false) {
|
||||
file.delete();
|
||||
}
|
||||
|
||||
// TODO: remove old one
|
||||
}
|
||||
else {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
catch (IllegalArgumentException e) { // because the file does not have an . in his path
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
new DirectoryCleaner(this.configuration.getWorkingDirectory()).run();
|
||||
new DirectoryCleaner(this.configuration.getWoolCacheDirectory()).run();
|
||||
new DirectoryCleaner(this.configuration.getSharedDownloadsDirectory()).run();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -180,7 +143,7 @@ public class DirectoryManager {
|
||||
* working, storage, and shared downloads directories as long as they are not null
|
||||
*/
|
||||
public List<File> getLocalCacheFiles() {
|
||||
List<File> files_local = new LinkedList<File>();
|
||||
List<File> filesLocal = new LinkedList<File>();
|
||||
List<File> files = new LinkedList<File>();
|
||||
if (this.configuration.getWorkingDirectory() != null) {
|
||||
File[] filesInDirectory = this.configuration.getWorkingDirectory().listFiles();
|
||||
@@ -208,10 +171,10 @@ public class DirectoryManager {
|
||||
String name = FilenameUtils.removeExtension(file.getName());
|
||||
if ("wool".equals(extension)) {
|
||||
// check if the md5 of the file is ok
|
||||
String md5_local = Utils.md5(file.getAbsolutePath());
|
||||
String md5Local = Utils.md5(file.getAbsolutePath());
|
||||
|
||||
if (md5_local.equals(name)) {
|
||||
files_local.add(file);
|
||||
if (md5Local.equals(name)) {
|
||||
filesLocal.add(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -220,7 +183,7 @@ public class DirectoryManager {
|
||||
}
|
||||
}
|
||||
|
||||
return files_local;
|
||||
return filesLocal;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -114,6 +114,7 @@ public class SettingsLoader {
|
||||
public static final String ARG_HEADLESS = "--headless";
|
||||
public static final String ARG_DISABLE_LARGE_DOWNLOADS = "--disable-large-downloads";
|
||||
public static final String ARG_INCOMPATIBLE_PROCESS = "-incompatible-process";
|
||||
public static final String ARG_SHADOW_DIR = "-shadow-dir";
|
||||
|
||||
private String path;
|
||||
|
||||
@@ -523,7 +524,6 @@ public class SettingsLoader {
|
||||
|
||||
if (config.isUserHasSpecifiedACacheDir() == false && cacheDir != null) {
|
||||
config.setCacheDir(new File(cacheDir.getValue()));
|
||||
(new DirectoryManager(config)).createCacheDir();
|
||||
}
|
||||
|
||||
if (config.getUIType() == null && ui != null) {
|
||||
|
||||
@@ -40,6 +40,7 @@ import java.time.temporal.ChronoUnit;
|
||||
import java.util.Calendar;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@@ -120,6 +121,8 @@ public class Worker {
|
||||
@Option(name = SettingsLoader.ARG_DISABLE_LARGE_DOWNLOADS, usage = "Disable download of larger projects to preserve internet traffic", required = false) private boolean disableLargeDownloads = false;
|
||||
|
||||
@Option(name = SettingsLoader.ARG_INCOMPATIBLE_PROCESS, usage = "Specify a process to stop the current render job and pause while the said process is running. For example, if we take Firefox the formatting is firefox.exe on Windows and firefox on Linux.", required = false) private String incompatibleProcess = null;
|
||||
|
||||
@Option(name = SettingsLoader.ARG_SHADOW_DIR, usage = "Mirror extracted passworded project files to this directory", metaVar = "PATH", required = false) private String shadowDir = null;
|
||||
|
||||
|
||||
public static void main(String[] args) {
|
||||
@@ -151,6 +154,9 @@ public class Worker {
|
||||
config.setPriority(priority);
|
||||
config.setDetectGPUs(!no_gpu_detection);
|
||||
config.setIncompatibleProcess(incompatibleProcess);
|
||||
if (shadowDir != null && !shadowDir.isBlank()) {
|
||||
config.setShadowExtractDirectory(shadowDir);
|
||||
}
|
||||
|
||||
// set a temporary log instance, in case there is a log call between here and config/setting merge
|
||||
Log.setInstance(config);
|
||||
@@ -476,9 +482,12 @@ public class Worker {
|
||||
String configFilePath = config.getConfigFilePath() != null ? config.getConfigFilePath() : OS.getOS().getDefaultConfigFilePath();
|
||||
config.setLogDirectory(log_dir != null ? log_dir : (new File (configFilePath).getParent()));
|
||||
|
||||
(new DirectoryManager(config)).createCacheDir();
|
||||
Log.setInstance(config);
|
||||
DirectoryManager directoryManager = new DirectoryManager(config);
|
||||
|
||||
directoryManager.createCacheDir();
|
||||
CompletableFuture<Void> directoryInit = directoryManager.cleanupDirectoryAsync();
|
||||
|
||||
Log.setInstance(config);
|
||||
Log.getInstance().debug("client version " + Configuration.jarVersion);
|
||||
|
||||
// Hostname change will overwrite the existing one (default or read from configuration file) but changes will be lost when the client closes
|
||||
@@ -524,7 +533,7 @@ public class Worker {
|
||||
((GuiSwing) gui).setSettingsLoader(settingsLoader);
|
||||
break;
|
||||
}
|
||||
Client cli = new Client(gui, config, server);
|
||||
Client cli = new Client(gui, config, server, directoryInit);
|
||||
gui.setClient(cli);
|
||||
ShutdownHook hook = new ShutdownHook(cli);
|
||||
hook.attachShutDownHook();
|
||||
|
||||
@@ -27,13 +27,16 @@ import java.io.OutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.Type;
|
||||
import java.nio.file.Files;
|
||||
import java.net.*;
|
||||
import java.time.Duration;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -55,6 +58,9 @@ import com.sheepit.client.ui.Gui;
|
||||
import com.sheepit.client.ui.Stats;
|
||||
import com.sheepit.client.ui.TransferStats;
|
||||
import com.sheepit.client.utils.Utils;
|
||||
import com.squareup.moshi.JsonAdapter;
|
||||
import com.squareup.moshi.Moshi;
|
||||
import com.squareup.moshi.Types;
|
||||
import lombok.Getter;
|
||||
import org.simpleframework.xml.core.Persister;
|
||||
|
||||
@@ -121,7 +127,7 @@ public class ServerRequest extends Thread {
|
||||
this.log = Log.getInstance();
|
||||
this.directoryManager = new DirectoryManager(this.user_config);
|
||||
this.lastRequestTime = 0;
|
||||
this.keepmealive_duration = 15 * 60 * 1000; // default 15min
|
||||
this.keepmealive_duration = 10 * 60 * 1000; // default 10min
|
||||
|
||||
// OkHttp performs best when we create a single OkHttpClient instance and reuse it for all of the HTTP calls. This is because each client holds its own
|
||||
// connection pool and thread pools.Reusing connections and threads reduces latency and saves memory. Conversely, creating a client for each request
|
||||
@@ -142,18 +148,30 @@ public class ServerRequest extends Thread {
|
||||
HttpUrl.Builder urlBuilder = Objects.requireNonNull(HttpUrl.parse(this.getPage("keepmealive"))).newBuilder();
|
||||
urlBuilder.addQueryParameter("paused", String.valueOf(this.client.isSuspended()));
|
||||
urlBuilder.addQueryParameter("sleeping", String.valueOf(this.client.nextJobRequest() != null));
|
||||
urlBuilder.addQueryParameter("state", String.valueOf(this.client.getState()));
|
||||
|
||||
if (this.client != null && this.client.getRenderingJob() != null) {
|
||||
Job job = this.client.getRenderingJob();
|
||||
|
||||
urlBuilder.addQueryParameter("frame", job.getRenderSettings().getFrameNumber()).addQueryParameter("job", job.getId());
|
||||
|
||||
RenderProcess process = job.getRenderProcess();
|
||||
if (process != null) {
|
||||
urlBuilder.addQueryParameter("rendertime", String.valueOf(process.getDuration()))
|
||||
.addQueryParameter("remainingtime", String.valueOf(process.getRemainingDuration()));
|
||||
if (this.client != null) {
|
||||
Map<String, com.sheepit.client.rendering.State> onGoingJob = new HashMap<>();
|
||||
if (this.client.getRenderingJob() != null) {
|
||||
Job job = this.client.getRenderingJob();
|
||||
|
||||
urlBuilder.addQueryParameter("frame", job.getRenderSettings().getFrameNumber()).addQueryParameter("job", job.getId());
|
||||
|
||||
RenderProcess process = job.getRenderProcess();
|
||||
if (process != null) {
|
||||
urlBuilder.addQueryParameter("rendertime", String.valueOf(process.getDuration()))
|
||||
.addQueryParameter("remainingtime", String.valueOf(process.getRemainingDuration()));
|
||||
}
|
||||
|
||||
onGoingJob.put(job.getId(), job.getState());
|
||||
}
|
||||
|
||||
for (Client.QueuedJob queuedJob : this.client.getJobsToValidate()) { // no blocking call
|
||||
onGoingJob.put(queuedJob.getJob().getId(), queuedJob.getJob().getState());
|
||||
}
|
||||
Moshi moshi = new Moshi.Builder().build();
|
||||
Type type = Types.newParameterizedType(Map.class, String.class, com.sheepit.client.rendering.State.class);
|
||||
JsonAdapter<Map<String, com.sheepit.client.rendering.State>> adapter = moshi.adapter(type);
|
||||
urlBuilder.addQueryParameter("ongoing", adapter.toJson(onGoingJob));
|
||||
}
|
||||
|
||||
Response response = this.HTTPRequest(urlBuilder);
|
||||
@@ -434,11 +452,14 @@ public class ServerRequest extends Thread {
|
||||
catch (SheepItException e) {
|
||||
throw e;
|
||||
}
|
||||
catch (NoRouteToHostException e) {
|
||||
throw new SheepItServerDown();
|
||||
}
|
||||
catch (UnknownHostException e) {
|
||||
throw new SheepItServerDown();
|
||||
catch (IOException e) {
|
||||
if (e.getCause() instanceof NoRouteToHostException || e.getCause() instanceof UnknownHostException) {
|
||||
throw new SheepItServerDown();
|
||||
}
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
e.printStackTrace(pw);
|
||||
throw new SheepItException("error requestJob: unknown IO exception " + e + " stacktrace: " + sw.toString());
|
||||
}
|
||||
catch (Exception e) {
|
||||
StringWriter sw = new StringWriter();
|
||||
@@ -458,30 +479,7 @@ public class ServerRequest extends Thread {
|
||||
}
|
||||
|
||||
public Response HTTPRequest(HttpUrl.Builder httpUrlBuilder, RequestBody data_) throws IOException {
|
||||
String url = httpUrlBuilder.build().toString();
|
||||
Request.Builder builder = new Request.Builder().addHeader("User-Agent", HTTP_USER_AGENT).url(url);
|
||||
|
||||
this.log.debug("Server::HTTPRequest url(" + url + ")");
|
||||
|
||||
if (data_ != null) {
|
||||
builder.post(data_);
|
||||
}
|
||||
|
||||
Request request = builder.build();
|
||||
|
||||
try {
|
||||
Response response = httpClient.newCall(request).execute();
|
||||
|
||||
if (response.isSuccessful() == false) {
|
||||
this.log.error("Received unsuccessful HTTP response " + response);
|
||||
}
|
||||
|
||||
this.lastRequestTime = new Date().getTime();
|
||||
return response;
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new IOException("Unexpected response from HTTP Stack" + e.getMessage());
|
||||
}
|
||||
return HTTPRequest(httpUrlBuilder, data_, false);
|
||||
}
|
||||
|
||||
public Response HTTPRequest(HttpUrl.Builder httpUrlBuilder, RequestBody data_, boolean checkIsSuccessful) throws IOException {
|
||||
@@ -507,10 +505,10 @@ public class ServerRequest extends Thread {
|
||||
return response;
|
||||
}
|
||||
catch (ConnectException e) {
|
||||
throw new ConnectException("Unexpected response from HTTP Stack" + e.getMessage());
|
||||
throw new ConnectException("Unexpected response from HTTP Stack " + e.getMessage());
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new IOException("Unexpected response from HTTP Stack" + e.getMessage());
|
||||
throw new IOException("Unexpected response from HTTP Stack " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -678,7 +676,7 @@ public class ServerRequest extends Thread {
|
||||
|
||||
return ServerCode.UNKNOWN;
|
||||
}
|
||||
catch (ConnectException e) {
|
||||
catch (SocketTimeoutException | ConnectException e) {
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
e.printStackTrace(pw);
|
||||
|
||||
@@ -72,6 +72,8 @@ import static com.sheepit.client.rendering.RenderSettings.UPDATE_METHOD_BY_TILE;
|
||||
|
||||
public static final int SHOW_BASE_ICON = -1;
|
||||
|
||||
private State state;
|
||||
|
||||
private DownloadItem projectDownload;
|
||||
private DownloadItem rendererDownload;
|
||||
private String id;
|
||||
@@ -110,6 +112,7 @@ import static com.sheepit.client.rendering.RenderSettings.UPDATE_METHOD_BY_TILE;
|
||||
renderProcess = new RenderProcess(log_);
|
||||
renderState = new RenderState();
|
||||
renderOutput = new RenderOutput();
|
||||
state = State.INITIALIZING;
|
||||
}
|
||||
|
||||
public void block() {
|
||||
@@ -158,6 +161,7 @@ import static com.sheepit.client.rendering.RenderSettings.UPDATE_METHOD_BY_TILE;
|
||||
}
|
||||
|
||||
public Error.Type render(Observer renderStarted) {
|
||||
state = State.RENDERING;
|
||||
gui.status("Rendering");
|
||||
RenderProcess process = getRenderProcess();
|
||||
Timer timerOfMaxRenderTime = null;
|
||||
|
||||
12
src/main/java/com/sheepit/client/rendering/State.java
Normal file
12
src/main/java/com/sheepit/client/rendering/State.java
Normal file
@@ -0,0 +1,12 @@
|
||||
package com.sheepit.client.rendering;
|
||||
|
||||
public enum State {
|
||||
INITIALIZING,
|
||||
DOWNLOADING,
|
||||
PREPARING,
|
||||
RENDERING,
|
||||
RENDERING_DONE,
|
||||
UPLOADING,
|
||||
ERRORING,
|
||||
UNKNOWN
|
||||
}
|
||||
@@ -44,7 +44,11 @@ import javax.swing.UIManager;
|
||||
import javax.swing.UnsupportedLookAndFeelException;
|
||||
import javax.swing.border.EmptyBorder;
|
||||
import java.awt.AWTException;
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Cursor;
|
||||
import java.awt.Desktop;
|
||||
import java.awt.Dimension;
|
||||
import java.awt.FlowLayout;
|
||||
import java.awt.FontMetrics;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.GridBagLayout;
|
||||
@@ -56,18 +60,23 @@ import java.awt.SystemTray;
|
||||
import java.awt.TrayIcon;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.awt.event.ActionListener;
|
||||
import java.awt.event.MouseAdapter;
|
||||
import java.awt.event.MouseEvent;
|
||||
import java.awt.event.WindowEvent;
|
||||
import java.awt.event.WindowStateListener;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
|
||||
public class GuiSwing extends JFrame implements Gui {
|
||||
public static final String type = "swing";
|
||||
private static final String logoPath = "/sheepit-logo.png";
|
||||
private static final String logoSheepItPath = "/sheepit-logo.png";
|
||||
private static final String logoDiscordPath = "/discord-logo.png";
|
||||
private static final String logoWebPath = "/web-logo.png";
|
||||
public static final int WIDTH = 540;
|
||||
|
||||
public static void drawVersionStringOnImage(final BufferedImage image, String versionString) {
|
||||
@@ -90,18 +99,20 @@ public class GuiSwing extends JFrame implements Gui {
|
||||
}
|
||||
|
||||
@NotNull public static JLabel createLogoWithWatermark() {
|
||||
final URL logoURL = GuiSwing.class.getResource(logoPath);
|
||||
final URL logoSheepItURL = GuiSwing.class.getResource(logoSheepItPath);
|
||||
final URL logoDiscordURL = GuiSwing.class.getResource(logoDiscordPath);
|
||||
final URL logoWebURL = GuiSwing.class.getResource(logoWebPath);
|
||||
|
||||
// If logo cannot be found, return dummy image
|
||||
if (logoURL == null) {
|
||||
System.err.println("Error: Unable to find logo " + logoPath);
|
||||
if (logoSheepItURL == null || logoDiscordURL == null || logoWebURL == null) {
|
||||
System.err.println("Error: Unable to find logos");
|
||||
return new JLabel();
|
||||
}
|
||||
|
||||
|
||||
JLabel labelImage;
|
||||
try {
|
||||
// Include the version of the app as a watermark in the SheepIt logo.
|
||||
final BufferedImage watermark = ImageIO.read(logoURL);
|
||||
final BufferedImage watermark = ImageIO.read(logoSheepItURL);
|
||||
String versionString = "v" + Configuration.jarVersion;
|
||||
drawVersionStringOnImage(watermark, versionString);
|
||||
|
||||
@@ -109,9 +120,50 @@ public class GuiSwing extends JFrame implements Gui {
|
||||
}
|
||||
catch (Exception e) {
|
||||
// If something fails, we just show the SheepIt logo (without any watermark)
|
||||
ImageIcon image = new ImageIcon(logoURL);
|
||||
ImageIcon image = new ImageIcon(logoSheepItURL);
|
||||
labelImage = new JLabel(image);
|
||||
}
|
||||
|
||||
ImageIcon logoImageDiscord = new ImageIcon(logoDiscordURL);
|
||||
ImageIcon logoImageWeb = new ImageIcon(logoWebURL);
|
||||
|
||||
labelImage.setLayout(new BorderLayout());
|
||||
|
||||
JPanel overlayPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 10, 10));
|
||||
overlayPanel.setOpaque(false);
|
||||
|
||||
JLabel logoLabelDiscord = new JLabel(logoImageDiscord);
|
||||
logoLabelDiscord.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
|
||||
logoLabelDiscord.addMouseListener(new MouseAdapter() {
|
||||
@Override
|
||||
public void mouseClicked(MouseEvent e) {
|
||||
try {
|
||||
Desktop.getDesktop().browse(new URI("https://discord.gg/nZu9thzXfx"));
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
JLabel logoLabelWeb = new JLabel(logoImageWeb);
|
||||
logoLabelWeb.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
|
||||
logoLabelWeb.addMouseListener(new MouseAdapter() {
|
||||
@Override
|
||||
public void mouseClicked(MouseEvent e) {
|
||||
try {
|
||||
Desktop.getDesktop().browse(new URI("https://www.sheepit-renderfarm.com"));
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
overlayPanel.add(logoLabelWeb);
|
||||
overlayPanel.add(logoLabelDiscord);
|
||||
labelImage.add(overlayPanel, BorderLayout.SOUTH);
|
||||
|
||||
return labelImage;
|
||||
}
|
||||
|
||||
|
||||
@@ -106,7 +106,10 @@ public class GuiText implements Gui {
|
||||
}
|
||||
|
||||
@Override public void status(String msg_, boolean overwriteSuspendedMsg) {
|
||||
log.debug("GUI " + msg_);
|
||||
boolean statusChanged = !statusOld.equals(msg_);
|
||||
if (statusChanged) {
|
||||
log.debug("GUI " + msg_);
|
||||
}
|
||||
|
||||
if (client != null && client.isSuspended()) {
|
||||
if (overwriteSuspendedMsg) {
|
||||
@@ -114,9 +117,8 @@ public class GuiText implements Gui {
|
||||
}
|
||||
}
|
||||
else {
|
||||
String statusNew = msg_;
|
||||
if (statusOld.equals(statusNew) == false) {
|
||||
statusOld = statusNew;
|
||||
if (statusChanged) {
|
||||
statusOld = msg_;
|
||||
System.out.println(String.format("%s %s", this.df.format(new Date()), msg_));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -709,6 +709,7 @@ public class Settings implements Activity {
|
||||
if (fromConfig != null && fromConfig.getAbsolutePath().equals(cacheDir.getAbsolutePath()) == false) {
|
||||
config.setCacheDir(cacheDir);
|
||||
(new DirectoryManager(config)).createCacheDir();
|
||||
(new DirectoryManager(config)).cleanWorkingDirectory();
|
||||
}
|
||||
else {
|
||||
// do nothing because the directory is the same as before
|
||||
|
||||
@@ -27,15 +27,19 @@ import java.nio.file.Paths;
|
||||
import java.security.DigestInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class Md5 {
|
||||
private static Map<String, String> cache = new HashMap<>();
|
||||
final class Md5 {
|
||||
|
||||
// Maps the file path to a MD5 hash of the file
|
||||
static final Map<String, String> cache = new ConcurrentHashMap<>();
|
||||
|
||||
// TODO: to avoid memory increase, check if files are deleted
|
||||
|
||||
public String get(String path) {
|
||||
private Md5() {}
|
||||
|
||||
static synchronized String get(String path) {
|
||||
String key = getUniqueKey(path);
|
||||
if (cache.containsKey(key) == false) {
|
||||
generate(path);
|
||||
@@ -43,19 +47,20 @@ public class Md5 {
|
||||
return cache.get(key);
|
||||
}
|
||||
|
||||
private void generate(String path) {
|
||||
static synchronized void generate(String path) {
|
||||
String key = getUniqueKey(path);
|
||||
try {
|
||||
MessageDigest md = MessageDigest.getInstance("MD5");
|
||||
InputStream is = Files.newInputStream(Paths.get(path));
|
||||
DigestInputStream dis = new DigestInputStream(is, md);
|
||||
byte[] buffer = new byte[8192];
|
||||
while (dis.read(buffer) > 0) {
|
||||
// process the entire file
|
||||
|
||||
String data;
|
||||
try (InputStream is = Files.newInputStream(Paths.get(path));
|
||||
DigestInputStream dis = new DigestInputStream(is, md)) {
|
||||
byte[] buffer = new byte[256 * 1024]; // 256 KiB buffer
|
||||
while (dis.read(buffer) > 0) {
|
||||
// process the entire file
|
||||
}
|
||||
data = Utils.convertBinaryToHex(md.digest());
|
||||
}
|
||||
String data = Utils.convertBinaryToHex(md.digest());
|
||||
dis.close();
|
||||
is.close();
|
||||
cache.put(key, data);
|
||||
}
|
||||
catch (NoSuchAlgorithmException | IOException e) {
|
||||
@@ -63,7 +68,7 @@ public class Md5 {
|
||||
}
|
||||
}
|
||||
|
||||
private String getUniqueKey(String path) {
|
||||
static synchronized String getUniqueKey(String path) {
|
||||
File file = new File(path);
|
||||
return Long.toString(file.lastModified()) + '_' + path;
|
||||
}
|
||||
|
||||
@@ -22,16 +22,12 @@ package com.sheepit.client.utils;
|
||||
import com.sheepit.client.zip.ChunkInputStream;
|
||||
import com.sheepit.client.logger.Log;
|
||||
import com.sheepit.client.zip.UnzipUtils;
|
||||
import net.lingala.zip4j.ZipFile;
|
||||
import net.lingala.zip4j.exception.ZipException;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
@@ -63,36 +59,6 @@ public class Utils {
|
||||
mimeTypes.put(".exr", "image/x-exr");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts (and optionally decrypts) contents of a given zip file to a target directory
|
||||
*
|
||||
* @param zipFileName_ Path to the zipfiles
|
||||
* @param destinationDirectory Path to the target directory where files will be extracted to
|
||||
* @param password Optional password for decrypting the zip archive which will only be used if it's not null and if the zip is truly encrypted
|
||||
* @param log The SheepIt Debug log where log messages might be logged into
|
||||
* @return A status code as an integer, -1 if it encounters a ZipException, 0 otherwise
|
||||
*/
|
||||
public static int unzipFileIntoDirectory(String zipFileName_, String destinationDirectory, char[] password, Log log) {
|
||||
try {
|
||||
ZipFile zipFile = new ZipFile(zipFileName_);
|
||||
// unzipParameters.setIgnoreDateTimeAttributes(true);
|
||||
|
||||
if (password != null && zipFile.isEncrypted()) {
|
||||
zipFile.setPassword(password);
|
||||
}
|
||||
// zipFile.extractAll(destinationDirectory, unzipParameters);
|
||||
zipFile.extractAll(destinationDirectory);
|
||||
}
|
||||
catch (ZipException e) {
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
e.printStackTrace(pw);
|
||||
log.debug("Utils::unzipFileIntoDirectory(" + zipFileName_ + "," + destinationDirectory + ") exception " + e + " stacktrace: " + sw.toString());
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the list of zip file chunks and combines them to a zip archive in memory to
|
||||
* extract (and optionally decrypt) the contents to the target directory
|
||||
@@ -117,6 +83,19 @@ public class Utils {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
public static int unzipChunksIntoDirectoryWithMirror(List<String> full_path_chunks, String destinationDirectory, String mirrorDirectory, char[] password, Log log) {
|
||||
var chunks = full_path_chunks.stream().map(File::new).collect(Collectors.toList());
|
||||
try {
|
||||
File mirror = mirrorDirectory == null ? null : new File(mirrorDirectory);
|
||||
UnzipUtils.parallelUnzip(new BufferedInputStream(new ChunkInputStream(chunks)), new File(destinationDirectory), password, mirror);
|
||||
return 0;
|
||||
}
|
||||
catch (IOException | InterruptedException e) {
|
||||
log.debug("Utils::unzipChunksIntoDirectoryWithMirror exception " + e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a MD5 checksum either from the cache or computes it and puts it into the cache
|
||||
@@ -125,8 +104,7 @@ public class Utils {
|
||||
* @return the string The MD5 checksum
|
||||
*/
|
||||
public static String md5(String path_of_file_) {
|
||||
Md5 md5 = new Md5();
|
||||
return md5.get(path_of_file_);
|
||||
return Md5.get(path_of_file_);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -186,7 +186,7 @@ public class UnzipUtils {
|
||||
*/
|
||||
public static Throwable parallelUnzip(InputStream zipFile, File destination, char[] password) throws IOException, InterruptedException {
|
||||
var workerCount = Math.min(MAX_IO_WRITTER_THREADS, Runtime.getRuntime().availableProcessors() + 1);
|
||||
return parallelUnzip(zipFile, destination, password, workerCount);
|
||||
return parallelUnzip(zipFile, destination, password, null, workerCount);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -200,7 +200,12 @@ public class UnzipUtils {
|
||||
* @param workerCount the number of workers for writing the output
|
||||
* @return Any error that occurred
|
||||
*/
|
||||
public static Throwable parallelUnzip(InputStream zipFile, File destination, char[] password, int workerCount) throws IOException, InterruptedException {
|
||||
public static Throwable parallelUnzip(InputStream zipFile, File destination, char[] password, File mirrorRoot) throws IOException, InterruptedException {
|
||||
var workerCount = Math.min(MAX_IO_WRITTER_THREADS, Runtime.getRuntime().availableProcessors() + 1);
|
||||
return parallelUnzip(zipFile, destination, password, mirrorRoot, workerCount);
|
||||
}
|
||||
|
||||
public static Throwable parallelUnzip(InputStream zipFile, File destination, char[] password, File mirrorRoot, int workerCount) throws IOException, InterruptedException {
|
||||
if (workerCount < 1) {
|
||||
throw new IllegalArgumentException("Workers must be greater or equal to 1");
|
||||
}
|
||||
@@ -212,7 +217,7 @@ public class UnzipUtils {
|
||||
}
|
||||
|
||||
try {
|
||||
readArchiveToQueue(zipFile, destination, password, queue);
|
||||
readArchiveToQueue(zipFile, destination, mirrorRoot, password, queue);
|
||||
writeWait(queue, executor);
|
||||
return queue.err;
|
||||
}
|
||||
@@ -260,7 +265,7 @@ public class UnzipUtils {
|
||||
GC_RUNNER.execute(System::gc);
|
||||
}
|
||||
|
||||
private static void readArchiveToQueue(InputStream zipFile, File destination, char[] password, WriteQueue queue) throws IOException, InterruptedException {
|
||||
private static void readArchiveToQueue(InputStream zipFile, File destination, File mirrorRoot, char[] password, WriteQueue queue) throws IOException, InterruptedException {
|
||||
try (var zipInputStream = new ZipInputStream(zipFile)) {
|
||||
if (password != null) {
|
||||
zipInputStream.setPassword(password);
|
||||
@@ -268,38 +273,58 @@ public class UnzipUtils {
|
||||
LocalFileHeader fileHeader;
|
||||
while ((fileHeader = zipInputStream.getNextEntry()) != null) {
|
||||
var outFile = new File(destination, fileHeader.getFileName());
|
||||
File mirrorFile = mirrorRoot == null ? null : new File(mirrorRoot, fileHeader.getFileName());
|
||||
|
||||
//Checks if the file is a directory
|
||||
if (fileHeader.isDirectory()) {
|
||||
queue.submit(new ZipWriteCmd.MkDir(outFile));
|
||||
if (mirrorFile != null) {
|
||||
queue.submit(new ZipWriteCmd.MkDir(mirrorFile));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fileHeader.getUncompressedSize() < WriteQueue.MAX_SIZE / 10) {
|
||||
queue.submit(new ZipWriteCmd.SmallFile(outFile, zipInputStream.readAllBytes()));
|
||||
byte[] data = zipInputStream.readAllBytes();
|
||||
queue.submit(new ZipWriteCmd.SmallFile(outFile, data));
|
||||
if (mirrorFile != null) {
|
||||
queue.submit(new ZipWriteCmd.SmallFile(mirrorFile, data));
|
||||
}
|
||||
}
|
||||
else {
|
||||
readBigFile(outFile, zipInputStream, queue);
|
||||
readBigFile(outFile, mirrorFile, zipInputStream, queue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void readBigFile(File outFile, ZipInputStream zipInputStream, WriteQueue queue) throws IOException, InterruptedException {
|
||||
private static void readBigFile(File outFile, File mirrorFile, ZipInputStream zipInputStream, WriteQueue queue) throws IOException, InterruptedException {
|
||||
var bf = new ZipWriteCmd.BigFile(outFile);
|
||||
queue.submit(bf);
|
||||
ZipWriteCmd.BigFile mirrorBf = null;
|
||||
if (mirrorFile != null) {
|
||||
mirrorBf = new ZipWriteCmd.BigFile(mirrorFile);
|
||||
queue.submit(mirrorBf);
|
||||
}
|
||||
try {
|
||||
var buff = new byte[ZipWriteCmd.BigFile.CHUNK_SIZE];
|
||||
int read;
|
||||
while ((read = zipInputStream.read(buff)) != -1) {
|
||||
var chunk = Arrays.copyOf(buff, read);
|
||||
bf.chunks.put(chunk);
|
||||
if (mirrorBf != null) {
|
||||
mirrorBf.chunks.put(Arrays.copyOf(chunk, chunk.length));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
bf.doneAdding = true;
|
||||
if (mirrorBf != null) {
|
||||
mirrorBf.doneAdding = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a directory with a lock. The lock is useful here as conflicting multithreaded requests can make each other fail sometimes.
|
||||
|
||||
BIN
src/main/resources/discord-logo.png
Normal file
BIN
src/main/resources/discord-logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 919 B |
Binary file not shown.
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 21 KiB |
BIN
src/main/resources/web-logo.png
Normal file
BIN
src/main/resources/web-logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 904 B |
Reference in New Issue
Block a user