diff --git a/.vscode/settings.json b/.vscode/settings.json index 1fce79e..1cd218a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,5 +8,12 @@ "lib/**/*.jar" ], "java.dependency.packagePresentation": "flat", - "java.configuration.updateBuildConfiguration": "automatic" + "java.configuration.updateBuildConfiguration": "automatic", + "spellright.language": [ + "de" + ], + "spellright.documentTypes": [ + "latex", + "plaintext" + ] } diff --git a/Dockerfile b/Dockerfile index 5d55021..8dd7108 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,15 +5,19 @@ RUN mvn package -DskipTests FROM adoptopenjdk/openjdk11:latest WORKDIR / + +ENV NB_TESTS=10 ENV EPSILON "0.1" ENV INSTANCE_MINJOBS 10 ENV INSTANCE_MAXJOBS 10 ENV INSTANCE_MINMACHINES 10 ENV INSTANCE_MAXMACHINES 10 +ENV INSTANCE_SEQMACHINES=5 ENV INSTANCE_MAX_SEQUENTIAL_TIME 100 -ENV ES_HOST "localhost" -ENV ES_INDEX "testdata-" +ENV ES_HOST "217.94.61.24" +ENV ES_PORT 9200 +ENV ES_INDEX "hpc-2024" -COPY --from=build /src/target/cpu-gpu-scheduling-1.0-SNAPSHOT-jar-with-dependencies.jar SchedulingAlgorithms.jar +COPY --from=build --chmod=0777 /src/target/cpu-gpu-scheduling-1.0-SNAPSHOT-jar-with-dependencies.jar /app/SchedulingAlgorithms.jar # use the Epslion ("noops") GC, because we want to measure the time of these algorithms. -ENTRYPOINT ["java", "-jar", "SchedulingAlgorithms.jar", "-XX:+UnlockExperimentalVMOptions", "-XX:+UseEpsilonGC"] \ No newline at end of file +ENTRYPOINT ["java", "-jar", "/app/SchedulingAlgorithms.jar", "-XX:+UnlockExperimentalVMOptions", "-XX:+UseEpsilonGC"] \ No newline at end of file diff --git a/README.md b/README.md index 43302ea..dfc4920 100644 --- a/README.md +++ b/README.md @@ -3,19 +3,55 @@ Algorithms for machine scheduling with malleable jobs An Implementation of the algorithms from Jansen & Land and Grage & Jansen, building upon an algorithm from MouniƩ, Rapine and Trystram. +- [Quick Start](#quick-start) + - [Run the local test-files](#run-the-local-test-files) +- [Execution with docker-compose](#execution-with-docker-compose) +- [Building docker images](#building-docker-images) +- [Configuration for testing](#configuration-for-testing) +- [A couple of Maven commands](#a-couple-of-maven-commands) -# A couple of Maven commands +# Quick Start +Build the docker image and run it. The `Dockerfile` contains reasonable default values for all variables. -Once you have configured your project in your IDE you can build it from there. However if you prefer you can use maven from the command line. In that case you could be interested in this short list of commands: +``` +docker build -t malleable . && docker run malleable +``` -* `mvn compile`: it will just compile the code of your application and tell you if there are errors -* `mvn test`: it will compile the code of your application and your tests. It will then run your tests (if you wrote any) and let you know if some fails -* `mvn install`: it will do everything `mvn test` does and then if everything looks file it will install the library or the application into your local maven repository (typically under /.m2). In this way you could use this library from other projects you want to build on the same machine +## Run the local test-files +``` +docker build -t malleable . && docker run -e TEST_FILE_PATH=/testfiles -v $(pwd)/TestInstances:/testfiles:ro malleable +``` +Output: +```console +ohnesorge@DESKTOP-FELIX:/mnt/d/workspace/CPU-GPU-Scheduling$ docker build -t malleable . && docker run -e TEST_FILE_PATH=/testfiles -v $(pwd)/TestInstances:/testfiles:ro malleable +[+] Building 50.9s (12/12) FINISHED +... + => => naming to docker.io/library/malleable:latest +12:12:35.857 [main] INFO de.ohnes.App - Starting Algorithm! +12:12:36.397 [main] INFO de.ohnes.App - Reading test file from /testfiles/TestInstance copy 2.json +12:12:36.398 [main] INFO de.ohnes.DualApproximationFramework - Starting dual approximation Framework with shelvesAlgo: CpuGpuApproach +12:12:36.434 [main] INFO de.ohnes.App - Ran instance with 3 machines and 8 jobs in 36 milliseconds. +12:12:36.437 [main] INFO de.ohnes.App - Computed Schedule: -If you need more information please take a look at this [quick tutorial](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). +///////////////////////////////////////////0002///////////////////////////////////////////###########################################0001###########################################/////////////////////////////////////////////////////0007/////////////////////////////////////////////////////#####################################################0006##################################################### +/////////////////////////////////////////////////////0005/////////////////////////////////////////////////////#####################################################0004#####################################################///////////////////////////////////////////0003///////////////////////////////////////////###########################################0000########################################### +12:12:36.557 [main] DEBUG de.ohnes.logger.MyElasticsearchClient - Trying to push test result to Elasticsearch... +12:12:36.684 [main] WARN de.ohnes.logger.MyElasticsearchClient - Couldn't reach ES Server. Saving data locally until next try. +12:12:36.692 [main] INFO de.ohnes.App - Reading test file from /testfiles/TestInstance copy 3.json +12:12:36.692 [main] INFO de.ohnes.DualApproximationFramework - Starting dual approximation Framework with shelvesAlgo: CpuGpuApproach +12:12:36.707 [main] INFO de.ohnes.App - Ran instance with 3 machines and 5 jobs in 15 milliseconds. +12:12:36.707 [main] INFO de.ohnes.App - Computed Schedule: +////////////////////////////0000//////////////////////////// +///////////////////////0001/////////////////////// +/////////////////////////////////0002/////////////////////////////////##################0004################## +/////////////////////////////////0002///////////////////////////////// +////////////////////////////0003//////////////////////////// +... +``` + # Execution with docker-compose Hint: before execution docker-compose the images need to be build using docker. For this see section Docker. @@ -37,5 +73,19 @@ The docker image can be build with the following command. ``` docker build -t malleable . ``` -With the current configuration of the `Dockerfile` the `target/bachelorarbeit-1.0-SNAPSHOT-jar-with-dependencies.jar` will be used, so make sure to call `mvn package` before. -The other configuration in this file are only defaults and can be changed in the `docker-compose.yml`. \ No newline at end of file +The dockerfile is written as a `multi-stage dockerfile`. Therefore there is no need to build the maven project first or making sure you have the correct maven/java version. **Just build the docker container and go!** + +# Configuration for testing + +All parameters are tunable via the environment variables. Note that the code does support randomly generated instances but also allows reading in user-specified .json files containing an instance. (format as in `./TestInstances/`). + + +# A couple of Maven commands + +Once you have configured your project in your IDE you can build it from there. However if you prefer you can use maven from the command line. In that case you could be interested in this short list of commands: + +* `mvn compile`: it will just compile the code of your application and tell you if there are errors +* `mvn test`: it will compile the code of your application and your tests. It will then run your tests (if you wrote any) and let you know if some fails +* `mvn install`: it will do everything `mvn test` does and then if everything looks file it will install the library or the application into your local maven repository (typically under /.m2). In this way you could use this library from other projects you want to build on the same machine + +If you need more information please take a look at this [quick tutorial](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). diff --git a/docker-compose.yml b/docker-compose.yml index 8bd4726..5014438 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,15 +1,16 @@ services: scheduling: - image: cpugpu:1.0.0 + image: cpugpu:1.1.0 environment: - EPSILON=0.1 - - INSTANCE_MINJOBS=10 - - INSTANCE_MAXJOBS=50 - - INSTANCE_MINMACHINES=5 - - INSTANCE_MAXMACHINES=30 + - INSTANCE_MINJOBS=50 + - INSTANCE_MAXJOBS=120 + - INSTANCE_MINMACHINES=30 + - INSTANCE_MAXMACHINES=35 - ES_HOST=192.168.178.101 - - ES_INDEX=cpugpu-0.1 + - ES_PORT=9200 + - ES_INDEX=cpugpu2-jobs-2024-01 # depends_on: # elasticsearch: # condition: service_healthy - restart: always \ No newline at end of file + restart: unless-stopped \ No newline at end of file diff --git a/src/main/java/de/ohnes/AlgorithmicComponents/Knapsack/MDKnapsack.java b/src/main/java/de/ohnes/AlgorithmicComponents/Knapsack/MDKnapsack.java index 7107d3c..fecdbc4 100644 --- a/src/main/java/de/ohnes/AlgorithmicComponents/Knapsack/MDKnapsack.java +++ b/src/main/java/de/ohnes/AlgorithmicComponents/Knapsack/MDKnapsack.java @@ -1,5 +1,6 @@ package de.ohnes.AlgorithmicComponents.Knapsack; +import java.util.HashMap; import java.util.List; import de.ohnes.util.Job; @@ -12,11 +13,26 @@ * It provides a method to solve the problem and allocate jobs to different shelves based on their weights and costs. */ public class MDKnapsack { + /** + * solves a multi-dimensional knapsack problem. + * + * @param smallItems + * @param bigItems + * @param capacity + * @param shelf1 + * @param shelf2 + * @param smallJobs + * @param seqJobs + */ + public void solve(List smallItems, List bigItems, Vector3D capacity, List shelf1, List shelf2, List smallJobs, List seqJobs) { - public void solve(List items, Vector3D capacity, List shelf1, List shelf2, List smallJobs, List seqJobs) { - Double[][][][] dp = new Double[items.size()+1][capacity.get(0)+1][capacity.get(1)+1][capacity.get(2)+1]; - - + int b = bigItems.size(); + int s = smallItems.size(); + int n = s + b; + //TODO: reduce 3rd dimension + Double[][][][] dp = new Double[b+1][capacity.get(0)+1][capacity.get(2)+1][capacity.get(1)+1]; + + //initialization for (int x1 = 0; x1 < dp[0].length; x1++) { for (int x2 = 0; x2 < dp[0][x1].length; x2++) { @@ -25,11 +41,12 @@ public void solve(List items, Vector3D capacity, List shelf } } } - + //acutal dp - for (int i = 1; i <= items.size(); i++) { - Integer[] costs = items.get(i-1).getCosts(); - Vector3D[] weights = items.get(i-1).getWeights(); + // fist solve the knapsack problem for the big items + for (int i = 1; i <= bigItems.size(); i++) { + Integer[] costs = bigItems.get(i-1).getCosts(); + Vector3D[] weights = bigItems.get(i-1).getWeights(); for (int x1 = 0; x1 < dp[0].length; x1++) { for (int x2 = 0; x2 < dp[0][x1].length; x2++) { for (int x3 = 0; x3 < dp[0][x1][x2].length; x3++) { @@ -37,8 +54,8 @@ public void solve(List items, Vector3D capacity, List shelf for (int c = 0; c < costs.length; c++) { //for the choices Vector3D w = weights[c]; int x1_ = x1 - w.get(0); - int x2_ = x2 - w.get(1); - int x3_ = x3 - w.get(2); + int x2_ = x2 - w.get(2); + int x3_ = x3 - w.get(1); if (x1_ < 0 || x2_ < 0 || x3_ < 0) { continue; } @@ -57,28 +74,112 @@ public void solve(List items, Vector3D capacity, List shelf } } - Vector3D minValue = new Vector3D(0, 0, 0); - double minCost = Double.MAX_VALUE; + //discard 2nd constraint, as the small items don't change it. + + Double[][][] dp2 = new Double[s+1][capacity.get(0)+1][capacity.get(2)+1]; + HashMap map = new HashMap<>(); // a map to remember the position of the best solution for the big items + //initialization for (int x1 = 0; x1 < dp[0].length; x1++) { for (int x2 = 0; x2 < dp[0][x1].length; x2++) { - for (int x3 = 0; x3 < dp[0][x1][x2].length; x3++) { - if (dp[items.size()][x1][x2][x3] != null && dp[items.size()][x1][x2][x3] < minCost) { - minCost = dp[items.size()][x1][x2][x3]; - minValue = new Vector3D(x1, x2, x3); + // find the best solution for the big items + // dp2[0][x1][x2] = Arrays.stream(dp[bigItems.size()][x1][x2]).filter(d -> d != null).min(Double::compare).orElse(null); + // remember the position of best solution + for (int i = 0; i < dp[b][x1][x2].length; i++) { + if (dp[b][x1][x2][i] != null) { + String key = x1 + "," + x2; + if (map.containsKey(key)) { + if (dp[b][x1][x2][i] < dp2[0][x1][x2]) { + map.put(key, i); + dp2[0][x1][x2] = dp[b][x1][x2][i]; + } + } else { + map.put(key, i); + dp2[0][x1][x2] = dp[b][x1][x2][i]; + } } } } } - //reconstruction - for (int i = items.size(); i > 0; i--) { - MDKnapsackItem item = items.get(i - 1); + //acutal dp + // solve the knapsack problem for the remaining small items + for (int i = 1; i <= s; i++) { + Integer[] costs = smallItems.get(i-1).getCosts(); + Vector3D[] weights = smallItems.get(i-1).getWeights(); + for (int x1 = 0; x1 < dp2[0].length; x1++) { + for (int x2 = 0; x2 < dp2[0][x1].length; x2++) { + double minVal = Double.MAX_VALUE; + for (int c = 0; c < costs.length; c++) { //for the choices + Vector3D w = weights[c]; + int x1_ = x1 - w.get(0); + int x2_ = x2 - w.get(2); + if (x1_ < 0 || x2_ < 0) { + continue; + } + if (dp2[i-1][x1_][x2_] == null) { + continue; + } + if (dp2[i-1][x1_][x2_] + costs[c] < minVal) { + minVal = dp2[i-1][x1_][x2_] + costs[c]; + } + } + if (minVal < Double.MAX_VALUE) { + dp2[i][x1][x2] = minVal; + } + } + } + } + + + Vector3D minValue = new Vector3D(0, 0, 0); + double minCost = Double.MAX_VALUE; + for (int x1 = 0; x1 < dp2[0].length; x1++) { + for (int x2 = 0; x2 < dp2[0][x1].length; x2++) { + if (dp2[s][x1][x2] != null && dp2[s][x1][x2] < minCost) { + minCost = dp2[s][x1][x2]; + minValue = new Vector3D(x1, 0, x2); + } + } + } + minValue.set(1, map.get(minValue.get(0) + "," + minValue.get(2))); // set the 2nd dimension to the best solution for the big items + //reconstruction for small items + for (int i = s; i > 0; i--) { + MDKnapsackItem item = smallItems.get(i - 1); + for (KnapsackChoice choice : item.getChoices()) { + Vector3D newWeight = minValue.subtract(choice.getWeight()); + if (newWeight.get(0) < 0 || newWeight.get(1) < 0 || newWeight.get(2) < 0) { + continue; + } + if (dp2[i-1][newWeight.get(0)][newWeight.get(2)] != null) { + switch (choice.getAllotment()) { + case SMALL: + smallJobs.add(item.getJob()); + break; + case SEQUENTIAL: + seqJobs.add(item.getJob()); + break; + case SHELF1: + shelf1.add(item.getJob()); + break; + case SHELF2: + shelf2.add(item.getJob()); + break; + } + minValue = newWeight; + break; //break out of loop as soon as some allotment was found. + } + } + } + + //reconstruction for big items + for (int i = b; i > 0; i--) { + MDKnapsackItem item = bigItems.get(i - 1); for (KnapsackChoice choice : item.getChoices()) { Vector3D newWeight = minValue.subtract(choice.getWeight()); if (newWeight.get(0) < 0 || newWeight.get(1) < 0 || newWeight.get(2) < 0) { continue; } - if (dp[i-1][newWeight.get(0)][newWeight.get(1)][newWeight.get(2)] != null) { + if (dp[i-1][newWeight.get(0)][newWeight.get(2)][newWeight.get(1)] != null) { switch (choice.getAllotment()) { case SMALL: smallJobs.add(item.getJob()); @@ -98,5 +199,7 @@ public void solve(List items, Vector3D capacity, List shelf } } } + // at the end we should arrive at 0.0 + assert dp[0][minValue.get(0)][minValue.get(2)][minValue.get(1)] == 0.0; } } diff --git a/src/main/java/de/ohnes/AlgorithmicComponents/Shelves/CpuGpuApproach.java b/src/main/java/de/ohnes/AlgorithmicComponents/Shelves/CpuGpuApproach.java index 8db19b0..7a4809f 100644 --- a/src/main/java/de/ohnes/AlgorithmicComponents/Shelves/CpuGpuApproach.java +++ b/src/main/java/de/ohnes/AlgorithmicComponents/Shelves/CpuGpuApproach.java @@ -34,26 +34,37 @@ public boolean solve(double d, double epsilon) { // inverted delta final int invDelta = 6; - final int n = I.getJobs().length; - final double mu = (1.0 * n * invDelta) / d; + final int n = I.getN(); + final int l = I.getL(); + final double v = (2.0 * invDelta) / d; //TODO: separate rounding for big tasks and small tasks + final double mu = (1.0 * n * invDelta) / d * l; List shelf2 = new ArrayList<>(Arrays.asList(MyMath.findBigJobs(I, d))); List smallJobs = new ArrayList<>(Arrays.asList(MyMath.findSmallJobs(I, d))); //transform to knapsack problem - List knapsackItems = new ArrayList<>(); + List smallKnapsackItems = new ArrayList<>(); + List bigKnapsackItems = new ArrayList<>(); for (Job job : smallJobs) { MDKnapsackItem knapsackItem = new MDKnapsackItem(); knapsackItem.setJob(job); //c_{i, S} knapsackItem.addChoice(MDKnapsackChoice.SMALL, job.getProcessingTime(1), new Vector3D(0, 0, 0)); //c_{i, 3} + int weight = 0; //if a choice would certainly violate the deadline d, we do not allow it. if (job.getSequentialProcessingTime() <= d) { - knapsackItem.addChoice(MDKnapsackChoice.SEQUENTIAL, 0, new Vector3D(0, job.getSequentialWeight(d), job.getScaledRoundedSequentialProcessingTime(mu))); + weight = job.getSequentialWeight(d); + knapsackItem.addChoice(MDKnapsackChoice.SEQUENTIAL, 0, new Vector3D(0, weight, job.getScaledRoundedSequentialProcessingTime(mu))); + } + if (weight > 0) { + // if the job is big + bigKnapsackItems.add(knapsackItem); + } else { + // if the job is small + smallKnapsackItems.add(knapsackItem); } - knapsackItems.add(knapsackItem); } for (Job job : shelf2) { MDKnapsackItem knapsackItem = new MDKnapsackItem(); @@ -73,9 +84,11 @@ public boolean solve(double d, double epsilon) { knapsackItem.addChoice(MDKnapsackChoice.SHELF2, job.getProcessingTime(dHalfAllotment) * dHalfAllotment, new Vector3D(0, 0, 0)); } //c_{i, 3} + int weight = 0; //if a choice would certainly violate the deadline d, we do not allow it. if (job.getSequentialProcessingTime() <= d) { - knapsackItem.addChoice(MDKnapsackChoice.SEQUENTIAL, 0, new Vector3D(0, job.getSequentialWeight(d), job.getScaledRoundedSequentialProcessingTime(mu))); + weight = job.getSequentialWeight(d); + knapsackItem.addChoice(MDKnapsackChoice.SEQUENTIAL, 0, new Vector3D(0, weight, job.getScaledRoundedSequentialProcessingTime(mu))); } // if there is no valid choice for some job, then we must reject the deadline d. @@ -83,7 +96,14 @@ public boolean solve(double d, double epsilon) { if (knapsackItem.getChoices().isEmpty()) { return false; } - knapsackItems.add(knapsackItem); + + if (weight > 0) { + // if the job is big + bigKnapsackItems.add(knapsackItem); + } else { + // if the job is small + smallKnapsackItems.add(knapsackItem); + } } @@ -94,8 +114,12 @@ public boolean solve(double d, double epsilon) { shelf2.clear(); List sequentialJobs = new ArrayList<>(); smallJobs.clear(); - Vector3D capacity = new Vector3D(I.getM(), 2* I.getL(), invDelta * I.getL()*I.getN()); - kS.solve(knapsackItems, capacity, shelf1, shelf2, smallJobs, sequentialJobs); + // 1st dimension: number of machines used by T_1 (less than m) + // 2nd dimension: weight of tasks on L (less than 2l) + // 3rd dimension: total work regarding the scaled and rounded instace on L (less than n/\delta) + // -> optimized: (less than 2l/\delta) + Vector3D capacity = new Vector3D(I.getM(), 2* l, invDelta * 2 * l + 1); + kS.solve(smallKnapsackItems, bigKnapsackItems, capacity, shelf1, shelf2, smallJobs, sequentialJobs); // calculate the work for the jobs in the shelves for the malleable machines. double Ws = 0; diff --git a/src/main/java/de/ohnes/App.java b/src/main/java/de/ohnes/App.java index f20a5b5..95ac05a 100644 --- a/src/main/java/de/ohnes/App.java +++ b/src/main/java/de/ohnes/App.java @@ -1,13 +1,22 @@ package de.ohnes; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.config.plugins.util.ResolverUtil.Test; + +import com.fasterxml.jackson.databind.ObjectMapper; import de.ohnes.AlgorithmicComponents.Approximation.TwoApproximation; import de.ohnes.AlgorithmicComponents.Shelves.CpuGpuApproach; import de.ohnes.logger.MyElasticsearchClient; +import de.ohnes.logger.printSchedule; import de.ohnes.util.Instance; import de.ohnes.util.TestResult; @@ -15,14 +24,18 @@ public class App { private static final Logger LOGGER = LogManager.getLogger(App.class); + private static Integer nbTests; private static Double epsilon; private static Integer minJobs; private static Integer maxJobs; private static Integer minMachines; private static Integer maxMachines; + private static Double seqMachines; private static Integer maxSeqTime; private static String ESHost; + private static int ESPort; private static String ESIndex; + private static String TestFilePath; /** * @param args @@ -30,36 +43,64 @@ public class App { */ public static void main(String[] args) throws Exception { Configurator.setRootLevel(Level.ALL); + readEnv(); + + LOGGER.info("Starting Algorithm!"); + MyElasticsearchClient.makeConnection(ESHost, ESPort); + // read test files if specified + if (TestFilePath != null) { + File dir = new File(TestFilePath); + File[] files = dir.listFiles(); + if(files != null) { + for(File testFile : files) { + Instance I = new Instance(); + try { + I = new ObjectMapper().readValue(testFile, Instance.class); + } catch (IOException e) { + e.printStackTrace(); + } + LOGGER.info("Reading test file from {}/{}", TestFilePath, testFile.getName()); + TestResult tr = runTest(I); + LOGGER.info("Computed Schedule: \n{}", printSchedule.printMachines(I.getMachines())); + MyElasticsearchClient.pushData(ESIndex, tr); + } + } + } else { + + try { + for (int i = 0; i < nbTests; i++) { + Instance I = new Instance(); + I.generateRandomInstance(minJobs, maxJobs, minMachines, maxMachines, seqMachines, maxSeqTime); + MyElasticsearchClient.pushData(ESIndex, runTest(I)); + } + } catch (OutOfMemoryError e) { + LOGGER.error("Out of Memory Error. Exiting..."); + System.exit(1); + } + } + System.exit(0); + } + + private static void readEnv() { + nbTests = Integer.parseInt(System.getenv("NB_TESTS")); epsilon = Double.parseDouble(System.getenv("EPSILON")); minJobs = Integer.parseInt(System.getenv("INSTANCE_MINJOBS")); maxJobs = Integer.parseInt(System.getenv("INSTANCE_MAXJOBS")); minMachines = Integer.parseInt(System.getenv("INSTANCE_MINMACHINES")); maxMachines = Integer.parseInt(System.getenv("INSTANCE_MAXMACHINES")); + seqMachines = Double.parseDouble(System.getenv("INSTANCE_SEQMACHINES")); maxSeqTime = Integer.parseInt(System.getenv("INSTANCE_MAX_SEQUENTIAL_TIME")); ESHost = System.getenv("ES_HOST"); + ESPort = Integer.parseInt(System.getenv("ES_PORT")); ESIndex = System.getenv("ES_INDEX"); - - LOGGER.info("Starting Algorithm!"); - MyElasticsearchClient.makeConnection(ESHost); - try { - while(true) { - MyElasticsearchClient.pushData(ESIndex, runTest()); - } - } catch (OutOfMemoryError e) { - LOGGER.error("Out of Memory Error. Exiting..."); - System.exit(1); - } - + TestFilePath = System.getenv("TEST_FILE_PATH"); } - /** * @return TestResult */ - private static TestResult runTest() { - Instance I = new Instance(); - I.generateRandomInstance(minJobs, maxJobs, minMachines, maxMachines, maxSeqTime); + private static TestResult runTest(Instance I) { DualApproximationFramework dF = new DualApproximationFramework(null, new CpuGpuApproach(), new TwoApproximation(), I); @@ -67,17 +108,18 @@ private static TestResult runTest() { long startTime = System.currentTimeMillis(); double d = dF.start(epsilon); long endTime = System.currentTimeMillis(); - LOGGER.info("Ran instance with {} machines and {} jobs in {} milliseconds.", I.getM(), I.getN(), (endTime - startTime)); + LOGGER.info("Ran instance with {} malleable, {} sequential machines and {} jobs in {} milliseconds.", I.getM(), I.getL(), I.getN(), (endTime - startTime)); TestResult tr = new TestResult(); tr.setAchivedMakespan(I.getMakespan()); tr.setEstimatedOptimum(d); tr.setJobs(I.getN()); - tr.setMachines(I.getM()); + tr.setMachines(I.getM() + I.getL()); + tr.setMalMachines(I.getM()); + tr.setSeqMachines(I.getL()); tr.setMilliseconds((endTime - startTime)); tr.setInstanceID(I.getId()); return tr; } - } diff --git a/src/main/java/de/ohnes/DualApproximationFramework.java b/src/main/java/de/ohnes/DualApproximationFramework.java index 0367de6..3d2b924 100644 --- a/src/main/java/de/ohnes/DualApproximationFramework.java +++ b/src/main/java/de/ohnes/DualApproximationFramework.java @@ -29,15 +29,9 @@ public DualApproximationFramework(Algorithm fptas, Algorithm knapsack, Approxima public double start(double epsilon) { Algorithm usedAlgo; - if(I.getM() >= 8 * (I.getN() / epsilon)) { - LOGGER.info("Starting dual approximation Framework with fptas: {}", this.getFPTASName()); - usedAlgo = this.fptas; - usedAlgo.setInstance(I); - } else { - LOGGER.info("Starting dual approximation Framework with shelvesAlgo: {}", this.getShelvesAlgoName()); - usedAlgo = this.knapsack; - usedAlgo.setInstance(I); - } + LOGGER.info("Starting dual approximation Framework with shelvesAlgo: {}", this.getShelvesAlgoName()); + usedAlgo = this.knapsack; + usedAlgo.setInstance(I); double lowerBound = this.approx.approximate(I) / 2; //TODO this bound could be thighter. double upperBound = lowerBound * 8; //TODO add list scheduling. -> schedule twiari greedy and divide by 2. @@ -50,7 +44,7 @@ private double binarySearch(Algorithm algo, double epsilon, double l, double r) I.resetInstance(); //reset the instance because it was altered in previous attempt. if(algo.solve(mid, epsilon)) { //a schedule of length "mid" exists - if(r - mid < epsilon) { + if((1 + epsilon) * l > mid) { return mid; } diff --git a/src/main/java/de/ohnes/logger/MyElasticsearchClient.java b/src/main/java/de/ohnes/logger/MyElasticsearchClient.java index e3b6144..5ee0826 100644 --- a/src/main/java/de/ohnes/logger/MyElasticsearchClient.java +++ b/src/main/java/de/ohnes/logger/MyElasticsearchClient.java @@ -1,15 +1,12 @@ package de.ohnes.logger; import java.io.IOException; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; @@ -38,10 +35,10 @@ public class MyElasticsearchClient { // }; // } - public static synchronized RestHighLevelClient makeConnection(String host) { + public static synchronized RestHighLevelClient makeConnection(String host, int port) { if(restHighLevelClient == null) { restHighLevelClient = new RestHighLevelClient(RestClient.builder( - new HttpHost(host, 9200) + new HttpHost(host, port) )); } return restHighLevelClient; diff --git a/src/main/java/de/ohnes/util/Instance.java b/src/main/java/de/ohnes/util/Instance.java index d8e33e7..088ff1c 100644 --- a/src/main/java/de/ohnes/util/Instance.java +++ b/src/main/java/de/ohnes/util/Instance.java @@ -45,15 +45,16 @@ public Instance(int n, int m, int l, Job[] jobs) { /** - * @param minJobs - * @param maxJobs - * @param minMachines - * @param maxMachines + * @param minJobs minimal number of jobs + * @param maxJobs maximal number of jobs + * @param minMachines minimal number of malleable machines + * @param maxMachines maximal number of malleable machines + * @param seqMachines the number of sequential machines in relation to malleable machines */ - public void generateRandomInstance(int minJobs, int maxJobs, int minMachines, int maxMachines, int maxSeqTime) { + public void generateRandomInstance(int minJobs, int maxJobs, int minMachines, int maxMachines, double seqMachines, int maxSeqTime) { this.m = MyMath.getRandomNumber(minMachines, maxMachines); - this.l = MyMath.getRandomNumber(minMachines, maxMachines); //TODO different parameters?? + this.l = (int) Math.floor(this.m * seqMachines); this.n = MyMath.getRandomNumber(minJobs, maxJobs); this.jobs = new Job[this.n]; diff --git a/src/main/java/de/ohnes/util/TestResult.java b/src/main/java/de/ohnes/util/TestResult.java index 3554932..446eb03 100644 --- a/src/main/java/de/ohnes/util/TestResult.java +++ b/src/main/java/de/ohnes/util/TestResult.java @@ -12,6 +12,8 @@ public class TestResult { private long InstanceID; private int jobs; private int machines; + private int seqMachines; + private int malMachines; private double estimatedOptimum; private double achivedMakespan; private long milliseconds; diff --git a/src/main/java/de/ohnes/util/Vector3D.java b/src/main/java/de/ohnes/util/Vector3D.java index 51265bb..82a1ed3 100644 --- a/src/main/java/de/ohnes/util/Vector3D.java +++ b/src/main/java/de/ohnes/util/Vector3D.java @@ -12,6 +12,10 @@ public int get(int i) { return this.values[i]; } + public void set(int i, int value) { + this.values[i] = value; + } + public boolean isSmallerElementWise(int x1, int x2, int x3) { return x1 <= this.values[0] && x2 <= this.values[1] && x3 <= this.values[2]; } diff --git a/src/test/java/de/ohnes/AppTest.java b/src/test/java/de/ohnes/AppTest.java index f8944fd..dac7351 100644 --- a/src/test/java/de/ohnes/AppTest.java +++ b/src/test/java/de/ohnes/AppTest.java @@ -67,7 +67,7 @@ public static List input() { for(int i = 0; i < 20; i++) { Instance[] args = new Instance[1]; args[0] = new Instance(0, 0, 0, null); - args[0].generateRandomInstance(100, 1000, 10, 50, 100); + args[0].generateRandomInstance(100, 1000, 10, 50, 1, 100); instances.add(args); } diff --git a/src/test/java/de/ohnes/CPUGPUTests.java b/src/test/java/de/ohnes/CPUGPUTests.java index ea65bc3..71850c9 100644 --- a/src/test/java/de/ohnes/CPUGPUTests.java +++ b/src/test/java/de/ohnes/CPUGPUTests.java @@ -61,7 +61,7 @@ public static List input() { for(int i = 0; i < 20; i++) { Instance[] args = new Instance[1]; args[0] = new Instance(0, 0, 0, null); - args[0].generateRandomInstance(10, 20, 5, 10, 100); + args[0].generateRandomInstance(10, 20, 5, 10, 1, 100); instances.add(args); } diff --git a/src/test/java/de/ohnes/LPTTests.java b/src/test/java/de/ohnes/LPTTests.java index f78c7c3..f29c649 100644 --- a/src/test/java/de/ohnes/LPTTests.java +++ b/src/test/java/de/ohnes/LPTTests.java @@ -62,7 +62,7 @@ public static List input() { for(int i = 0; i < 20; i++) { Instance[] args = new Instance[1]; args[0] = new Instance(0, 0, 0, null); - args[0].generateRandomInstance(100, 1000, 10, 50, 100); + args[0].generateRandomInstance(100, 1000, 10, 50, 1, 100); instances.add(args); } diff --git a/src/test/java/de/ohnes/MDKnapsackTests.java b/src/test/java/de/ohnes/MDKnapsackTests.java index d715dca..8e1db32 100644 --- a/src/test/java/de/ohnes/MDKnapsackTests.java +++ b/src/test/java/de/ohnes/MDKnapsackTests.java @@ -26,17 +26,19 @@ @RunWith(Parameterized.class) public class MDKnapsackTests { - private List items; + private List smallItems; + private List bigItems; private Vector3D capacity; List shelf1; List shelf2; List smallJobs; List seqJobs; - public MDKnapsackTests(List items, Vector3D capacity, List shelf1, List shelf2, List smallJobs, List seqJobs) { + public MDKnapsackTests(List smallItems, List bigItems, Vector3D capacity, List shelf1, List shelf2, List smallJobs, List seqJobs) { super(); this.capacity = capacity; - this.items = items; + this.smallItems = smallItems; + this.bigItems = bigItems; this.seqJobs = seqJobs; this.shelf1 = shelf1; this.shelf2 = shelf2; @@ -54,40 +56,41 @@ public static List input() { Vector3D capacity = new Vector3D(10, 10, 10); - List items = new ArrayList<>(); + List smallItems = new ArrayList<>(); + List bigItems = new ArrayList<>(); MDKnapsackItem item1 = new MDKnapsackItem(); item1.setJob(new Job(1, new int[]{10, 20, 30}, 20)); - item1.addChoice(MDKnapsackChoice.SHELF1, 1, new Vector3D(10, 0, 0)); - item1.addChoice(MDKnapsackChoice.SHELF2, 2, new Vector3D(0, 10, 0)); - item1.addChoice(MDKnapsackChoice.SEQUENTIAL, 3, new Vector3D(0, 0, 10)); + // item1.addChoice(MDKnapsackChoice.SHELF1, 1, new Vector3D(10, 0, 0)); + // item1.addChoice(MDKnapsackChoice.SHELF2, 2, new Vector3D(0, 10, 0)); + item1.addChoice(MDKnapsackChoice.SEQUENTIAL, 0, new Vector3D(0, 0, 10)); item1.addChoice(MDKnapsackChoice.SMALL, 4, new Vector3D(0, 0, 0)); - items.add(item1); + smallItems.add(item1); MDKnapsackItem item2 = new MDKnapsackItem(); item2.setJob(new Job(1, new int[]{10, 20, 30}, 20)); item2.addChoice(MDKnapsackChoice.SHELF1, 1, new Vector3D(10, 0, 0)); item2.addChoice(MDKnapsackChoice.SHELF2, 2, new Vector3D(0, 10, 0)); item2.addChoice(MDKnapsackChoice.SEQUENTIAL, 3, new Vector3D(0, 0, 10)); - item2.addChoice(MDKnapsackChoice.SMALL, 4, new Vector3D(0, 0, 0)); - items.add(item2); + // item2.addChoice(MDKnapsackChoice.SMALL, 4, new Vector3D(0, 0, 0)); + bigItems.add(item2); MDKnapsackItem item3 = new MDKnapsackItem(); item3.setJob(new Job(1, new int[]{10, 20, 30}, 20)); item3.addChoice(MDKnapsackChoice.SHELF1, 1, new Vector3D(10, 0, 0)); item3.addChoice(MDKnapsackChoice.SHELF2, 2, new Vector3D(0, 10, 0)); item3.addChoice(MDKnapsackChoice.SEQUENTIAL, 3, new Vector3D(0, 0, 10)); - item3.addChoice(MDKnapsackChoice.SMALL, 4, new Vector3D(0, 0, 0)); - items.add(item3); + // item3.addChoice(MDKnapsackChoice.SMALL, 4, new Vector3D(0, 0, 0)); + bigItems.add(item3); - return Arrays.asList(new Object[][] {{items, capacity, shelf1, shelf2, smallJobs, seqJobs}}); + return Arrays.asList(new Object[][] {{smallItems, bigItems, capacity, shelf1, shelf2, smallJobs, seqJobs}}); } @Test public void testMDKnapsack() { MDKnapsack kS = new MDKnapsack(); - kS.solve(items, capacity, shelf1, shelf2, smallJobs, seqJobs); - assertTrue("All jobs should be selected", shelf1.size() + shelf2.size() + smallJobs.size() + seqJobs.size() == items.size()); //length should be leq than capacity + kS.solve(smallItems, bigItems, capacity, shelf1, shelf2, smallJobs, seqJobs); + assertTrue("All jobs should be selected", shelf1.size() + shelf2.size() + smallJobs.size() + seqJobs.size() == smallItems.size() + bigItems.size()); //length should be leq than capacity assertTrue("No job should be chosen as small", smallJobs.size()==0); assertTrue("In Shelf1 should be 1 job.", shelf1.size()==1); assertTrue("In Shelf2 should be 1 job.", shelf2.size()==1);