Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,12 @@
"lib/**/*.jar"
],
"java.dependency.packagePresentation": "flat",
"java.configuration.updateBuildConfiguration": "automatic"
"java.configuration.updateBuildConfiguration": "automatic",
"spellright.language": [
"de"
],
"spellright.documentTypes": [
"latex",
"plaintext"
]
}
12 changes: 8 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,19 @@ RUN mvn package -DskipTests

FROM adoptopenjdk/openjdk11:latest
WORKDIR /

ENV NB_TESTS=10
ENV EPSILON "0.1"
ENV INSTANCE_MINJOBS 10
ENV INSTANCE_MAXJOBS 10
ENV INSTANCE_MINMACHINES 10
ENV INSTANCE_MAXMACHINES 10
ENV INSTANCE_SEQMACHINES=5
ENV INSTANCE_MAX_SEQUENTIAL_TIME 100
ENV ES_HOST "localhost"
ENV ES_INDEX "testdata-"
ENV ES_HOST "217.94.61.24"
ENV ES_PORT 9200
ENV ES_INDEX "hpc-2024"

COPY --from=build /src/target/cpu-gpu-scheduling-1.0-SNAPSHOT-jar-with-dependencies.jar SchedulingAlgorithms.jar
COPY --from=build --chmod=0777 /src/target/cpu-gpu-scheduling-1.0-SNAPSHOT-jar-with-dependencies.jar /app/SchedulingAlgorithms.jar
# use the Epslion ("noops") GC, because we want to measure the time of these algorithms.
ENTRYPOINT ["java", "-jar", "SchedulingAlgorithms.jar", "-XX:+UnlockExperimentalVMOptions", "-XX:+UseEpsilonGC"]
ENTRYPOINT ["java", "-jar", "/app/SchedulingAlgorithms.jar", "-XX:+UnlockExperimentalVMOptions", "-XX:+UseEpsilonGC"]
66 changes: 58 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,55 @@ Algorithms for machine scheduling with malleable jobs

An Implementation of the algorithms from Jansen & Land and Grage & Jansen, building upon an algorithm from Mounié, Rapine and Trystram.

- [Quick Start](#quick-start)
- [Run the local test-files](#run-the-local-test-files)
- [Execution with docker-compose](#execution-with-docker-compose)
- [Building docker images](#building-docker-images)
- [Configuration for testing](#configuration-for-testing)
- [A couple of Maven commands](#a-couple-of-maven-commands)

# A couple of Maven commands
# Quick Start
Build the docker image and run it. The `Dockerfile` contains reasonable default values for all variables.

Once you have configured your project in your IDE you can build it from there. However if you prefer you can use maven from the command line. In that case you could be interested in this short list of commands:
```
docker build -t malleable . && docker run malleable
```

* `mvn compile`: it will just compile the code of your application and tell you if there are errors
* `mvn test`: it will compile the code of your application and your tests. It will then run your tests (if you wrote any) and let you know if some fails
* `mvn install`: it will do everything `mvn test` does and then if everything looks file it will install the library or the application into your local maven repository (typically under <USER FOLDER>/.m2). In this way you could use this library from other projects you want to build on the same machine
## Run the local test-files
```
docker build -t malleable . && docker run -e TEST_FILE_PATH=/testfiles -v $(pwd)/TestInstances:/testfiles:ro malleable
```
Output:
```console
ohnesorge@DESKTOP-FELIX:/mnt/d/workspace/CPU-GPU-Scheduling$ docker build -t malleable . && docker run -e TEST_FILE_PATH=/testfiles -v $(pwd)/TestInstances:/testfiles:ro malleable
[+] Building 50.9s (12/12) FINISHED
...
=> => naming to docker.io/library/malleable:latest
12:12:35.857 [main] INFO de.ohnes.App - Starting Algorithm!
12:12:36.397 [main] INFO de.ohnes.App - Reading test file from /testfiles/TestInstance copy 2.json
12:12:36.398 [main] INFO de.ohnes.DualApproximationFramework - Starting dual approximation Framework with shelvesAlgo: CpuGpuApproach
12:12:36.434 [main] INFO de.ohnes.App - Ran instance with 3 machines and 8 jobs in 36 milliseconds.
12:12:36.437 [main] INFO de.ohnes.App - Computed Schedule:

If you need more information please take a look at this [quick tutorial](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html).

///////////////////////////////////////////0002///////////////////////////////////////////###########################################0001###########################################/////////////////////////////////////////////////////0007/////////////////////////////////////////////////////#####################################################0006#####################################################
/////////////////////////////////////////////////////0005/////////////////////////////////////////////////////#####################################################0004#####################################################///////////////////////////////////////////0003///////////////////////////////////////////###########################################0000###########################################


12:12:36.557 [main] DEBUG de.ohnes.logger.MyElasticsearchClient - Trying to push test result to Elasticsearch...
12:12:36.684 [main] WARN de.ohnes.logger.MyElasticsearchClient - Couldn't reach ES Server. Saving data locally until next try.
12:12:36.692 [main] INFO de.ohnes.App - Reading test file from /testfiles/TestInstance copy 3.json
12:12:36.692 [main] INFO de.ohnes.DualApproximationFramework - Starting dual approximation Framework with shelvesAlgo: CpuGpuApproach
12:12:36.707 [main] INFO de.ohnes.App - Ran instance with 3 machines and 5 jobs in 15 milliseconds.
12:12:36.707 [main] INFO de.ohnes.App - Computed Schedule:
////////////////////////////0000////////////////////////////
///////////////////////0001///////////////////////
/////////////////////////////////0002/////////////////////////////////##################0004##################
/////////////////////////////////0002/////////////////////////////////
////////////////////////////0003////////////////////////////
...
```

# Execution with docker-compose
Hint: before execution docker-compose the images need to be build using docker. For this see section Docker.

Expand All @@ -37,5 +73,19 @@ The docker image can be build with the following command.
```
docker build -t malleable .
```
With the current configuration of the `Dockerfile` the `target/bachelorarbeit-1.0-SNAPSHOT-jar-with-dependencies.jar` will be used, so make sure to call `mvn package` before.
The other configuration in this file are only defaults and can be changed in the `docker-compose.yml`.
The dockerfile is written as a `multi-stage dockerfile`. Therefore there is no need to build the maven project first or making sure you have the correct maven/java version. **Just build the docker container and go!**

# Configuration for testing

All parameters are tunable via the environment variables. Note that the code does support randomly generated instances but also allows reading in user-specified .json files containing an instance. (format as in `./TestInstances/`).


# A couple of Maven commands

Once you have configured your project in your IDE you can build it from there. However if you prefer you can use maven from the command line. In that case you could be interested in this short list of commands:

* `mvn compile`: it will just compile the code of your application and tell you if there are errors
* `mvn test`: it will compile the code of your application and your tests. It will then run your tests (if you wrote any) and let you know if some fails
* `mvn install`: it will do everything `mvn test` does and then if everything looks file it will install the library or the application into your local maven repository (typically under <USER FOLDER>/.m2). In this way you could use this library from other projects you want to build on the same machine

If you need more information please take a look at this [quick tutorial](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html).
15 changes: 8 additions & 7 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
services:
scheduling:
image: cpugpu:1.0.0
image: cpugpu:1.1.0
environment:
- EPSILON=0.1
- INSTANCE_MINJOBS=10
- INSTANCE_MAXJOBS=50
- INSTANCE_MINMACHINES=5
- INSTANCE_MAXMACHINES=30
- INSTANCE_MINJOBS=50
- INSTANCE_MAXJOBS=120
- INSTANCE_MINMACHINES=30
- INSTANCE_MAXMACHINES=35
- ES_HOST=192.168.178.101
- ES_INDEX=cpugpu-0.1
- ES_PORT=9200
- ES_INDEX=cpugpu2-jobs-2024-01
# depends_on:
# elasticsearch:
# condition: service_healthy
restart: always
restart: unless-stopped
143 changes: 123 additions & 20 deletions src/main/java/de/ohnes/AlgorithmicComponents/Knapsack/MDKnapsack.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package de.ohnes.AlgorithmicComponents.Knapsack;

import java.util.HashMap;
import java.util.List;

import de.ohnes.util.Job;
Expand All @@ -12,11 +13,26 @@
* It provides a method to solve the problem and allocate jobs to different shelves based on their weights and costs.
*/
public class MDKnapsack {
/**
* solves a multi-dimensional knapsack problem.
*
* @param smallItems
* @param bigItems
* @param capacity
* @param shelf1
* @param shelf2
* @param smallJobs
* @param seqJobs
*/
public void solve(List<MDKnapsackItem> smallItems, List<MDKnapsackItem> bigItems, Vector3D capacity, List<Job> shelf1, List<Job> shelf2, List<Job> smallJobs, List<Job> seqJobs) {

public void solve(List<MDKnapsackItem> items, Vector3D capacity, List<Job> shelf1, List<Job> shelf2, List<Job> smallJobs, List<Job> seqJobs) {
Double[][][][] dp = new Double[items.size()+1][capacity.get(0)+1][capacity.get(1)+1][capacity.get(2)+1];


int b = bigItems.size();
int s = smallItems.size();
int n = s + b;
//TODO: reduce 3rd dimension
Double[][][][] dp = new Double[b+1][capacity.get(0)+1][capacity.get(2)+1][capacity.get(1)+1];


//initialization
for (int x1 = 0; x1 < dp[0].length; x1++) {
for (int x2 = 0; x2 < dp[0][x1].length; x2++) {
Expand All @@ -25,20 +41,21 @@ public void solve(List<MDKnapsackItem> items, Vector3D capacity, List<Job> shelf
}
}
}

//acutal dp
for (int i = 1; i <= items.size(); i++) {
Integer[] costs = items.get(i-1).getCosts();
Vector3D[] weights = items.get(i-1).getWeights();
// fist solve the knapsack problem for the big items
for (int i = 1; i <= bigItems.size(); i++) {
Integer[] costs = bigItems.get(i-1).getCosts();
Vector3D[] weights = bigItems.get(i-1).getWeights();
for (int x1 = 0; x1 < dp[0].length; x1++) {
for (int x2 = 0; x2 < dp[0][x1].length; x2++) {
for (int x3 = 0; x3 < dp[0][x1][x2].length; x3++) {
double minVal = Double.MAX_VALUE;
for (int c = 0; c < costs.length; c++) { //for the choices
Vector3D w = weights[c];
int x1_ = x1 - w.get(0);
int x2_ = x2 - w.get(1);
int x3_ = x3 - w.get(2);
int x2_ = x2 - w.get(2);
int x3_ = x3 - w.get(1);
if (x1_ < 0 || x2_ < 0 || x3_ < 0) {
continue;
}
Expand All @@ -57,28 +74,112 @@ public void solve(List<MDKnapsackItem> items, Vector3D capacity, List<Job> shelf
}
}

Vector3D minValue = new Vector3D(0, 0, 0);
double minCost = Double.MAX_VALUE;
//discard 2nd constraint, as the small items don't change it.

Double[][][] dp2 = new Double[s+1][capacity.get(0)+1][capacity.get(2)+1];
HashMap<String, Integer> map = new HashMap<>(); // a map to remember the position of the best solution for the big items
//initialization
for (int x1 = 0; x1 < dp[0].length; x1++) {
for (int x2 = 0; x2 < dp[0][x1].length; x2++) {
for (int x3 = 0; x3 < dp[0][x1][x2].length; x3++) {
if (dp[items.size()][x1][x2][x3] != null && dp[items.size()][x1][x2][x3] < minCost) {
minCost = dp[items.size()][x1][x2][x3];
minValue = new Vector3D(x1, x2, x3);
// find the best solution for the big items
// dp2[0][x1][x2] = Arrays.stream(dp[bigItems.size()][x1][x2]).filter(d -> d != null).min(Double::compare).orElse(null);
// remember the position of best solution
for (int i = 0; i < dp[b][x1][x2].length; i++) {
if (dp[b][x1][x2][i] != null) {
String key = x1 + "," + x2;
if (map.containsKey(key)) {
if (dp[b][x1][x2][i] < dp2[0][x1][x2]) {
map.put(key, i);
dp2[0][x1][x2] = dp[b][x1][x2][i];
}
} else {
map.put(key, i);
dp2[0][x1][x2] = dp[b][x1][x2][i];
}
}
}
}
}

//reconstruction
for (int i = items.size(); i > 0; i--) {
MDKnapsackItem item = items.get(i - 1);
//acutal dp
// solve the knapsack problem for the remaining small items
for (int i = 1; i <= s; i++) {
Integer[] costs = smallItems.get(i-1).getCosts();
Vector3D[] weights = smallItems.get(i-1).getWeights();
for (int x1 = 0; x1 < dp2[0].length; x1++) {
for (int x2 = 0; x2 < dp2[0][x1].length; x2++) {
double minVal = Double.MAX_VALUE;
for (int c = 0; c < costs.length; c++) { //for the choices
Vector3D w = weights[c];
int x1_ = x1 - w.get(0);
int x2_ = x2 - w.get(2);
if (x1_ < 0 || x2_ < 0) {
continue;
}
if (dp2[i-1][x1_][x2_] == null) {
continue;
}
if (dp2[i-1][x1_][x2_] + costs[c] < minVal) {
minVal = dp2[i-1][x1_][x2_] + costs[c];
}
}
if (minVal < Double.MAX_VALUE) {
dp2[i][x1][x2] = minVal;
}
}
}
}


Vector3D minValue = new Vector3D(0, 0, 0);
double minCost = Double.MAX_VALUE;
for (int x1 = 0; x1 < dp2[0].length; x1++) {
for (int x2 = 0; x2 < dp2[0][x1].length; x2++) {
if (dp2[s][x1][x2] != null && dp2[s][x1][x2] < minCost) {
minCost = dp2[s][x1][x2];
minValue = new Vector3D(x1, 0, x2);
}
}
}
minValue.set(1, map.get(minValue.get(0) + "," + minValue.get(2))); // set the 2nd dimension to the best solution for the big items
//reconstruction for small items
for (int i = s; i > 0; i--) {
MDKnapsackItem item = smallItems.get(i - 1);
for (KnapsackChoice choice : item.getChoices()) {
Vector3D newWeight = minValue.subtract(choice.getWeight());
if (newWeight.get(0) < 0 || newWeight.get(1) < 0 || newWeight.get(2) < 0) {
continue;
}
if (dp2[i-1][newWeight.get(0)][newWeight.get(2)] != null) {
switch (choice.getAllotment()) {
case SMALL:
smallJobs.add(item.getJob());
break;
case SEQUENTIAL:
seqJobs.add(item.getJob());
break;
case SHELF1:
shelf1.add(item.getJob());
break;
case SHELF2:
shelf2.add(item.getJob());
break;
}
minValue = newWeight;
break; //break out of loop as soon as some allotment was found.
}
}
}

//reconstruction for big items
for (int i = b; i > 0; i--) {
MDKnapsackItem item = bigItems.get(i - 1);
for (KnapsackChoice choice : item.getChoices()) {
Vector3D newWeight = minValue.subtract(choice.getWeight());
if (newWeight.get(0) < 0 || newWeight.get(1) < 0 || newWeight.get(2) < 0) {
continue;
}
if (dp[i-1][newWeight.get(0)][newWeight.get(1)][newWeight.get(2)] != null) {
if (dp[i-1][newWeight.get(0)][newWeight.get(2)][newWeight.get(1)] != null) {
switch (choice.getAllotment()) {
case SMALL:
smallJobs.add(item.getJob());
Expand All @@ -98,5 +199,7 @@ public void solve(List<MDKnapsackItem> items, Vector3D capacity, List<Job> shelf
}
}
}
// at the end we should arrive at 0.0
assert dp[0][minValue.get(0)][minValue.get(2)][minValue.get(1)] == 0.0;
}
}
Loading