summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDante Niewenhuis <d.niewenhuis@hotmail.com>2024-11-05 14:17:08 +0100
committerGitHub <noreply@github.com>2024-11-05 14:17:08 +0100
commit0f835d57b0e989e25aa0b71fe374a0fb1a94e86f (patch)
tree8d777634dbb8a3e03b14ecc4208228f92127f4b5
parentf3e578a2a43c99997dbf35e09debfde255a4ae22 (diff)
Documentation update (#261)
* Updated a lot of documentation, added a new get-started tutorial. * Applied Spotless * Applied Spotless Java * Added bitbrains workload to site
-rw-r--r--opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceLoader.kt16
-rw-r--r--opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceReader.kt6
-rw-r--r--opendc-experiments/opendc-experiments-base/build.gradle.kts6
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentFactories.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentFactories.kt)6
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentReader.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentReader.kt)4
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentWriter.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentWriter.kt)4
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/Scenario.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt)14
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/AllocationPolicySpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/AllocationPolicySpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/CheckpointModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExperimentSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExperimentSpec.kt)8
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExportModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExportModelSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/FailureModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/PowerModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/PowerModelSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioTopologySpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioTopologySpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/WorkloadSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/WorkloadSpec.kt)2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentCli.kt2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentRunner.kt2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioReplayer.kt4
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt4
-rw-r--r--opendc-experiments/opendc-experiments-base/src/test/kotlin/org/opendc/experiments/base/ScenarioIntegrationTest.kt2
-rw-r--r--opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SACli.kt2
-rw-r--r--opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SARunner.kt2
-rw-r--r--opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/scenario/M3SAFactories.kt2
-rw-r--r--opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragment.java (renamed from opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragmentNew.java)10
-rw-r--r--opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonModel.java38
-rw-r--r--opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/SimPowerSource.java3
-rw-r--r--site/docs/documentation/Input/Experiment.md175
-rw-r--r--site/docs/documentation/Input/ExperimentSchema.md (renamed from site/docs/documentation/Input/ScenarioSchema.md)0
-rw-r--r--site/docs/documentation/Input/FailureModel.md (renamed from site/docs/documentation/Input/FailureModels.md)58
-rw-r--r--site/docs/documentation/Input/Scenario.md125
-rw-r--r--site/docs/documentation/Input/Topology.md61
-rw-r--r--site/docs/documentation/Input/Traces.md26
-rw-r--r--site/docs/documentation/Input/Workload.md24
-rw-r--r--site/docs/documentation/Output.md73
-rw-r--r--site/docs/getting-started/0-installation.md40
-rw-r--r--site/docs/getting-started/1-first-experiment.md200
-rw-r--r--site/docs/getting-started/documents/experiments/simple_experiment.json13
-rw-r--r--site/docs/getting-started/documents/topologies/big.json59
-rw-r--r--site/docs/getting-started/documents/topologies/small.json22
-rw-r--r--site/docs/getting-started/documents/workloads/bitbrains-small.zipbin0 -> 573038 bytes
-rw-r--r--site/old_tutorials/0-installation.md31
-rw-r--r--site/old_tutorials/1-design.mdx (renamed from site/docs/getting-started/1-design.mdx)0
-rw-r--r--site/old_tutorials/2-experiment.mdx (renamed from site/docs/getting-started/2-experiment.mdx)0
-rw-r--r--site/old_tutorials/3-whats-next.md12
-rw-r--r--site/old_tutorials/_category_.json8
46 files changed, 752 insertions, 326 deletions
diff --git a/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceLoader.kt b/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceLoader.kt
index ccf1d81c..104abdca 100644
--- a/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceLoader.kt
+++ b/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceLoader.kt
@@ -22,7 +22,7 @@
package org.opendc.compute.carbon
-import org.opendc.simulator.compute.power.CarbonFragmentNew
+import org.opendc.simulator.compute.power.CarbonFragment
import org.opendc.trace.Trace
import org.opendc.trace.conv.CARBON_INTENSITY_TIMESTAMP
import org.opendc.trace.conv.CARBON_INTENSITY_VALUE
@@ -41,14 +41,14 @@ public class CarbonTraceLoader {
/**
* The cache of workloads.
*/
- private val cache = ConcurrentHashMap<String, SoftReference<List<CarbonFragmentNew>>>()
+ private val cache = ConcurrentHashMap<String, SoftReference<List<CarbonFragment>>>()
private val builder = CarbonFragmentNewBuilder()
/**
* Read the metadata into a workload.
*/
- private fun parseCarbon(trace: Trace): List<CarbonFragmentNew> {
+ private fun parseCarbon(trace: Trace): List<CarbonFragment> {
val reader = checkNotNull(trace.getTable(TABLE_CARBON_INTENSITIES)).newReader()
val startTimeCol = reader.resolve(CARBON_INTENSITY_TIMESTAMP)
@@ -77,7 +77,7 @@ public class CarbonTraceLoader {
/**
* Load the trace with the specified [name] and [format].
*/
- public fun get(pathToFile: File): List<CarbonFragmentNew> {
+ public fun get(pathToFile: File): List<CarbonFragment> {
val trace = Trace.open(pathToFile, "carbon")
return parseCarbon(trace)
@@ -97,7 +97,7 @@ public class CarbonTraceLoader {
/**
* The total load of the trace.
*/
- public val fragments: MutableList<CarbonFragmentNew> = mutableListOf()
+ public val fragments: MutableList<CarbonFragment> = mutableListOf()
/**
* Add a fragment to the trace.
@@ -110,7 +110,11 @@ public class CarbonTraceLoader {
carbonIntensity: Double,
) {
fragments.add(
- CarbonFragmentNew(startTime.toEpochMilli(), Long.MAX_VALUE, carbonIntensity),
+ CarbonFragment(
+ startTime.toEpochMilli(),
+ Long.MAX_VALUE,
+ carbonIntensity,
+ ),
)
}
diff --git a/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceReader.kt b/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceReader.kt
index 0b2b07a1..b308ed21 100644
--- a/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceReader.kt
+++ b/opendc-compute/opendc-compute-carbon/src/main/kotlin/org/opendc/compute/carbon/CarbonTraceReader.kt
@@ -24,14 +24,14 @@
package org.opendc.compute.carbon
-import org.opendc.simulator.compute.power.CarbonFragmentNew
+import org.opendc.simulator.compute.power.CarbonFragment
import java.io.File
import javax.management.InvalidAttributeValueException
/**
* Construct a workload from a trace.
*/
-public fun getCarbonFragments(pathToFile: String?): List<CarbonFragmentNew>? {
+public fun getCarbonFragments(pathToFile: String?): List<CarbonFragment>? {
if (pathToFile == null) {
return null
}
@@ -42,7 +42,7 @@ public fun getCarbonFragments(pathToFile: String?): List<CarbonFragmentNew>? {
/**
* Construct a workload from a trace.
*/
-public fun getCarbonFragments(file: File): List<CarbonFragmentNew> {
+public fun getCarbonFragments(file: File): List<CarbonFragment> {
if (!file.exists()) {
throw InvalidAttributeValueException("The carbon trace cannot be found")
}
diff --git a/opendc-experiments/opendc-experiments-base/build.gradle.kts b/opendc-experiments/opendc-experiments-base/build.gradle.kts
index d8921ffb..36073418 100644
--- a/opendc-experiments/opendc-experiments-base/build.gradle.kts
+++ b/opendc-experiments/opendc-experiments-base/build.gradle.kts
@@ -52,8 +52,8 @@ dependencies {
val createScenarioApp by tasks.creating(CreateStartScripts::class) {
dependsOn(tasks.jar)
- applicationName = "OpenDCScenarioRunner"
- mainClass.set("org.opendc.experiments.base.runner.ScenarioCli")
+ applicationName = "OpenDCExperimentRunner"
+ mainClass.set("org.opendc.experiments.base.runner.ExperimentCli")
classpath = tasks.jar.get().outputs.files + configurations["runtimeClasspath"]
outputDir = project.buildDir.resolve("scripts")
}
@@ -61,7 +61,7 @@ val createScenarioApp by tasks.creating(CreateStartScripts::class) {
// Create custom Scenario distribution
distributions {
main {
- distributionBaseName.set("OpenDCScenarioRunner")
+ distributionBaseName.set("OpenDCExperimentRunner")
contents {
from("README.md")
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentFactories.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentFactories.kt
index 524d4219..5d158ea3 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentFactories.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentFactories.kt
@@ -20,10 +20,10 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario
+package org.opendc.experiments.base.experiment
-import org.opendc.experiments.base.scenario.specs.ExperimentSpec
-import org.opendc.experiments.base.scenario.specs.ScenarioSpec
+import org.opendc.experiments.base.experiment.specs.ExperimentSpec
+import org.opendc.experiments.base.experiment.specs.ScenarioSpec
import java.io.File
private val experimentReader = ExperimentReader()
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentReader.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentReader.kt
index 8ed60b08..12127644 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentReader.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentReader.kt
@@ -20,13 +20,13 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario
+package org.opendc.experiments.base.experiment
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.decodeFromStream
import org.opendc.compute.simulator.telemetry.parquet.ComputeExportConfig
-import org.opendc.experiments.base.scenario.specs.ExperimentSpec
+import org.opendc.experiments.base.experiment.specs.ExperimentSpec
import java.io.File
import java.io.InputStream
import java.nio.file.Path
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentWriter.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentWriter.kt
index 6afe6031..73331fe2 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ExperimentWriter.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/ExperimentWriter.kt
@@ -20,11 +20,11 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario
+package org.opendc.experiments.base.experiment
import kotlinx.serialization.encodeToString
import kotlinx.serialization.json.Json
-import org.opendc.experiments.base.scenario.specs.ScenarioSpec
+import org.opendc.experiments.base.experiment.specs.ScenarioSpec
import java.io.File
/**
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/Scenario.kt
index e62669e4..a99bd061 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/Scenario.kt
@@ -20,15 +20,15 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario
+package org.opendc.experiments.base.experiment
import org.opendc.compute.simulator.telemetry.parquet.ComputeExportConfig
-import org.opendc.experiments.base.scenario.specs.AllocationPolicySpec
-import org.opendc.experiments.base.scenario.specs.CheckpointModelSpec
-import org.opendc.experiments.base.scenario.specs.ExportModelSpec
-import org.opendc.experiments.base.scenario.specs.FailureModelSpec
-import org.opendc.experiments.base.scenario.specs.ScenarioTopologySpec
-import org.opendc.experiments.base.scenario.specs.WorkloadSpec
+import org.opendc.experiments.base.experiment.specs.AllocationPolicySpec
+import org.opendc.experiments.base.experiment.specs.CheckpointModelSpec
+import org.opendc.experiments.base.experiment.specs.ExportModelSpec
+import org.opendc.experiments.base.experiment.specs.FailureModelSpec
+import org.opendc.experiments.base.experiment.specs.ScenarioTopologySpec
+import org.opendc.experiments.base.experiment.specs.WorkloadSpec
/**
* A data class representing a scenario for a set of experiments.
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/AllocationPolicySpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/AllocationPolicySpec.kt
index ddc11a50..0bd3d476 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/AllocationPolicySpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/AllocationPolicySpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
import org.opendc.compute.simulator.scheduler.ComputeSchedulerEnum
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/CheckpointModelSpec.kt
index ad0fba1d..47c3eb70 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/CheckpointModelSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExperimentSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExperimentSpec.kt
index 7805ed2b..6d8c8ebf 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExperimentSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExperimentSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
import org.opendc.common.logger.infoNewLine
@@ -50,14 +50,14 @@ public data class ExperimentSpec(
val outputFolder: String = "output",
val initialSeed: Int = 0,
val runs: Int = 1,
+ val exportModels: Set<ExportModelSpec> = setOf(ExportModelSpec()),
+ val computeExportConfig: ComputeExportConfig = ComputeExportConfig.ALL_COLUMNS,
+ val maxNumFailures: Set<Int> = setOf(10),
val topologies: Set<ScenarioTopologySpec>,
val workloads: Set<WorkloadSpec>,
val allocationPolicies: Set<AllocationPolicySpec> = setOf(AllocationPolicySpec()),
- val exportModels: Set<ExportModelSpec> = setOf(ExportModelSpec()),
val failureModels: Set<FailureModelSpec?> = setOf(null),
val checkpointModels: Set<CheckpointModelSpec?> = setOf(null),
- val computeExportConfig: ComputeExportConfig = ComputeExportConfig.ALL_COLUMNS,
- val maxNumFailures: Set<Int> = setOf(10),
) {
init {
require(runs > 0) { "The number of runs should always be positive" }
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExportModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExportModelSpec.kt
index d51de27b..62f1ea4b 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ExportModelSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ExportModelSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/FailureModelSpec.kt
index c20b4467..520d7e3d 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/FailureModelSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
/*
* Copyright (c) 2024 AtLarge Research
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/PowerModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/PowerModelSpec.kt
index f9679b26..a508d8c1 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/PowerModelSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/PowerModelSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioSpec.kt
index b4f04c1c..b41eb37b 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
import org.opendc.compute.simulator.telemetry.parquet.ComputeExportConfig
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioTopologySpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioTopologySpec.kt
index feaca0c6..02c40af4 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioTopologySpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/ScenarioTopologySpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
import java.io.File
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/WorkloadSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/WorkloadSpec.kt
index 956e97f1..7f34c508 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/WorkloadSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/WorkloadSpec.kt
@@ -20,7 +20,7 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.scenario.specs
+package org.opendc.experiments.base.experiment.specs
import kotlinx.serialization.Serializable
import org.opendc.compute.workload.ComputeWorkload
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentCli.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentCli.kt
index 28ebe45c..e067bf45 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentCli.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentCli.kt
@@ -30,7 +30,7 @@ import com.github.ajalt.clikt.parameters.options.defaultLazy
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.file
import com.github.ajalt.clikt.parameters.types.int
-import org.opendc.experiments.base.scenario.getExperiment
+import org.opendc.experiments.base.experiment.getExperiment
import java.io.File
/**
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentRunner.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentRunner.kt
index 076cfb9f..0b45806b 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentRunner.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ExperimentRunner.kt
@@ -22,7 +22,7 @@
package org.opendc.experiments.base.runner
-import org.opendc.experiments.base.scenario.Scenario
+import org.opendc.experiments.base.experiment.Scenario
import java.util.concurrent.ForkJoinPool
/**
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioReplayer.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioReplayer.kt
index c82e2557..a0263e38 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioReplayer.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioReplayer.kt
@@ -35,8 +35,8 @@ import org.opendc.compute.simulator.TaskWatcher
import org.opendc.compute.simulator.service.ComputeService
import org.opendc.compute.simulator.service.ServiceTask
import org.opendc.compute.workload.Task
-import org.opendc.experiments.base.scenario.specs.FailureModelSpec
-import org.opendc.experiments.base.scenario.specs.createFailureModel
+import org.opendc.experiments.base.experiment.specs.FailureModelSpec
+import org.opendc.experiments.base.experiment.specs.createFailureModel
import java.time.InstantSource
import java.util.Random
import kotlin.coroutines.coroutineContext
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
index d525e066..4d6069e4 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
@@ -33,8 +33,8 @@ import org.opendc.compute.simulator.service.ComputeService
import org.opendc.compute.simulator.telemetry.parquet.ParquetComputeMonitor
import org.opendc.compute.topology.clusterTopology
import org.opendc.compute.workload.ComputeWorkloadLoader
-import org.opendc.experiments.base.scenario.Scenario
-import org.opendc.experiments.base.scenario.specs.getWorkloadType
+import org.opendc.experiments.base.experiment.Scenario
+import org.opendc.experiments.base.experiment.specs.getWorkloadType
import org.opendc.simulator.kotlin.runSimulation
import java.io.File
import java.time.Duration
diff --git a/opendc-experiments/opendc-experiments-base/src/test/kotlin/org/opendc/experiments/base/ScenarioIntegrationTest.kt b/opendc-experiments/opendc-experiments-base/src/test/kotlin/org/opendc/experiments/base/ScenarioIntegrationTest.kt
index 9fa1a09a..132ed7b5 100644
--- a/opendc-experiments/opendc-experiments-base/src/test/kotlin/org/opendc/experiments/base/ScenarioIntegrationTest.kt
+++ b/opendc-experiments/opendc-experiments-base/src/test/kotlin/org/opendc/experiments/base/ScenarioIntegrationTest.kt
@@ -45,8 +45,8 @@ import org.opendc.compute.workload.ComputeWorkloadLoader
import org.opendc.compute.workload.Task
import org.opendc.compute.workload.sampleByLoad
import org.opendc.compute.workload.trace
+import org.opendc.experiments.base.experiment.specs.TraceBasedFailureModelSpec
import org.opendc.experiments.base.runner.replay
-import org.opendc.experiments.base.scenario.specs.TraceBasedFailureModelSpec
import org.opendc.simulator.kotlin.runSimulation
import java.io.File
import java.util.Random
diff --git a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SACli.kt b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SACli.kt
index 8036e5b7..4fe58d88 100644
--- a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SACli.kt
+++ b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SACli.kt
@@ -30,8 +30,8 @@ import com.github.ajalt.clikt.parameters.options.defaultLazy
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.file
import com.github.ajalt.clikt.parameters.types.int
+import org.opendc.experiments.base.experiment.getExperiment
import org.opendc.experiments.base.runner.runExperiment
-import org.opendc.experiments.base.scenario.getExperiment
import org.opendc.experiments.m3sa.m3saAnalyze
import org.opendc.experiments.m3sa.scenario.getOutputFolder
import java.io.File
diff --git a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SARunner.kt b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SARunner.kt
index 89daf5f3..0068738a 100644
--- a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SARunner.kt
+++ b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/runner/M3SARunner.kt
@@ -24,9 +24,9 @@
package org.opendc.experiments.m3sa.runner
+import org.opendc.experiments.base.experiment.Scenario
import org.opendc.experiments.base.runner.runScenario
import org.opendc.experiments.base.runner.setupOutputFolderStructure
-import org.opendc.experiments.base.scenario.Scenario
import java.util.concurrent.ForkJoinPool
/**
diff --git a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/scenario/M3SAFactories.kt b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/scenario/M3SAFactories.kt
index a4df40e1..bb217f0d 100644
--- a/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/scenario/M3SAFactories.kt
+++ b/opendc-experiments/opendc-experiments-m3sa/src/main/kotlin/org/opendc/experiments/m3sa/scenario/M3SAFactories.kt
@@ -22,7 +22,7 @@
package org.opendc.experiments.m3sa.scenario
-import org.opendc.experiments.base.scenario.ExperimentReader
+import org.opendc.experiments.base.experiment.ExperimentReader
import java.io.File
private val experimentReader = ExperimentReader()
diff --git a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragmentNew.java b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragment.java
index 78281a77..2563a61d 100644
--- a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragmentNew.java
+++ b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonFragment.java
@@ -22,12 +22,16 @@
package org.opendc.simulator.compute.power;
-public class CarbonFragmentNew {
- private long endTime;
+/**
+ * An object holding the carbon intensity during a specific time frame.
+ * Used by {@link CarbonModel}.
+ */
+public class CarbonFragment {
private long startTime;
+ private long endTime;
private double carbonIntensity;
- public CarbonFragmentNew(long startTime, long endTime, double carbonIntensity) {
+ public CarbonFragment(long startTime, long endTime, double carbonIntensity) {
this.setStartTime(startTime);
this.setEndTime(endTime);
this.setCarbonIntensity(carbonIntensity);
diff --git a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonModel.java b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonModel.java
index 87ced77a..98ef2b72 100644
--- a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonModel.java
+++ b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/CarbonModel.java
@@ -26,26 +26,32 @@ import java.util.List;
import org.opendc.simulator.engine.FlowGraph;
import org.opendc.simulator.engine.FlowNode;
+/**
+ * CarbonModel used to provide the Carbon Intensity of a {@link SimPowerSource}
+ * A CarbonModel is based on a list of {@link CarbonFragment} that define the carbon intensity at specific time frames.
+ */
public class CarbonModel extends FlowNode {
private SimPowerSource powerSource;
private long startTime = 0L; // The absolute timestamp on which the workload started
- private List<CarbonFragmentNew> fragments;
- private CarbonFragmentNew current_fragment;
+ private List<CarbonFragment> fragments;
+ private CarbonFragment current_fragment;
private int fragment_index;
+
/**
- * Construct a new {@link FlowNode} instance.
+ * Construct a CarbonModel
*
- * @param parentGraph The {@link FlowGraph} this stage belongs to.
+ * @param parentGraph The active FlowGraph which should be used to make the new FlowNode
+ * @param powerSource The Power Source which should be updated with the carbon intensity
+ * @param carbonFragments A list of Carbon Fragments defining the carbon intensity at different time frames
+ * @param startTime The start time of the simulation. This is used to go from relative time (used by the clock)
+ * to absolute time (used by carbon fragments).
*/
public CarbonModel(
- FlowGraph parentGraph,
- SimPowerSource powerSource,
- List<CarbonFragmentNew> carbonFragments,
- long startTime) {
+ FlowGraph parentGraph, SimPowerSource powerSource, List<CarbonFragment> carbonFragments, long startTime) {
super(parentGraph);
this.powerSource = powerSource;
@@ -62,27 +68,31 @@ public class CarbonModel extends FlowNode {
}
/**
- * Convert the given time to the absolute time by adding the start of workload
- *
- * @param time
+ * Convert the given relative time to the absolute time by adding the start of workload
*/
private long getAbsoluteTime(long time) {
return time + startTime;
}
+ /**
+ * Convert the given absolute time to the relative time by subtracting the start of workload
+ */
private long getRelativeTime(long time) {
return time - startTime;
}
- private void findCorrectFragment(long absolute_time) {
+ /**
+ * Traverse the fragments to find the fragment that matches the given absoluteTime
+ */
+ private void findCorrectFragment(long absoluteTime) {
// Traverse to the previous fragment, until you reach the correct fragment
- while (absolute_time < this.current_fragment.getStartTime()) {
+ while (absoluteTime < this.current_fragment.getStartTime()) {
this.current_fragment = fragments.get(--this.fragment_index);
}
// Traverse to the next fragment, until you reach the correct fragment
- while (absolute_time >= this.current_fragment.getEndTime()) {
+ while (absoluteTime >= this.current_fragment.getEndTime()) {
this.current_fragment = fragments.get(++this.fragment_index);
}
}
diff --git a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/SimPowerSource.java b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/SimPowerSource.java
index 03d54ad3..2c953d06 100644
--- a/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/SimPowerSource.java
+++ b/opendc-simulator/opendc-simulator-compute/src/main/java/org/opendc/simulator/compute/power/SimPowerSource.java
@@ -100,8 +100,7 @@ public final class SimPowerSource extends FlowNode implements FlowSupplier {
// Constructors
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- public SimPowerSource(
- FlowGraph graph, double max_capacity, List<CarbonFragmentNew> carbonFragments, long startTime) {
+ public SimPowerSource(FlowGraph graph, double max_capacity, List<CarbonFragment> carbonFragments, long startTime) {
super(graph);
this.capacity = max_capacity;
diff --git a/site/docs/documentation/Input/Experiment.md b/site/docs/documentation/Input/Experiment.md
new file mode 100644
index 00000000..c8b96d1f
--- /dev/null
+++ b/site/docs/documentation/Input/Experiment.md
@@ -0,0 +1,175 @@
+When using OpenDC, an experiment defines what should be run, and how. An experiment consists of one or more scenarios,
+each defining a different simulation to run. Scenarios can differ in many things, such as the topology that is used,
+the workload that is run, or the policies that are used to name a few. An experiment is defined using a JSON file.
+In this page, we will discuss how to properly define experiments for OpenDC.
+
+:::info Code
+All code related to reading and processing Experiment files can be found [here](https://github.com/atlarge-research/opendc/tree/master/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment)
+
+The code used to run a given experiment can be found [here](https://github.com/atlarge-research/opendc/tree/master/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner)
+:::
+
+## Schema
+
+The schema for the scenario file is provided in [schema](ExperimentSchema)
+In the following section, we describe the different components of the schema.
+Some components of an experiment are not single values, but lists. This is used to run multiple scenarios using
+a single experiment file. OpenDC will execute all permutations of the different values.
+This means that if all list based values have a single value, only one Scenario will be run.
+
+| Variable | Type | Required? | Default | Description |
+|---------------------|----------------------------------------------|-----------|----------|-------------------------------------------------------------------|
+| name | string | no | "" | Name of the scenario, used for identification and referencing. |
+| outputFolder | string | no | "output" | Directory where the simulation outputs will be stored. |
+| initialSeed | integer | no | 0 | Seed used for random number generation to ensure reproducibility. |
+| runs | integer | no | 1 | Number of times the scenario should be run. |
+| exportModels | List[[ExportModel](#exportmodel)] | no | Default | Specifications for exporting data from the simulation. |
+| computeExportConfig | [ComputeExportConfig](#checkpointmodel) | no | Default | The features that should be exported during the simulation |
+| maxNumFailures | List[integer] | no | [10] | The max number of times a task can fail before being terminated. |
+| topologies | List[[Topology](#topology)] | yes | N/A | List of topologies used in the scenario. |
+| workloads | List[[Workload](#workload)] | yes | N/A | List of workloads to be executed within the scenario. |
+| allocationPolicies | List[[AllocationPolicy](#allocation-policy)] | yes | N/A | Allocation policies used for resource management in the scenario. |
+| failureModels | List[[FailureModel](#failuremodel)] | no | Default | List of failure models to simulate various types of failures. |
+| checkpointModels | List[[CheckpointModel](#checkpointmodel)] | no | null | Paths to carbon footprint trace files. |
+| carbonTracePaths | List[string] | no | null | Paths to carbon footprint trace files. |
+
+
+Many of the input fields of the experiment file are complex objects themselves. Next, we will describe the required input
+type of each of these fields.
+
+### ExportModel
+
+| Variable | Type | Required? | Default | Description |
+|----------------|-------|-----------|---------|---------------------------------------------|
+| exportInterval | Int64 | no | 300 | The duration between two exports in seconds |
+
+
+### ComputeExportConfig
+The features that should be exported by OpenDC
+
+| Variable | Type | Required? | Default | Description |
+|--------------------------|--------------|-----------|--------------|-----------------------------------------------------------------------|
+| hostExportColumns | List[String] | no | All features | The features that should be exported to the host output file. |
+| taskExportColumns | List[String] | no | All features | The features that should be exported to the task output file. |
+| powerSourceExportColumns | List[String] | no | All features | The features that should be exported to the power source output file. |
+| serviceExportColumns | List[String] | no | All features | The features that should be exported to the service output file. |
+
+
+### Topology
+Defines the topology on which the workload will be run.
+
+:::info
+For more information about the Topology go [here](Topology)
+:::
+
+| Variable | Type | Required? | Default | Description |
+|-------------|--------|-----------|---------|---------------------------------------------------------------------|
+| pathToFile | string | yes | N/A | Path to the JSON file defining the topology. |
+
+### Workload
+Defines the workload that needs to be executed.
+
+:::info
+For more information about workloads go [here](Workload)
+:::
+
+| Variable | Type | Required? | Default | Description |
+|-------------|--------|-----------|---------|---------------------------------------------------------------------|
+| pathToFile | string | yes | N/A | Path to the file containing the workload trace. |
+| type | string | yes | N/A | Type of the workload (e.g., "ComputeWorkload"). |
+
+### Allocation Policy
+Defines the allocation policy that should be used to decide on which host each task should be executed
+
+:::info Code
+The different allocation policies that can be used can be found [here](https://github.com/atlarge-research/opendc/blob/master/opendc-compute/opendc-compute-simulator/src/main/kotlin/org/opendc/compute/simulator/scheduler/ComputeSchedulers.kt)
+:::
+
+| Variable | Type | Required? | Default | Description |
+|------------|--------|-----------|---------|----------------------------|
+| policyType | string | yes | N/A | Type of allocation policy. |
+
+### FailureModel
+The failure model that should be used during the simulation
+See [FailureModels](FailureModel) for detailed instructions.
+
+### CheckpointModel
+The checkpoint model that should be used to create snapshots.
+
+| Variable | Type | Required? | Default | Description |
+|---------------------------|--------|-----------|---------|---------------------------------------------------------------------------------------------------------------------|
+| checkpointInterval | Int64 | no | 3600000 | The time between checkpoints in ms |
+| checkpointDuration | Int64 | no | 300000 | The time to create a snapshot in ms |
+| checkpointIntervalScaling | Double | no | 1.0 | The scaling of the checkpointInterval after each succesful checkpoint. The default of 1.0 means no scaling happens. |
+
+
+## Examples
+In the following section, we discuss several examples of Scenario files. Any scenario file can be verified using the
+JSON schema defined in [schema](TopologySchema).
+
+### Simple
+
+The simplest scneario that can be provided to OpenDC is shown below:
+```json
+{
+ "topologies": [
+ {
+ "pathToFile": "topologies/topology1.json"
+ }
+ ],
+ "workloads": [
+ {
+ "type": "ComputeWorkload",
+ "pathToFile": "traces/bitbrains-small"
+ }
+ ],
+ "allocationPolicies": [
+ {
+ "policyType": "Mem"
+ }
+ ]
+}
+```
+
+This scenario creates a simulation from file topology1, located in the topologies folder, with a workload trace from the
+bitbrains-small file, and an allocation policy of type Mem. The simulation is run once (by default), and the default
+name is "".
+
+### Complex
+Following is an example of a more complex topology:
+```json
+{
+ "topologies": [
+ {
+ "pathToFile": "topologies/topology1.json"
+ },
+ {
+ "pathToFile": "topologies/topology2.json"
+ },
+ {
+ "pathToFile": "topologies/topology3.json"
+ }
+ ],
+ "workloads": [
+ {
+ "pathToFile": "traces/bitbrains-small",
+ "type": "ComputeWorkload"
+ },
+ {
+ "pathToFile": "traces/bitbrains-large",
+ "type": "ComputeWorkload"
+ }
+ ],
+ "allocationPolicies": [
+ {
+ "policyType": "Mem"
+ },
+ {
+ "policyType": "Mem-Inv"
+ }
+ ]
+}
+```
+
+This scenario runs a total of 12 experiments. We have 3 topologies (3 datacenter configurations), each simulated with
+2 distinct workloads, each using a different allocation policy (either Mem or Mem-Inv).
diff --git a/site/docs/documentation/Input/ScenarioSchema.md b/site/docs/documentation/Input/ExperimentSchema.md
index 78ec55f7..78ec55f7 100644
--- a/site/docs/documentation/Input/ScenarioSchema.md
+++ b/site/docs/documentation/Input/ExperimentSchema.md
diff --git a/site/docs/documentation/Input/FailureModels.md b/site/docs/documentation/Input/FailureModel.md
index d62767f6..ecaf7c03 100644
--- a/site/docs/documentation/Input/FailureModels.md
+++ b/site/docs/documentation/Input/FailureModel.md
@@ -1,14 +1,18 @@
-OpenDC provides three types of failure models: [Trace-based](#trace-based-failure-models), [Sample-based](#sample-based-failure-models),
-and [Prefab](#prefab-failure-models).
+OpenDC provides three types of failure models: [Trace-based](#trace-based-failure-models), [Sample-based](#sample-based-failure-models),
+and [Prefab](#prefab-failure-models).
-All failure models have a similar structure containing three simple steps.
+All failure models have a similar structure containing three simple steps.
1. The _interval_ time determines the time between two failures.
2. The _duration_ time determines how long a single failure takes.
3. The _intensity_ determines how many hosts are effected by a failure.
-# Trace based failure models
-Trace-based failure models are defined by a parquet file. This file defines the interval, duration, and intensity of
+:::info Code
+The code that defines the Failure Models can found [here](https://github.com/atlarge-research/opendc/blob/master/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/experiment/specs/FailureModelSpec.kt).
+:::
+
+## Trace based failure models
+Trace-based failure models are defined by a parquet file. This file defines the interval, duration, and intensity of
several failures. The failures defined in the file are looped. A valid failure model file follows the format defined below:
| Metric | Datatype | Unit | Summary |
@@ -17,7 +21,11 @@ several failures. The failures defined in the file are looped. A valid failure m
| failure_duration | int64 | milli seconds | The duration of the failure |
| failure_intensity | float64 | ratio | The ratio of hosts effected by the failure |
-## Schema
+:::info Code
+The code implementation of Trace Based Failure Models can be found [here](https://github.com/atlarge-research/opendc/blob/master/opendc-compute/opendc-compute-failure/src/main/kotlin/org/opendc/compute/failure/models/TraceBasedFailureModel.kt)
+:::
+
+### Example
A trace-based failure model is specified by setting "type" to "trace-based".
After, the user can define the path to the failure trace using "pathToFile":
```json
@@ -36,17 +44,21 @@ The "repeat" value can be set to false if the user does not want the failures to
}
```
-# Sample based failure models
-Sample based failure models sample from three distributions to get the _interval_, _duration_, and _intensity_ of
-each failure. Sample-based failure models are effected by randomness and will thus create different results based
-on the provided seed.
+## Sample based failure models
+Sample based failure models sample from three distributions to get the _interval_, _duration_, and _intensity_ of
+each failure. Sample-based failure models are effected by randomness and will thus create different results based
+on the provided seed.
+
+:::info Code
+The code implementation for the Sample based failure models can be found [here](https://github.com/atlarge-research/opendc/blob/master/opendc-compute/opendc-compute-failure/src/main/kotlin/org/opendc/compute/failure/models/SampleBasedFailureModel.kt)
+:::
-## Distributions
+### Distributions
OpenDC supports eight different distributions based on java's [RealDistributions](https://commons.apache.org/proper/commons-math/javadocs/api-3.6.1/org/apache/commons/math3/distribution/RealDistribution.html).
Because the different distributions require different variables, they have to be specified with a specific "type".
+Next, we show an example of a correct specification of all available distributions in OpenDC.
#### [ConstantRealDistribution](https://commons.apache.org/proper/commons-math/javadocs/api-3.6.1/org/apache/commons/math3/distribution/ConstantRealDistribution.html)
-A distribution that always returns the same value.
```json
{
@@ -71,7 +83,7 @@ A distribution that always returns the same value.
"scale": 0.5
}
```
-
+
#### [LogNormalDistribution](https://commons.apache.org/proper/commons-math/javadocs/api-3.6.1/org/apache/commons/math3/distribution/LogNormalDistribution.html)
```json
{
@@ -93,7 +105,7 @@ A distribution that always returns the same value.
#### [ParetoDistribution](https://commons.apache.org/proper/commons-math/javadocs/api-3.6.1/org/apache/commons/math3/distribution/ParetoDistribution.html)
```json
{
- "type": "constant",
+ "type": "pareto",
"scale": 1.0,
"shape": 0.6
}
@@ -117,9 +129,9 @@ A distribution that always returns the same value.
}
```
-## Schema
+### Example
A sample-based failure model is defined using three distributions for _intensity_, _duration_, and _intensity_.
-Distributions can be mixed however the user wants. Note, values for _intensity_ and _duration_ are clamped to be positive.
+Distributions can be mixed however the user wants. Note, values for _intensity_ and _duration_ are clamped to be positive.
The _intensity_ is clamped to the range [0.0, 1.0).
To specify a sample-based failure model, the type needs to be set to "custom".
@@ -143,13 +155,13 @@ Example:
}
```
-# Prefab failure models
-The final type of failure models is the prefab models. These are models that are predefined in OpenDC and are based on
-research. Currently, OpenDC has 9 prefab models based on [The Failure Trace Archive: Enabling the comparison of failure measurements and models of distributed systems](https://www-sciencedirect-com.vu-nl.idm.oclc.org/science/article/pii/S0743731513000634)
+## Prefab failure models
+The final type of failure models is the prefab models. These are models that are predefined in OpenDC and are based on
+research. Currently, OpenDC has 9 prefab models based on [The Failure Trace Archive: Enabling the comparison of failure measurements and models of distributed systems](https://www-sciencedirect-com.vu-nl.idm.oclc.org/science/article/pii/S0743731513000634)
The figure below shows the values used to define the failure models.
![img.png](img.png)
-Each failure model is defined four times, on for each of the four distribution.
+Each failure model is defined four times, on for each of the four distribution.
The final list of available prefabs is thus:
G5k06Exp
@@ -189,7 +201,11 @@ The final list of available prefabs is thus:
Websites02LogN
Websites02Gam
-## Schema
+:::info Code
+The different Prefab models can be found [here](https://github.com/atlarge-research/opendc/tree/master/opendc-compute/opendc-compute-failure/src/main/kotlin/org/opendc/compute/failure/prefab)
+:::
+
+### Example
To specify a prefab model, the "type" needs to be set to "prefab".
After, the prefab can be defined with "prefabName":
diff --git a/site/docs/documentation/Input/Scenario.md b/site/docs/documentation/Input/Scenario.md
deleted file mode 100644
index ff7b9ffb..00000000
--- a/site/docs/documentation/Input/Scenario.md
+++ /dev/null
@@ -1,125 +0,0 @@
-The scenario of a simulation is defined using a JSON file. A scenario consists of one or more topologies, one or more
-workloads, one or more allocation policies, a name and a number of times the simulation is being run.
-
-## Schema
-
-The schema for the scenario file is provided in [schema](ScenarioSchema)
-In the following section, we describe the different components of the schema.
-
-### General Structure
-
-| Variable | Type | Required? | Default | Description |
-|----------------------|----------------------------------------------|-----------|-------|--------------------------------------------------------------------------|
-| name | string | no | "" | Name of the scenario, used for identification and referencing. |
-| topologies | List[[Topology](#topology)] | yes | N/A | List of topologies used in the scenario. |
-| workloads | List[[Workload](#workload)] | yes | N/A | List of workloads to be executed within the scenario. |
-| allocationPolicies | List[[AllocationPolicy](#allocation-policy)] | yes | N/A | Allocation policies used for resource management in the scenario. |
-| failureModels | List[[FailureModel](#failuremodel)] | no | empty | List of failure models to simulate various types of failures. |
-| exportModels | List[[ExportModel](#exportmodel)] | no | empty | Specifications for exporting data from the simulation. |
-| carbonTracePaths | List[string] | no | null | Paths to carbon footprint trace files. |
-| outputFolder | string | no | "output" | Directory where the simulation outputs will be stored. |
-| initialSeed | integer | no | 0 | Seed used for random number generation to ensure reproducibility. |
-| runs | integer | no | 1 | Number of times the scenario should be run. |
-
-### Topology
-
-| Variable | Type | Required? | Default | Description |
-|-------------|--------|-----------|---------|---------------------------------------------------------------------|
-| pathToFile | string | yes | N/A | Path to the JSON file defining the topology. |
-
-### Workload
-
-| Variable | Type | Required? | Default | Description |
-|-------------|--------|-----------|---------|---------------------------------------------------------------------|
-| pathToFile | string | yes | N/A | Path to the file containing the workload trace. |
-| type | string | yes | N/A | Type of the workload (e.g., "ComputeWorkload"). |
-
-### Allocation Policy
-
-| Variable | Type | Required? | Default | Description |
-|-------------|--------|-----------|---------|---------------------------------------------------------------------|
-| policyType | string | yes | N/A | Type of allocation policy (e.g., "BestFit", "FirstFit"). |
-
-### FailureModel
-
-| Variable | Type | Required? | Default | Description |
-|-------------|--------|-----------|---------|---------------------------------------------------------------------|
-| modelType | string | yes | N/A | Type of failure model to simulate specific operational failures. |
-
-### ExportModel
-
-| Variable | Type | Required? | Default | Description |
-|-------------|--------|-----------|---------|---------------------------------------------------------------------|
-| exportType | string | yes | N/A | Specifies the type of data export model for simulation results. |
-
-
-## Examples
-In the following section, we discuss several examples of Scenario files. Any scenario file can be verified using the
-JSON schema defined in [schema](TopologySchema).
-
-### Simple
-
-The simplest scneario that can be provided to OpenDC is shown below:
-```json
-{
- "topologies": [
- {
- "pathToFile": "topologies/topology1.json"
- }
- ],
- "workloads": [
- {
- "pathToFile": "traces/bitbrains-small",
- "type": "ComputeWorkload"
- }
- ],
- "allocationPolicies": [
- {
- "policyType": "Mem"
- }
- ]
-}
-```
-
-This scenario creates a simulation from file topology1, located in the topologies folder, with a workload trace from the
-bitbrains-small file, and an allocation policy of type Mem. The simulation is run once (by default), and the default
-name is "".
-
-### Complex
-Following is an example of a more complex topology:
-```json
-{
- "topologies": [
- {
- "pathToFile": "topologies/topology1.json"
- },
- {
- "pathToFile": "topologies/topology2.json"
- },
- {
- "pathToFile": "topologies/topology3.json"
- }
- ],
- "workloads": [
- {
- "pathToFile": "traces/bitbrains-small",
- "type": "ComputeWorkload"
- },
- {
- "pathToFile": "traces/bitbrains-large",
- "type": "ComputeWorkload"
- }
- ],
- "allocationPolicies": [
- {
- "policyType": "Mem"
- },
- {
- "policyType": "Mem-Inv"
- }
- ]
-}
-```
-
-This scenario runs a total of 12 experiments. We have 3 topologies (3 datacenter configurations), each simulated with
-2 distinct workloads, each using a different allocation policy (either Mem or Mem-Inv).
diff --git a/site/docs/documentation/Input/Topology.md b/site/docs/documentation/Input/Topology.md
index cf726616..0d2479bd 100644
--- a/site/docs/documentation/Input/Topology.md
+++ b/site/docs/documentation/Input/Topology.md
@@ -2,6 +2,11 @@ The topology of a datacenter is defined using a JSON file. A topology consist of
Each cluster consist of at least one host on which jobs can be executed. Each host consist of one or more CPUs,
a memory unit and a power model.
+:::info Code
+The code related to reading and processing topology files can be found [here](https://github.com/atlarge-research/opendc/tree/master/opendc-compute/opendc-compute-topology/src/main/kotlin/org/opendc/compute/topology)
+:::
+
+
## Schema
The schema for the topology file is provided in [schema](TopologySchema).
@@ -17,12 +22,12 @@ In the following section, we describe the different components of the schema.
### Host
-| variable | type | required? | default | description |
-|------------|-----------------------|-----------|---------|--------------------------------------------------------------------------------|
-| name | string | no | Host | The name of the host. This is only important for debugging and post-processing |
-| count | integer | no | 1 | The amount of hosts of this type are in the cluster |
-| cpuModel | [CPU](#cpuModel) | yes | N/A | The CPUs in the host |
-| memory | [Memory](#memory) | yes | N/A | The memory used by the host |
+| variable | type | required? | default | description |
+|-------------|-----------------------------|-----------|---------|--------------------------------------------------------------------------------|
+| name | string | no | Host | The name of the host. This is only important for debugging and post-processing |
+| count | integer | no | 1 | The amount of hosts of this type are in the cluster |
+| cpuModel | [CPU](#cpu) | yes | N/A | The CPUs in the host |
+| memory | [Memory](#memory) | yes | N/A | The memory used by the host |
| power model | [Power Model](#power-model) | yes | N/A | The power model used to determine the power draw of the host |
### CPU
@@ -49,12 +54,13 @@ In the following section, we describe the different components of the schema.
### Power Model
-| variable | type | Unit | required? | default | description |
-|-----------|---------|------|-----------|---------|----------------------------------------------------------------------------|
-| modelType | string | N/A | yes | N/A | The type of model used to determine power draw |
-| power | string | Watt | no | 400 | The constant power draw when using the 'constant' power model type in Watt |
-| maxPower | string | Watt | yes | N/A | The power draw of a host when using max capacity in Watt |
-| idlePower | integer | Watt | yes | N/A | The power draw of a host when idle in Watt |
+| variable | type | Unit | required? | default | description |
+|-----------------|--------|------|-----------|----------|-------------------------------------------------------------------------------|
+| vendor | string | N/A | yes | N/A | The type of model used to determine power draw |
+| modelName | string | N/A | yes | N/A | The type of model used to determine power draw |
+| arch | string | N/A | yes | N/A | The type of model used to determine power draw |
+| totalPower | Int64 | Watt | no | max long | The power draw of a host when using max capacity in Watt |
+| carbonTracePath | string | N/A | no | null | Path to a carbon intensity trace. If not given, carbon intensity is always 0. |
## Examples
@@ -71,12 +77,11 @@ The simplest data center that can be provided to OpenDC is shown below:
{
"hosts": [
{
- "cpus": [
- {
- "coreCount": 16,
- "coreSpeed": 1000
- }
- ],
+ "cpu":
+ {
+ "coreCount": 16,
+ "coreSpeed": 1000
+ },
"memory": {
"memorySize": 100000
}
@@ -87,7 +92,7 @@ The simplest data center that can be provided to OpenDC is shown below:
}
```
-This is creates a data center with a single cluster containing a single host. This host consist of a single 16 core CPU
+This creates a data center with a single cluster containing a single host. This host consist of a single 16 core CPU
with a speed of 1 Ghz, and 100 MiB RAM memory.
### Count
@@ -102,14 +107,14 @@ Duplicating clusters, hosts, or CPUs is easy using the "count" keyword:
"hosts": [
{
"count": 5,
- "cpus": [
- {
- "coreCount": 16,
- "coreSpeed": 1000,
- "count": 10
- }
- ],
- "memory": {
+ "cpu":
+ {
+ "coreCount": 16,
+ "coreSpeed": 1000,
+ "count": 10
+ },
+ "memory":
+ {
"memorySize": 100000
}
}
@@ -205,7 +210,7 @@ Aside from using number to indicate values it is also possible to define values
"modelType": "linear",
"power": "400 Watts",
"maxPower": "1 KW",
- "idlePower": "0.4W"
+ "idlePower": "0.4 W"
}
}
]
diff --git a/site/docs/documentation/Input/Traces.md b/site/docs/documentation/Input/Traces.md
deleted file mode 100644
index ec5782cb..00000000
--- a/site/docs/documentation/Input/Traces.md
+++ /dev/null
@@ -1,26 +0,0 @@
-### Traces
-OpenDC works with two types of traces that describe the servers that need to be run. Both traces have to be provided as
-parquet files.
-
-#### Meta
-The meta trace provides an overview of the servers:
-
-| Metric | Datatype | Unit | Summary |
-|--------------|------------|----------|--------------------------------------------------|
-| id | string | | The id of the server |
-| start_time | datetime64 | datetime | The submission time of the server |
-| stop_time | datetime64 | datetime | The finish time of the submission |
-| cpu_count | int32 | count | The number of CPUs required to run this server |
-| cpu_capacity | float64 | MHz | The amount of CPU required to run this server |
-| mem_capacity | int64 | MB | The amount of memory required to run this server |
-
-#### Trace
-The Trace file provides information about the computational demand of each server over time:
-
-| Metric | Datatype | Unit | Summary |
-|-----------|------------|---------------|---------------------------------------------|
-| id | string | | The id of the server |
-| timestamp | datetime64 | datetime | The timestamp of the sample |
-| duration | int64 | milli seconds | The duration since the last sample |
-| cpu_count | int32 | count | The number of cpus required |
-| cpu_usage | float64 | MHz | The amount of computational power required. |
diff --git a/site/docs/documentation/Input/Workload.md b/site/docs/documentation/Input/Workload.md
new file mode 100644
index 00000000..5f2e61ae
--- /dev/null
+++ b/site/docs/documentation/Input/Workload.md
@@ -0,0 +1,24 @@
+OpenDC works with two types of traces that describe the servers that need to be run. Both traces have to be provided as
+parquet files.
+
+#### Task
+The meta trace provides an overview of the servers:
+
+| Metric | Datatype | Unit | Summary |
+|-----------------|----------|----------|--------------------------------------------------|
+| id | string | | The id of the server |
+| submission_time | int64 | datetime | The submission time of the server |
+| duration | int64 | datetime | The finish time of the submission |
+| cpu_count | int32 | count | The number of CPUs required to run this server |
+| cpu_capacity | float64 | MHz | The amount of CPU required to run this server |
+| mem_capacity | int64 | MB | The amount of memory required to run this server |
+
+#### Fragment
+The Fragment file provides information about the computational demand of each server over time:
+
+| Metric | Datatype | Unit | Summary |
+|-----------|------------|---------------|---------------------------------------------|
+| id | string | | The id of the task |
+| duration | int64 | milli seconds | The duration since the last sample |
+| cpu_count | int32 | count | The number of cpus required |
+| cpu_usage | float64 | MHz | The amount of computational power required. |
diff --git a/site/docs/documentation/Output.md b/site/docs/documentation/Output.md
index dbc2a765..3f9eb3d5 100644
--- a/site/docs/documentation/Output.md
+++ b/site/docs/documentation/Output.md
@@ -3,27 +3,31 @@ Running OpenDC results in three output files. The first file ([Server](#server))
The second file ([Host](#host)) contains all metrics related to the hosts on which jobs can be executed. Finally, the third file ([Service](#service))
contains metrics describing the overall performance. An experiment in OpenDC has
-### Server
-The server output file, contains all metrics of related to the servers run.
+### Task
+The task output file, contains all metrics of related to the tasks that are being executed.
-| Metric | Datatype | Unit | Summary |
-|--------------------|----------|--------|-------------------------------------------------------------------------------|
-| timestamp | int64 | ms | Timestamp of the sample since the start of the workload |
-| absolute timestamp | int64 | ms | The absolute timestamp based on the given workload |
-| server_id | binary | string | The id of the server determined during runtime |
-| server_name | binary | string | The name of the server provided by the Trace |
-| host_id | binary | string | The id of the host on which the server is hosted or `null` if it has no host. |
-| mem_capacity | int64 | Mb | |
-| cpu_count | int32 | count | |
-| cpu_limit | double | MHz | The capacity of the CPUs of Host on which the server is running. |
-| cpu_time_active | int64 | ms | The duration that a CPU was active in the server. |
-| cpu_time_idle | int64 | ms | The duration that a CPU was idle in the server. |
-| cpu_time_steal | int64 | ms | The duration that a vCPU wanted to run, but no capacity was available. |
-| cpu_time_lost | int64 | ms | The duration of CPU time that was lost due to interference. |
-| uptime | int64 | ms | The uptime of the host since last sample. |
-| downtime | int64 | ms | The downtime of the host since last sample. |
-| provision_time | int64 | ms | The time for which the server was enqueued for the scheduler. |
-| boot_time | int64 | ms | The time the server took booting. |
+| Metric | Datatype | Unit | Summary |
+|--------------------|----------|-----------|-------------------------------------------------------------------------------|
+| timestamp | int64 | ms | Timestamp of the sample since the start of the workload |
+| absolute timestamp | int64 | ms | The absolute timestamp based on the given workload |
+| server_id | binary | string | The id of the server determined during runtime |
+| server_name | binary | string | The name of the server provided by the Trace |
+| host_id | binary | string | The id of the host on which the server is hosted or `null` if it has no host. |
+| mem_capacity | int64 | Mb | |
+| cpu_count | int32 | count | |
+| cpu_limit | double | MHz | The capacity of the CPUs of Host on which the server is running. |
+| cpu_time_active | int64 | ms | The duration that a CPU was active in the server. |
+| cpu_time_idle | int64 | ms | The duration that a CPU was idle in the server. |
+| cpu_time_steal | int64 | ms | The duration that a vCPU wanted to run, but no capacity was available. |
+| cpu_time_lost | int64 | ms | The duration of CPU time that was lost due to interference. |
+| uptime | int64 | ms | The uptime of the host since last sample. |
+| downtime | int64 | ms | The downtime of the host since last sample. |
+| provision_time | int64 | ms | The time for which the server was enqueued for the scheduler. |
+| boot_time | int64 | ms | The time a task got booted. |
+| boot_time_absolute | int64 | ms | The absolute time a task got booted. |
+| creation_time | int64 | ms | The time at which the task was created by the ComputeService |
+| finish_time | int64 | ms | The time at which the task was finished (either completed or terminated) |
+| task_state | String | TaskState | The status of the Task |
### Host
The host output file, contains all metrics of related to the host run.
@@ -33,7 +37,7 @@ The host output file, contains all metrics of related to the host run.
| timestamp | int64 | ms | Timestamp of the sample |
| absolute timestamp | int64 | ms | The absolute timestamp based on the given workload |
| host_id | binary | string | The id of the host given by OpenDC |
-| cpu_count | int32 | count | The number of available cpuModel cores |
+| cpu_count | int32 | count | The number of available cpuModel cores |
| mem_capacity | int64 | Mb | The amount of available memory |
| guests_terminated | int32 | count | The number of guests that are in a terminated state. |
| guests_running | int32 | count | The number of guests that are in a running state. |
@@ -49,11 +53,24 @@ The host output file, contains all metrics of related to the host run.
| cpu_time_lost | int64 | ms | The duration of CPU time that was lost due to interference. |
| power_draw | double | Watt | The current power draw of the host. |
| energy_usage | double | Joule (Ws) | The total energy consumption of the host since last sample. |
-| carbon_intensity | double | gCO2/kW | The amount of carbon that is emitted when using a unit of energy |
-| carbon_emission | double | gram | The amount of carbon emitted since the previous sample |
| uptime | int64 | ms | The uptime of the host since last sample. |
| downtime | int64 | ms | The downtime of the host since last sample. |
-| boot_time | int64 | ms | The time the host took to boot. |
+| boot_time | int64 | ms | The time a host got booted. |
+| boot_time_absolute | int64 | ms | The absolute time a host got booted. |
+
+### Power Source
+The host output file, contains all metrics of related to the host run.
+
+| Metric | DataType | Unit | Summary |
+|--------------------|----------|------------|------------------------------------------------------------------------------------------|
+| timestamp | int64 | ms | Timestamp of the sample |
+| absolute timestamp | int64 | ms | The absolute timestamp based on the given workload |
+| hosts_connected | int | Count | The number of hosts connected to the power Source (WARNING: does not work at the moment) |
+| power_draw | double | Watt | The current power draw of the host. |
+| energy_usage | double | Joule (Ws) | The total energy consumption of the host since last sample. |
+| carbon_intensity | double | gCO2/kW | The amount of carbon that is emitted when using a unit of energy |
+| carbon_emission | double | gram | The amount of carbon emitted since the previous sample |
+
### Service
The service output file, contains metrics providing an overview of the performance.
@@ -64,8 +81,10 @@ The service output file, contains metrics providing an overview of the performan
| absolute timestamp | int64 | ms | The absolute timestamp based on the given workload |
| hosts_up | int32 | count | The number of hosts that are up at this instant. |
| hosts_down | int32 | count | The number of hosts that are down at this instant. |
-| servers_pending | int32 | count | The number of servers that are pending to be scheduled. |
-| servers_active | int32 | count | The number of servers that are currently active. |
+| tasks_total | int32 | count | The number of servers that are currently active. |
+| tasks_pending | int32 | count | The number of servers that are pending to be scheduled. |
+| tasks_active | int32 | count | The number of servers that are currently active. |
+| tasks_terminated | int32 | count | The number of servers that are currently active. |
+| tasks_completed | int32 | count | The number of servers that are currently active. |
| attempts_success | int32 | count | The scheduling attempts that were successful. |
| attempts_failure | int32 | count | The scheduling attempts that were unsuccessful due to client error. |
-| attempts_error | int32 | count | The scheduling attempts that were unsuccessful due to scheduler error. |
diff --git a/site/docs/getting-started/0-installation.md b/site/docs/getting-started/0-installation.md
index e9f539a8..281e811f 100644
--- a/site/docs/getting-started/0-installation.md
+++ b/site/docs/getting-started/0-installation.md
@@ -6,7 +6,7 @@ description: How to install OpenDC locally, and start experimenting in no time.
This page describes how to set up and configure a local single-user OpenDC installation so that you can quickly get your
experiments running. You can also use the [hosted version of OpenDC](https://app.opendc.org) to get started even
-quicker.
+quicker (The web server is however missing some more complex features).
## Prerequisites
@@ -14,42 +14,18 @@ quicker.
1. **Supported Platforms**
OpenDC is actively tested on Windows, macOS and GNU/Linux.
2. **Required Software**
- A Java installation of version 17 or higher is required for OpenDC. You may download the
+ A Java installation of version 19 or higher is required for OpenDC. You may download the
[Java distribution from Oracle](https://www.oracle.com/java/technologies/downloads/) or use the distribution provided
by your package manager.
## Download
-To get an OpenDC distribution, download a recent version from our [Releases](https://github.com/atlarge-research/opendc/releases)
-page on GitHub.
+To get an OpenDC distribution, download a recent version from our [Releases](https://github.com/atlarge-research/opendc/releases) page on GitHub.
+For basic usage, the OpenDCExperimentRunner is all that is needed.
## Setup
-Unpack the downloaded OpenDC distribution and try the following command:
-
-```bash
-$ bin/opendc-server
-__ ____ __ _____ ___ __ ____ ______
- --/ __ \/ / / / _ | / _ \/ //_/ / / / __/
- -/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \
---\___\_\____/_/ |_/_/|_/_/|_|\____/___/
-2022-09-12 10:30:22,064 INFO [org.fly.cor.int.dat.bas.BaseDatabaseType] (main) Database: jdbc:h2:file:./data/opendc.db (H2 2.1)
-2022-09-12 10:30:22,089 WARN [org.fly.cor.int.dat.bas.Database] (main) Flyway upgrade recommended: H2 2.1.214 is newer than this version of Flyway and support has not been tested. The latest supported version of H2 is 2.1.210.
-2022-09-12 10:30:22,098 INFO [org.fly.cor.int.com.DbMigrate] (main) Current version of schema "PUBLIC": 1.0.0
-2022-09-12 10:30:22,099 INFO [org.fly.cor.int.com.DbMigrate] (main) Schema "PUBLIC" is up to date. No migration necessary.
-2022-09-12 10:30:22,282 INFO [org.ope.web.run.run.OpenDCRunnerRecorder] (main) Starting OpenDC Runner in background (polling every PT30S)
-2022-09-12 10:30:22,347 INFO [io.quarkus] (main) opendc-web-server 2.1-rc1 on JVM (powered by Quarkus 2.11.1.Final) started in 1.366s. Listening on: http://0.0.0.0:8080
-2022-09-12 10:30:22,348 INFO [io.quarkus] (main) Profile prod activated.
-2022-09-12 10:30:22,348 INFO [io.quarkus] (main) Installed features: [agroal, cdi, flyway, hibernate-orm, hibernate-validator, jdbc-h2, jdbc-postgresql, kotlin, narayana-jta, opendc-runner, opendc-ui, resteasy, resteasy-jackson, security, smallrye-simHyperVisorContext-propagation, smallrye-openapi, swagger-ui, vertx]
-```
-This will launch the built-in single-user OpenDC server on port 8080. Visit
-[http://localhost:8080](http://localhost:8080) to access the bundled web UI.
-
-## Configuration
-
-OpenDC can be configured using the configuration files located in the `conf` directory. By default, all user data is
-stored in the `data` directory using the H2 database engine.
-
-## Multi-tenant deployment
-
-For more information on setting up multi-tenant, non-trivial deployments, see the [Deployment Guide](docs/advanced-guides/deploy.md).
+Unpack the downloaded OpenDC distribution. Opening OpenDCExperimentRunner results in two folders, `bin` and `lib`.
+`lib` contains all `.jar` files needed to run OpenDC. `bin` two executable versions of the OpenDCExperimentRunner.
+In the following pages, we discuss how to run an experiment using the executables.
+
diff --git a/site/docs/getting-started/1-first-experiment.md b/site/docs/getting-started/1-first-experiment.md
new file mode 100644
index 00000000..313d757b
--- /dev/null
+++ b/site/docs/getting-started/1-first-experiment.md
@@ -0,0 +1,200 @@
+---
+description: Designing a simple experiment
+---
+
+# First Experiment
+Now that you have downloaded OpenDC, we will start creating a simple experiment.
+In this experiment we will compare the performance of a small, and a big data center on the same workload.
+
+:::info Learning goal
+During this tutorial, we will learn how to create and execute a simple experiment in OpenDC.
+:::
+
+## Designing a Data Center
+
+The first requirement to run an experiment in OpenDC is a **topology**.
+A **topology** defines the hardware on which a **workload** is executed.
+Larger topologies will be capable of running more workloads, and will often quicker.
+
+A **topology** is defined using a JSON file. A **topology** contains one or more _clusters_.
+_clusters_ are groups of _hosts_ on a specific location. Each cluster consists of one or more _hosts_.
+A _host_ is a machine on which one or more tasks can be executed. _hosts_ are composed of a _cpu_ and a _memory_ unit.
+
+### Simple Data Center
+in this experiment, we are comparing two data centers. Below is an example of the small **topology** file:
+
+```json
+{
+ "clusters":
+ [
+ {
+ "name": "C01",
+ "hosts" :
+ [
+ {
+ "name": "H01",
+ "cpu":
+ {
+ "coreCount": 12,
+ "coreSpeed": 3300
+ },
+ "memory": {
+ "memorySize": 140457600000
+ }
+ }
+ ]
+ }
+ ]
+}
+```
+
+This **topology** consist of a single _cluster_, with a single _host_.
+
+:::tip
+To use this **topology** in experiment copy the content to a new JSON file, or download it [here](documents/topologies/small.json "download")
+:::
+
+### Simple Data Center
+in this experiment, we are comparing two data centers. Below is an example of the bigger **topology** file:
+
+```json
+{
+ "clusters":
+ [
+ {
+ "name": "C01",
+ "hosts" :
+ [
+ {
+ "name": "H01",
+ "cpu":
+ {
+ "coreCount": 32,
+ "coreSpeed": 3200
+ },
+ "memory": {
+ "memorySize": 256000
+ }
+ }
+ ]
+ },
+ {
+ "name": "C02",
+ "hosts" :
+ [
+ {
+ "name": "H02",
+ "count": 6,
+ "cpu":
+ {
+ "coreCount": 8,
+ "coreSpeed": 2930
+ },
+ "memory": {
+ "memorySize": 64000
+ }
+ }
+ ]
+ },
+ {
+ "name": "C03",
+ "hosts" :
+ [
+ {
+ "name": "H03",
+ "count": 2,
+ "cpu":
+ {
+ "coreCount": 16,
+ "coreSpeed": 3200
+ },
+ "memory": {
+ "memorySize": 128000
+ }
+ }
+ ]
+ }
+ ]
+}
+```
+
+Compared to the small topology, the big topology consist of three clusters, all consisting of a single host.
+
+:::tip
+To use this **topology** in experiment copy the content to a new JSON file, or download it [here](documents/topologies/big.json "download")
+:::
+
+:::info
+For more in depth information about Topologies, see [Topology](../documentation/Input/Topology)
+:::
+
+## Workloads
+
+Next to the topology, we need a workload to simulate on the data center.
+In OpenDC, workloads are defined as a bag of tasks. Each task is accompanied by one or more fragments.
+These fragments define the computational requirements of the task over time.
+For this experiment, we will use the bitbrains-small workload. This is a small workload of 50 tasks,
+spanning over a bit more than a month time. You can download the workload [here](documents/workloads/bitbrains-small.zip "download")
+
+:::info
+For more in depth information about Workloads, see [Workload](../documentation/Input/Workload)
+:::
+
+## Executing an experiment
+
+To run an experiment, we need to create an **experiment** file. This is a JSON file, that defines what should be executed
+by OpenDC, and how. Below is an example of a simple **experiment** file:
+
+```json
+{
+ "name": "simple",
+ "topologies": [{
+ "pathToFile": "topologies/small.json"
+ },
+ {
+ "pathToFile": "topologies/big.json"
+ }],
+ "workloads": [{
+ "pathToFile": "traces/bitbrains-small",
+ "type": "ComputeWorkload"
+ }]
+}
+```
+
+In this **experiment**, three things are defined. First, is the `name`. This defines how the experiment is called
+in the output folder. Second, is the `topologies`. This defines where OpenDC can find the topology files.
+Finally, the `workloads`. This defines which workload OpenDC should run. You can download the experiment file [here](documents/experiments/simple_experiment.json "download")
+
+As you can see, `topologies` defines two topologies. In this case OpenDC will run two simulations, one with the small
+topology, and one with the big topology.
+
+:::info
+For more in depth information about Experiments, see [Experiment](../documentation/Input/Experiment)
+:::
+
+## Running OpenDC
+At this point, we should have all components to run an experiment. To make sure every file can be used by OpenDC,
+please create an experiment folder such as the one shown below:
+```
+── {simulation-folder-name} 📁 🔧
+ ├── topologies 📁 🔒
+ │ └── small.json 📄 🔧
+ │ └── big.json 📄 🔧
+ ├── experiments 📁 🔒
+ │ └── simple_experiment.json 📄 🔧
+ ├── workloads 📁 🔒
+ │ └── bitbrains-small 📁 🔒
+ │ └── fragments.parquet 📄 🔧
+ │ └── tasks.parquet 📄 🔧
+ ├── OpenDCExperimentRunner 📁 🔒
+ │ └── lib 📁 🔒
+ │ └── bin 📁 🔒
+ ├── output 📁 🔒
+```
+
+Executing the experiment can be done directly from the terminal.
+Execute the following code from the terminal in simulation-folder-name
+
+```
+$ ./OpenDCExperimentRunner/bin/OpenDCExperimentRunner.sh --experiment-path "experiments/simple_experiment.json"
+```
diff --git a/site/docs/getting-started/documents/experiments/simple_experiment.json b/site/docs/getting-started/documents/experiments/simple_experiment.json
new file mode 100644
index 00000000..74429fdb
--- /dev/null
+++ b/site/docs/getting-started/documents/experiments/simple_experiment.json
@@ -0,0 +1,13 @@
+{
+ "name": "simple",
+ "topologies": [{
+ "pathToFile": "topologies/small.json"
+ },
+ {
+ "pathToFile": "topologies/big.json"
+ }],
+ "workloads": [{
+ "pathToFile": "traces/bitbrains-small",
+ "type": "ComputeWorkload"
+ }]
+}
diff --git a/site/docs/getting-started/documents/topologies/big.json b/site/docs/getting-started/documents/topologies/big.json
new file mode 100644
index 00000000..c3a060cc
--- /dev/null
+++ b/site/docs/getting-started/documents/topologies/big.json
@@ -0,0 +1,59 @@
+{
+ "clusters":
+ [
+ {
+ "name": "C01",
+ "hosts" :
+ [
+ {
+ "name": "H01",
+ "cpu":
+ {
+ "coreCount": 32,
+ "coreSpeed": 3200
+ },
+ "memory": {
+ "memorySize": 256000
+ }
+ }
+ ]
+ },
+ {
+ "name": "C02",
+ "hosts" :
+ [
+ {
+ "name": "H02",
+ "count": 6,
+ "cpu":
+ {
+ "coreCount": 8,
+ "coreSpeed": 2930
+ },
+ "memory": {
+ "memorySize": 64000
+ }
+ }
+ ]
+ },
+ {
+ "name": "C03",
+ "hosts" :
+ [
+ {
+ "name": "H03",
+ "count": 2,
+ "cpu":
+ {
+ "coreCount": 16,
+ "coreSpeed": 3200
+ },
+ "memory": {
+ "memorySize": 128000
+ }
+ }
+ ]
+ }
+ ]
+}
+
diff --git a/site/docs/getting-started/documents/topologies/small.json b/site/docs/getting-started/documents/topologies/small.json
new file mode 100644
index 00000000..54e3c6fc
--- /dev/null
+++ b/site/docs/getting-started/documents/topologies/small.json
@@ -0,0 +1,22 @@
+{
+ "clusters":
+ [
+ {
+ "name": "C01",
+ "hosts" :
+ [
+ {
+ "name": "H01",
+ "cpu":
+ {
+ "coreCount": 12,
+ "coreSpeed": 3300
+ },
+ "memory": {
+ "memorySize": 140457600000
+ }
+ }
+ ]
+ }
+ ]
+}
diff --git a/site/docs/getting-started/documents/workloads/bitbrains-small.zip b/site/docs/getting-started/documents/workloads/bitbrains-small.zip
new file mode 100644
index 00000000..f128e636
--- /dev/null
+++ b/site/docs/getting-started/documents/workloads/bitbrains-small.zip
Binary files differ
diff --git a/site/old_tutorials/0-installation.md b/site/old_tutorials/0-installation.md
new file mode 100644
index 00000000..281e811f
--- /dev/null
+++ b/site/old_tutorials/0-installation.md
@@ -0,0 +1,31 @@
+---
+description: How to install OpenDC locally, and start experimenting in no time.
+---
+
+# Installation
+
+This page describes how to set up and configure a local single-user OpenDC installation so that you can quickly get your
+experiments running. You can also use the [hosted version of OpenDC](https://app.opendc.org) to get started even
+quicker (The web server is however missing some more complex features).
+
+
+## Prerequisites
+
+1. **Supported Platforms**
+ OpenDC is actively tested on Windows, macOS and GNU/Linux.
+2. **Required Software**
+ A Java installation of version 19 or higher is required for OpenDC. You may download the
+ [Java distribution from Oracle](https://www.oracle.com/java/technologies/downloads/) or use the distribution provided
+ by your package manager.
+
+## Download
+
+To get an OpenDC distribution, download a recent version from our [Releases](https://github.com/atlarge-research/opendc/releases) page on GitHub.
+For basic usage, the OpenDCExperimentRunner is all that is needed.
+
+## Setup
+
+Unpack the downloaded OpenDC distribution. Opening OpenDCExperimentRunner results in two folders, `bin` and `lib`.
+`lib` contains all `.jar` files needed to run OpenDC. `bin` two executable versions of the OpenDCExperimentRunner.
+In the following pages, we discuss how to run an experiment using the executables.
+
diff --git a/site/docs/getting-started/1-design.mdx b/site/old_tutorials/1-design.mdx
index e8ab2c58..e8ab2c58 100644
--- a/site/docs/getting-started/1-design.mdx
+++ b/site/old_tutorials/1-design.mdx
diff --git a/site/docs/getting-started/2-experiment.mdx b/site/old_tutorials/2-experiment.mdx
index 14970ea6..14970ea6 100644
--- a/site/docs/getting-started/2-experiment.mdx
+++ b/site/old_tutorials/2-experiment.mdx
diff --git a/site/old_tutorials/3-whats-next.md b/site/old_tutorials/3-whats-next.md
new file mode 100644
index 00000000..7c021119
--- /dev/null
+++ b/site/old_tutorials/3-whats-next.md
@@ -0,0 +1,12 @@
+---
+description: How to supercharge your designs and experiments with OpenDC.
+---
+
+# What's next?
+
+Congratulations! You have just learned how to design and experiment with a (virtual) datacenter in OpenDC. What's next?
+
+- Follow one of the [tutorials](/docs/category/tutorials) using OpenDC.
+- Check the [advanced guides](/docs/category/advanced-guides) for more complex material.
+- Read about [existing work using OpenDC](/community/research).
+- Get involved in the [OpenDC Community](/community/support).
diff --git a/site/old_tutorials/_category_.json b/site/old_tutorials/_category_.json
new file mode 100644
index 00000000..169f7a27
--- /dev/null
+++ b/site/old_tutorials/_category_.json
@@ -0,0 +1,8 @@
+{
+ "label": "Getting Started",
+ "position": 2,
+ "link": {
+ "type": "generated-index",
+ "description": "10 minutes to learn the most important concepts of OpenDC."
+ }
+}