summaryrefslogtreecommitdiff
path: root/opendc-experiments/opendc-experiments-base/src/main/kotlin
diff options
context:
space:
mode:
authorRadu Nicolae <rnicolae04@gmail.com>2024-04-22 13:51:39 +0200
committerGitHub <noreply@github.com>2024-04-22 13:51:39 +0200
commitd4c1d8468a17eb7adf8bf20949c2fdc4b2f93fec (patch)
tree3ab47cd41633615ae187c2a2923ac09ae48ccbd3 /opendc-experiments/opendc-experiments-base/src/main/kotlin
parentd652fa2fa76556edd81d3b8087a0c943d462ec49 (diff)
Merged scenario and portfolio (#220)
* sync with the master branch * rebase * multimodel - simulation is currently run as many times as you can see a model * factory method - handles models without given params * removed redundant flags * modelType * flags removed * implemented output into a folder * multimodel ipynb setup - to be implemented and also ran as a python script, when the simulation occurs * towards a mutimodel python implementation - issue observed - the saved files have same data? * json parsing handles now lists for topology, workloads, allocaitonPolicies, powerModels * scenarioFile inputs lists, and creates multiple combinations of scenarios * multi-model prediction repaired, now we predict using multiple models * commit before removing powerModel from scenario * commit after removing powerModel from scenario * commit after removing powerModel from scenario (and actually running) * powermodels now can output their name and full name (with min and max) * now we can select where to output (seed or output folder) * input files - clear naming + output naming improved * minimal changes * all tests passing + json files from tests updated to the new json format * json files from topology now accept only one power model (instead of list) * json files from topology now accept only one power model (instead of list) * multi and single input from tests updated to match the format * tests passed locally * spotless applies * demo folder removed
Diffstat (limited to 'opendc-experiments/opendc-experiments-base/src/main/kotlin')
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/Scenario.kt19
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioFactories.kt130
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioReader.kt1
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioSpecs.kt167
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/AllocationPolicySpec.kt38
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ExportModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioReader.kt)35
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/FailureModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/Portfolio.kt)21
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/PowerModelSpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioSpec.kt)9
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ScenarioSpec.kt65
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/TopologySpec.kt (renamed from opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioFactories.kt)33
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/WorkloadSpec.kt70
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt2
-rw-r--r--opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt150
13 files changed, 444 insertions, 296 deletions
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/Scenario.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/Scenario.kt
index f0e5717a..9029691a 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/Scenario.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/Scenario.kt
@@ -22,16 +22,31 @@
package org.opendc.experiments.base.models.scenario
+import AllocationPolicySpec
+import ExportModelSpec
+import WorkloadSpec
import org.opendc.compute.simulator.failure.FailureModel
import org.opendc.compute.topology.specs.HostSpec
+/**
+ * A data class representing a scenario for a set of experiments.
+ *
+ * @property topology The list of HostSpec representing the topology of the scenario.
+ * @property workload The WorkloadSpec representing the workload of the scenario.
+ * @property allocationPolicy The AllocationPolicySpec representing the allocation policy of the scenario.
+ * @property failureModel The FailureModel representing the failure model of the scenario. It can be null.
+ * @property exportModel The ExportSpec representing the export model of the scenario. It defaults to an instance of ExportSpec.
+ * @property outputFolder The String representing the output folder of the scenario. It defaults to "output".
+ * @property name The String representing the name of the scenario. It defaults to an empty string.
+ * @property runs The Int representing the number of runs of the scenario. It defaults to 1.
+ * @property initialSeed The Int representing the initial seed of the scenario. It defaults to 0.
+ */
public data class Scenario(
val topology: List<HostSpec>,
val workload: WorkloadSpec,
val allocationPolicy: AllocationPolicySpec,
val failureModel: FailureModel?,
- val carbonTracePath: String? = null,
- val exportModel: ExportSpec = ExportSpec(),
+ val exportModel: ExportModelSpec = ExportModelSpec(),
val outputFolder: String = "output",
val name: String = "",
val runs: Int = 1,
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioFactories.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioFactories.kt
index 93b2a2b5..56076f52 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioFactories.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioFactories.kt
@@ -22,37 +22,125 @@
package org.opendc.experiments.base.models.scenario
+import AllocationPolicySpec
+import TopologySpec
+import WorkloadSpec
import org.opendc.compute.simulator.failure.getFailureModel
+import org.opendc.compute.topology.TopologyReader
import org.opendc.compute.topology.clusterTopology
+import org.opendc.compute.topology.specs.TopologyJSONSpec
+import org.opendc.experiments.base.models.scenario.specs.ScenarioSpec
import java.io.File
+import java.util.UUID
private val scenarioReader = ScenarioReader()
-public fun getScenario(filePath: String): Scenario {
+/**
+ * Returns a list of Scenarios from a given file path (input).
+ *
+ * @param filePath The path to the file containing the scenario specifications.
+ * @return A list of Scenarios.
+ */
+public fun getScenario(filePath: String): List<Scenario> {
return getScenario(File(filePath))
}
-public fun getScenario(file: File): Scenario {
+/**
+ * Returns a list of Scenarios from a given file. Reads and decodes the contents of the (JSON) file.
+ *
+ * @param file The file containing the scenario specifications.
+ * @return A list of Scenarios.
+ */
+public fun getScenario(file: File): List<Scenario> {
return getScenario(scenarioReader.read(file))
}
-public fun getScenario(scenarioSpec: ScenarioSpec): Scenario {
- val topology = clusterTopology(File(scenarioSpec.topology.pathToFile))
- val workload = scenarioSpec.workload
- val allocationPolicy = scenarioSpec.allocationPolicy
- val failureModel = getFailureModel(scenarioSpec.failureModel.failureInterval)
- val exportModel = scenarioSpec.exportModel
-
- return Scenario(
- topology,
- workload,
- allocationPolicy,
- failureModel,
- scenarioSpec.carbonTracePath,
- exportModel,
- scenarioSpec.outputFolder,
- scenarioSpec.name,
- scenarioSpec.runs,
- scenarioSpec.initialSeed,
- )
+/**
+ * Returns a list of Scenarios from a given ScenarioSpec.
+ *
+ * @param scenarioSpec The ScenarioSpec containing the scenario specifications.
+ * @return A list of Scenarios.
+ */
+public fun getScenario(scenarioSpec: ScenarioSpec): List<Scenario> {
+ return getScenarioCombinations(scenarioSpec)
+}
+
+/**
+ * Returns a list of Scenarios from a given ScenarioSpec by generating all possible combinations of
+ * workloads, allocation policies, failure models, and export models within a topology.
+ *
+ * @param scenarioSpec The ScenarioSpec containing the scenario specifications.
+ * @return A list of Scenarios.
+ */
+public fun getScenarioCombinations(scenarioSpec: ScenarioSpec): List<Scenario> {
+ val topologies = getTopologies(scenarioSpec.topologies)
+ val topologiesSpec = scenarioSpec.topologies
+ val workloads = scenarioSpec.workloads
+ val allocationPolicies = scenarioSpec.allocationPolicies
+ val failureModels = scenarioSpec.failureModels
+ val exportModels = scenarioSpec.exportModels
+ val scenarios = mutableListOf<Scenario>()
+
+ for (topology in topologiesSpec) {
+ for (workload in workloads) {
+ for (allocationPolicy in allocationPolicies) {
+ for (failureModel in failureModels) {
+ for (exportModel in exportModels) {
+ val scenario =
+ Scenario(
+ topology = clusterTopology(File(topology.pathToFile)),
+ workload = workload,
+ allocationPolicy = allocationPolicy,
+ failureModel = getFailureModel(failureModel.failureInterval),
+ exportModel = exportModel,
+ outputFolder = scenarioSpec.outputFolder,
+ name = getOutputFolderName(scenarioSpec, topology, workload, allocationPolicy),
+ runs = scenarioSpec.runs,
+ initialSeed = scenarioSpec.initialSeed,
+ )
+ scenarios.add(scenario)
+ }
+ }
+ }
+ }
+ }
+
+ return scenarios
+}
+
+/**
+ * Returns a list of TopologyJSONSpec from a given list of TopologySpec.
+ *
+ * @param topologies The list of TopologySpec.
+ * @return A list of TopologyJSONSpec.
+ */
+public fun getTopologies(topologies: List<TopologySpec>): List<TopologyJSONSpec> {
+ val readTopologies = mutableListOf<TopologyJSONSpec>()
+ for (topology in topologies) {
+ readTopologies.add(TopologyReader().read(File(topology.pathToFile)))
+ }
+
+ return readTopologies
+}
+
+/**
+ * Returns a string representing the output folder name for a given ScenarioSpec, CpuPowerModel, AllocationPolicySpec, and topology path.
+ *
+ * @param scenarioSpec The ScenarioSpec.
+ * @param powerModel The CpuPowerModel.
+ * @param allocationPolicy The AllocationPolicySpec.
+ * @param topologyPath The path to the topology file.
+ * @return A string representing the output folder name.
+ */
+public fun getOutputFolderName(
+ scenarioSpec: ScenarioSpec,
+ topology: TopologySpec,
+ workload: WorkloadSpec,
+ allocationPolicy: AllocationPolicySpec,
+): String {
+ return "scenario=${scenarioSpec.name}" +
+ "-topology=${topology.pathToFile}" +
+ "-workload=${workload.name}}" +
+ "-scheduler=${allocationPolicy.name}" +
+ "-${UUID.randomUUID().toString().substring(0, 8)}"
}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioReader.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioReader.kt
index e7c7b4ae..ffbb3aa3 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioReader.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioReader.kt
@@ -25,6 +25,7 @@ package org.opendc.experiments.base.models.scenario
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.decodeFromStream
+import org.opendc.experiments.base.models.scenario.specs.ScenarioSpec
import java.io.File
import java.io.InputStream
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioSpecs.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioSpecs.kt
deleted file mode 100644
index f39b16dd..00000000
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/ScenarioSpecs.kt
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright (c) 2024 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.experiments.base.models.scenario
-
-import kotlinx.serialization.Serializable
-import org.opendc.compute.service.scheduler.ComputeSchedulerEnum
-import org.opendc.compute.workload.ComputeWorkload
-import org.opendc.compute.workload.sampleByLoad
-import org.opendc.compute.workload.trace
-import java.io.File
-
-/**
- * specification describing a scenario
- *
- * @property topology
- * @property workload
- * @property allocationPolicy
- * @property failureModel
- * @property exportModel
- * @property outputFolder
- * @property initialSeed
- * @property runs
- */
-@Serializable
-public data class ScenarioSpec(
- val topology: TopologySpec,
- val workload: WorkloadSpec,
- val allocationPolicy: AllocationPolicySpec,
- val failureModel: FailureModelSpec = FailureModelSpec(),
- val carbonTracePath: String? = null,
- val exportModel: ExportSpec = ExportSpec(),
- val outputFolder: String = "output",
- val initialSeed: Int = 0,
- val runs: Int = 1,
- var name: String = "",
-) {
- init {
- require(runs > 0) { "The number of runs should always be positive" }
- require(carbonTracePath == null || File(carbonTracePath).exists()) { "The provided carbon trace cannot be found: $carbonTracePath" }
-
- // generate name if not provided
- if (name == "") {
- name = "workload=${workload.name}_topology=${topology.name}_allocationPolicy=${allocationPolicy.name}"
- }
- }
-}
-
-/**
- * specification describing a topology
- *
- * @property pathToFile
- */
-@Serializable
-public data class TopologySpec(
- val pathToFile: String,
-) {
- public val name: String = File(pathToFile).nameWithoutExtension
-
- init {
- require(File(pathToFile).exists()) { "The provided path to the topology: $pathToFile does not exist " }
- }
-}
-
-/**
- * specification describing a workload
- *
- * @property pathToFile
- * @property type
- */
-@Serializable
-public data class WorkloadSpec(
- val pathToFile: String,
- val type: WorkloadTypes,
-) {
- public val name: String = File(pathToFile).nameWithoutExtension
-
- init {
- require(File(pathToFile).exists()) { "The provided path to the workload: $pathToFile does not exist " }
- }
-}
-
-/**
- * specification describing a workload type
- *
- * @constructor Create empty Workload types
- */
-public enum class WorkloadTypes {
- /**
- * Compute workload
- *
- * @constructor Create empty Compute workload
- */
- ComputeWorkload,
-}
-
-/**
- *
- *TODO: move to separate file
- * @param type
- */
-public fun getWorkloadType(type: WorkloadTypes): ComputeWorkload {
- return when (type) {
- WorkloadTypes.ComputeWorkload -> trace("trace").sampleByLoad(1.0)
- }
-}
-
-/**
- * specification describing how tasks are allocated
- *
- * @property policyType
- *
- * TODO: expand with more variables such as allowed over-subscription
- */
-@Serializable
-public data class AllocationPolicySpec(
- val policyType: ComputeSchedulerEnum,
-) {
- public val name: String = policyType.toString()
-}
-
-/**
- * specification describing the failure model
- *
- * @property failureInterval The interval between failures in s. Should be 0.0 or higher
- */
-@Serializable
-public data class FailureModelSpec(
- val failureInterval: Double = 0.0,
-) {
- init {
- require(failureInterval >= 0.0) { "failure frequency cannot be lower than 0" }
- }
-}
-
-/**
- * specification describing how the results should be exported
- *
- * @property exportInterval The interval of exporting results in s. Should be higher than 0.0
- */
-@Serializable
-public data class ExportSpec(
- val exportInterval: Long = 5 * 60,
-) {
- init {
- require(exportInterval > 0) { "The Export interval has to be higher than 0" }
- }
-}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/AllocationPolicySpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/AllocationPolicySpec.kt
new file mode 100644
index 00000000..f7ae7e9f
--- /dev/null
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/AllocationPolicySpec.kt
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2024 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+import kotlinx.serialization.Serializable
+import org.opendc.compute.service.scheduler.ComputeSchedulerEnum
+
+/**
+ * specification describing how tasks are allocated
+ *
+ * @property policyType
+ *
+ * TODO: expand with more variables such as allowed over-subscription
+ */
+@Serializable
+public data class AllocationPolicySpec(
+ val policyType: ComputeSchedulerEnum,
+) {
+ public val name: String = policyType.toString()
+}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioReader.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ExportModelSpec.kt
index 767b61bb..9a23ad00 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioReader.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ExportModelSpec.kt
@@ -20,29 +20,18 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.models.portfolio
+import kotlinx.serialization.Serializable
-import kotlinx.serialization.ExperimentalSerializationApi
-import kotlinx.serialization.json.Json
-import kotlinx.serialization.json.decodeFromStream
-import java.io.File
-import java.io.InputStream
-
-public class PortfolioReader {
- @OptIn(ExperimentalSerializationApi::class)
- public fun read(file: File): PortfolioSpec {
- val input = file.inputStream()
- val obj = Json.decodeFromStream<PortfolioSpec>(input)
-
- return obj
- }
-
- /**
- * Read the specified [input].
- */
- @OptIn(ExperimentalSerializationApi::class)
- public fun read(input: InputStream): PortfolioSpec {
- val obj = Json.decodeFromStream<PortfolioSpec>(input)
- return obj
+/**
+ * specification describing how the results should be exported
+ *
+ * @property exportInterval The interval of exporting results in s. Should be higher than 0.0
+ */
+@Serializable
+public data class ExportModelSpec(
+ val exportInterval: Long = 5 * 60,
+) {
+ init {
+ require(exportInterval > 0) { "The Export interval has to be higher than 0" }
}
}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/Portfolio.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/FailureModelSpec.kt
index 7b0299c5..99620366 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/Portfolio.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/FailureModelSpec.kt
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2021 AtLarge Research
+ * Copyright (c) 2024 AtLarge Research
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
@@ -20,13 +20,18 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.models.portfolio
-
-import org.opendc.experiments.base.models.scenario.Scenario
+import kotlinx.serialization.Serializable
/**
- * A portfolio represents a collection of scenarios are tested for the work.
+ * specification describing the failure model
+ *
+ * @property failureInterval The interval between failures in s. Should be 0.0 or higher
*/
-public class Portfolio(
- public val scenarios: Iterable<Scenario>,
-)
+@Serializable
+public data class FailureModelSpec(
+ val failureInterval: Double = 0.0,
+) {
+ init {
+ require(failureInterval >= 0.0) { "failure frequency cannot be lower than 0" }
+ }
+}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/PowerModelSpec.kt
index 554442b2..fc568925 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioSpec.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/PowerModelSpec.kt
@@ -20,12 +20,11 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.models.portfolio
-
import kotlinx.serialization.Serializable
-import org.opendc.experiments.base.models.scenario.ScenarioSpec
@Serializable
-public data class PortfolioSpec(
- val scenarios: List<ScenarioSpec>,
+public data class PowerModelSpec(
+ val type: String = "constant",
+ val idlePower: Double = 200.0,
+ val maxPower: Double = 350.0,
)
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ScenarioSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ScenarioSpec.kt
new file mode 100644
index 00000000..5f9aec4a
--- /dev/null
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/ScenarioSpec.kt
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2024 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.experiments.base.models.scenario.specs
+
+import AllocationPolicySpec
+import ExportModelSpec
+import FailureModelSpec
+import TopologySpec
+import WorkloadSpec
+import kotlinx.serialization.Serializable
+
+/**
+ * specification describing a scenario
+ *
+ * @property topologies
+ * @property workloads
+ * @property allocationPolicies
+ * @property failureModels
+ * @property exportModels
+ * @property outputFolder
+ * @property initialSeed
+ * @property runs
+ */
+@Serializable
+public data class ScenarioSpec(
+ val topologies: List<TopologySpec>,
+ val workloads: List<WorkloadSpec>,
+ val allocationPolicies: List<AllocationPolicySpec>,
+ val failureModels: List<FailureModelSpec> = listOf(FailureModelSpec()),
+ val exportModels: List<ExportModelSpec> = listOf(ExportModelSpec()),
+ val outputFolder: String = "output",
+ val initialSeed: Int = 0,
+ val runs: Int = 1,
+ var name: String = "",
+) {
+ init {
+ require(runs > 0) { "The number of runs should always be positive" }
+
+ // generate name if not provided
+ if (name == "") {
+ name =
+ "workload=${workloads[0].name}_topology=${topologies[0].name}_allocationPolicy=${allocationPolicies[0].name}"
+ }
+ }
+}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioFactories.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/TopologySpec.kt
index aee87814..392b9763 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/portfolio/PortfolioFactories.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/TopologySpec.kt
@@ -20,26 +20,21 @@
* SOFTWARE.
*/
-package org.opendc.experiments.base.models.portfolio
-
-import org.opendc.experiments.base.models.scenario.getScenario
+import kotlinx.serialization.Serializable
import java.io.File
-private val porfolioReader = PortfolioReader()
-
-public fun getPortfolio(filePath: String): Portfolio {
- return getPortfolio(File(filePath))
-}
-
-public fun getPortfolio(file: File): Portfolio {
- return getPortfolio(porfolioReader.read(file))
-}
+/**
+ * specification describing a topology
+ *
+ * @property pathToFile
+ */
+@Serializable
+public data class TopologySpec(
+ val pathToFile: String,
+) {
+ public val name: String = File(pathToFile).nameWithoutExtension
-public fun getPortfolio(portfolioSpec: PortfolioSpec): Portfolio {
- return Portfolio(
- portfolioSpec.scenarios.map {
- scenario ->
- getScenario(scenario)
- },
- )
+ init {
+ require(File(pathToFile).exists()) { "The provided path to the topology: $pathToFile does not exist " }
+ }
}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/WorkloadSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/WorkloadSpec.kt
new file mode 100644
index 00000000..819f633d
--- /dev/null
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/models/scenario/specs/WorkloadSpec.kt
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2024 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+import kotlinx.serialization.Serializable
+import org.opendc.compute.workload.ComputeWorkload
+import org.opendc.compute.workload.sampleByLoad
+import org.opendc.compute.workload.trace
+import java.io.File
+
+/**
+ * specification describing a workload
+ *
+ * @property pathToFile
+ * @property type
+ */
+@Serializable
+public data class WorkloadSpec(
+ val pathToFile: String,
+ val type: WorkloadTypes,
+) {
+ public val name: String = File(pathToFile).nameWithoutExtension
+
+ init {
+ require(File(pathToFile).exists()) { "The provided path to the workload: $pathToFile does not exist " }
+ }
+}
+
+/**
+ * specification describing a workload type
+ *
+ * @constructor Create empty Workload types
+ */
+public enum class WorkloadTypes {
+ /**
+ * Compute workload
+ *
+ * @constructor Create empty Compute workload
+ */
+ ComputeWorkload,
+}
+
+/**
+ *
+ *TODO: move to separate file
+ * @param type
+ */
+public fun getWorkloadType(type: WorkloadTypes): ComputeWorkload {
+ return when (type) {
+ WorkloadTypes.ComputeWorkload -> trace("trace").sampleByLoad(1.0)
+ }
+}
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt
index a6a05d78..97914556 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt
@@ -108,7 +108,7 @@ public suspend fun ComputeService.replay(
}
// Make sure the trace entries are ordered by submission time
-// assert(start - simulationOffset >= 0) { "Invalid trace order" }
+ // assert(start - simulationOffset >= 0) { "Invalid trace order" }
// Delay the server based on the startTime given by the trace.
if (!submitImmediately) {
diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
index 59c11f34..63853d33 100644
--- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
+++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt
@@ -22,9 +22,9 @@
package org.opendc.experiments.base.runner
+import getWorkloadType
import me.tongfei.progressbar.ProgressBarBuilder
import me.tongfei.progressbar.ProgressBarStyle
-import org.opendc.compute.carbon.getCarbonTrace
import org.opendc.compute.service.ComputeService
import org.opendc.compute.service.scheduler.ComputeSchedulerEnum
import org.opendc.compute.service.scheduler.createComputeScheduler
@@ -34,9 +34,7 @@ import org.opendc.compute.simulator.provisioner.setupComputeService
import org.opendc.compute.simulator.provisioner.setupHosts
import org.opendc.compute.telemetry.export.parquet.ParquetComputeMonitor
import org.opendc.compute.workload.ComputeWorkloadLoader
-import org.opendc.experiments.base.models.portfolio.Portfolio
import org.opendc.experiments.base.models.scenario.Scenario
-import org.opendc.experiments.base.models.scenario.getWorkloadType
import org.opendc.simulator.kotlin.runSimulation
import java.io.File
import java.time.Duration
@@ -44,17 +42,6 @@ import java.util.Random
import java.util.concurrent.ForkJoinPool
import java.util.stream.LongStream
-public fun runPortfolio(
- portfolio: Portfolio,
- parallelism: Int,
-) {
- val pool = ForkJoinPool(parallelism)
-
- for (scenario in portfolio.scenarios) {
- runScenario(scenario, pool)
- }
-}
-
/**
* Run scenario when no pool is available for parallel execution
*
@@ -62,11 +49,26 @@ public fun runPortfolio(
* @param parallelism The number of scenarios that can be run in parallel
*/
public fun runScenario(
- scenario: Scenario,
+ scenarios: List<Scenario>,
parallelism: Int,
) {
- val pool = ForkJoinPool(parallelism)
- runScenario(scenario, pool)
+ val ansiReset = "\u001B[0m"
+ val ansiGreen = "\u001B[32m"
+ val ansiBlue = "\u001B[34m"
+ clearOutputFolder()
+
+ for (scenario in scenarios) {
+ val pool = ForkJoinPool(parallelism)
+ println(
+ "\n\n$ansiGreen================================================================================$ansiReset",
+ )
+ println("$ansiBlue Running scenario: ${scenario.name} $ansiReset")
+ println("$ansiGreen================================================================================$ansiReset")
+ runScenario(
+ scenario,
+ pool,
+ )
+ }
}
/**
@@ -81,20 +83,14 @@ public fun runScenario(
pool: ForkJoinPool,
) {
val pb =
- ProgressBarBuilder()
- .setInitialMax(scenario.runs.toLong())
- .setStyle(ProgressBarStyle.ASCII)
- .setTaskName("Simulating...")
- .build()
+ ProgressBarBuilder().setInitialMax(scenario.runs.toLong()).setStyle(ProgressBarStyle.ASCII)
+ .setTaskName("Simulating...").build()
pool.submit {
- LongStream.range(0, scenario.runs.toLong())
- .parallel()
- .forEach {
- runScenario(scenario, scenario.initialSeed + it)
- pb.step()
- }
-
+ LongStream.range(0, scenario.runs.toLong()).parallel().forEach {
+ runScenario(scenario, scenario.initialSeed + it)
+ pb.step()
+ }
pb.close()
}.join()
}
@@ -111,39 +107,93 @@ public fun runScenario(
): Unit =
runSimulation {
val serviceDomain = "compute.opendc.org"
-
Provisioner(dispatcher, seed).use { provisioner ->
-
provisioner.runSteps(
- setupComputeService(serviceDomain, { createComputeScheduler(ComputeSchedulerEnum.Mem, Random(it.seeder.nextLong())) }),
+ setupComputeService(
+ serviceDomain,
+ { createComputeScheduler(ComputeSchedulerEnum.Mem, Random(it.seeder.nextLong())) },
+ ),
setupHosts(serviceDomain, scenario.topology, optimize = true),
)
- val carbonTrace = getCarbonTrace(scenario.carbonTracePath)
-
val partition = scenario.name + "/seed=$seed"
-
val workloadLoader = ComputeWorkloadLoader(File(scenario.workload.pathToFile))
val vms = getWorkloadType(scenario.workload.type).resolve(workloadLoader, Random(seed))
-
val startTime = Duration.ofMillis(vms.minOf { it.startTime }.toEpochMilli())
- provisioner.runStep(
- registerComputeMonitor(
- serviceDomain,
- ParquetComputeMonitor(
- File(scenario.outputFolder),
- partition,
- bufferSize = 4096,
- ),
- Duration.ofSeconds(scenario.exportModel.exportInterval),
- startTime,
- carbonTrace,
- ),
- )
+ // saveInSeedFolder(provisioner, serviceDomain, scenario, seed, partition, startTime)
+ // XOR
+ saveInOutputFolder(provisioner, serviceDomain, scenario, startTime)
val service = provisioner.registry.resolve(serviceDomain, ComputeService::class.java)!!
-
service.replay(timeSource, vms, seed, failureModel = scenario.failureModel)
}
}
+
+/**
+ * When the simulation is run, saves the simulation results into a seed folder. This is useful for debugging purposes.
+ * @param provisioner The provisioner used to setup and run the simulation.
+ * @param serviceDomain The domain of the compute service.
+ * @param scenario The scenario being run in the simulation.
+ * @param seed The seed used for randomness in the simulation.
+ * @param partition The partition name for the output data.
+ * @param startTime The start time of the simulation.
+
+ */
+public fun saveInSeedFolder(
+ provisioner: Provisioner,
+ serviceDomain: String,
+ scenario: Scenario,
+ seed: Long,
+ partition: String,
+ startTime: Duration,
+) {
+ provisioner.runStep(
+ registerComputeMonitor(
+ serviceDomain,
+ ParquetComputeMonitor(
+ File(scenario.outputFolder),
+ partition,
+ bufferSize = 4096,
+ ),
+ Duration.ofSeconds(scenario.exportModel.exportInterval),
+ startTime,
+ ),
+ )
+}
+
+/**
+ * Saves the simulation results into a specific output folder received from the input.
+ *
+ * @param provisioner The provisioner used to setup and run the simulation.
+ * @param serviceDomain The domain of the compute service.
+ * @param scenario The scenario being run.
+ * @param startTime The start time of the simulation.
+ */
+public fun saveInOutputFolder(
+ provisioner: Provisioner,
+ serviceDomain: String,
+ scenario: Scenario,
+ startTime: Duration,
+) {
+ provisioner.runStep(
+ registerComputeMonitor(
+ serviceDomain,
+ ParquetComputeMonitor(
+ File("output/simulation-results/"),
+ scenario.name,
+ bufferSize = 4096,
+ ),
+ Duration.ofSeconds(scenario.exportModel.exportInterval),
+ startTime,
+ ),
+ )
+}
+
+/**
+ * Utilitary function, in case we want to delete the previous simulation results.
+ */
+public fun clearOutputFolder() {
+ val outputFolderPath = "output/simulation-results/"
+ if (File(outputFolderPath).exists()) File(outputFolderPath).deleteRecursively()
+}