diff options
Diffstat (limited to 'opendc-experiments/opendc-experiments-base')
9 files changed, 414 insertions, 66 deletions
diff --git a/opendc-experiments/opendc-experiments-base/build.gradle.kts b/opendc-experiments/opendc-experiments-base/build.gradle.kts index c75af87b..30510785 100644 --- a/opendc-experiments/opendc-experiments-base/build.gradle.kts +++ b/opendc-experiments/opendc-experiments-base/build.gradle.kts @@ -37,11 +37,13 @@ dependencies { implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.6.0") implementation(libs.progressbar) + implementation(project(mapOf("path" to ":opendc-simulator:opendc-simulator-core"))) + implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-workload"))) implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-telemetry"))) - implementation(project(mapOf("path" to ":opendc-simulator:opendc-simulator-core"))) implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-topology"))) implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-carbon"))) + implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-failure"))) runtimeOnly(libs.log4j.core) runtimeOnly(libs.log4j.slf4j) diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt index e1305b3f..970754b0 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioHelpers.kt @@ -20,11 +20,11 @@ * SOFTWARE. */ -@file:JvmName("TraceHelpers") +@file:JvmName("ScenarioHelpers") package org.opendc.experiments.base.runner -import FailureModelSpec +import CheckpointModelSpec import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.delay import kotlinx.coroutines.launch @@ -33,9 +33,14 @@ import kotlinx.coroutines.yield import org.opendc.compute.api.Server import org.opendc.compute.api.ServerState import org.opendc.compute.api.ServerWatcher +import org.opendc.compute.failure.models.FailureModel import org.opendc.compute.service.ComputeService import org.opendc.compute.workload.VirtualMachine +import org.opendc.experiments.base.scenario.specs.FailureModelSpec +import org.opendc.experiments.base.scenario.specs.createFailureModel import java.time.InstantSource +import java.util.Random +import kotlin.coroutines.coroutineContext import kotlin.math.max /** @@ -45,7 +50,7 @@ import kotlin.math.max */ public class RunningServerWatcher : ServerWatcher { // TODO: make this changeable - private val unlockStates: List<ServerState> = listOf(ServerState.TERMINATED, ServerState.ERROR, ServerState.DELETED) + private val unlockStates: List<ServerState> = listOf(ServerState.DELETED, ServerState.TERMINATED) private val mutex: Mutex = Mutex() @@ -80,18 +85,25 @@ public suspend fun ComputeService.replay( clock: InstantSource, trace: List<VirtualMachine>, failureModelSpec: FailureModelSpec? = null, + checkpointModelSpec: CheckpointModelSpec? = null, seed: Long = 0, submitImmediately: Boolean = false, ) { - // TODO: add failureModel functionality val client = newClient() + // Create a failure model based on the failureModelSpec, if not null, otherwise set failureModel to null + val failureModel: FailureModel? = + failureModelSpec?.let { + createFailureModel(coroutineContext, clock, this, Random(seed), it) + } + // Create new image for the virtual machine val image = client.newImage("vm-image") try { coroutineScope { - // TODO: start failure model when implemented + // Start the fault injector + failureModel?.start() var simulationOffset = Long.MIN_VALUE @@ -109,7 +121,17 @@ public suspend fun ComputeService.replay( delay(max(0, (start - now - simulationOffset))) } - val workload = entry.trace.createWorkload(start) + val checkpointTime = checkpointModelSpec?.checkpointTime ?: 0L + val checkpointWait = checkpointModelSpec?.checkpointWait ?: 0L + +// val workload = SimRuntimeWorkload( +// entry.duration, +// 1.0, +// checkpointTime, +// checkpointWait +// ) + + val workload = entry.trace.createWorkload(start, checkpointTime, checkpointWait) val meta = mutableMapOf<String, Any>("workload" to workload) launch { @@ -140,7 +162,7 @@ public suspend fun ComputeService.replay( } yield() } finally { - // TODO: close failure model when implemented + failureModel?.close() client.close() } } diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt index d6ee5d72..cb4fdd46 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/runner/ScenarioRunner.kt @@ -116,7 +116,7 @@ public fun runScenario( val serviceDomain = "compute.opendc.org" Provisioner(dispatcher, seed).use { provisioner -> - val topology = clusterTopology(scenario.topology.pathToFile, Random(seed)) + val topology = clusterTopology(scenario.topologySpec.pathToFile, Random(seed)) provisioner.runSteps( setupComputeService( serviceDomain, @@ -125,15 +125,15 @@ public fun runScenario( setupHosts(serviceDomain, topology, optimize = true), ) - val workloadLoader = ComputeWorkloadLoader(File(scenario.workload.pathToFile)) - val vms = getWorkloadType(scenario.workload.type).resolve(workloadLoader, Random(seed)) + val workloadLoader = ComputeWorkloadLoader(File(scenario.workloadSpec.pathToFile)) + val vms = getWorkloadType(scenario.workloadSpec.type).resolve(workloadLoader, Random(seed)) val carbonTrace = getCarbonTrace(scenario.carbonTracePath) val startTime = Duration.ofMillis(vms.minOf { it.startTime }.toEpochMilli()) addExportModel(provisioner, serviceDomain, scenario, seed, startTime, carbonTrace, index) val service = provisioner.registry.resolve(serviceDomain, ComputeService::class.java)!! - service.replay(timeSource, vms, failureModelSpec = scenario.failureModel, seed = seed) + service.replay(timeSource, vms, failureModelSpec = scenario.failureModelSpec, seed = seed) } } @@ -164,7 +164,7 @@ public fun addExportModel( "seed=$seed", bufferSize = 4096, ), - Duration.ofSeconds(scenario.exportModel.exportInterval), + Duration.ofSeconds(scenario.exportModelSpec.exportInterval), startTime, carbonTrace, ), @@ -184,10 +184,10 @@ public fun clearOutputFolder(outputFolderPath: String) { * @param folderPath The path to the output folder */ private fun setupOutputFolderStructure(folderPath: String) { - val trackrPath = folderPath + "/trackr.json" - val simulationAnalysisPath = folderPath + "/simulation-analysis/" - val energyAnalysisPath = simulationAnalysisPath + "/power_draw/" - val emissionsAnalysisPath = simulationAnalysisPath + "/carbon_emission/" + val trackrPath = "$folderPath/trackr.json" + val simulationAnalysisPath = "$folderPath/simulation-analysis/" + val energyAnalysisPath = "$simulationAnalysisPath/power_draw/" + val emissionsAnalysisPath = "$simulationAnalysisPath/carbon_emission/" File(folderPath).mkdir() File(trackrPath).createNewFile() diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt index 7f0308fc..02a8234d 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/Scenario.kt @@ -23,10 +23,11 @@ package org.opendc.experiments.base.scenario import AllocationPolicySpec +import CheckpointModelSpec import ExportModelSpec -import FailureModelSpec import ScenarioTopologySpec import WorkloadSpec +import org.opendc.experiments.base.scenario.specs.FailureModelSpec /** * A data class representing a scenario for a set of experiments. @@ -41,15 +42,15 @@ import WorkloadSpec * @property runs The Int representing the number of runs of the scenario. It defaults to 1. * @property initialSeed The Int representing the initial seed of the scenario. It defaults to 0. */ - public data class Scenario( var id: Int = -1, - val topology: ScenarioTopologySpec, - val workload: WorkloadSpec, - val allocationPolicy: AllocationPolicySpec, - val failureModel: FailureModelSpec?, + val topologySpec: ScenarioTopologySpec, + val workloadSpec: WorkloadSpec, + val allocationPolicySpec: AllocationPolicySpec, + val failureModelSpec: FailureModelSpec?, + val checkpointModelSpec: CheckpointModelSpec?, val carbonTracePath: String? = null, - val exportModel: ExportModelSpec = ExportModelSpec(), + val exportModelSpec: ExportModelSpec = ExportModelSpec(), val outputFolder: String = "output", val name: String = "", val runs: Int = 1, diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioFactories.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioFactories.kt index 19f8ebf0..e7b52c55 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioFactories.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioFactories.kt @@ -22,7 +22,6 @@ package org.opendc.experiments.base.scenario -import ScenarioTopologySpec import org.opendc.experiments.base.scenario.specs.ScenarioSpec import java.io.File @@ -60,35 +59,36 @@ public fun getScenarios(scenarioSpec: ScenarioSpec): List<Scenario> { val outputFolder = scenarioSpec.outputFolder + "/" + scenarioSpec.name File(outputFolder).mkdirs() - val trackrPath = outputFolder + "/trackr.json" + val trackrPath = "$outputFolder/trackr.json" File(trackrPath).createNewFile() val scenarios = mutableListOf<Scenario>() - var scenarioID = 0 - for (scenarioTopologySpec in scenarioSpec.topologies) { + for ((scenarioID, scenarioTopologySpec) in scenarioSpec.topologies.withIndex()) { for (workloadSpec in scenarioSpec.workloads) { for (allocationPolicySpec in scenarioSpec.allocationPolicies) { for (failureModelSpec in scenarioSpec.failureModels) { - for (carbonTracePath in scenarioSpec.carbonTracePaths) { - for (exportModelSpec in scenarioSpec.exportModels) { - val scenario = - Scenario( - id = scenarioID, - topology = scenarioTopologySpec, - workload = workloadSpec, - allocationPolicy = allocationPolicySpec, - failureModel = failureModelSpec, - carbonTracePath = carbonTracePath, - exportModel = exportModelSpec, - outputFolder = outputFolder, - name = scenarioID.toString(), - runs = scenarioSpec.runs, - initialSeed = scenarioSpec.initialSeed, - ) - trackScenario(scenarioSpec, outputFolder, scenario, scenarioTopologySpec) - scenarios.add(scenario) - scenarioID++ + for (checkpointModelSpec in scenarioSpec.checkpointModels) { + for (carbonTracePath in scenarioSpec.carbonTracePaths) { + for (exportModelSpec in scenarioSpec.exportModels) { + val scenario = + Scenario( + id = scenarioID, + topologySpec = scenarioTopologySpec, + workloadSpec = workloadSpec, + allocationPolicySpec = allocationPolicySpec, + failureModelSpec = failureModelSpec, + checkpointModelSpec = checkpointModelSpec, + carbonTracePath = carbonTracePath, + exportModelSpec = exportModelSpec, + outputFolder = outputFolder, + name = scenarioID.toString(), + runs = scenarioSpec.runs, + initialSeed = scenarioSpec.initialSeed, + ) + trackScenario(scenarioSpec, outputFolder, scenario) + scenarios.add(scenario) + } } } } @@ -112,19 +112,19 @@ public fun trackScenario( scenarioSpec: ScenarioSpec, outputFolder: String, scenario: Scenario, - topologySpec: ScenarioTopologySpec, ) { - val trackrPath = outputFolder + "/trackr.json" + val trackrPath = "$outputFolder/trackr.json" scenarioWriter.write( ScenarioSpec( id = scenario.id, name = scenarioSpec.name, - topologies = listOf(topologySpec), - workloads = listOf(scenario.workload), - allocationPolicies = listOf(scenario.allocationPolicy), - // when implemented, add failure models here + topologies = listOf(scenario.topologySpec), + workloads = listOf(scenario.workloadSpec), + allocationPolicies = listOf(scenario.allocationPolicySpec), + failureModels = listOf(scenario.failureModelSpec), + checkpointModels = listOf(scenario.checkpointModelSpec), carbonTracePaths = listOf(scenario.carbonTracePath), - exportModels = listOf(scenario.exportModel), + exportModels = listOf(scenario.exportModelSpec), outputFolder = scenario.outputFolder, initialSeed = scenario.initialSeed, runs = scenario.runs, diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioReader.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioReader.kt index 19ce5a14..3bbd500b 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioReader.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/ScenarioReader.kt @@ -28,14 +28,25 @@ import kotlinx.serialization.json.decodeFromStream import org.opendc.experiments.base.scenario.specs.ScenarioSpec import java.io.File import java.io.InputStream +import java.nio.file.Path +import kotlin.io.path.inputStream public class ScenarioReader { +// private val jsonReader = Json { serializersModule = failureModule } + private val jsonReader = Json + @OptIn(ExperimentalSerializationApi::class) public fun read(file: File): ScenarioSpec { val input = file.inputStream() - val obj = Json.decodeFromStream<ScenarioSpec>(input) - return obj + return jsonReader.decodeFromStream<ScenarioSpec>(input) + } + + @OptIn(ExperimentalSerializationApi::class) + public fun read(path: Path): ScenarioSpec { + val input = path.inputStream() + + return jsonReader.decodeFromStream<ScenarioSpec>(input) } /** @@ -43,7 +54,6 @@ public class ScenarioReader { */ @OptIn(ExperimentalSerializationApi::class) public fun read(input: InputStream): ScenarioSpec { - val obj = Json.decodeFromStream<ScenarioSpec>(input) - return obj + return jsonReader.decodeFromStream<ScenarioSpec>(input) } } diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt new file mode 100644 index 00000000..9432fc9b --- /dev/null +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/CheckpointModelSpec.kt @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2024 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +import kotlinx.serialization.Serializable + +@Serializable +public data class CheckpointModelSpec( + val checkpointWait: Long = 60 * 60 * 1000, + val checkpointTime: Long = 5 * 60 * 1000, +) diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt index 99620366..a27e77bc 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/FailureModelSpec.kt @@ -20,18 +20,301 @@ * SOFTWARE. */ +package org.opendc.experiments.base.scenario.specs + +/* + * Copyright (c) 2024 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable +import org.apache.commons.math3.distribution.ConstantRealDistribution +import org.apache.commons.math3.distribution.ExponentialDistribution +import org.apache.commons.math3.distribution.GammaDistribution +import org.apache.commons.math3.distribution.LogNormalDistribution +import org.apache.commons.math3.distribution.NormalDistribution +import org.apache.commons.math3.distribution.ParetoDistribution +import org.apache.commons.math3.distribution.RealDistribution +import org.apache.commons.math3.distribution.UniformRealDistribution +import org.apache.commons.math3.distribution.WeibullDistribution +import org.apache.commons.math3.random.Well19937c +import org.opendc.compute.failure.models.FailureModel +import org.opendc.compute.failure.models.SampleBasedFailureModel +import org.opendc.compute.failure.models.TraceBasedFailureModel +import org.opendc.compute.failure.prefab.FailurePrefab +import org.opendc.compute.failure.prefab.createFailureModelPrefab +import org.opendc.compute.service.ComputeService +import java.io.File +import java.time.InstantSource +import kotlin.coroutines.CoroutineContext /** - * specification describing the failure model + * Specifications of the different Failure models + * There are three types of Specs that can be used by using their SerialName as the type. * - * @property failureInterval The interval between failures in s. Should be 0.0 or higher + * @constructor Create empty Failure model spec */ + +@Serializable +public sealed interface FailureModelSpec { + public var name: String +} + +/** + * A failure model spec for failure models based on a failure trace. + * + * @property pathToFile Path to the parquet file that contains the failure trace + */ +@Serializable +@SerialName("trace-based") +public data class TraceBasedFailureModelSpec( + public val pathToFile: String, +) : FailureModelSpec { + override var name: String = File(pathToFile).nameWithoutExtension + + init { + require(File(pathToFile).exists()) { "Path to file $pathToFile does not exist" } + } +} + +/** + * A specification for a failure model that is already present in OpenDC. + * + * @property prefabName The name of the prefab. It needs to be valid [FailurePrefab] + */ +@Serializable +@SerialName("prefab") +public data class PrefabFailureModelSpec( + public val prefabName: FailurePrefab, +) : FailureModelSpec { + override var name: String = prefabName.toString() +} + +/** + * Specification of a custom failure model that is defined by three distributions to sample from. + * Distributions are defined using a [DistributionSpec]. + * + * @property iatSampler Sampler for the time between failures defined in hours + * @property durationSampler Sampler for the time of a failure defined in hours + * @property nohSampler Sampler for ratio of hosts that fail defined as a double between 0.0 and 1.0 + * @constructor Create empty Custom failure model spec + */ +@Serializable +@SerialName("custom") +public data class CustomFailureModelSpec( + public val iatSampler: DistributionSpec, + public val durationSampler: DistributionSpec, + public val nohSampler: DistributionSpec, +) : FailureModelSpec { + override var name: String = "custom" +} + +/** + * Specifications of the different Distributions that can used to create a [CustomFailureModelSpec] + * All [DistributionSpec]s have a different definition based on the variables they need to function. + * Available [DistributionSpec] are: + * - [ConstantDistributionSpec] + * - [ExponentialDistributionSpec] + * - [GammaDistributionSpec] + * - [LogNormalDistributionSpec] + * - [ParetoDistributionSpec] + * - [UniformDistributionSpec] + * - [WeibullDistributionSpec] +*/ + +@Serializable +public sealed interface DistributionSpec + @Serializable -public data class FailureModelSpec( - val failureInterval: Double = 0.0, -) { +@SerialName("constant") +public data class ConstantDistributionSpec( + public val value: Double, +) : DistributionSpec { init { - require(failureInterval >= 0.0) { "failure frequency cannot be lower than 0" } + require(value > 0.0) { "Value must be greater than 0.0" } + } +} + +@Serializable +@SerialName("exponential") +public data class ExponentialDistributionSpec( + public val mean: Double, +) : DistributionSpec + +@Serializable +@SerialName("gamma") +public data class GammaDistributionSpec( + public val shape: Double, + public val scale: Double, +) : DistributionSpec + +@Serializable +@SerialName("log-normal") +public data class LogNormalDistributionSpec( + public val scale: Double, + public val shape: Double, +) : DistributionSpec + +@Serializable +@SerialName("normal") +public data class NormalDistributionSpec( + public val mean: Double, + public val std: Double, +) : DistributionSpec + +@Serializable +@SerialName("pareto") +public data class ParetoDistributionSpec( + public val scale: Double, + public val shape: Double, +) : DistributionSpec + +@Serializable +@SerialName("uniform") +public data class UniformDistributionSpec( + public val upper: Double, + public val lower: Double, +) : DistributionSpec { + init { + require(upper > lower) { "Upper bound must be greater than the lower bound" } + } +} + +@Serializable +@SerialName("weibull") +public data class WeibullDistributionSpec( + public val alpha: Double, + public val beta: Double, +) : DistributionSpec + +/** + * Create a [FailureModel] based on the provided [FailureModelSpec] + * + * @param context + * @param clock + * @param service + * @param random + * @param failureModelSpec + * @return + */ +public fun createFailureModel( + context: CoroutineContext, + clock: InstantSource, + service: ComputeService, + random: java.util.random.RandomGenerator, + failureModelSpec: FailureModelSpec?, +): FailureModel? { + return when (failureModelSpec) { + is PrefabFailureModelSpec -> createFailureModel(context, clock, service, random, failureModelSpec) + is CustomFailureModelSpec -> createFailureModel(context, clock, service, random, failureModelSpec) + is TraceBasedFailureModelSpec -> createFailureModel(context, clock, service, random, failureModelSpec) + else -> null + } +} + +/** + * Create [FailureModel] based on the provided [PrefabFailureModelSpec] + * + * @param context + * @param clock + * @param service + * @param random + * @param failureModel + * @return + */ +public fun createFailureModel( + context: CoroutineContext, + clock: InstantSource, + service: ComputeService, + random: java.util.random.RandomGenerator, + failureModel: PrefabFailureModelSpec, +): FailureModel { + return createFailureModelPrefab(context, clock, service, random, failureModel.prefabName) +} + +/** + * Create [FailureModel] based on the provided [TraceBasedFailureModel] + * + * @param context + * @param clock + * @param service + * @param random + * @param failureModel + * @return + */ +public fun createFailureModel( + context: CoroutineContext, + clock: InstantSource, + service: ComputeService, + random: java.util.random.RandomGenerator, + failureModel: TraceBasedFailureModelSpec, +): FailureModel { + return TraceBasedFailureModel(context, clock, service, random, failureModel.pathToFile) +} + +/** + * Create [FailureModel] based on the provided [CustomFailureModelSpec] + * + * @param context + * @param clock + * @param service + * @param random + * @param failureModel + * @return + */ +public fun createFailureModel( + context: CoroutineContext, + clock: InstantSource, + service: ComputeService, + random: java.util.random.RandomGenerator, + failureModel: CustomFailureModelSpec, +): FailureModel { + val rng: org.apache.commons.math3.random.RandomGenerator = Well19937c(random.nextLong()) + + val iatSampler = createSampler(rng, failureModel.iatSampler) + val durationSampler = createSampler(rng, failureModel.durationSampler) + val nohSampler = createSampler(rng, failureModel.nohSampler) + + return SampleBasedFailureModel(context, clock, service, random, iatSampler, durationSampler, nohSampler) +} + +/** + * Create a [RealDistribution] to sample from based on the provided [DistributionSpec] + * + * @param rng + * @param distributionSpec + * @return + */ +public fun createSampler( + rng: org.apache.commons.math3.random.RandomGenerator, + distributionSpec: DistributionSpec, +): RealDistribution { + return when (distributionSpec) { + is ConstantDistributionSpec -> ConstantRealDistribution(distributionSpec.value) + is ExponentialDistributionSpec -> ExponentialDistribution(rng, distributionSpec.mean) + is GammaDistributionSpec -> GammaDistribution(rng, distributionSpec.shape, distributionSpec.scale) + is LogNormalDistributionSpec -> LogNormalDistribution(rng, distributionSpec.scale, distributionSpec.shape) + is NormalDistributionSpec -> NormalDistribution(rng, distributionSpec.mean, distributionSpec.std) + is ParetoDistributionSpec -> ParetoDistribution(rng, distributionSpec.scale, distributionSpec.shape) + is UniformDistributionSpec -> UniformRealDistribution(rng, distributionSpec.lower, distributionSpec.upper) + is WeibullDistributionSpec -> WeibullDistribution(rng, distributionSpec.alpha, distributionSpec.beta) } } diff --git a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt index 876a62cf..eb3cd04e 100644 --- a/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt +++ b/opendc-experiments/opendc-experiments-base/src/main/kotlin/org/opendc/experiments/base/scenario/specs/ScenarioSpec.kt @@ -23,8 +23,8 @@ package org.opendc.experiments.base.scenario.specs import AllocationPolicySpec +import CheckpointModelSpec import ExportModelSpec -import FailureModelSpec import ScenarioTopologySpec import WorkloadSpec import kotlinx.serialization.Serializable @@ -49,7 +49,8 @@ public data class ScenarioSpec( val topologies: List<ScenarioTopologySpec>, val workloads: List<WorkloadSpec>, val allocationPolicies: List<AllocationPolicySpec>, - val failureModels: List<FailureModelSpec> = listOf(FailureModelSpec()), + val failureModels: List<FailureModelSpec?> = listOf(null), + val checkpointModels: List<CheckpointModelSpec?> = listOf(null), val carbonTracePaths: List<String?> = listOf(null), val exportModels: List<ExportModelSpec> = listOf(ExportModelSpec()), val outputFolder: String = "output", |
