diff options
| author | Fabian Mastenbroek <mail.fabianm@gmail.com> | 2021-09-17 17:12:20 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2021-09-17 17:12:20 +0200 |
| commit | c1b9719aad10566c9d17f9eb757236c58a602b89 (patch) | |
| tree | 2755ea2d44256116e6dc08a57a64b37a36331249 /opendc-experiments/opendc-experiments-energy21/src/main | |
| parent | 2cd3bd18e548a72d64afe0e7f59487f4747d722f (diff) | |
| parent | e2537c59bef0645b948e92553cc5a42a8c0f7256 (diff) | |
merge: Standardize simulator metrics
This pull request standardizes the metrics emitted by the simulator based on OpenTelemetry conventions.
From now on, all metrics exposed by the simulator are exported through OpenTelemetry
following the recommended practices for naming, collection, etc.
**Implementation Notes**
- Improve ParquetDataWriter implementation
- Simplify CoroutineMetricReader
- Create separate MeterProvider per service/host
- Standardize compute scheduler metrics
- Standardize SimHost metrics
- Use logical types for Parquet output columns
**External Dependencies**
- Update to OpenTelemetry 1.6.0
**Breaking API Changes**
- Instead of supplying a `Meter` instances, key classes are now responsible for constructing
a `Meter` instance from the supplied `MeterProvider`.
- Export format has been changed to suit the outputted metrics
- Energy experiments shell has been removed
Diffstat (limited to 'opendc-experiments/opendc-experiments-energy21/src/main')
2 files changed, 0 insertions, 222 deletions
diff --git a/opendc-experiments/opendc-experiments-energy21/src/main/kotlin/org/opendc/experiments/energy21/EnergyExperiment.kt b/opendc-experiments/opendc-experiments-energy21/src/main/kotlin/org/opendc/experiments/energy21/EnergyExperiment.kt deleted file mode 100644 index d9194969..00000000 --- a/opendc-experiments/opendc-experiments-energy21/src/main/kotlin/org/opendc/experiments/energy21/EnergyExperiment.kt +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.experiments.energy21 - -import com.typesafe.config.ConfigFactory -import io.opentelemetry.api.metrics.MeterProvider -import io.opentelemetry.sdk.metrics.export.MetricProducer -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.coroutineScope -import mu.KotlinLogging -import org.opendc.compute.service.ComputeService -import org.opendc.compute.service.scheduler.ComputeScheduler -import org.opendc.compute.service.scheduler.FilterScheduler -import org.opendc.compute.service.scheduler.filters.ComputeFilter -import org.opendc.compute.service.scheduler.filters.RamFilter -import org.opendc.compute.service.scheduler.filters.VCpuFilter -import org.opendc.compute.simulator.SimHost -import org.opendc.experiments.capelin.* -import org.opendc.experiments.capelin.export.parquet.ParquetExportMonitor -import org.opendc.experiments.capelin.trace.StreamingParquetTraceReader -import org.opendc.harness.dsl.Experiment -import org.opendc.harness.dsl.anyOf -import org.opendc.simulator.compute.kernel.SimFairShareHypervisorProvider -import org.opendc.simulator.compute.kernel.cpufreq.PerformanceScalingGovernor -import org.opendc.simulator.compute.model.MachineModel -import org.opendc.simulator.compute.model.MemoryUnit -import org.opendc.simulator.compute.model.ProcessingNode -import org.opendc.simulator.compute.model.ProcessingUnit -import org.opendc.simulator.compute.power.* -import org.opendc.simulator.core.runBlockingSimulation -import org.opendc.simulator.resources.SimResourceInterpreter -import org.opendc.telemetry.compute.collectServiceMetrics -import org.opendc.telemetry.compute.withMonitor -import java.io.File -import java.time.Clock -import java.util.* - -/** - * Experiments for the OpenDC project on Energy modeling. - */ -public class EnergyExperiment : Experiment("Energy Modeling 2021") { - /** - * The logger for this portfolio instance. - */ - private val logger = KotlinLogging.logger {} - - /** - * The configuration to use. - */ - private val config = ConfigFactory.load().getConfig("opendc.experiments.energy21") - - /** - * The traces to test. - */ - private val trace by anyOf("solvinity") - - /** - * The power models to test. - */ - private val powerModel by anyOf(PowerModelType.LINEAR, PowerModelType.CUBIC, PowerModelType.INTERPOLATION) - - override fun doRun(repeat: Int): Unit = runBlockingSimulation { - val allocationPolicy = FilterScheduler( - filters = listOf(ComputeFilter(), VCpuFilter(1.0), RamFilter(1.0)), - weighers = listOf(), - subsetSize = Int.MAX_VALUE - ) - - val meterProvider: MeterProvider = createMeterProvider(clock) - val monitor = ParquetExportMonitor(File(config.getString("output-path")), "power_model=$powerModel/run_id=$repeat", 4096) - val trace = StreamingParquetTraceReader(File(config.getString("trace-path"), trace)) - - withComputeService(clock, meterProvider, allocationPolicy) { scheduler -> - withMonitor(scheduler, clock, meterProvider as MetricProducer, monitor) { - processTrace( - clock, - trace, - scheduler, - monitor - ) - } - } - - val monitorResults = collectServiceMetrics(clock.millis(), meterProvider as MetricProducer) - logger.debug { - "Finish SUBMIT=${monitorResults.instanceCount} " + - "FAIL=${monitorResults.failedInstanceCount} " + - "QUEUE=${monitorResults.queuedInstanceCount} " + - "RUNNING=${monitorResults.runningInstanceCount}" - } - } - - /** - * Construct the environment for a simulated compute service.. - */ - public suspend fun withComputeService( - clock: Clock, - meterProvider: MeterProvider, - scheduler: ComputeScheduler, - block: suspend CoroutineScope.(ComputeService) -> Unit - ): Unit = coroutineScope { - val model = createMachineModel() - val interpreter = SimResourceInterpreter(coroutineContext, clock) - val hosts = List(64) { id -> - SimHost( - UUID(0, id.toLong()), - "node-$id", - model, - emptyMap(), - coroutineContext, - interpreter, - meterProvider.get("opendc-compute-simulator"), - SimFairShareHypervisorProvider(), - PerformanceScalingGovernor(), - powerModel.driver - ) - } - - val serviceMeter = meterProvider.get("opendc-compute") - val service = - ComputeService(coroutineContext, clock, serviceMeter, scheduler) - - for (host in hosts) { - service.addHost(host) - } - - try { - block(this, service) - } finally { - service.close() - hosts.forEach(SimHost::close) - } - } - - /** - * The machine model based on: https://www.spec.org/power_ssj2008/results/res2020q1/power_ssj2008-20191125-01012.html - */ - private fun createMachineModel(): MachineModel { - val node = ProcessingNode("AMD", "am64", "EPYC 7742", 64) - val cpus = List(node.coreCount) { id -> ProcessingUnit(node, id, 3400.0) } - val memory = List(8) { MemoryUnit("Samsung", "Unknown", 2933.0, 16_000) } - - return MachineModel(cpus, memory) - } - - /** - * The power models to test. - */ - public enum class PowerModelType { - CUBIC { - override val driver: PowerDriver = SimplePowerDriver(CubicPowerModel(206.0, 56.4)) - }, - - LINEAR { - override val driver: PowerDriver = SimplePowerDriver(LinearPowerModel(206.0, 56.4)) - }, - - SQRT { - override val driver: PowerDriver = SimplePowerDriver(SqrtPowerModel(206.0, 56.4)) - }, - - SQUARE { - override val driver: PowerDriver = SimplePowerDriver(SquarePowerModel(206.0, 56.4)) - }, - - INTERPOLATION { - override val driver: PowerDriver = SimplePowerDriver( - InterpolationPowerModel( - listOf(56.4, 100.0, 107.0, 117.0, 127.0, 138.0, 149.0, 162.0, 177.0, 191.0, 206.0) - ) - ) - }, - - MSE { - override val driver: PowerDriver = SimplePowerDriver(MsePowerModel(206.0, 56.4, 1.4)) - }, - - ASYMPTOTIC { - override val driver: PowerDriver = SimplePowerDriver(AsymptoticPowerModel(206.0, 56.4, 0.3, false)) - }, - - ASYMPTOTIC_DVFS { - override val driver: PowerDriver = SimplePowerDriver(AsymptoticPowerModel(206.0, 56.4, 0.3, true)) - }; - - public abstract val driver: PowerDriver - } -} diff --git a/opendc-experiments/opendc-experiments-energy21/src/main/resources/application.conf b/opendc-experiments/opendc-experiments-energy21/src/main/resources/application.conf deleted file mode 100644 index 263da0fe..00000000 --- a/opendc-experiments/opendc-experiments-energy21/src/main/resources/application.conf +++ /dev/null @@ -1,14 +0,0 @@ -# Default configuration for the energy experiments -opendc.experiments.energy21 { - # Path to the directory containing the input traces - trace-path = input/traces - - # Path to the output directory to write the results to - output-path = output -} - -opendc.experiments.capelin { - env-path = input/environments/ - trace-path = input/traces/ - output-path = output -} |
