diff options
Diffstat (limited to 'opendc-compute/opendc-compute-telemetry')
16 files changed, 0 insertions, 2040 deletions
diff --git a/opendc-compute/opendc-compute-telemetry/build.gradle.kts b/opendc-compute/opendc-compute-telemetry/build.gradle.kts deleted file mode 100644 index e8692449..00000000 --- a/opendc-compute/opendc-compute-telemetry/build.gradle.kts +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -description = "OpenDC Compute Service implementation" - -// Build configuration -plugins { - `kotlin-library-conventions` - kotlin("plugin.serialization") version "1.9.22" -} - -dependencies { - api(projects.opendcCompute.opendcComputeApi) - api(projects.opendcTrace.opendcTraceParquet) - implementation(projects.opendcCommon) - implementation(libs.kotlin.logging) - implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.6.0") - implementation(project(mapOf("path" to ":opendc-trace:opendc-trace-parquet"))) - implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-service"))) - implementation(project(mapOf("path" to ":opendc-compute:opendc-compute-carbon"))) - - testImplementation(projects.opendcSimulator.opendcSimulatorCore) - testRuntimeOnly(libs.log4j.core) - testRuntimeOnly(libs.log4j.slf4j) -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt deleted file mode 100644 index 56cda31c..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt +++ /dev/null @@ -1,594 +0,0 @@ -/* - * Copyright (c) 2022 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry - -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.delay -import kotlinx.coroutines.isActive -import kotlinx.coroutines.launch -import mu.KotlinLogging -import org.opendc.common.Dispatcher -import org.opendc.common.asCoroutineDispatcher -import org.opendc.compute.api.Task -import org.opendc.compute.api.TaskState -import org.opendc.compute.carbon.CarbonTrace -import org.opendc.compute.service.ComputeService -import org.opendc.compute.service.driver.Host -import org.opendc.compute.telemetry.table.HostInfo -import org.opendc.compute.telemetry.table.HostTableReader -import org.opendc.compute.telemetry.table.ServiceTableReader -import org.opendc.compute.telemetry.table.TaskInfo -import org.opendc.compute.telemetry.table.TaskTableReader -import java.time.Duration -import java.time.Instant - -/** - * A helper class to collect metrics from a [ComputeService] instance and automatically export the metrics every - * export interval. - * - * @param dispatcher A [Dispatcher] for scheduling the future events. - * @param service The [ComputeService] to monitor. - * @param monitor The monitor to export the metrics to. - * @param exportInterval The export interval. - */ -public class ComputeMetricReader( - dispatcher: Dispatcher, - private val service: ComputeService, - private val monitor: ComputeMonitor, - private val exportInterval: Duration = Duration.ofMinutes(5), - private val startTime: Duration = Duration.ofMillis(0), - private val carbonTrace: CarbonTrace = CarbonTrace(null), -) : AutoCloseable { - private val logger = KotlinLogging.logger {} - private val scope = CoroutineScope(dispatcher.asCoroutineDispatcher()) - private val clock = dispatcher.timeSource - - /** - * Aggregator for service metrics. - */ - private val serviceTableReader = ServiceTableReaderImpl(service, startTime) - - /** - * Mapping from [Host] instances to [HostTableReaderImpl] - */ - private val hostTableReaders = mutableMapOf<Host, HostTableReaderImpl>() - - /** - * Mapping from [Task] instances to [TaskTableReaderImpl] - */ - private val taskTableReaders = mutableMapOf<Task, TaskTableReaderImpl>() - - /** - * The background job that is responsible for collecting the metrics every cycle. - */ - private val job = - scope.launch { - val intervalMs = exportInterval.toMillis() - try { - while (isActive) { - delay(intervalMs) - - loggState() - } - } finally { - loggState() - - if (monitor is AutoCloseable) { - monitor.close() - } - } - } - - private fun loggState() { - try { - val now = this.clock.instant() - - for (host in this.service.hosts) { - val reader = this.hostTableReaders.computeIfAbsent(host) { HostTableReaderImpl(it, startTime, carbonTrace) } - reader.record(now) - this.monitor.record(reader.copy()) - reader.reset() - } - - for (task in this.service.tasks) { - val reader = this.taskTableReaders.computeIfAbsent(task) { TaskTableReaderImpl(service, it, startTime) } - reader.record(now) - this.monitor.record(reader.copy()) - reader.reset() - } - - this.serviceTableReader.record(now) - monitor.record(this.serviceTableReader.copy()) - } catch (cause: Throwable) { - this.logger.warn(cause) { "Exporter threw an Exception" } - } - } - - override fun close() { - job.cancel() - } - - /** - * An aggregator for service metrics before they are reported. - */ - private class ServiceTableReaderImpl( - private val service: ComputeService, - private val startTime: Duration = Duration.ofMillis(0), - ) : ServiceTableReader { - override fun copy(): ServiceTableReader { - val newServiceTable = ServiceTableReaderImpl(service) - newServiceTable.setValues(this) - - return newServiceTable - } - - override fun setValues(table: ServiceTableReader) { - _timestamp = table.timestamp - _timestampAbsolute = table.timestampAbsolute - - _hostsUp = table.hostsUp - _hostsDown = table.hostsDown - _tasksTotal = table.tasksTotal - _tasksPending = table.tasksPending - _tasksActive = table.tasksActive - _attemptsSuccess = table.attemptsSuccess - _attemptsFailure = table.attemptsFailure - _attemptsError = table.attemptsError - } - - private var _timestamp: Instant = Instant.MIN - override val timestamp: Instant - get() = _timestamp - - private var _timestampAbsolute: Instant = Instant.MIN - override val timestampAbsolute: Instant - get() = _timestampAbsolute - - override val hostsUp: Int - get() = _hostsUp - private var _hostsUp = 0 - - override val hostsDown: Int - get() = _hostsDown - private var _hostsDown = 0 - - override val tasksTotal: Int - get() = _tasksTotal - private var _tasksTotal = 0 - - override val tasksPending: Int - get() = _tasksPending - private var _tasksPending = 0 - - override val tasksActive: Int - get() = _tasksActive - private var _tasksActive = 0 - - override val attemptsSuccess: Int - get() = _attemptsSuccess - private var _attemptsSuccess = 0 - - override val attemptsFailure: Int - get() = _attemptsFailure - private var _attemptsFailure = 0 - - override val attemptsError: Int - get() = _attemptsError - private var _attemptsError = 0 - - /** - * Record the next cycle. - */ - fun record(now: Instant) { - _timestamp = now - _timestampAbsolute = now + startTime - - val stats = service.getSchedulerStats() - _hostsUp = stats.hostsAvailable - _hostsDown = stats.hostsUnavailable - _tasksTotal = stats.tasksTotal - _tasksPending = stats.tasksPending - _tasksActive = stats.tasksActive - _attemptsSuccess = stats.attemptsSuccess.toInt() - _attemptsFailure = stats.attemptsFailure.toInt() - _attemptsError = stats.attemptsError.toInt() - } - } - - /** - * An aggregator for host metrics before they are reported. - */ - private class HostTableReaderImpl( - host: Host, - private val startTime: Duration = Duration.ofMillis(0), - private val carbonTrace: CarbonTrace = CarbonTrace(null), - ) : HostTableReader { - override fun copy(): HostTableReader { - val newHostTable = HostTableReaderImpl(_host) - newHostTable.setValues(this) - - return newHostTable - } - - override fun setValues(table: HostTableReader) { - _timestamp = table.timestamp - _timestampAbsolute = table.timestampAbsolute - - _guestsTerminated = table.guestsTerminated - _guestsRunning = table.guestsRunning - _guestsError = table.guestsError - _guestsInvalid = table.guestsInvalid - _cpuLimit = table.cpuLimit - _cpuDemand = table.cpuDemand - _cpuUsage = table.cpuUsage - _cpuUtilization = table.cpuUtilization - _cpuActiveTime = table.cpuActiveTime - _cpuIdleTime = table.cpuIdleTime - _cpuStealTime = table.cpuStealTime - _cpuLostTime = table.cpuLostTime - _powerDraw = table.powerDraw - _energyUsage = table.energyUsage - _carbonIntensity = table.carbonIntensity - _carbonEmission = table.carbonEmission - _uptime = table.uptime - _downtime = table.downtime - _bootTime = table.bootTime - _bootTimeAbsolute = table.bootTimeAbsolute - } - - private val _host = host - - override val host: HostInfo = - HostInfo(host.uid.toString(), host.name, "x86", host.model.coreCount, host.model.cpuCapacity, host.model.memoryCapacity) - - override val timestamp: Instant - get() = _timestamp - private var _timestamp = Instant.MIN - - override val timestampAbsolute: Instant - get() = _timestampAbsolute - private var _timestampAbsolute = Instant.MIN - - override val guestsTerminated: Int - get() = _guestsTerminated - private var _guestsTerminated = 0 - - override val guestsRunning: Int - get() = _guestsRunning - private var _guestsRunning = 0 - - override val guestsError: Int - get() = _guestsError - private var _guestsError = 0 - - override val guestsInvalid: Int - get() = _guestsInvalid - private var _guestsInvalid = 0 - - override val cpuLimit: Double - get() = _cpuLimit - private var _cpuLimit = 0.0 - - override val cpuUsage: Double - get() = _cpuUsage - private var _cpuUsage = 0.0 - - override val cpuDemand: Double - get() = _cpuDemand - private var _cpuDemand = 0.0 - - override val cpuUtilization: Double - get() = _cpuUtilization - private var _cpuUtilization = 0.0 - - override val cpuActiveTime: Long - get() = _cpuActiveTime - previousCpuActiveTime - private var _cpuActiveTime = 0L - private var previousCpuActiveTime = 0L - - override val cpuIdleTime: Long - get() = _cpuIdleTime - previousCpuIdleTime - private var _cpuIdleTime = 0L - private var previousCpuIdleTime = 0L - - override val cpuStealTime: Long - get() = _cpuStealTime - previousCpuStealTime - private var _cpuStealTime = 0L - private var previousCpuStealTime = 0L - - override val cpuLostTime: Long - get() = _cpuLostTime - previousCpuLostTime - private var _cpuLostTime = 0L - private var previousCpuLostTime = 0L - - override val powerDraw: Double - get() = _powerDraw - private var _powerDraw = 0.0 - - override val energyUsage: Double - get() = _energyUsage - previousPowerTotal - private var _energyUsage = 0.0 - private var previousPowerTotal = 0.0 - - override val carbonIntensity: Double - get() = _carbonIntensity - private var _carbonIntensity = 0.0 - - override val carbonEmission: Double - get() = _carbonEmission - private var _carbonEmission = 0.0 - - override val uptime: Long - get() = _uptime - previousUptime - private var _uptime = 0L - private var previousUptime = 0L - - override val downtime: Long - get() = _downtime - previousDowntime - private var _downtime = 0L - private var previousDowntime = 0L - - override val bootTime: Instant? - get() = _bootTime - private var _bootTime: Instant? = null - - override val bootTimeAbsolute: Instant? - get() = _bootTimeAbsolute - private var _bootTimeAbsolute: Instant? = null - - /** - * Record the next cycle. - */ - fun record(now: Instant) { - val hostCpuStats = _host.getCpuStats() - val hostSysStats = _host.getSystemStats() - - _timestamp = now - _timestampAbsolute = now + startTime - - _guestsTerminated = hostSysStats.guestsTerminated - _guestsRunning = hostSysStats.guestsRunning - _guestsError = hostSysStats.guestsError - _guestsInvalid = hostSysStats.guestsInvalid - _cpuLimit = hostCpuStats.capacity - _cpuDemand = hostCpuStats.demand - _cpuUsage = hostCpuStats.usage - _cpuUtilization = hostCpuStats.utilization - _cpuActiveTime = hostCpuStats.activeTime - _cpuIdleTime = hostCpuStats.idleTime - _cpuStealTime = hostCpuStats.stealTime - _cpuLostTime = hostCpuStats.lostTime - _powerDraw = hostSysStats.powerDraw - _energyUsage = hostSysStats.energyUsage - _carbonIntensity = carbonTrace.getCarbonIntensity(timestampAbsolute) - - _carbonEmission = carbonIntensity * (energyUsage / 3600000.0) // convert energy usage from J to kWh - _uptime = hostSysStats.uptime.toMillis() - _downtime = hostSysStats.downtime.toMillis() - _bootTime = hostSysStats.bootTime - _bootTime = hostSysStats.bootTime + startTime - } - - /** - * Finish the aggregation for this cycle. - */ - fun reset() { - // Reset intermediate state for next aggregation - previousCpuActiveTime = _cpuActiveTime - previousCpuIdleTime = _cpuIdleTime - previousCpuStealTime = _cpuStealTime - previousCpuLostTime = _cpuLostTime - previousPowerTotal = _energyUsage - previousUptime = _uptime - previousDowntime = _downtime - - _guestsTerminated = 0 - _guestsRunning = 0 - _guestsError = 0 - _guestsInvalid = 0 - - _cpuLimit = 0.0 - _cpuUsage = 0.0 - _cpuDemand = 0.0 - _cpuUtilization = 0.0 - - _powerDraw = 0.0 - _energyUsage = 0.0 - _carbonIntensity = 0.0 - _carbonEmission = 0.0 - } - } - - /** - * An aggregator for task metrics before they are reported. - */ - private class TaskTableReaderImpl( - private val service: ComputeService, - private val task: Task, - private val startTime: Duration = Duration.ofMillis(0), - ) : TaskTableReader { - override fun copy(): TaskTableReader { - val newTaskTable = TaskTableReaderImpl(service, task) - newTaskTable.setValues(this) - - return newTaskTable - } - - override fun setValues(table: TaskTableReader) { - host = table.host - - _timestamp = table.timestamp - _timestampAbsolute = table.timestampAbsolute - - _cpuLimit = table.cpuLimit - _cpuActiveTime = table.cpuActiveTime - _cpuIdleTime = table.cpuIdleTime - _cpuStealTime = table.cpuStealTime - _cpuLostTime = table.cpuLostTime - _uptime = table.uptime - _downtime = table.downtime - _provisionTime = table.provisionTime - _bootTime = table.bootTime - _bootTimeAbsolute = table.bootTimeAbsolute - - _taskState = table.taskState - } - - /** - * The static information about this task. - */ - override val taskInfo = - TaskInfo( - task.uid.toString(), - task.name, - "vm", - "x86", - task.image.uid.toString(), - task.image.name, - task.flavor.coreCount, - task.flavor.memorySize, - ) - - /** - * The [HostInfo] of the host on which the task is hosted. - */ - override var host: HostInfo? = null - private var _host: Host? = null - - private var _timestamp = Instant.MIN - override val timestamp: Instant - get() = _timestamp - - private var _timestampAbsolute = Instant.MIN - override val timestampAbsolute: Instant - get() = _timestampAbsolute - - override val uptime: Long - get() = _uptime - previousUptime - private var _uptime: Long = 0 - private var previousUptime = 0L - - override val downtime: Long - get() = _downtime - previousDowntime - private var _downtime: Long = 0 - private var previousDowntime = 0L - - override val provisionTime: Instant? - get() = _provisionTime - private var _provisionTime: Instant? = null - - override val bootTime: Instant? - get() = _bootTime - private var _bootTime: Instant? = null - - override val cpuLimit: Double - get() = _cpuLimit - private var _cpuLimit = 0.0 - - override val cpuActiveTime: Long - get() = _cpuActiveTime - previousCpuActiveTime - private var _cpuActiveTime = 0L - private var previousCpuActiveTime = 0L - - override val cpuIdleTime: Long - get() = _cpuIdleTime - previousCpuIdleTime - private var _cpuIdleTime = 0L - private var previousCpuIdleTime = 0L - - override val cpuStealTime: Long - get() = _cpuStealTime - previousCpuStealTime - private var _cpuStealTime = 0L - private var previousCpuStealTime = 0L - - override val cpuLostTime: Long - get() = _cpuLostTime - previousCpuLostTime - private var _cpuLostTime = 0L - private var previousCpuLostTime = 0L - - override val bootTimeAbsolute: Instant? - get() = _bootTimeAbsolute - private var _bootTimeAbsolute: Instant? = null - - override val taskState: TaskState? - get() = _taskState - private var _taskState: TaskState? = null - - /** - * Record the next cycle. - */ - fun record(now: Instant) { - val newHost = service.lookupHost(task) - if (newHost != null && newHost.uid != _host?.uid) { - _host = newHost - host = - HostInfo( - newHost.uid.toString(), - newHost.name, - "x86", - newHost.model.coreCount, - newHost.model.cpuCapacity, - newHost.model.memoryCapacity, - ) - } - - val cpuStats = _host?.getCpuStats(task) - val sysStats = _host?.getSystemStats(task) - - _timestamp = now - _timestampAbsolute = now + startTime - - _cpuLimit = cpuStats?.capacity ?: 0.0 - _cpuActiveTime = cpuStats?.activeTime ?: 0 - _cpuIdleTime = cpuStats?.idleTime ?: 0 - _cpuStealTime = cpuStats?.stealTime ?: 0 - _cpuLostTime = cpuStats?.lostTime ?: 0 - _uptime = sysStats?.uptime?.toMillis() ?: 0 - _downtime = sysStats?.downtime?.toMillis() ?: 0 - _provisionTime = task.launchedAt - _bootTime = sysStats?.bootTime - - _taskState = task.state - - if (sysStats != null) { - _bootTimeAbsolute = sysStats.bootTime + startTime - } else { - _bootTimeAbsolute = null - } - } - - /** - * Finish the aggregation for this cycle. - */ - fun reset() { - previousUptime = _uptime - previousDowntime = _downtime - previousCpuActiveTime = _cpuActiveTime - previousCpuIdleTime = _cpuIdleTime - previousCpuStealTime = _cpuStealTime - previousCpuLostTime = _cpuLostTime - - _host = null - _cpuLimit = 0.0 - } - } -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt deleted file mode 100644 index 1df058fb..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2022 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry - -import org.opendc.compute.telemetry.table.HostTableReader -import org.opendc.compute.telemetry.table.ServiceTableReader -import org.opendc.compute.telemetry.table.TaskTableReader - -/** - * A monitor that tracks the metrics and events of the OpenDC Compute service. - */ -public interface ComputeMonitor { - /** - * Record an entry with the specified [reader]. - */ - public fun record(reader: TaskTableReader) {} - - /** - * Record an entry with the specified [reader]. - */ - public fun record(reader: HostTableReader) {} - - /** - * Record an entry with the specified [reader]. - */ - public fun record(reader: ServiceTableReader) {} -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt deleted file mode 100644 index 161c0936..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright (c) 2024 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.export.parquet - -import kotlinx.serialization.KSerializer -import kotlinx.serialization.Serializable -import kotlinx.serialization.builtins.ListSerializer -import kotlinx.serialization.descriptors.SerialDescriptor -import kotlinx.serialization.descriptors.buildClassSerialDescriptor -import kotlinx.serialization.encoding.Decoder -import kotlinx.serialization.encoding.Encoder -import kotlinx.serialization.encoding.encodeStructure -import kotlinx.serialization.json.Json -import kotlinx.serialization.json.JsonDecoder -import kotlinx.serialization.json.JsonElement -import kotlinx.serialization.json.jsonObject -import org.opendc.common.logger.logger -import org.opendc.compute.telemetry.table.HostTableReader -import org.opendc.compute.telemetry.table.ServiceTableReader -import org.opendc.compute.telemetry.table.TaskTableReader -import org.opendc.trace.util.parquet.exporter.ColListSerializer -import org.opendc.trace.util.parquet.exporter.ExportColumn -import org.opendc.trace.util.parquet.exporter.Exportable -import org.opendc.trace.util.parquet.exporter.columnSerializer - -/** - * Aggregates the necessary settings to personalize the output - * parquet files for compute workloads. - * - * @param[hostExportColumns] the columns that will be included in the `host.parquet` raw output file. - * @param[taskExportColumns] the columns that will be included in the `task.parquet` raw output file. - * @param[serviceExportColumns] the columns that will be included in the `service.parquet` raw output file. - */ -@Serializable(with = ComputeExportConfig.Companion.ComputeExportConfigSerializer::class) -public data class ComputeExportConfig( - public val hostExportColumns: Set<ExportColumn<HostTableReader>>, - public val taskExportColumns: Set<ExportColumn<TaskTableReader>>, - public val serviceExportColumns: Set<ExportColumn<ServiceTableReader>>, -) { - public constructor( - hostExportColumns: Collection<ExportColumn<HostTableReader>>, - taskExportColumns: Collection<ExportColumn<TaskTableReader>>, - serviceExportColumns: Collection<ExportColumn<ServiceTableReader>>, - ) : this( - hostExportColumns.toSet() + DfltHostExportColumns.BASE_EXPORT_COLUMNS, - taskExportColumns.toSet() + DfltTaskExportColumns.BASE_EXPORT_COLUMNS, - serviceExportColumns.toSet() + DfltServiceExportColumns.BASE_EXPORT_COLUMNS, - ) - - /** - * @return formatted string representing the export config. - */ - public fun fmt(): String = - """ - | === Compute Export Config === - | Host columns : ${hostExportColumns.map { it.name }.toString().trim('[', ']')} - | Task columns : ${taskExportColumns.map { it.name }.toString().trim('[', ']')} - | Service columns : ${serviceExportColumns.map { it.name }.toString().trim('[', ']')} - """.trimIndent() - - public companion object { - internal val LOG by logger() - - /** - * Force the jvm to load the default [ExportColumn]s relevant to compute export, - * so that they are available for deserialization. - */ - public fun loadDfltColumns() { - DfltHostExportColumns - DfltTaskExportColumns - DfltServiceExportColumns - } - - /** - * Config that includes all columns defined in [DfltHostExportColumns], - * [DfltTaskExportColumns], [DfltServiceExportColumns] among all other loaded - * columns for [HostTableReader], [TaskTableReader] and [ServiceTableReader]. - */ - public val ALL_COLUMNS: ComputeExportConfig by lazy { - loadDfltColumns() - ComputeExportConfig( - hostExportColumns = ExportColumn.getAllLoadedColumns(), - taskExportColumns = ExportColumn.getAllLoadedColumns(), - serviceExportColumns = ExportColumn.getAllLoadedColumns(), - ) - } - - /** - * A runtime [KSerializer] is needed for reasons explained in [columnSerializer] docs. - * - * This serializer makes use of reified column serializers for the 2 properties. - */ - internal object ComputeExportConfigSerializer : KSerializer<ComputeExportConfig> { - override val descriptor: SerialDescriptor = - buildClassSerialDescriptor("org.opendc.compute.telemetry.export.parquet.ComputeExportConfig") { - element( - "hostExportColumns", - ListSerializer(columnSerializer<HostTableReader>()).descriptor, - ) - element( - "taskExportColumns", - ListSerializer(columnSerializer<TaskTableReader>()).descriptor, - ) - element( - "serviceExportColumns", - ListSerializer(columnSerializer<ServiceTableReader>()).descriptor, - ) - } - - override fun deserialize(decoder: Decoder): ComputeExportConfig { - val jsonDec = - (decoder as? JsonDecoder) ?: let { - // Basically a recursive call with a JsonDecoder. - return json.decodeFromString(decoder.decodeString().trim('"')) - } - - // Loads the default columns so that they are available for deserialization. - loadDfltColumns() - val elem = jsonDec.decodeJsonElement().jsonObject - - val hostFields: List<ExportColumn<HostTableReader>> = elem["hostExportColumns"].toFieldList() - val taskFields: List<ExportColumn<TaskTableReader>> = elem["taskExportColumns"].toFieldList() - val serviceFields: List<ExportColumn<ServiceTableReader>> = elem["serviceExportColumns"].toFieldList() - - return ComputeExportConfig( - hostExportColumns = hostFields, - taskExportColumns = taskFields, - serviceExportColumns = serviceFields, - ) - } - - override fun serialize( - encoder: Encoder, - value: ComputeExportConfig, - ) { - encoder.encodeStructure(descriptor) { - encodeSerializableElement( - descriptor, - 0, - ColListSerializer(columnSerializer<HostTableReader>()), - value.hostExportColumns.toList(), - ) - encodeSerializableElement( - descriptor, - 1, - ColListSerializer(columnSerializer<TaskTableReader>()), - value.taskExportColumns.toList(), - ) - encodeSerializableElement( - descriptor, - 2, - ColListSerializer(columnSerializer<ServiceTableReader>()), - value.serviceExportColumns.toList(), - ) - } - } - } - } -} - -private val json = Json { ignoreUnknownKeys = true } - -private inline fun <reified T : Exportable> JsonElement?.toFieldList(): List<ExportColumn<T>> = - this?.let { - json.decodeFromJsonElement(ColListSerializer(columnSerializer<T>()), it) - }?.ifEmpty { - ComputeExportConfig.LOG.warn( - "deserialized list of export columns for exportable ${T::class.simpleName} " + - "produced empty list, falling back to all loaded columns", - ) - ExportColumn.getAllLoadedColumns<T>() - } ?: ExportColumn.getAllLoadedColumns<T>() diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltHostExportColumns.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltHostExportColumns.kt deleted file mode 100644 index 261c5462..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltHostExportColumns.kt +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2024 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.export.parquet - -import org.apache.parquet.io.api.Binary -import org.apache.parquet.schema.LogicalTypeAnnotation -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32 -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64 -import org.apache.parquet.schema.Types -import org.opendc.compute.telemetry.table.HostTableReader -import org.opendc.trace.util.parquet.exporter.ExportColumn - -/** - * This object wraps the [ExportColumn]s to solves ambiguity for field - * names that are included in more than 1 exportable. - * - * Additionally, it allows to load all the fields at once by just its symbol, - * so that these columns can be deserialized. Additional fields can be added - * from anywhere, and they are deserializable as long as they are loaded by the jvm. - * - * ```kotlin - * ... - * // Loads the column - * DfltHostExportColumns - * ... - * ``` - */ -public object DfltHostExportColumns { - public val TIMESTAMP: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp"), - ) { it.timestamp.toEpochMilli() } - - public val TIMESTAMP_ABS: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp_absolute"), - ) { it.timestampAbsolute.toEpochMilli() } - - public val HOST_ID: ExportColumn<HostTableReader> = - ExportColumn( - field = - Types.required(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("host_id"), - ) { Binary.fromString(it.host.id) } - - public val HOST_NAME: ExportColumn<HostTableReader> = - ExportColumn( - field = - Types.required(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("host_name"), - ) { Binary.fromString(it.host.name) } - - public val CPU_COUNT: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT32).named("core_count"), - ) { it.host.coreCount } - - public val MEM_CAPACITY: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("mem_capacity"), - ) { it.host.memCapacity } - - public val GUESTS_TERMINATED: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT32).named("guests_terminated"), - ) { it.guestsTerminated } - - public val GUESTS_RUNNING: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT32).named("guests_running"), - ) { it.guestsRunning } - - public val GUESTS_ERROR: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT32).named("guests_error"), - ) { it.guestsError } - - public val GUESTS_INVALID: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT32).named("guests_invalid"), - ) { it.guestsInvalid } - - public val CPU_LIMIT: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("cpu_limit"), - ) { it.cpuLimit } - - public val CPU_USAGE: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("cpu_usage"), - ) { it.cpuUsage } - - public val CPU_DEMAND: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("cpu_demand"), - ) { it.cpuDemand } - - public val CPU_UTILIZATION: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("cpu_utilization"), - ) { it.cpuUtilization } - - public val CPU_TIME_ACTIVE: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_active"), - ) { it.cpuActiveTime } - - public val CPU_TIME_IDLE: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_idle"), - ) { it.cpuIdleTime } - - public val CPU_TIME_STEAL: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_steal"), - ) { it.cpuStealTime } - - public val CPU_TIME_LOST: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_lost"), - ) { it.cpuLostTime } - - public val POWER_DRAW: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("power_draw"), - ) { it.powerDraw } - - public val ENERGY_USAGE: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("energy_usage"), - ) { it.energyUsage } - - public val CARBON_INTENSITY: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("carbon_intensity"), - ) { it.carbonIntensity } - - public val CARBON_EMISSION: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("carbon_emission"), - ) { it.carbonEmission } - - public val UP_TIME: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("uptime"), - ) { it.uptime } - - public val DOWN_TIME: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.required(INT64).named("downtime"), - ) { it.downtime } - - public val BOOT_TIME: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.optional(INT64).named("boot_time"), - ) { it.bootTime?.toEpochMilli() } - - public val BOOT_TIME_ABS: ExportColumn<HostTableReader> = - ExportColumn( - field = Types.optional(INT64).named("boot_time_absolute"), - ) { it.bootTimeAbsolute?.toEpochMilli() } - - /** - * The columns that are always included in the output file. - */ - internal val BASE_EXPORT_COLUMNS = - setOf( - TIMESTAMP_ABS, - TIMESTAMP, - ) -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt deleted file mode 100644 index 8038060d..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2024 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.export.parquet - -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32 -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64 -import org.apache.parquet.schema.Types -import org.opendc.compute.telemetry.table.ServiceTableReader -import org.opendc.trace.util.parquet.exporter.ExportColumn - -/** - * This object wraps the [ExportColumn]s to solves ambiguity for field - * names that are included in more than 1 exportable. - * - * Additionally, it allows to load all the fields at once by just its symbol, - * so that these columns can be deserialized. Additional fields can be added - * from anywhere, and they are deserializable as long as they are loaded by the jvm. - * - * ```kotlin - * ... - * // Loads the column - * DfltServiceExportColumns - * ... - * ``` - */ -public object DfltServiceExportColumns { - public val TIMESTAMP: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp"), - ) { it.timestamp.toEpochMilli() } - - public val TIMESTAMP_ABS: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp_absolute"), - ) { it.timestampAbsolute.toEpochMilli() } - - public val HOSTS_UP: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("hosts_up"), - ) { it.hostsUp } - - public val TASKS_PENDING: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("tasks_pending"), - ) { it.tasksPending } - - public val TASKS_ACTIVE: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("tasks_active"), - ) { it.tasksActive } - - public val ATTEMPTS_SUCCESS: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("attempts_success"), - ) { it.attemptsSuccess } - - public val AT3yyTEMPTS_FAILURE: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("attempts_failure"), - ) { it.attemptsFailure } - - public val ATTEMPTS_ERROR: ExportColumn<ServiceTableReader> = - ExportColumn( - field = Types.required(INT32).named("attempts_error"), - ) { it.attemptsError } - - /** - * The columns that are always included in the output file. - */ - internal val BASE_EXPORT_COLUMNS = - setOf( - TIMESTAMP_ABS, - TIMESTAMP, - ) -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt deleted file mode 100644 index 9e86e1a3..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2024 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.export.parquet - -import org.apache.parquet.io.api.Binary -import org.apache.parquet.schema.LogicalTypeAnnotation -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32 -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64 -import org.apache.parquet.schema.Types -import org.opendc.compute.telemetry.table.TaskTableReader -import org.opendc.trace.util.parquet.exporter.ExportColumn - -/** - * This object wraps the [ExportColumn]s to solves ambiguity for field - * names that are included in more than 1 exportable - * - * Additionally, it allows to load all the fields at once by just its symbol, - * so that these columns can be deserialized. Additional fields can be added - * from anywhere, and they are deserializable as long as they are loaded by the jvm. - * - * ```kotlin - * ... - * // Loads the column - * DfltTaskExportColumns - * ... - * ``` - */ -public object DfltTaskExportColumns { - public val TIMESTAMP: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp"), - ) { it.timestamp.toEpochMilli() } - - public val TIMESTAMP_ABS: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("timestamp_absolute"), - ) { it.timestampAbsolute.toEpochMilli() } - - public val TASK_ID: ExportColumn<TaskTableReader> = - ExportColumn( - field = - Types.required(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("task_id"), - ) { Binary.fromString(it.taskInfo.id) } - - public val HOST_ID: ExportColumn<TaskTableReader> = - ExportColumn( - field = - Types.optional(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("host_id"), - ) { it.host?.id?.let { Binary.fromString(it) } } - - public val TASK_NAME: ExportColumn<TaskTableReader> = - ExportColumn( - field = - Types.required(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("task_name"), - ) { Binary.fromString(it.taskInfo.name) } - - public val CPU_COUNT: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT32).named("cpu_count"), - ) { it.taskInfo.cpuCount } - - public val MEM_CAPACITY: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("mem_capacity"), - ) { it.taskInfo.memCapacity } - - public val CPU_LIMIT: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(DOUBLE).named("cpu_limit"), - ) { it.cpuLimit } - - public val CPU_TIME_ACTIVE: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_active"), - ) { it.cpuActiveTime } - - public val CPU_TIME_IDLE: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_idle"), - ) { it.cpuIdleTime } - - public val CPU_TIME_STEAL: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_steal"), - ) { it.cpuStealTime } - - public val CPU_TIME_LOST: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("cpu_time_lost"), - ) { it.cpuLostTime } - - public val UP_TIME: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("uptime"), - ) { it.uptime } - - public val DOWN_TIME: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.required(INT64).named("downtime"), - ) { it.downtime } - - public val PROVISION_TIME: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.optional(INT64).named("provision_time"), - ) { it.provisionTime?.toEpochMilli() } - - public val BOOT_TIME: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.optional(INT64).named("boot_time"), - ) { it.bootTime?.toEpochMilli() } - - public val BOOT_TIME_ABS: ExportColumn<TaskTableReader> = - ExportColumn( - field = Types.optional(INT64).named("boot_time_absolute"), - ) { it.bootTimeAbsolute?.toEpochMilli() } - - public val TASK_STATE: ExportColumn<TaskTableReader> = - ExportColumn( - field = - Types.optional(BINARY) - .`as`(LogicalTypeAnnotation.stringType()) - .named("task_state"), - ) { Binary.fromString(it.taskState?.name) } - - /** - * The columns that are always included in the output file. - */ - internal val BASE_EXPORT_COLUMNS = - setOf( - TIMESTAMP_ABS, - TIMESTAMP, - ) -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt deleted file mode 100644 index 3b7a7c0c..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2022 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.export.parquet - -import org.opendc.compute.telemetry.ComputeMonitor -import org.opendc.compute.telemetry.table.HostTableReader -import org.opendc.compute.telemetry.table.ServiceTableReader -import org.opendc.compute.telemetry.table.TaskTableReader -import org.opendc.trace.util.parquet.exporter.ExportColumn -import org.opendc.trace.util.parquet.exporter.Exportable -import org.opendc.trace.util.parquet.exporter.Exporter -import java.io.File - -/** - * A [ComputeMonitor] that logs the events to a Parquet file. - */ -public class ParquetComputeMonitor( - private val hostExporter: Exporter<HostTableReader>, - private val taskExporter: Exporter<TaskTableReader>, - private val serviceExporter: Exporter<ServiceTableReader>, -) : ComputeMonitor, AutoCloseable { - override fun record(reader: HostTableReader) { - hostExporter.write(reader) - } - - override fun record(reader: TaskTableReader) { - taskExporter.write(reader) - } - - override fun record(reader: ServiceTableReader) { - serviceExporter.write(reader) - } - - override fun close() { - hostExporter.close() - taskExporter.close() - serviceExporter.close() - } - - public companion object { - /** - * Overloaded constructor with [ComputeExportConfig] as parameter. - * - * @param[base] parent pathname for output file. - * @param[partition] child pathname for output file. - * @param[bufferSize] size of the buffer used by the writer thread. - */ - public operator fun invoke( - base: File, - partition: String, - bufferSize: Int, - computeExportConfig: ComputeExportConfig, - ): ParquetComputeMonitor = - invoke( - base = base, - partition = partition, - bufferSize = bufferSize, - hostExportColumns = computeExportConfig.hostExportColumns, - taskExportColumns = computeExportConfig.taskExportColumns, - serviceExportColumns = computeExportConfig.serviceExportColumns, - ) - - /** - * Constructor that loads default [ExportColumn]s defined in - * [DfltHostExportColumns], [DfltTaskExportColumns], [DfltServiceExportColumns] - * in case optional parameters are omitted and all fields need to be retrieved. - * - * @param[base] parent pathname for output file. - * @param[partition] child pathname for output file. - * @param[bufferSize] size of the buffer used by the writer thread. - */ - public operator fun invoke( - base: File, - partition: String, - bufferSize: Int, - hostExportColumns: Collection<ExportColumn<HostTableReader>>? = null, - taskExportColumns: Collection<ExportColumn<TaskTableReader>>? = null, - serviceExportColumns: Collection<ExportColumn<ServiceTableReader>>? = null, - ): ParquetComputeMonitor { - // Loads the fields in case they need to be retrieved if optional params are omitted. - ComputeExportConfig.loadDfltColumns() - - return ParquetComputeMonitor( - hostExporter = - Exporter( - outputFile = File(base, "$partition/host.parquet").also { it.parentFile.mkdirs() }, - columns = hostExportColumns ?: Exportable.getAllLoadedColumns(), - bufferSize = bufferSize, - ), - taskExporter = - Exporter( - outputFile = File(base, "$partition/task.parquet").also { it.parentFile.mkdirs() }, - columns = taskExportColumns ?: Exportable.getAllLoadedColumns(), - bufferSize = bufferSize, - ), - serviceExporter = - Exporter( - outputFile = File(base, "$partition/service.parquet").also { it.parentFile.mkdirs() }, - columns = serviceExportColumns ?: Exportable.getAllLoadedColumns(), - bufferSize = bufferSize, - ), - ) - } - } -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md deleted file mode 100644 index aee63fc9..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md +++ /dev/null @@ -1,70 +0,0 @@ -### Summary -Added output configuration, that can be defined in the scenario `.json` file, that allows to select which columns are to be included in the raw oputput files `host.parquet`, `task.parquet` and `service.parquet`. - -### Columns -The 'default' columns are defined in `DfltHostExportcolumns`, `DfltTaskExportColumns` and `DfltServiceExportColumns`. Any number of additional columns can be definied anywhere (`ExportColumn<Exportable>`) and it is going to be deserializable as long as it is loaded by the jvm. - -### Deserialization -Each `ExportColumn` has a `Regex`, used for deserialization. If no custom regex is provided, the default one is used. The default regex matches the column name in case-insensitive manner, either with `_` as in the name or with ` ` (blank space). - -###### E.g.: -***column name*** = "cpu\_count" -***default column regex*** = "\\s*(?:cpu_count|cpu count)\\s*" (case insensitive) -***matches*** = "cpu\_count", "cpu count", "CpU/_cOuNt" etc. - -### JSON Schema -```json -// scenario.json -{ - ... - "computeExportConfig": { - "type": "object", - "properties": { - "hostExportColumns": { "type": "array" }, - "taskExportColumns": { "type": "array" } , - "serviceExportColumns": { "type": "array" } , - "required": [ /* NONE REQUIRED */ ] - } - }, - ... - "required": [ - ... - // NOT REQUIRED - ] -} -``` - - -###### Bad Formatting Cases -- If a column name (and type) does not match any deserializable column, the entry is ignored and error message is logged. -- If an empty list of columns is provided or those that are provided were not deserializable, then all loaded columns for that `Exportable` are used, and a warning message is logged. -- If no list is provided, then all loaded columns for that `Exportable` are used. - - -### Example - -```json -// scenario.json -{ - ... - "computeExportConfig": { - "hostExportColumns": ["timestamp", "timestamp_absolute", "invalid-entry1", "guests_invalid"], - "taskExportColumns": ["invalid-entry2"], - "serviceExportColumns": ["timestamp", "tasks_active", "tasks_pending"] - }, - ... -``` - -```json -// console output -10:51:56.561 [ERROR] ColListSerializer - no match found for column "invalid-entry1", ignoring... -10:51:56.563 [ERROR] ColListSerializer - no match found for column "invalid-entry2", ignoring... -10:51:56.564 [WARN] ComputeExportConfig - deserialized list of export columns for exportable TaskTableReader produced empty list, falling back to all loaded columns -10:51:56.584 [INFO] ScenariosSpec - -| === Compute Export Config === -| Host columns : timestamp, timestamp_absolute, guests_invalid -| Task columns : timestamp, timestamp_absolute, task_id, task_name, cpu_count, mem_capacity, cpu_limit, cpu_time_active, cpu_time_idle, cpu_time_steal, cpu_time_lost, uptime, downtime, provision_time, boot_time, boot_time_absolute -| Service columns : timestamp, tasks_active, tasks_pending - -``` - diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostInfo.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostInfo.kt deleted file mode 100644 index 62b7ef0d..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostInfo.kt +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -/** - * Information about a host exposed to the telemetry service. - */ -public data class HostInfo( - val id: String, - val name: String, - val arch: String, - val coreCount: Int, - val coreSpeed: Double, - val memCapacity: Long, -) diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt deleted file mode 100644 index a7b8bedb..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -import org.opendc.trace.util.parquet.exporter.Exportable -import java.time.Instant - -/** - * An interface that is used to read a row of a host trace entry. - */ -public interface HostTableReader : Exportable { - public fun copy(): HostTableReader - - public fun setValues(table: HostTableReader) - - /** - * The [HostInfo] of the host to which the row belongs to. - */ - public val host: HostInfo - - /** - * The timestamp of the current entry of the reader relative to the start of the workload. - */ - public val timestamp: Instant - - /** - * The timestamp of the current entry of the reader. - */ - public val timestampAbsolute: Instant - - /** - * The number of guests that are in a terminated state. - */ - public val guestsTerminated: Int - - /** - * The number of guests that are in a running state. - */ - public val guestsRunning: Int - - /** - * The number of guests that are in an error state. - */ - public val guestsError: Int - - /** - * The number of guests that are in an unknown state. - */ - public val guestsInvalid: Int - - /** - * The capacity of the CPUs in the host (in MHz). - */ - public val cpuLimit: Double - - /** - * The usage of all CPUs in the host (in MHz). - */ - public val cpuUsage: Double - - /** - * The demand of all vCPUs of the guests (in MHz) - */ - public val cpuDemand: Double - - /** - * The CPU utilization of the host. - */ - public val cpuUtilization: Double - - /** - * The duration (in ms) that a CPU was active in the host. - */ - public val cpuActiveTime: Long - - /** - * The duration (in ms) that a CPU was idle in the host. - */ - public val cpuIdleTime: Long - - /** - * The duration (in ms) that a vCPU wanted to run, but no capacity was available. - */ - public val cpuStealTime: Long - - /** - * The duration (in ms) of CPU time that was lost due to interference. - */ - public val cpuLostTime: Long - - /** - * The current power draw of the host in W. - */ - public val powerDraw: Double - - /** - * The total energy consumption of the host since last sample in J. - */ - public val energyUsage: Double - - /** - * The current carbon intensity of the host in gCO2 / kW. - */ - public val carbonIntensity: Double - - /** - * The current carbon emission since the last deadline in g. - */ - public val carbonEmission: Double - - /** - * The uptime of the host since last time in ms. - */ - public val uptime: Long - - /** - * The downtime of the host since last time in ms. - */ - public val downtime: Long - - /** - * The [Instant] at which the host booted relative to the start of the workload. - */ - public val bootTime: Instant? - - /** - * The [Instant] at which the host booted. - */ - public val bootTimeAbsolute: Instant? -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt deleted file mode 100644 index 7a8ba6a7..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -import java.time.Instant - -/** - * A trace entry for the compute service. - */ -public data class ServiceData( - val timestamp: Instant, - val hostsUp: Int, - val hostsDown: Int, - val tasksTotal: Int, - val tasksPending: Int, - val tasksActive: Int, - val attemptsSuccess: Int, - val attemptsFailure: Int, - val attemptsError: Int, -) - -/** - * Convert a [ServiceTableReader] into a persistent object. - */ -public fun ServiceTableReader.toServiceData(): ServiceData { - return ServiceData( - timestamp, - hostsUp, - hostsDown, - tasksTotal, - tasksPending, - tasksActive, - attemptsSuccess, - attemptsFailure, - attemptsError, - ) -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt deleted file mode 100644 index 23630fb4..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -import org.opendc.trace.util.parquet.exporter.Exportable -import java.time.Instant - -/** - * An interface that is used to read a row of a service trace entry. - */ -public interface ServiceTableReader : Exportable { - public fun copy(): ServiceTableReader - - public fun setValues(table: ServiceTableReader) - - /** - * The timestamp of the current entry of the reader. - */ - public val timestamp: Instant - - /** - * The timestamp of the current entry of the reader. - */ - public val timestampAbsolute: Instant - - /** - * The number of hosts that are up at this instant. - */ - public val hostsUp: Int - - /** - * The number of hosts that are down at this instant. - */ - public val hostsDown: Int - - /** - * The number of tasks that are registered with the compute service. - */ - public val tasksTotal: Int - - /** - * The number of tasks that are pending to be scheduled. - */ - public val tasksPending: Int - - /** - * The number of tasks that are currently active. - */ - public val tasksActive: Int - - /** - * The scheduling attempts that were successful. - */ - public val attemptsSuccess: Int - - /** - * The scheduling attempts that were unsuccessful due to client error. - */ - public val attemptsFailure: Int - - /** - * The scheduling attempts that were unsuccessful due to scheduler error. - */ - public val attemptsError: Int -} diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt deleted file mode 100644 index 2d1ae91a..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -/** - * Static information about a task exposed to the telemetry service. - */ -public data class TaskInfo( - val id: String, - val name: String, - val type: String, - val arch: String, - val imageId: String, - val imageName: String, - val cpuCount: Int, - val memCapacity: Long, -) diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt deleted file mode 100644 index ae7f7a49..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2021 AtLarge Research - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.opendc.compute.telemetry.table - -import org.opendc.compute.api.TaskState -import org.opendc.compute.telemetry.export.parquet.DfltTaskExportColumns -import org.opendc.trace.util.parquet.exporter.Exportable -import java.time.Instant - -/** - * An interface that is used to read a row of a task trace entry. - */ -public interface TaskTableReader : Exportable { - public fun copy(): TaskTableReader - - public fun setValues(table: TaskTableReader) - - /** - * The timestamp of the current entry of the reader relative to the start of the workload. - */ - public val timestamp: Instant - - /** - * The timestamp of the current entry of the reader. - */ - public val timestampAbsolute: Instant - - /** - * The [TaskInfo] of the task to which the row belongs to. - */ - public val taskInfo: TaskInfo - - /** - * The [HostInfo] of the host on which the task is hosted or `null` if it has no host. - */ - public val host: HostInfo? - - /** - * The uptime of the host since last time in ms. - */ - public val uptime: Long - - /** - * The downtime of the host since last time in ms. - */ - public val downtime: Long - - /** - * The [Instant] at which the task was enqueued for the scheduler. - */ - public val provisionTime: Instant? - - /** - * The [Instant] at which the task booted relative to the start of the workload. - */ - public val bootTime: Instant? - - /** - * The [Instant] at which the task booted. - */ - public val bootTimeAbsolute: Instant? - - /** - * The capacity of the CPUs of Host on which the task is running (in MHz). - */ - public val cpuLimit: Double - - /** - * The duration (in seconds) that a CPU was active in the task. - */ - public val cpuActiveTime: Long - - /** - * The duration (in seconds) that a CPU was idle in the task. - */ - public val cpuIdleTime: Long - - /** - * The duration (in seconds) that a vCPU wanted to run, but no capacity was available. - */ - public val cpuStealTime: Long - - /** - * The duration (in seconds) of CPU time that was lost due to interference. - */ - public val cpuLostTime: Long - - /** - * The state of the task - */ - public val taskState: TaskState? -} - -// Loads the default export fields for deserialization whenever this file is loaded. -private val _ignore = DfltTaskExportColumns diff --git a/opendc-compute/opendc-compute-telemetry/src/test/resources/log4j2.xml b/opendc-compute/opendc-compute-telemetry/src/test/resources/log4j2.xml deleted file mode 100644 index 0dfb75f2..00000000 --- a/opendc-compute/opendc-compute-telemetry/src/test/resources/log4j2.xml +++ /dev/null @@ -1,38 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- - ~ Copyright (c) 2021 AtLarge Research - ~ - ~ Permission is hereby granted, free of charge, to any person obtaining a copy - ~ of this software and associated documentation files (the "Software"), to deal - ~ in the Software without restriction, including without limitation the rights - ~ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - ~ copies of the Software, and to permit persons to whom the Software is - ~ furnished to do so, subject to the following conditions: - ~ - ~ The above copyright notice and this permission notice shall be included in all - ~ copies or substantial portions of the Software. - ~ - ~ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - ~ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - ~ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - ~ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - ~ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - ~ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - ~ SOFTWARE. - --> - -<Configuration status="WARN" packages="org.apache.logging.log4j.core"> - <Appenders> - <Console name="Console" target="SYSTEM_OUT"> - <PatternLayout pattern="%d{HH:mm:ss.SSS} [%highlight{%-5level}] %logger{36} - %msg%n" disableAnsi="false"/> - </Console> - </Appenders> - <Loggers> - <Logger name="org.opendc" level="trace" additivity="false"> - <AppenderRef ref="Console"/> - </Logger> - <Root level="info"> - <AppenderRef ref="Console"/> - </Root> - </Loggers> -</Configuration> |
