diff options
| author | Dante Niewenhuis <d.niewenhuis@hotmail.com> | 2024-05-31 16:10:51 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-05-31 16:10:51 +0200 |
| commit | 23caa622972708bcf626f7747e509022f70d31fc (patch) | |
| tree | cc83d5bc75cb73341dbb2357a57983bd42f6b46d /opendc-compute/opendc-compute-telemetry | |
| parent | ad20465a5df47b49561bb0afbdda5cd65c5da4b8 (diff) | |
Addded host_name and boot_time_absolute to the output files (#233)
Added host_name to the host output file.
Added boot_time_absolute to the server output file.
renamed absolute_timestamp to timestamp_absolute in all output files
Diffstat (limited to 'opendc-compute/opendc-compute-telemetry')
7 files changed, 132 insertions, 79 deletions
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt index 46759ed1..0b11b57d 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt @@ -143,7 +143,7 @@ public class ComputeMetricReader( override fun setValues(table: ServiceTableReader) { _timestamp = table.timestamp - _absoluteTimestamp = table.absoluteTimestamp + _timestampAbsolute = table.timestampAbsolute _hostsUp = table.hostsUp _hostsDown = table.hostsDown @@ -159,9 +159,9 @@ public class ComputeMetricReader( override val timestamp: Instant get() = _timestamp - private var _absoluteTimestamp: Instant = Instant.MIN - override val absoluteTimestamp: Instant - get() = _absoluteTimestamp + private var _timestampAbsolute: Instant = Instant.MIN + override val timestampAbsolute: Instant + get() = _timestampAbsolute override val hostsUp: Int get() = _hostsUp @@ -200,7 +200,7 @@ public class ComputeMetricReader( */ fun record(now: Instant) { _timestamp = now - _absoluteTimestamp = now + startTime + _timestampAbsolute = now + startTime val stats = service.getSchedulerStats() _hostsUp = stats.hostsAvailable @@ -231,7 +231,7 @@ public class ComputeMetricReader( override fun setValues(table: HostTableReader) { _timestamp = table.timestamp - _absoluteTimestamp = table.absoluteTimestamp + _timestampAbsolute = table.timestampAbsolute _guestsTerminated = table.guestsTerminated _guestsRunning = table.guestsRunning @@ -252,6 +252,7 @@ public class ComputeMetricReader( _uptime = table.uptime _downtime = table.downtime _bootTime = table.bootTime + _bootTimeAbsolute = table.bootTimeAbsolute } private val _host = host @@ -262,9 +263,9 @@ public class ComputeMetricReader( get() = _timestamp private var _timestamp = Instant.MIN - override val absoluteTimestamp: Instant - get() = _absoluteTimestamp - private var _absoluteTimestamp = Instant.MIN + override val timestampAbsolute: Instant + get() = _timestampAbsolute + private var _timestampAbsolute = Instant.MIN override val guestsTerminated: Int get() = _guestsTerminated @@ -349,6 +350,10 @@ public class ComputeMetricReader( get() = _bootTime private var _bootTime: Instant? = null + override val bootTimeAbsolute: Instant? + get() = _bootTimeAbsolute + private var _bootTimeAbsolute: Instant? = null + /** * Record the next cycle. */ @@ -357,7 +362,7 @@ public class ComputeMetricReader( val hostSysStats = _host.getSystemStats() _timestamp = now - _absoluteTimestamp = now + startTime + _timestampAbsolute = now + startTime _guestsTerminated = hostSysStats.guestsTerminated _guestsRunning = hostSysStats.guestsRunning @@ -373,12 +378,13 @@ public class ComputeMetricReader( _cpuLostTime = hostCpuStats.lostTime _powerDraw = hostSysStats.powerDraw _energyUsage = hostSysStats.energyUsage - _carbonIntensity = carbonTrace.getCarbonIntensity(absoluteTimestamp) + _carbonIntensity = carbonTrace.getCarbonIntensity(timestampAbsolute) _carbonEmission = carbonIntensity * (energyUsage / 3600000.0) // convert energy usage from J to kWh _uptime = hostSysStats.uptime.toMillis() _downtime = hostSysStats.downtime.toMillis() _bootTime = hostSysStats.bootTime + _bootTime = hostSysStats.bootTime + startTime } /** @@ -430,7 +436,7 @@ public class ComputeMetricReader( host = table.host _timestamp = table.timestamp - _absoluteTimestamp = table.absoluteTimestamp + _timestampAbsolute = table.timestampAbsolute _cpuLimit = table.cpuLimit _cpuActiveTime = table.cpuActiveTime @@ -441,6 +447,7 @@ public class ComputeMetricReader( _downtime = table.downtime _provisionTime = table.provisionTime _bootTime = table.bootTime + _bootTimeAbsolute = table.bootTimeAbsolute } private val _server = server @@ -470,9 +477,9 @@ public class ComputeMetricReader( override val timestamp: Instant get() = _timestamp - private var _absoluteTimestamp = Instant.MIN - override val absoluteTimestamp: Instant - get() = _absoluteTimestamp + private var _timestampAbsolute = Instant.MIN + override val timestampAbsolute: Instant + get() = _timestampAbsolute override val uptime: Long get() = _uptime - previousUptime @@ -516,6 +523,10 @@ public class ComputeMetricReader( private var _cpuLostTime = 0L private var previousCpuLostTime = 0L + override val bootTimeAbsolute: Instant? + get() = _bootTimeAbsolute + private var _bootTimeAbsolute: Instant? = null + /** * Record the next cycle. */ @@ -530,7 +541,7 @@ public class ComputeMetricReader( val sysStats = _host?.getSystemStats(_server) _timestamp = now - _absoluteTimestamp = now + startTime + _timestampAbsolute = now + startTime _cpuLimit = cpuStats?.capacity ?: 0.0 _cpuActiveTime = cpuStats?.activeTime ?: 0 @@ -541,6 +552,12 @@ public class ComputeMetricReader( _downtime = sysStats?.downtime?.toMillis() ?: 0 _provisionTime = _server.launchedAt _bootTime = sysStats?.bootTime + + if (sysStats != null) { + _bootTimeAbsolute = sysStats.bootTime + startTime + } else { + _bootTimeAbsolute = null + } } /** diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetHostDataWriter.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetHostDataWriter.kt index dc2d39c2..020e67f2 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetHostDataWriter.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetHostDataWriter.kt @@ -76,99 +76,110 @@ public class ParquetHostDataWriter(path: File, bufferSize: Int) : consumer.addLong(data.timestamp.toEpochMilli()) consumer.endField("timestamp", 0) - consumer.startField("absolute_timestamp", 1) - consumer.addLong(data.absoluteTimestamp.toEpochMilli()) - consumer.endField("absolute_timestamp", 1) + consumer.startField("timestamp_absolute", 1) + consumer.addLong(data.timestampAbsolute.toEpochMilli()) + consumer.endField("timestamp_absolute", 1) consumer.startField("host_id", 2) consumer.addBinary(Binary.fromString(data.host.id)) consumer.endField("host_id", 2) - consumer.startField("cpu_count", 3) + consumer.startField("host_name", 3) + consumer.addBinary(Binary.fromString(data.host.name)) + consumer.endField("host_name", 3) + + consumer.startField("cpu_count", 4) consumer.addInteger(data.host.cpuCount) - consumer.endField("cpu_count", 3) + consumer.endField("cpu_count", 4) - consumer.startField("mem_capacity", 4) + consumer.startField("mem_capacity", 5) consumer.addLong(data.host.memCapacity) - consumer.endField("mem_capacity", 4) + consumer.endField("mem_capacity", 5) - consumer.startField("guests_terminated", 5) + consumer.startField("guests_terminated", 6) consumer.addInteger(data.guestsTerminated) - consumer.endField("guests_terminated", 5) + consumer.endField("guests_terminated", 6) - consumer.startField("guests_running", 6) + consumer.startField("guests_running", 7) consumer.addInteger(data.guestsRunning) - consumer.endField("guests_running", 6) + consumer.endField("guests_running", 7) - consumer.startField("guests_error", 7) + consumer.startField("guests_error", 8) consumer.addInteger(data.guestsError) - consumer.endField("guests_error", 7) + consumer.endField("guests_error", 8) - consumer.startField("guests_invalid", 8) + consumer.startField("guests_invalid", 9) consumer.addInteger(data.guestsInvalid) - consumer.endField("guests_invalid", 8) + consumer.endField("guests_invalid", 9) - consumer.startField("cpu_limit", 9) + consumer.startField("cpu_limit", 10) consumer.addDouble(data.cpuLimit) - consumer.endField("cpu_limit", 9) + consumer.endField("cpu_limit", 10) - consumer.startField("cpu_usage", 10) + consumer.startField("cpu_usage", 11) consumer.addDouble(data.cpuUsage) - consumer.endField("cpu_usage", 10) + consumer.endField("cpu_usage", 11) - consumer.startField("cpu_demand", 11) + consumer.startField("cpu_demand", 12) consumer.addDouble(data.cpuUsage) - consumer.endField("cpu_demand", 11) + consumer.endField("cpu_demand", 12) - consumer.startField("cpu_utilization", 12) + consumer.startField("cpu_utilization", 13) consumer.addDouble(data.cpuUtilization) - consumer.endField("cpu_utilization", 12) + consumer.endField("cpu_utilization", 13) - consumer.startField("cpu_time_active", 13) + consumer.startField("cpu_time_active", 14) consumer.addLong(data.cpuActiveTime) - consumer.endField("cpu_time_active", 13) + consumer.endField("cpu_time_active", 14) - consumer.startField("cpu_time_idle", 14) + consumer.startField("cpu_time_idle", 15) consumer.addLong(data.cpuIdleTime) - consumer.endField("cpu_time_idle", 14) + consumer.endField("cpu_time_idle", 15) - consumer.startField("cpu_time_steal", 15) + consumer.startField("cpu_time_steal", 16) consumer.addLong(data.cpuStealTime) - consumer.endField("cpu_time_steal", 15) + consumer.endField("cpu_time_steal", 16) - consumer.startField("cpu_time_lost", 16) + consumer.startField("cpu_time_lost", 17) consumer.addLong(data.cpuLostTime) - consumer.endField("cpu_time_lost", 16) + consumer.endField("cpu_time_lost", 17) - consumer.startField("power_draw", 17) + consumer.startField("power_draw", 18) consumer.addDouble(data.powerDraw) - consumer.endField("power_draw", 17) + consumer.endField("power_draw", 18) - consumer.startField("energy_usage", 18) + consumer.startField("energy_usage", 19) consumer.addDouble(data.energyUsage) - consumer.endField("energy_usage", 18) + consumer.endField("energy_usage", 19) - consumer.startField("carbon_intensity", 19) + consumer.startField("carbon_intensity", 20) consumer.addDouble(data.carbonIntensity) - consumer.endField("carbon_intensity", 19) + consumer.endField("carbon_intensity", 20) - consumer.startField("carbon_emission", 20) + consumer.startField("carbon_emission", 21) consumer.addDouble(data.carbonEmission) - consumer.endField("carbon_emission", 20) + consumer.endField("carbon_emission", 21) - consumer.startField("uptime", 21) + consumer.startField("uptime", 22) consumer.addLong(data.uptime) - consumer.endField("uptime", 21) + consumer.endField("uptime", 22) - consumer.startField("downtime", 22) + consumer.startField("downtime", 23) consumer.addLong(data.downtime) - consumer.endField("downtime", 22) + consumer.endField("downtime", 23) val bootTime = data.bootTime if (bootTime != null) { - consumer.startField("boot_time", 23) + consumer.startField("boot_time", 24) consumer.addLong(bootTime.toEpochMilli()) - consumer.endField("boot_time", 23) + consumer.endField("boot_time", 24) + } + + val bootTimeAbsolute = data.bootTimeAbsolute + if (bootTimeAbsolute != null) { + consumer.startField("boot_time_absolute", 25) + consumer.addLong(bootTimeAbsolute.toEpochMilli()) + consumer.endField("boot_time_absolute", 25) } consumer.endMessage() @@ -188,12 +199,16 @@ public class ParquetHostDataWriter(path: File, bufferSize: Int) : .named("timestamp"), Types .required(PrimitiveType.PrimitiveTypeName.INT64) - .named("absolute_timestamp"), + .named("timestamp_absolute"), Types .required(PrimitiveType.PrimitiveTypeName.BINARY) .`as`(LogicalTypeAnnotation.stringType()) .named("host_id"), Types + .required(PrimitiveType.PrimitiveTypeName.BINARY) + .`as`(LogicalTypeAnnotation.stringType()) + .named("host_name"), + Types .required(PrimitiveType.PrimitiveTypeName.INT32) .named("cpu_count"), Types @@ -256,6 +271,9 @@ public class ParquetHostDataWriter(path: File, bufferSize: Int) : Types .optional(PrimitiveType.PrimitiveTypeName.INT64) .named("boot_time"), + Types + .optional(PrimitiveType.PrimitiveTypeName.INT64) + .named("boot_time_absolute"), ) .named("host") } diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServerDataWriter.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServerDataWriter.kt index 4045d070..e1b489ac 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServerDataWriter.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServerDataWriter.kt @@ -77,9 +77,9 @@ public class ParquetServerDataWriter(path: File, bufferSize: Int) : consumer.addLong(data.timestamp.toEpochMilli()) consumer.endField("timestamp", 0) - consumer.startField("absolute_timestamp", 1) - consumer.addLong(data.absoluteTimestamp.toEpochMilli()) - consumer.endField("absolute_timestamp", 1) + consumer.startField("timestamp_absolute", 1) + consumer.addLong(data.timestampAbsolute.toEpochMilli()) + consumer.endField("timestamp_absolute", 1) consumer.startField("server_id", 2) consumer.addBinary(Binary.fromString(data.server.id)) @@ -146,6 +146,13 @@ public class ParquetServerDataWriter(path: File, bufferSize: Int) : consumer.endField("boot_time", 15) } + val bootTimeAbsolute = data.bootTimeAbsolute + if (bootTimeAbsolute != null) { + consumer.startField("boot_time_absolute", 16) + consumer.addLong(bootTimeAbsolute.toEpochMilli()) + consumer.endField("boot_time_absolute", 16) + } + consumer.endMessage() } } @@ -162,7 +169,7 @@ public class ParquetServerDataWriter(path: File, bufferSize: Int) : .named("timestamp"), Types .required(PrimitiveType.PrimitiveTypeName.INT64) - .named("absolute_timestamp"), + .named("timestamp_absolute"), Types .required(PrimitiveType.PrimitiveTypeName.BINARY) .`as`(LogicalTypeAnnotation.stringType()) @@ -204,12 +211,13 @@ public class ParquetServerDataWriter(path: File, bufferSize: Int) : .named("downtime"), Types .optional(PrimitiveType.PrimitiveTypeName.INT64) -// .`as`(LogicalTypeAnnotation.timestampType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)) .named("provision_time"), Types .optional(PrimitiveType.PrimitiveTypeName.INT64) -// .`as`(LogicalTypeAnnotation.timestampType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)) .named("boot_time"), + Types + .optional(PrimitiveType.PrimitiveTypeName.INT64) + .named("boot_time_absolute"), ) .named("server") } diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServiceDataWriter.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServiceDataWriter.kt index 068f82ba..eba8fc4f 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServiceDataWriter.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetServiceDataWriter.kt @@ -66,9 +66,9 @@ public class ParquetServiceDataWriter(path: File, bufferSize: Int) : consumer.addLong(data.timestamp.toEpochMilli()) consumer.endField("timestamp", 0) - consumer.startField("absolute_timestamp", 1) - consumer.addLong(data.absoluteTimestamp.toEpochMilli()) - consumer.endField("absolute_timestamp", 1) + consumer.startField("timestamp_absolute", 1) + consumer.addLong(data.timestampAbsolute.toEpochMilli()) + consumer.endField("timestamp_absolute", 1) consumer.startField("hosts_up", 2) consumer.addInteger(data.hostsUp) @@ -111,7 +111,7 @@ public class ParquetServiceDataWriter(path: File, bufferSize: Int) : .named("timestamp"), Types .required(PrimitiveType.PrimitiveTypeName.INT64) - .named("absolute_timestamp"), + .named("timestamp_absolute"), Types .required(PrimitiveType.PrimitiveTypeName.INT32) .named("hosts_up"), diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt index e6b19c11..d41c6dc0 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/HostTableReader.kt @@ -38,14 +38,14 @@ public interface HostTableReader { public val host: HostInfo /** - * The timestamp of the current entry of the reader. + * The timestamp of the current entry of the reader relative to the start of the workload. */ public val timestamp: Instant /** * The timestamp of the current entry of the reader. */ - public val absoluteTimestamp: Instant + public val timestampAbsolute: Instant /** * The number of guests that are in a terminated state. @@ -138,7 +138,12 @@ public interface HostTableReader { public val downtime: Long /** - * The [Instant] at which the host booted. + * The [Instant] at which the host booted relative to the start of the workload. */ public val bootTime: Instant? + + /** + * The [Instant] at which the host booted. + */ + public val bootTimeAbsolute: Instant? } diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt index baac1142..51113025 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt @@ -33,14 +33,14 @@ public interface ServerTableReader { public fun setValues(table: ServerTableReader) /** - * The timestamp of the current entry of the reader. + * The timestamp of the current entry of the reader relative to the start of the workload. */ public val timestamp: Instant /** * The timestamp of the current entry of the reader. */ - public val absoluteTimestamp: Instant + public val timestampAbsolute: Instant /** * The [ServerInfo] of the server to which the row belongs to. @@ -68,11 +68,16 @@ public interface ServerTableReader { public val provisionTime: Instant? /** - * The [Instant] at which the server booted. + * The [Instant] at which the server booted relative to the start of the workload. */ public val bootTime: Instant? /** + * The [Instant] at which the server booted. + */ + public val bootTimeAbsolute: Instant? + + /** * The capacity of the CPUs of Host on which the server is running (in MHz). */ public val cpuLimit: Double diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt index 3b184913..e6c2a1ae 100644 --- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt +++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt @@ -40,7 +40,7 @@ public interface ServiceTableReader { /** * The timestamp of the current entry of the reader. */ - public val absoluteTimestamp: Instant + public val timestampAbsolute: Instant /** * The number of hosts that are up at this instant. |
