summaryrefslogtreecommitdiff
path: root/opendc-compute/opendc-compute-workload
diff options
context:
space:
mode:
authorFabian Mastenbroek <mail.fabianm@gmail.com>2021-09-28 11:23:13 +0200
committerGitHub <noreply@github.com>2021-09-28 11:23:13 +0200
commit6196895bfd0334052afa4fb91b00adb259a661b6 (patch)
tree8a14988b30f6f5758b1f9f982d0086296eb5d416 /opendc-compute/opendc-compute-workload
parent993c65d9c287d8db2db9ff1f95abb414803a502c (diff)
parent94d8ee69e52dcd375a662a08c198aa29670362fb (diff)
merge: Simplify usage of ComputeMetricExporter
This pull request addresses some issues with the current implementation of the `ComputeMetricExporter` class. In particular, the construction of `ComputeMetricExporter` does not require a `Clock` anymore. - Ensure shutdown of exporter is called - Do not require clock for ComputeMetricExporter - Do not recover guests in non-error state - Write null values explicitly in Parquet exporter - Report cause of compute exporter failure **Breaking API Changes** - `ComputeMetricExporter` is now an abstract class that can be extended to collect metrics - `ParquetComputeMonitor` has been renamed to `ParquetComputeMetricExporter` and extends `ComputeMetricExporter`
Diffstat (limited to 'opendc-compute/opendc-compute-workload')
-rw-r--r--opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetComputeMetricExporter.kt (renamed from opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetExportMonitor.kt)8
-rw-r--r--opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetHostDataWriter.kt4
-rw-r--r--opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetServerDataWriter.kt4
3 files changed, 8 insertions, 8 deletions
diff --git a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetExportMonitor.kt b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetComputeMetricExporter.kt
index f41a2241..ad182d67 100644
--- a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetExportMonitor.kt
+++ b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetComputeMetricExporter.kt
@@ -22,6 +22,8 @@
package org.opendc.compute.workload.export.parquet
+import io.opentelemetry.sdk.common.CompletableResultCode
+import org.opendc.telemetry.compute.ComputeMetricExporter
import org.opendc.telemetry.compute.ComputeMonitor
import org.opendc.telemetry.compute.table.HostData
import org.opendc.telemetry.compute.table.ServerData
@@ -31,7 +33,7 @@ import java.io.File
/**
* A [ComputeMonitor] that logs the events to a Parquet file.
*/
-public class ParquetExportMonitor(base: File, partition: String, bufferSize: Int) : ComputeMonitor, AutoCloseable {
+public class ParquetComputeMetricExporter(base: File, partition: String, bufferSize: Int) : ComputeMetricExporter() {
private val serverWriter = ParquetServerDataWriter(
File(base, "server/$partition/data.parquet").also { it.parentFile.mkdirs() },
bufferSize
@@ -59,9 +61,11 @@ public class ParquetExportMonitor(base: File, partition: String, bufferSize: Int
serviceWriter.write(data)
}
- override fun close() {
+ override fun shutdown(): CompletableResultCode {
hostWriter.close()
serviceWriter.close()
serverWriter.close()
+
+ return CompletableResultCode.ofSuccess()
}
}
diff --git a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetHostDataWriter.kt b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetHostDataWriter.kt
index 37066a0d..98a0739e 100644
--- a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetHostDataWriter.kt
+++ b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetHostDataWriter.kt
@@ -54,9 +54,7 @@ public class ParquetHostDataWriter(path: File, bufferSize: Int) :
builder["uptime"] = data.uptime
builder["downtime"] = data.downtime
val bootTime = data.bootTime
- if (bootTime != null) {
- builder["boot_time"] = bootTime.toEpochMilli()
- }
+ builder["boot_time"] = bootTime?.toEpochMilli()
builder["cpu_count"] = data.host.cpuCount
builder["cpu_limit"] = data.cpuLimit
diff --git a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetServerDataWriter.kt b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetServerDataWriter.kt
index bea23d32..0d11ec23 100644
--- a/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetServerDataWriter.kt
+++ b/opendc-compute/opendc-compute-workload/src/main/kotlin/org/opendc/compute/workload/export/parquet/ParquetServerDataWriter.kt
@@ -56,9 +56,7 @@ public class ParquetServerDataWriter(path: File, bufferSize: Int) :
builder["uptime"] = data.uptime
builder["downtime"] = data.downtime
val bootTime = data.bootTime
- if (bootTime != null) {
- builder["boot_time"] = bootTime.toEpochMilli()
- }
+ builder["boot_time"] = bootTime?.toEpochMilli()
builder["scheduling_latency"] = data.schedulingLatency
builder["cpu_count"] = data.server.cpuCount