summaryrefslogtreecommitdiff
path: root/opendc-compute/opendc-compute-telemetry
diff options
context:
space:
mode:
authorDante Niewenhuis <d.niewenhuis@hotmail.com>2024-08-27 13:48:46 +0200
committerGitHub <noreply@github.com>2024-08-27 13:48:46 +0200
commit3363df4c72a064e590ca98f8e01832cfa4e15a3f (patch)
tree9a938700fe08ce344ff5d0d475d0b64d7233d1fc /opendc-compute/opendc-compute-telemetry
parentc21708013f2746807f5bdb3fc47c2b47ed15b7c8 (diff)
Renamed input files and internally server is changed to task (#246)
* Updated SimTrace to use a single ArrayDeque instead of three separate lists for deadline, cpuUsage, and coreCount * Renamed input files to tasks.parquet and fragments.parquet. Renamed server to task. OpenDC nows exports tasks.parquet instead of server.parquet
Diffstat (limited to 'opendc-compute/opendc-compute-telemetry')
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt92
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt4
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt32
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt12
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt (renamed from opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServerExportColumns.kt)52
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt22
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md16
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt12
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt12
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt (renamed from opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerInfo.kt)4
-rw-r--r--opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt (renamed from opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt)30
11 files changed, 144 insertions, 144 deletions
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt
index 0b11b57d..5bd237fd 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMetricReader.kt
@@ -29,15 +29,15 @@ import kotlinx.coroutines.launch
import mu.KotlinLogging
import org.opendc.common.Dispatcher
import org.opendc.common.asCoroutineDispatcher
-import org.opendc.compute.api.Server
+import org.opendc.compute.api.Task
import org.opendc.compute.carbon.CarbonTrace
import org.opendc.compute.service.ComputeService
import org.opendc.compute.service.driver.Host
import org.opendc.compute.telemetry.table.HostInfo
import org.opendc.compute.telemetry.table.HostTableReader
-import org.opendc.compute.telemetry.table.ServerInfo
-import org.opendc.compute.telemetry.table.ServerTableReader
import org.opendc.compute.telemetry.table.ServiceTableReader
+import org.opendc.compute.telemetry.table.TaskInfo
+import org.opendc.compute.telemetry.table.TaskTableReader
import java.time.Duration
import java.time.Instant
@@ -73,9 +73,9 @@ public class ComputeMetricReader(
private val hostTableReaders = mutableMapOf<Host, HostTableReaderImpl>()
/**
- * Mapping from [Server] instances to [ServerTableReaderImpl]
+ * Mapping from [Task] instances to [TaskTableReaderImpl]
*/
- private val serverTableReaders = mutableMapOf<Server, ServerTableReaderImpl>()
+ private val taskTableReaders = mutableMapOf<Task, TaskTableReaderImpl>()
/**
* The background job that is responsible for collecting the metrics every cycle.
@@ -109,8 +109,8 @@ public class ComputeMetricReader(
reader.reset()
}
- for (server in this.service.servers) {
- val reader = this.serverTableReaders.computeIfAbsent(server) { ServerTableReaderImpl(service, it, startTime) }
+ for (task in this.service.tasks) {
+ val reader = this.taskTableReaders.computeIfAbsent(task) { TaskTableReaderImpl(service, it, startTime) }
reader.record(now)
this.monitor.record(reader.copy())
reader.reset()
@@ -147,9 +147,9 @@ public class ComputeMetricReader(
_hostsUp = table.hostsUp
_hostsDown = table.hostsDown
- _serversTotal = table.serversTotal
- _serversPending = table.serversPending
- _serversActive = table.serversActive
+ _tasksTotal = table.tasksTotal
+ _tasksPending = table.tasksPending
+ _tasksActive = table.tasksActive
_attemptsSuccess = table.attemptsSuccess
_attemptsFailure = table.attemptsFailure
_attemptsError = table.attemptsError
@@ -171,17 +171,17 @@ public class ComputeMetricReader(
get() = _hostsDown
private var _hostsDown = 0
- override val serversTotal: Int
- get() = _serversTotal
- private var _serversTotal = 0
+ override val tasksTotal: Int
+ get() = _tasksTotal
+ private var _tasksTotal = 0
- override val serversPending: Int
- get() = _serversPending
- private var _serversPending = 0
+ override val tasksPending: Int
+ get() = _tasksPending
+ private var _tasksPending = 0
- override val serversActive: Int
- get() = _serversActive
- private var _serversActive = 0
+ override val tasksActive: Int
+ get() = _tasksActive
+ private var _tasksActive = 0
override val attemptsSuccess: Int
get() = _attemptsSuccess
@@ -205,9 +205,9 @@ public class ComputeMetricReader(
val stats = service.getSchedulerStats()
_hostsUp = stats.hostsAvailable
_hostsDown = stats.hostsUnavailable
- _serversTotal = stats.serversTotal
- _serversPending = stats.serversPending
- _serversActive = stats.serversActive
+ _tasksTotal = stats.tasksTotal
+ _tasksPending = stats.tasksPending
+ _tasksActive = stats.tasksActive
_attemptsSuccess = stats.attemptsSuccess.toInt()
_attemptsFailure = stats.attemptsFailure.toInt()
_attemptsError = stats.attemptsError.toInt()
@@ -418,21 +418,21 @@ public class ComputeMetricReader(
}
/**
- * An aggregator for server metrics before they are reported.
+ * An aggregator for task metrics before they are reported.
*/
- private class ServerTableReaderImpl(
+ private class TaskTableReaderImpl(
private val service: ComputeService,
- server: Server,
+ task: Task,
private val startTime: Duration = Duration.ofMillis(0),
- ) : ServerTableReader {
- override fun copy(): ServerTableReader {
- val newServerTable = ServerTableReaderImpl(service, _server)
- newServerTable.setValues(this)
+ ) : TaskTableReader {
+ override fun copy(): TaskTableReader {
+ val newTaskTable = TaskTableReaderImpl(service, _task)
+ newTaskTable.setValues(this)
- return newServerTable
+ return newTaskTable
}
- override fun setValues(table: ServerTableReader) {
+ override fun setValues(table: TaskTableReader) {
host = table.host
_timestamp = table.timestamp
@@ -450,25 +450,25 @@ public class ComputeMetricReader(
_bootTimeAbsolute = table.bootTimeAbsolute
}
- private val _server = server
+ private val _task = task
/**
- * The static information about this server.
+ * The static information about this task.
*/
- override val server =
- ServerInfo(
- server.uid.toString(),
- server.name,
+ override val task =
+ TaskInfo(
+ task.uid.toString(),
+ task.name,
"vm",
"x86",
- server.image.uid.toString(),
- server.image.name,
- server.flavor.coreCount,
- server.flavor.memorySize,
+ task.image.uid.toString(),
+ task.image.name,
+ task.flavor.coreCount,
+ task.flavor.memorySize,
)
/**
- * The [HostInfo] of the host on which the server is hosted.
+ * The [HostInfo] of the host on which the task is hosted.
*/
override var host: HostInfo? = null
private var _host: Host? = null
@@ -531,14 +531,14 @@ public class ComputeMetricReader(
* Record the next cycle.
*/
fun record(now: Instant) {
- val newHost = service.lookupHost(_server)
+ val newHost = service.lookupHost(_task)
if (newHost != null && newHost.uid != _host?.uid) {
_host = newHost
host = HostInfo(newHost.uid.toString(), newHost.name, "x86", newHost.model.cpuCount, newHost.model.memoryCapacity)
}
- val cpuStats = _host?.getCpuStats(_server)
- val sysStats = _host?.getSystemStats(_server)
+ val cpuStats = _host?.getCpuStats(_task)
+ val sysStats = _host?.getSystemStats(_task)
_timestamp = now
_timestampAbsolute = now + startTime
@@ -550,7 +550,7 @@ public class ComputeMetricReader(
_cpuLostTime = cpuStats?.lostTime ?: 0
_uptime = sysStats?.uptime?.toMillis() ?: 0
_downtime = sysStats?.downtime?.toMillis() ?: 0
- _provisionTime = _server.launchedAt
+ _provisionTime = _task.launchedAt
_bootTime = sysStats?.bootTime
if (sysStats != null) {
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt
index b236a7df..1df058fb 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/ComputeMonitor.kt
@@ -23,8 +23,8 @@
package org.opendc.compute.telemetry
import org.opendc.compute.telemetry.table.HostTableReader
-import org.opendc.compute.telemetry.table.ServerTableReader
import org.opendc.compute.telemetry.table.ServiceTableReader
+import org.opendc.compute.telemetry.table.TaskTableReader
/**
* A monitor that tracks the metrics and events of the OpenDC Compute service.
@@ -33,7 +33,7 @@ public interface ComputeMonitor {
/**
* Record an entry with the specified [reader].
*/
- public fun record(reader: ServerTableReader) {}
+ public fun record(reader: TaskTableReader) {}
/**
* Record an entry with the specified [reader].
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt
index 02e3e0bb..161c0936 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ComputeExportConfig.kt
@@ -36,8 +36,8 @@ import kotlinx.serialization.json.JsonElement
import kotlinx.serialization.json.jsonObject
import org.opendc.common.logger.logger
import org.opendc.compute.telemetry.table.HostTableReader
-import org.opendc.compute.telemetry.table.ServerTableReader
import org.opendc.compute.telemetry.table.ServiceTableReader
+import org.opendc.compute.telemetry.table.TaskTableReader
import org.opendc.trace.util.parquet.exporter.ColListSerializer
import org.opendc.trace.util.parquet.exporter.ExportColumn
import org.opendc.trace.util.parquet.exporter.Exportable
@@ -48,22 +48,22 @@ import org.opendc.trace.util.parquet.exporter.columnSerializer
* parquet files for compute workloads.
*
* @param[hostExportColumns] the columns that will be included in the `host.parquet` raw output file.
- * @param[serverExportColumns] the columns that will be included in the `server.parquet` raw output file.
+ * @param[taskExportColumns] the columns that will be included in the `task.parquet` raw output file.
* @param[serviceExportColumns] the columns that will be included in the `service.parquet` raw output file.
*/
@Serializable(with = ComputeExportConfig.Companion.ComputeExportConfigSerializer::class)
public data class ComputeExportConfig(
public val hostExportColumns: Set<ExportColumn<HostTableReader>>,
- public val serverExportColumns: Set<ExportColumn<ServerTableReader>>,
+ public val taskExportColumns: Set<ExportColumn<TaskTableReader>>,
public val serviceExportColumns: Set<ExportColumn<ServiceTableReader>>,
) {
public constructor(
hostExportColumns: Collection<ExportColumn<HostTableReader>>,
- serverExportColumns: Collection<ExportColumn<ServerTableReader>>,
+ taskExportColumns: Collection<ExportColumn<TaskTableReader>>,
serviceExportColumns: Collection<ExportColumn<ServiceTableReader>>,
) : this(
hostExportColumns.toSet() + DfltHostExportColumns.BASE_EXPORT_COLUMNS,
- serverExportColumns.toSet() + DfltServerExportColumns.BASE_EXPORT_COLUMNS,
+ taskExportColumns.toSet() + DfltTaskExportColumns.BASE_EXPORT_COLUMNS,
serviceExportColumns.toSet() + DfltServiceExportColumns.BASE_EXPORT_COLUMNS,
)
@@ -74,7 +74,7 @@ public data class ComputeExportConfig(
"""
| === Compute Export Config ===
| Host columns : ${hostExportColumns.map { it.name }.toString().trim('[', ']')}
- | Server columns : ${serverExportColumns.map { it.name }.toString().trim('[', ']')}
+ | Task columns : ${taskExportColumns.map { it.name }.toString().trim('[', ']')}
| Service columns : ${serviceExportColumns.map { it.name }.toString().trim('[', ']')}
""".trimIndent()
@@ -87,20 +87,20 @@ public data class ComputeExportConfig(
*/
public fun loadDfltColumns() {
DfltHostExportColumns
- DfltServerExportColumns
+ DfltTaskExportColumns
DfltServiceExportColumns
}
/**
* Config that includes all columns defined in [DfltHostExportColumns],
- * [DfltServerExportColumns], [DfltServiceExportColumns] among all other loaded
- * columns for [HostTableReader], [ServerTableReader] and [ServiceTableReader].
+ * [DfltTaskExportColumns], [DfltServiceExportColumns] among all other loaded
+ * columns for [HostTableReader], [TaskTableReader] and [ServiceTableReader].
*/
public val ALL_COLUMNS: ComputeExportConfig by lazy {
loadDfltColumns()
ComputeExportConfig(
hostExportColumns = ExportColumn.getAllLoadedColumns(),
- serverExportColumns = ExportColumn.getAllLoadedColumns(),
+ taskExportColumns = ExportColumn.getAllLoadedColumns(),
serviceExportColumns = ExportColumn.getAllLoadedColumns(),
)
}
@@ -118,8 +118,8 @@ public data class ComputeExportConfig(
ListSerializer(columnSerializer<HostTableReader>()).descriptor,
)
element(
- "serverExportColumns",
- ListSerializer(columnSerializer<ServerTableReader>()).descriptor,
+ "taskExportColumns",
+ ListSerializer(columnSerializer<TaskTableReader>()).descriptor,
)
element(
"serviceExportColumns",
@@ -139,12 +139,12 @@ public data class ComputeExportConfig(
val elem = jsonDec.decodeJsonElement().jsonObject
val hostFields: List<ExportColumn<HostTableReader>> = elem["hostExportColumns"].toFieldList()
- val serverFields: List<ExportColumn<ServerTableReader>> = elem["serverExportColumns"].toFieldList()
+ val taskFields: List<ExportColumn<TaskTableReader>> = elem["taskExportColumns"].toFieldList()
val serviceFields: List<ExportColumn<ServiceTableReader>> = elem["serviceExportColumns"].toFieldList()
return ComputeExportConfig(
hostExportColumns = hostFields,
- serverExportColumns = serverFields,
+ taskExportColumns = taskFields,
serviceExportColumns = serviceFields,
)
}
@@ -163,8 +163,8 @@ public data class ComputeExportConfig(
encodeSerializableElement(
descriptor,
1,
- ColListSerializer(columnSerializer<ServerTableReader>()),
- value.serverExportColumns.toList(),
+ ColListSerializer(columnSerializer<TaskTableReader>()),
+ value.taskExportColumns.toList(),
)
encodeSerializableElement(
descriptor,
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt
index 89396545..8038060d 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServiceExportColumns.kt
@@ -59,15 +59,15 @@ public object DfltServiceExportColumns {
field = Types.required(INT32).named("hosts_up"),
) { it.hostsUp }
- public val SERVERS_PENDING: ExportColumn<ServiceTableReader> =
+ public val TASKS_PENDING: ExportColumn<ServiceTableReader> =
ExportColumn(
- field = Types.required(INT32).named("servers_pending"),
- ) { it.serversPending }
+ field = Types.required(INT32).named("tasks_pending"),
+ ) { it.tasksPending }
- public val SERVERS_ACTIVE: ExportColumn<ServiceTableReader> =
+ public val TASKS_ACTIVE: ExportColumn<ServiceTableReader> =
ExportColumn(
- field = Types.required(INT32).named("servers_active"),
- ) { it.serversActive }
+ field = Types.required(INT32).named("tasks_active"),
+ ) { it.tasksActive }
public val ATTEMPTS_SUCCESS: ExportColumn<ServiceTableReader> =
ExportColumn(
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServerExportColumns.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt
index 91d6c9bf..5bb7dd1f 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltServerExportColumns.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/DfltTaskExportColumns.kt
@@ -29,7 +29,7 @@ import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64
import org.apache.parquet.schema.Types
-import org.opendc.compute.telemetry.table.ServerTableReader
+import org.opendc.compute.telemetry.table.TaskTableReader
import org.opendc.trace.util.parquet.exporter.ExportColumn
/**
@@ -43,30 +43,30 @@ import org.opendc.trace.util.parquet.exporter.ExportColumn
* ```kotlin
* ...
* // Loads the column
- * DfltServerExportColumns
+ * DfltTaskExportColumns
* ...
* ```
*/
-public object DfltServerExportColumns {
- public val TIMESTAMP: ExportColumn<ServerTableReader> =
+public object DfltTaskExportColumns {
+ public val TIMESTAMP: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("timestamp"),
) { it.timestamp.toEpochMilli() }
- public val TIMESTAMP_ABS: ExportColumn<ServerTableReader> =
+ public val TIMESTAMP_ABS: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("timestamp_absolute"),
) { it.timestampAbsolute.toEpochMilli() }
- public val SERVER_ID: ExportColumn<ServerTableReader> =
+ public val TASK_ID: ExportColumn<TaskTableReader> =
ExportColumn(
field =
Types.required(BINARY)
.`as`(LogicalTypeAnnotation.stringType())
- .named("server_id"),
- ) { Binary.fromString(it.server.id) }
+ .named("task_id"),
+ ) { Binary.fromString(it.task.id) }
- public val HOST_ID: ExportColumn<ServerTableReader> =
+ public val HOST_ID: ExportColumn<TaskTableReader> =
ExportColumn(
field =
Types.optional(BINARY)
@@ -74,70 +74,70 @@ public object DfltServerExportColumns {
.named("host_id"),
) { it.host?.id?.let { Binary.fromString(it) } }
- public val SERVER_NAME: ExportColumn<ServerTableReader> =
+ public val TASK_NAME: ExportColumn<TaskTableReader> =
ExportColumn(
field =
Types.required(BINARY)
.`as`(LogicalTypeAnnotation.stringType())
- .named("server_name"),
- ) { Binary.fromString(it.server.name) }
+ .named("task_name"),
+ ) { Binary.fromString(it.task.name) }
- public val CPU_COUNT: ExportColumn<ServerTableReader> =
+ public val CPU_COUNT: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT32).named("cpu_count"),
- ) { it.server.cpuCount }
+ ) { it.task.cpuCount }
- public val MEM_CAPACITY: ExportColumn<ServerTableReader> =
+ public val MEM_CAPACITY: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("mem_capacity"),
- ) { it.server.memCapacity }
+ ) { it.task.memCapacity }
- public val CPU_LIMIT: ExportColumn<ServerTableReader> =
+ public val CPU_LIMIT: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(DOUBLE).named("cpu_limit"),
) { it.cpuLimit }
- public val CPU_TIME_ACTIVE: ExportColumn<ServerTableReader> =
+ public val CPU_TIME_ACTIVE: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("cpu_time_active"),
) { it.cpuActiveTime }
- public val CPU_TIME_IDLE: ExportColumn<ServerTableReader> =
+ public val CPU_TIME_IDLE: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("cpu_time_idle"),
) { it.cpuIdleTime }
- public val CPU_TIME_STEAL: ExportColumn<ServerTableReader> =
+ public val CPU_TIME_STEAL: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("cpu_time_steal"),
) { it.cpuStealTime }
- public val CPU_TIME_LOST: ExportColumn<ServerTableReader> =
+ public val CPU_TIME_LOST: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("cpu_time_lost"),
) { it.cpuLostTime }
- public val UP_TIME: ExportColumn<ServerTableReader> =
+ public val UP_TIME: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("uptime"),
) { it.uptime }
- public val DOWN_TIME: ExportColumn<ServerTableReader> =
+ public val DOWN_TIME: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.required(INT64).named("downtime"),
) { it.downtime }
- public val PROVISION_TIME: ExportColumn<ServerTableReader> =
+ public val PROVISION_TIME: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.optional(INT64).named("provision_time"),
) { it.provisionTime?.toEpochMilli() }
- public val BOOT_TIME: ExportColumn<ServerTableReader> =
+ public val BOOT_TIME: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.optional(INT64).named("boot_time"),
) { it.bootTime?.toEpochMilli() }
- public val BOOT_TIME_ABS: ExportColumn<ServerTableReader> =
+ public val BOOT_TIME_ABS: ExportColumn<TaskTableReader> =
ExportColumn(
field = Types.optional(INT64).named("boot_time_absolute"),
) { it.bootTimeAbsolute?.toEpochMilli() }
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt
index 6bea4cc2..3b7a7c0c 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/ParquetComputeMonitor.kt
@@ -24,8 +24,8 @@ package org.opendc.compute.telemetry.export.parquet
import org.opendc.compute.telemetry.ComputeMonitor
import org.opendc.compute.telemetry.table.HostTableReader
-import org.opendc.compute.telemetry.table.ServerTableReader
import org.opendc.compute.telemetry.table.ServiceTableReader
+import org.opendc.compute.telemetry.table.TaskTableReader
import org.opendc.trace.util.parquet.exporter.ExportColumn
import org.opendc.trace.util.parquet.exporter.Exportable
import org.opendc.trace.util.parquet.exporter.Exporter
@@ -36,15 +36,15 @@ import java.io.File
*/
public class ParquetComputeMonitor(
private val hostExporter: Exporter<HostTableReader>,
- private val serverExporter: Exporter<ServerTableReader>,
+ private val taskExporter: Exporter<TaskTableReader>,
private val serviceExporter: Exporter<ServiceTableReader>,
) : ComputeMonitor, AutoCloseable {
override fun record(reader: HostTableReader) {
hostExporter.write(reader)
}
- override fun record(reader: ServerTableReader) {
- serverExporter.write(reader)
+ override fun record(reader: TaskTableReader) {
+ taskExporter.write(reader)
}
override fun record(reader: ServiceTableReader) {
@@ -53,7 +53,7 @@ public class ParquetComputeMonitor(
override fun close() {
hostExporter.close()
- serverExporter.close()
+ taskExporter.close()
serviceExporter.close()
}
@@ -76,13 +76,13 @@ public class ParquetComputeMonitor(
partition = partition,
bufferSize = bufferSize,
hostExportColumns = computeExportConfig.hostExportColumns,
- serverExportColumns = computeExportConfig.serverExportColumns,
+ taskExportColumns = computeExportConfig.taskExportColumns,
serviceExportColumns = computeExportConfig.serviceExportColumns,
)
/**
* Constructor that loads default [ExportColumn]s defined in
- * [DfltHostExportColumns], [DfltServerExportColumns], [DfltServiceExportColumns]
+ * [DfltHostExportColumns], [DfltTaskExportColumns], [DfltServiceExportColumns]
* in case optional parameters are omitted and all fields need to be retrieved.
*
* @param[base] parent pathname for output file.
@@ -94,7 +94,7 @@ public class ParquetComputeMonitor(
partition: String,
bufferSize: Int,
hostExportColumns: Collection<ExportColumn<HostTableReader>>? = null,
- serverExportColumns: Collection<ExportColumn<ServerTableReader>>? = null,
+ taskExportColumns: Collection<ExportColumn<TaskTableReader>>? = null,
serviceExportColumns: Collection<ExportColumn<ServiceTableReader>>? = null,
): ParquetComputeMonitor {
// Loads the fields in case they need to be retrieved if optional params are omitted.
@@ -107,10 +107,10 @@ public class ParquetComputeMonitor(
columns = hostExportColumns ?: Exportable.getAllLoadedColumns(),
bufferSize = bufferSize,
),
- serverExporter =
+ taskExporter =
Exporter(
- outputFile = File(base, "$partition/server.parquet").also { it.parentFile.mkdirs() },
- columns = serverExportColumns ?: Exportable.getAllLoadedColumns(),
+ outputFile = File(base, "$partition/task.parquet").also { it.parentFile.mkdirs() },
+ columns = taskExportColumns ?: Exportable.getAllLoadedColumns(),
bufferSize = bufferSize,
),
serviceExporter =
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md
index f48bc229..aee63fc9 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/export/parquet/README.md
@@ -1,8 +1,8 @@
### Summary
-Added output configuration, that can be defined in the scenario `.json` file, that allows to select which columns are to be included in the raw oputput files `host.parquet`, `server.parquet` and `service.parquet`.
+Added output configuration, that can be defined in the scenario `.json` file, that allows to select which columns are to be included in the raw oputput files `host.parquet`, `task.parquet` and `service.parquet`.
### Columns
-The 'default' columns are defined in `DfltHostExportcolumns`, `DfltServerExportColumns` and `DfltServiceExportColumns`. Any number of additional columns can be definied anywhere (`ExportColumn<Exportable>`) and it is going to be deserializable as long as it is loaded by the jvm.
+The 'default' columns are defined in `DfltHostExportcolumns`, `DfltTaskExportColumns` and `DfltServiceExportColumns`. Any number of additional columns can be definied anywhere (`ExportColumn<Exportable>`) and it is going to be deserializable as long as it is loaded by the jvm.
### Deserialization
Each `ExportColumn` has a `Regex`, used for deserialization. If no custom regex is provided, the default one is used. The default regex matches the column name in case-insensitive manner, either with `_` as in the name or with ` ` (blank space).
@@ -21,7 +21,7 @@ Each `ExportColumn` has a `Regex`, used for deserialization. If no custom regex
"type": "object",
"properties": {
"hostExportColumns": { "type": "array" },
- "serverExportColumns": { "type": "array" } ,
+ "taskExportColumns": { "type": "array" } ,
"serviceExportColumns": { "type": "array" } ,
"required": [ /* NONE REQUIRED */ ]
}
@@ -49,8 +49,8 @@ Each `ExportColumn` has a `Regex`, used for deserialization. If no custom regex
...
"computeExportConfig": {
"hostExportColumns": ["timestamp", "timestamp_absolute", "invalid-entry1", "guests_invalid"],
- "serverExportColumns": ["invalid-entry2"],
- "serviceExportColumns": ["timestamp", "servers_active", "servers_pending"]
+ "taskExportColumns": ["invalid-entry2"],
+ "serviceExportColumns": ["timestamp", "tasks_active", "tasks_pending"]
},
...
```
@@ -59,12 +59,12 @@ Each `ExportColumn` has a `Regex`, used for deserialization. If no custom regex
// console output
10:51:56.561 [ERROR] ColListSerializer - no match found for column "invalid-entry1", ignoring...
10:51:56.563 [ERROR] ColListSerializer - no match found for column "invalid-entry2", ignoring...
-10:51:56.564 [WARN] ComputeExportConfig - deserialized list of export columns for exportable ServerTableReader produced empty list, falling back to all loaded columns
+10:51:56.564 [WARN] ComputeExportConfig - deserialized list of export columns for exportable TaskTableReader produced empty list, falling back to all loaded columns
10:51:56.584 [INFO] ScenariosSpec -
| === Compute Export Config ===
| Host columns : timestamp, timestamp_absolute, guests_invalid
-| Server columns : timestamp, timestamp_absolute, server_id, server_name, cpu_count, mem_capacity, cpu_limit, cpu_time_active, cpu_time_idle, cpu_time_steal, cpu_time_lost, uptime, downtime, provision_time, boot_time, boot_time_absolute
-| Service columns : timestamp, servers_active, servers_pending
+| Task columns : timestamp, timestamp_absolute, task_id, task_name, cpu_count, mem_capacity, cpu_limit, cpu_time_active, cpu_time_idle, cpu_time_steal, cpu_time_lost, uptime, downtime, provision_time, boot_time, boot_time_absolute
+| Service columns : timestamp, tasks_active, tasks_pending
```
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt
index ad4b3d49..7a8ba6a7 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceData.kt
@@ -31,9 +31,9 @@ public data class ServiceData(
val timestamp: Instant,
val hostsUp: Int,
val hostsDown: Int,
- val serversTotal: Int,
- val serversPending: Int,
- val serversActive: Int,
+ val tasksTotal: Int,
+ val tasksPending: Int,
+ val tasksActive: Int,
val attemptsSuccess: Int,
val attemptsFailure: Int,
val attemptsError: Int,
@@ -47,9 +47,9 @@ public fun ServiceTableReader.toServiceData(): ServiceData {
timestamp,
hostsUp,
hostsDown,
- serversTotal,
- serversPending,
- serversActive,
+ tasksTotal,
+ tasksPending,
+ tasksActive,
attemptsSuccess,
attemptsFailure,
attemptsError,
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt
index c3a92fc7..23630fb4 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServiceTableReader.kt
@@ -54,19 +54,19 @@ public interface ServiceTableReader : Exportable {
public val hostsDown: Int
/**
- * The number of servers that are registered with the compute service.
+ * The number of tasks that are registered with the compute service.
*/
- public val serversTotal: Int
+ public val tasksTotal: Int
/**
- * The number of servers that are pending to be scheduled.
+ * The number of tasks that are pending to be scheduled.
*/
- public val serversPending: Int
+ public val tasksPending: Int
/**
- * The number of servers that are currently active.
+ * The number of tasks that are currently active.
*/
- public val serversActive: Int
+ public val tasksActive: Int
/**
* The scheduling attempts that were successful.
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerInfo.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt
index fb83bf06..2d1ae91a 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerInfo.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskInfo.kt
@@ -23,9 +23,9 @@
package org.opendc.compute.telemetry.table
/**
- * Static information about a server exposed to the telemetry service.
+ * Static information about a task exposed to the telemetry service.
*/
-public data class ServerInfo(
+public data class TaskInfo(
val id: String,
val name: String,
val type: String,
diff --git a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt
index a1aed778..1e38d5eb 100644
--- a/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/ServerTableReader.kt
+++ b/opendc-compute/opendc-compute-telemetry/src/main/kotlin/org/opendc/compute/telemetry/table/TaskTableReader.kt
@@ -22,17 +22,17 @@
package org.opendc.compute.telemetry.table
-import org.opendc.compute.telemetry.export.parquet.DfltServerExportColumns
+import org.opendc.compute.telemetry.export.parquet.DfltTaskExportColumns
import org.opendc.trace.util.parquet.exporter.Exportable
import java.time.Instant
/**
- * An interface that is used to read a row of a server trace entry.
+ * An interface that is used to read a row of a task trace entry.
*/
-public interface ServerTableReader : Exportable {
- public fun copy(): ServerTableReader
+public interface TaskTableReader : Exportable {
+ public fun copy(): TaskTableReader
- public fun setValues(table: ServerTableReader)
+ public fun setValues(table: TaskTableReader)
/**
* The timestamp of the current entry of the reader relative to the start of the workload.
@@ -45,12 +45,12 @@ public interface ServerTableReader : Exportable {
public val timestampAbsolute: Instant
/**
- * The [ServerInfo] of the server to which the row belongs to.
+ * The [TaskInfo] of the task to which the row belongs to.
*/
- public val server: ServerInfo
+ public val task: TaskInfo
/**
- * The [HostInfo] of the host on which the server is hosted or `null` if it has no host.
+ * The [HostInfo] of the host on which the task is hosted or `null` if it has no host.
*/
public val host: HostInfo?
@@ -65,32 +65,32 @@ public interface ServerTableReader : Exportable {
public val downtime: Long
/**
- * The [Instant] at which the server was enqueued for the scheduler.
+ * The [Instant] at which the task was enqueued for the scheduler.
*/
public val provisionTime: Instant?
/**
- * The [Instant] at which the server booted relative to the start of the workload.
+ * The [Instant] at which the task booted relative to the start of the workload.
*/
public val bootTime: Instant?
/**
- * The [Instant] at which the server booted.
+ * The [Instant] at which the task booted.
*/
public val bootTimeAbsolute: Instant?
/**
- * The capacity of the CPUs of Host on which the server is running (in MHz).
+ * The capacity of the CPUs of Host on which the task is running (in MHz).
*/
public val cpuLimit: Double
/**
- * The duration (in seconds) that a CPU was active in the server.
+ * The duration (in seconds) that a CPU was active in the task.
*/
public val cpuActiveTime: Long
/**
- * The duration (in seconds) that a CPU was idle in the server.
+ * The duration (in seconds) that a CPU was idle in the task.
*/
public val cpuIdleTime: Long
@@ -106,4 +106,4 @@ public interface ServerTableReader : Exportable {
}
// Loads the default export fields for deserialization whenever this file is loaded.
-private val _ignore = DfltServerExportColumns
+private val _ignore = DfltTaskExportColumns