diff options
| author | Dante Niewenhuis <d.niewenhuis@hotmail.com> | 2024-03-05 13:23:57 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-03-05 13:23:57 +0100 |
| commit | 5864cbcbfe2eb8c36ca05c3a39c7e5916aeecaec (patch) | |
| tree | 5b2773b8dc21c2e1b526fb70f829c376dd80532a /opendc-trace/opendc-trace-bitbrains/src | |
| parent | d28002a3c151d198298574312f32f1cb43f3a660 (diff) | |
Updated package versions, updated web server tests. (#207)
* Updated all package versions including kotlin. Updated all web-server tests to run.
* Changed the java version of the tests. OpenDC now only supports java 19.
* small update
* test update
* new update
* updated docker version to 19
* updated docker version to 19
Diffstat (limited to 'opendc-trace/opendc-trace-bitbrains/src')
7 files changed, 316 insertions, 251 deletions
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt index 511f02db..8387d1ed 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt @@ -23,18 +23,18 @@ package org.opendc.trace.bitbrains import org.opendc.trace.TableReader -import org.opendc.trace.conv.RESOURCE_CLUSTER_ID -import org.opendc.trace.conv.RESOURCE_CPU_CAPACITY -import org.opendc.trace.conv.RESOURCE_CPU_COUNT -import org.opendc.trace.conv.RESOURCE_ID -import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY -import org.opendc.trace.conv.RESOURCE_STATE_CPU_DEMAND -import org.opendc.trace.conv.RESOURCE_STATE_CPU_READY_PCT -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT -import org.opendc.trace.conv.RESOURCE_STATE_DISK_READ -import org.opendc.trace.conv.RESOURCE_STATE_DISK_WRITE -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP +import org.opendc.trace.conv.resourceClusterID +import org.opendc.trace.conv.resourceCpuCapacity +import org.opendc.trace.conv.resourceCpuCount +import org.opendc.trace.conv.resourceID +import org.opendc.trace.conv.resourceMemCapacity +import org.opendc.trace.conv.resourceStateCpuDemand +import org.opendc.trace.conv.resourceStateCpuReadyPct +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateCpuUsagePct +import org.opendc.trace.conv.resourceStateDiskRead +import org.opendc.trace.conv.resourceStateDiskWrite +import org.opendc.trace.conv.resourceStateTimestamp import java.io.BufferedReader import java.time.Duration import java.time.Instant @@ -99,18 +99,18 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR val field = line.subSequence(start, end) as String when (col++) { - COL_TIMESTAMP -> timestamp = Instant.ofEpochSecond(field.toLong(10)) - COL_CPU_USAGE -> cpuUsage = field.toDouble() - COL_CPU_DEMAND -> cpuDemand = field.toDouble() - COL_DISK_READ -> diskRead = field.toDouble() - COL_DISK_WRITE -> diskWrite = field.toDouble() - COL_CLUSTER_ID -> cluster = field.trim() - COL_NCPUS -> cpuCores = field.toInt(10) - COL_CPU_READY_PCT -> cpuReadyPct = field.toDouble() - COL_POWERED_ON -> poweredOn = field.toInt(10) == 1 - COL_CPU_CAPACITY -> cpuCapacity = field.toDouble() - COL_ID -> id = field.trim() - COL_MEM_CAPACITY -> memCapacity = field.toDouble() * 1000 // Convert from MB to KB + colTimestamp -> timestamp = Instant.ofEpochSecond(field.toLong(10)) + colCpuUsage -> cpuUsage = field.toDouble() + colCpuDemand -> cpuDemand = field.toDouble() + colDiskRead -> diskRead = field.toDouble() + colDiskWrite -> diskWrite = field.toDouble() + colClusterID -> cluster = field.trim() + colNcpus -> cpuCores = field.toInt(10) + colCpuReadyPct -> cpuReadyPct = field.toDouble() + colPoweredOn -> poweredOn = field.toInt(10) == 1 + colCpuCapacity -> cpuCapacity = field.toDouble() + colID -> id = field.trim() + colMemCapacity -> memCapacity = field.toDouble() * 1000 // Convert from MB to KB } } @@ -119,31 +119,31 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR override fun resolve(name: String): Int { return when (name) { - RESOURCE_ID -> COL_ID - RESOURCE_CLUSTER_ID -> COL_CLUSTER_ID - RESOURCE_STATE_TIMESTAMP -> COL_TIMESTAMP - RESOURCE_CPU_COUNT -> COL_NCPUS - RESOURCE_CPU_CAPACITY -> COL_CPU_CAPACITY - RESOURCE_STATE_CPU_USAGE -> COL_CPU_USAGE - RESOURCE_STATE_CPU_USAGE_PCT -> COL_CPU_USAGE_PCT - RESOURCE_STATE_CPU_DEMAND -> COL_CPU_DEMAND - RESOURCE_STATE_CPU_READY_PCT -> COL_CPU_READY_PCT - RESOURCE_MEM_CAPACITY -> COL_MEM_CAPACITY - RESOURCE_STATE_DISK_READ -> COL_DISK_READ - RESOURCE_STATE_DISK_WRITE -> COL_DISK_WRITE + resourceID -> colID + resourceClusterID -> colClusterID + resourceStateTimestamp -> colTimestamp + resourceCpuCount -> colNcpus + resourceCpuCapacity -> colCpuCapacity + resourceStateCpuUsage -> colCpuUsage + resourceStateCpuUsagePct -> colCpuUsagePct + resourceStateCpuDemand -> colCpuDemand + resourceStateCpuReadyPct -> colCpuReadyPct + resourceMemCapacity -> colMemCapacity + resourceStateDiskRead -> colDiskRead + resourceStateDiskWrite -> colDiskWrite else -> -1 } } override fun isNull(index: Int): Boolean { - require(index in 0 until COL_MAX) { "Invalid column index" } + require(index in 0 until colMax) { "Invalid column index" } return false } override fun getBoolean(index: Int): Boolean { check(state == State.Active) { "No active row" } return when (index) { - COL_POWERED_ON -> poweredOn + colPoweredOn -> poweredOn else -> throw IllegalArgumentException("Invalid column") } } @@ -151,7 +151,7 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR override fun getInt(index: Int): Int { check(state == State.Active) { "No active row" } return when (index) { - COL_NCPUS -> cpuCores + colNcpus -> cpuCores else -> throw IllegalArgumentException("Invalid column") } } @@ -167,14 +167,14 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR override fun getDouble(index: Int): Double { check(state == State.Active) { "No active row" } return when (index) { - COL_CPU_CAPACITY -> cpuCapacity - COL_CPU_USAGE -> cpuUsage - COL_CPU_USAGE_PCT -> cpuUsage / cpuCapacity - COL_CPU_READY_PCT -> cpuReadyPct - COL_CPU_DEMAND -> cpuDemand - COL_MEM_CAPACITY -> memCapacity - COL_DISK_READ -> diskRead - COL_DISK_WRITE -> diskWrite + colCpuCapacity -> cpuCapacity + colCpuUsage -> cpuUsage + colCpuUsagePct -> cpuUsage / cpuCapacity + colCpuReadyPct -> cpuReadyPct + colCpuDemand -> cpuDemand + colMemCapacity -> memCapacity + colDiskRead -> diskRead + colDiskWrite -> diskWrite else -> throw IllegalArgumentException("Invalid column") } } @@ -182,8 +182,8 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR override fun getString(index: Int): String? { check(state == State.Active) { "No active row" } return when (index) { - COL_ID -> id - COL_CLUSTER_ID -> cluster + colID -> id + colClusterID -> cluster else -> throw IllegalArgumentException("Invalid column") } } @@ -195,7 +195,7 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR override fun getInstant(index: Int): Instant? { check(state == State.Active) { "No active row" } return when (index) { - COL_TIMESTAMP -> timestamp + colTimestamp -> timestamp else -> throw IllegalArgumentException("Invalid column") } } @@ -204,15 +204,25 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR throw IllegalArgumentException("Invalid column") } - override fun <T> getList(index: Int, elementType: Class<T>): List<T>? { + override fun <T> getList( + index: Int, + elementType: Class<T>, + ): List<T>? { throw IllegalArgumentException("Invalid column") } - override fun <T> getSet(index: Int, elementType: Class<T>): Set<T>? { + override fun <T> getSet( + index: Int, + elementType: Class<T>, + ): Set<T>? { throw IllegalArgumentException("Invalid column") } - override fun <K, V> getMap(index: Int, keyType: Class<K>, valueType: Class<V>): Map<K, V>? { + override fun <K, V> getMap( + index: Int, + keyType: Class<K>, + valueType: Class<V>, + ): Map<K, V>? { throw IllegalArgumentException("Invalid column") } @@ -259,22 +269,24 @@ internal class BitbrainsExResourceStateTableReader(private val reader: BufferedR /** * Default column indices for the extended Bitbrains format. */ - private val COL_TIMESTAMP = 0 - private val COL_CPU_USAGE = 1 - private val COL_CPU_DEMAND = 2 - private val COL_DISK_READ = 4 - private val COL_DISK_WRITE = 6 - private val COL_CLUSTER_ID = 10 - private val COL_NCPUS = 12 - private val COL_CPU_READY_PCT = 13 - private val COL_POWERED_ON = 14 - private val COL_CPU_CAPACITY = 18 - private val COL_ID = 19 - private val COL_MEM_CAPACITY = 20 - private val COL_CPU_USAGE_PCT = 21 - private val COL_MAX = COL_CPU_USAGE_PCT + 1 + private val colTimestamp = 0 + private val colCpuUsage = 1 + private val colCpuDemand = 2 + private val colDiskRead = 4 + private val colDiskWrite = 6 + private val colClusterID = 10 + private val colNcpus = 12 + private val colCpuReadyPct = 13 + private val colPoweredOn = 14 + private val colCpuCapacity = 18 + private val colID = 19 + private val colMemCapacity = 20 + private val colCpuUsagePct = 21 + private val colMax = colCpuUsagePct + 1 private enum class State { - Pending, Active, Closed + Pending, + Active, + Closed, } } diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt index d364694c..6115953f 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt @@ -26,19 +26,19 @@ import org.opendc.trace.TableColumn import org.opendc.trace.TableColumnType import org.opendc.trace.TableReader import org.opendc.trace.TableWriter -import org.opendc.trace.conv.RESOURCE_CLUSTER_ID -import org.opendc.trace.conv.RESOURCE_CPU_CAPACITY -import org.opendc.trace.conv.RESOURCE_CPU_COUNT -import org.opendc.trace.conv.RESOURCE_ID -import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY -import org.opendc.trace.conv.RESOURCE_STATE_CPU_DEMAND -import org.opendc.trace.conv.RESOURCE_STATE_CPU_READY_PCT -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT -import org.opendc.trace.conv.RESOURCE_STATE_DISK_READ -import org.opendc.trace.conv.RESOURCE_STATE_DISK_WRITE -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP import org.opendc.trace.conv.TABLE_RESOURCE_STATES +import org.opendc.trace.conv.resourceClusterID +import org.opendc.trace.conv.resourceCpuCapacity +import org.opendc.trace.conv.resourceCpuCount +import org.opendc.trace.conv.resourceID +import org.opendc.trace.conv.resourceMemCapacity +import org.opendc.trace.conv.resourceStateCpuDemand +import org.opendc.trace.conv.resourceStateCpuReadyPct +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateCpuUsagePct +import org.opendc.trace.conv.resourceStateDiskRead +import org.opendc.trace.conv.resourceStateDiskWrite +import org.opendc.trace.conv.resourceStateTimestamp import org.opendc.trace.spi.TableDetails import org.opendc.trace.spi.TraceFormat import org.opendc.trace.util.CompositeTableReader @@ -64,36 +64,47 @@ public class BitbrainsExTraceFormat : TraceFormat { override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCE_STATES) - override fun getDetails(path: Path, table: String): TableDetails { + override fun getDetails( + path: Path, + table: String, + ): TableDetails { return when (table) { - TABLE_RESOURCE_STATES -> TableDetails( - listOf( - TableColumn(RESOURCE_ID, TableColumnType.String), - TableColumn(RESOURCE_CLUSTER_ID, TableColumnType.String), - TableColumn(RESOURCE_STATE_TIMESTAMP, TableColumnType.Instant), - TableColumn(RESOURCE_CPU_COUNT, TableColumnType.Int), - TableColumn(RESOURCE_CPU_CAPACITY, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_USAGE, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_USAGE_PCT, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_DEMAND, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_READY_PCT, TableColumnType.Double), - TableColumn(RESOURCE_MEM_CAPACITY, TableColumnType.Double), - TableColumn(RESOURCE_STATE_DISK_READ, TableColumnType.Double), - TableColumn(RESOURCE_STATE_DISK_WRITE, TableColumnType.Double) + TABLE_RESOURCE_STATES -> + TableDetails( + listOf( + TableColumn(resourceID, TableColumnType.String), + TableColumn(resourceClusterID, TableColumnType.String), + TableColumn(resourceStateTimestamp, TableColumnType.Instant), + TableColumn(resourceCpuCount, TableColumnType.Int), + TableColumn(resourceCpuCapacity, TableColumnType.Double), + TableColumn(resourceStateCpuUsage, TableColumnType.Double), + TableColumn(resourceStateCpuUsagePct, TableColumnType.Double), + TableColumn(resourceStateCpuDemand, TableColumnType.Double), + TableColumn(resourceStateCpuReadyPct, TableColumnType.Double), + TableColumn(resourceMemCapacity, TableColumnType.Double), + TableColumn(resourceStateDiskRead, TableColumnType.Double), + TableColumn(resourceStateDiskWrite, TableColumnType.Double), + ), ) - ) else -> throw IllegalArgumentException("Table $table not supported") } } - override fun newReader(path: Path, table: String, projection: List<String>?): TableReader { + override fun newReader( + path: Path, + table: String, + projection: List<String>?, + ): TableReader { return when (table) { TABLE_RESOURCE_STATES -> newResourceStateReader(path) else -> throw IllegalArgumentException("Table $table not supported") } } - override fun newWriter(path: Path, table: String): TableWriter { + override fun newWriter( + path: Path, + table: String, + ): TableWriter { throw UnsupportedOperationException("Writing not supported for this format") } @@ -101,10 +112,11 @@ public class BitbrainsExTraceFormat : TraceFormat { * Construct a [TableReader] for reading over all resource state partitions. */ private fun newResourceStateReader(path: Path): TableReader { - val partitions = Files.walk(path, 1) - .filter { !Files.isDirectory(it) && it.extension == "txt" } - .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) - .toSortedMap() + val partitions = + Files.walk(path, 1) + .filter { !Files.isDirectory(it) && it.extension == "txt" } + .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) + .toSortedMap() val it = partitions.iterator() return object : CompositeTableReader() { diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt index 65ca8a9c..e264fccb 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt @@ -27,18 +27,18 @@ import com.fasterxml.jackson.core.JsonToken import com.fasterxml.jackson.dataformat.csv.CsvParser import com.fasterxml.jackson.dataformat.csv.CsvSchema import org.opendc.trace.TableReader -import org.opendc.trace.conv.RESOURCE_CPU_CAPACITY -import org.opendc.trace.conv.RESOURCE_CPU_COUNT -import org.opendc.trace.conv.RESOURCE_ID -import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT -import org.opendc.trace.conv.RESOURCE_STATE_DISK_READ -import org.opendc.trace.conv.RESOURCE_STATE_DISK_WRITE -import org.opendc.trace.conv.RESOURCE_STATE_MEM_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_NET_RX -import org.opendc.trace.conv.RESOURCE_STATE_NET_TX -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP +import org.opendc.trace.conv.resourceCpuCapacity +import org.opendc.trace.conv.resourceCpuCount +import org.opendc.trace.conv.resourceID +import org.opendc.trace.conv.resourceMemCapacity +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateCpuUsagePct +import org.opendc.trace.conv.resourceStateDiskRead +import org.opendc.trace.conv.resourceStateDiskWrite +import org.opendc.trace.conv.resourceStateMemUsage +import org.opendc.trace.conv.resourceStateNetRx +import org.opendc.trace.conv.resourceStateNetTx +import org.opendc.trace.conv.resourceStateTimestamp import java.text.NumberFormat import java.time.Duration import java.time.Instant @@ -103,20 +103,21 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, when (parser.currentName) { "Timestamp [ms]" -> { - timestamp = when (timestampType) { - TimestampType.UNDECIDED -> { - try { - val res = LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC) - timestampType = TimestampType.DATE_TIME - res - } catch (e: DateTimeParseException) { - timestampType = TimestampType.EPOCH_MILLIS - Instant.ofEpochSecond(parser.longValue) + timestamp = + when (timestampType) { + TimestampType.UNDECIDED -> { + try { + val res = LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC) + timestampType = TimestampType.DATE_TIME + res + } catch (e: DateTimeParseException) { + timestampType = TimestampType.EPOCH_MILLIS + Instant.ofEpochSecond(parser.longValue) + } } + TimestampType.DATE_TIME -> LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC) + TimestampType.EPOCH_MILLIS -> Instant.ofEpochSecond(parser.longValue) } - TimestampType.DATE_TIME -> LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC) - TimestampType.EPOCH_MILLIS -> Instant.ofEpochSecond(parser.longValue) - } } "CPU cores" -> cpuCores = parser.intValue "CPU capacity provisioned [MHZ]" -> cpuCapacity = parseSafeDouble() @@ -134,39 +135,39 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, return true } - private val COL_TIMESTAMP = 0 - private val COL_CPU_COUNT = 1 - private val COL_CPU_CAPACITY = 2 - private val COL_CPU_USAGE = 3 - private val COL_CPU_USAGE_PCT = 4 - private val COL_MEM_CAPACITY = 5 - private val COL_MEM_USAGE = 6 - private val COL_DISK_READ = 7 - private val COL_DISK_WRITE = 8 - private val COL_NET_RX = 9 - private val COL_NET_TX = 10 - private val COL_ID = 11 + private val colTimestamp = 0 + private val colCpuCount = 1 + private val colCpuCapacity = 2 + private val colCpuUsage = 3 + private val colCpuUsagePct = 4 + private val colMemCapacity = 5 + private val colMemUsage = 6 + private val colDiskRead = 7 + private val colDiskWrite = 8 + private val colNetRx = 9 + private val colNetTx = 10 + private val colID = 11 override fun resolve(name: String): Int { return when (name) { - RESOURCE_ID -> COL_ID - RESOURCE_STATE_TIMESTAMP -> COL_TIMESTAMP - RESOURCE_CPU_COUNT -> COL_CPU_COUNT - RESOURCE_CPU_CAPACITY -> COL_CPU_CAPACITY - RESOURCE_STATE_CPU_USAGE -> COL_CPU_USAGE - RESOURCE_STATE_CPU_USAGE_PCT -> COL_CPU_USAGE_PCT - RESOURCE_MEM_CAPACITY -> COL_MEM_CAPACITY - RESOURCE_STATE_MEM_USAGE -> COL_MEM_USAGE - RESOURCE_STATE_DISK_READ -> COL_DISK_READ - RESOURCE_STATE_DISK_WRITE -> COL_DISK_WRITE - RESOURCE_STATE_NET_RX -> COL_NET_RX - RESOURCE_STATE_NET_TX -> COL_NET_TX + resourceID -> colID + resourceStateTimestamp -> colTimestamp + resourceCpuCount -> colCpuCount + resourceCpuCapacity -> colCpuCapacity + resourceStateCpuUsage -> colCpuUsage + resourceStateCpuUsagePct -> colCpuUsagePct + resourceMemCapacity -> colMemCapacity + resourceStateMemUsage -> colMemUsage + resourceStateDiskRead -> colDiskRead + resourceStateDiskWrite -> colDiskWrite + resourceStateNetRx -> colNetRx + resourceStateNetTx -> colNetTx else -> -1 } } override fun isNull(index: Int): Boolean { - require(index in 0..COL_ID) { "Invalid column index" } + require(index in 0..colID) { "Invalid column index" } return false } @@ -177,7 +178,7 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, override fun getInt(index: Int): Int { checkActive() return when (index) { - COL_CPU_COUNT -> cpuCores + colCpuCount -> cpuCores else -> throw IllegalArgumentException("Invalid column") } } @@ -193,15 +194,15 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, override fun getDouble(index: Int): Double { checkActive() return when (index) { - COL_CPU_CAPACITY -> cpuCapacity - COL_CPU_USAGE -> cpuUsage - COL_CPU_USAGE_PCT -> cpuUsagePct - COL_MEM_CAPACITY -> memCapacity - COL_MEM_USAGE -> memUsage - COL_DISK_READ -> diskRead - COL_DISK_WRITE -> diskWrite - COL_NET_RX -> netReceived - COL_NET_TX -> netTransmitted + colCpuCapacity -> cpuCapacity + colCpuUsage -> cpuUsage + colCpuUsagePct -> cpuUsagePct + colMemCapacity -> memCapacity + colMemUsage -> memUsage + colDiskRead -> diskRead + colDiskWrite -> diskWrite + colNetRx -> netReceived + colNetTx -> netTransmitted else -> throw IllegalArgumentException("Invalid column") } } @@ -209,7 +210,7 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, override fun getString(index: Int): String { checkActive() return when (index) { - COL_ID -> partition + colID -> partition else -> throw IllegalArgumentException("Invalid column") } } @@ -221,7 +222,7 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, override fun getInstant(index: Int): Instant? { checkActive() return when (index) { - COL_TIMESTAMP -> timestamp + colTimestamp -> timestamp else -> throw IllegalArgumentException("Invalid column") } } @@ -230,15 +231,25 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, throw IllegalArgumentException("Invalid column") } - override fun <T> getList(index: Int, elementType: Class<T>): List<T>? { + override fun <T> getList( + index: Int, + elementType: Class<T>, + ): List<T>? { throw IllegalArgumentException("Invalid column") } - override fun <T> getSet(index: Int, elementType: Class<T>): Set<T>? { + override fun <T> getSet( + index: Int, + elementType: Class<T>, + ): Set<T>? { throw IllegalArgumentException("Invalid column") } - override fun <K, V> getMap(index: Int, keyType: Class<K>, valueType: Class<V>): Map<K, V>? { + override fun <K, V> getMap( + index: Int, + keyType: Class<K>, + valueType: Class<V>, + ): Map<K, V>? { throw IllegalArgumentException("Invalid column") } @@ -322,30 +333,33 @@ internal class BitbrainsResourceStateTableReader(private val partition: String, * The type of the timestamp in the trace. */ private enum class TimestampType { - UNDECIDED, DATE_TIME, EPOCH_MILLIS + UNDECIDED, + DATE_TIME, + EPOCH_MILLIS, } companion object { /** * The [CsvSchema] that is used to parse the trace. */ - private val schema = CsvSchema.builder() - .addColumn("Timestamp [ms]", CsvSchema.ColumnType.NUMBER_OR_STRING) - .addColumn("CPU cores", CsvSchema.ColumnType.NUMBER) - .addColumn("CPU capacity provisioned [MHZ]", CsvSchema.ColumnType.NUMBER) - .addColumn("CPU usage [MHZ]", CsvSchema.ColumnType.NUMBER) - .addColumn("CPU usage [%]", CsvSchema.ColumnType.NUMBER) - .addColumn("Memory capacity provisioned [KB]", CsvSchema.ColumnType.NUMBER) - .addColumn("Memory usage [KB]", CsvSchema.ColumnType.NUMBER) - .addColumn("Memory usage [%]", CsvSchema.ColumnType.NUMBER) - .addColumn("Disk read throughput [KB/s]", CsvSchema.ColumnType.NUMBER) - .addColumn("Disk write throughput [KB/s]", CsvSchema.ColumnType.NUMBER) - .addColumn("Disk size [GB]", CsvSchema.ColumnType.NUMBER) - .addColumn("Network received throughput [KB/s]", CsvSchema.ColumnType.NUMBER) - .addColumn("Network transmitted throughput [KB/s]", CsvSchema.ColumnType.NUMBER) - .setAllowComments(true) - .setUseHeader(true) - .setColumnSeparator(';') - .build() + private val schema = + CsvSchema.builder() + .addColumn("Timestamp [ms]", CsvSchema.ColumnType.NUMBER_OR_STRING) + .addColumn("CPU cores", CsvSchema.ColumnType.NUMBER) + .addColumn("CPU capacity provisioned [MHZ]", CsvSchema.ColumnType.NUMBER) + .addColumn("CPU usage [MHZ]", CsvSchema.ColumnType.NUMBER) + .addColumn("CPU usage [%]", CsvSchema.ColumnType.NUMBER) + .addColumn("Memory capacity provisioned [KB]", CsvSchema.ColumnType.NUMBER) + .addColumn("Memory usage [KB]", CsvSchema.ColumnType.NUMBER) + .addColumn("Memory usage [%]", CsvSchema.ColumnType.NUMBER) + .addColumn("Disk read throughput [KB/s]", CsvSchema.ColumnType.NUMBER) + .addColumn("Disk write throughput [KB/s]", CsvSchema.ColumnType.NUMBER) + .addColumn("Disk size [GB]", CsvSchema.ColumnType.NUMBER) + .addColumn("Network received throughput [KB/s]", CsvSchema.ColumnType.NUMBER) + .addColumn("Network transmitted throughput [KB/s]", CsvSchema.ColumnType.NUMBER) + .setAllowComments(true) + .setUseHeader(true) + .setColumnSeparator(';') + .build() } } diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt index 776a8f86..a12785f0 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt @@ -24,7 +24,7 @@ package org.opendc.trace.bitbrains import com.fasterxml.jackson.dataformat.csv.CsvFactory import org.opendc.trace.TableReader -import org.opendc.trace.conv.RESOURCE_ID +import org.opendc.trace.conv.resourceID import java.nio.file.Path import java.time.Duration import java.time.Instant @@ -56,7 +56,7 @@ internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms val parser = factory.createParser(path.toFile()) val reader = BitbrainsResourceStateTableReader(name, parser) - val idCol = reader.resolve(RESOURCE_ID) + val idCol = reader.resolve(resourceID) try { if (!reader.nextRow()) { @@ -74,17 +74,17 @@ internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms return false } - private val COL_ID = 0 + private val colID = 0 override fun resolve(name: String): Int { return when (name) { - RESOURCE_ID -> COL_ID + resourceID -> colID else -> -1 } } override fun isNull(index: Int): Boolean { - require(index in 0..COL_ID) { "Invalid column index" } + require(index in 0..colID) { "Invalid column index" } return false } @@ -111,7 +111,7 @@ internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms override fun getString(index: Int): String? { check(state == State.Active) { "No active row" } return when (index) { - COL_ID -> id + colID -> id else -> throw IllegalArgumentException("Invalid column") } } @@ -128,15 +128,25 @@ internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms throw IllegalArgumentException("Invalid column") } - override fun <T> getList(index: Int, elementType: Class<T>): List<T>? { + override fun <T> getList( + index: Int, + elementType: Class<T>, + ): List<T>? { throw IllegalArgumentException("Invalid column") } - override fun <T> getSet(index: Int, elementType: Class<T>): Set<T>? { + override fun <T> getSet( + index: Int, + elementType: Class<T>, + ): Set<T>? { throw IllegalArgumentException("Invalid column") } - override fun <K, V> getMap(index: Int, keyType: Class<K>, valueType: Class<V>): Map<K, V>? { + override fun <K, V> getMap( + index: Int, + keyType: Class<K>, + valueType: Class<V>, + ): Map<K, V>? { throw IllegalArgumentException("Invalid column") } @@ -158,6 +168,8 @@ internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms } private enum class State { - Pending, Active, Closed + Pending, + Active, + Closed, } } diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt index b0809735..23853077 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt @@ -28,20 +28,20 @@ import org.opendc.trace.TableColumn import org.opendc.trace.TableColumnType import org.opendc.trace.TableReader import org.opendc.trace.TableWriter -import org.opendc.trace.conv.RESOURCE_CPU_CAPACITY -import org.opendc.trace.conv.RESOURCE_CPU_COUNT -import org.opendc.trace.conv.RESOURCE_ID -import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT -import org.opendc.trace.conv.RESOURCE_STATE_DISK_READ -import org.opendc.trace.conv.RESOURCE_STATE_DISK_WRITE -import org.opendc.trace.conv.RESOURCE_STATE_MEM_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_NET_RX -import org.opendc.trace.conv.RESOURCE_STATE_NET_TX -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP import org.opendc.trace.conv.TABLE_RESOURCES import org.opendc.trace.conv.TABLE_RESOURCE_STATES +import org.opendc.trace.conv.resourceCpuCapacity +import org.opendc.trace.conv.resourceCpuCount +import org.opendc.trace.conv.resourceID +import org.opendc.trace.conv.resourceMemCapacity +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateCpuUsagePct +import org.opendc.trace.conv.resourceStateDiskRead +import org.opendc.trace.conv.resourceStateDiskWrite +import org.opendc.trace.conv.resourceStateMemUsage +import org.opendc.trace.conv.resourceStateNetRx +import org.opendc.trace.conv.resourceStateNetTx +import org.opendc.trace.conv.resourceStateTimestamp import org.opendc.trace.spi.TableDetails import org.opendc.trace.spi.TraceFormat import org.opendc.trace.util.CompositeTableReader @@ -63,9 +63,10 @@ public class BitbrainsTraceFormat : TraceFormat { /** * The [CsvFactory] used to create the parser. */ - private val factory = CsvFactory() - .enable(CsvParser.Feature.ALLOW_COMMENTS) - .enable(CsvParser.Feature.TRIM_SPACES) + private val factory = + CsvFactory() + .enable(CsvParser.Feature.ALLOW_COMMENTS) + .enable(CsvParser.Feature.TRIM_SPACES) override fun create(path: Path) { throw UnsupportedOperationException("Writing not supported for this format") @@ -73,40 +74,50 @@ public class BitbrainsTraceFormat : TraceFormat { override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCES, TABLE_RESOURCE_STATES) - override fun getDetails(path: Path, table: String): TableDetails { + override fun getDetails( + path: Path, + table: String, + ): TableDetails { return when (table) { - TABLE_RESOURCES -> TableDetails( - listOf( - TableColumn(RESOURCE_ID, TableColumnType.String) + TABLE_RESOURCES -> + TableDetails( + listOf( + TableColumn(resourceID, TableColumnType.String), + ), ) - ) - TABLE_RESOURCE_STATES -> TableDetails( - listOf( - TableColumn(RESOURCE_ID, TableColumnType.String), - TableColumn(RESOURCE_STATE_TIMESTAMP, TableColumnType.Instant), - TableColumn(RESOURCE_CPU_COUNT, TableColumnType.Int), - TableColumn(RESOURCE_CPU_CAPACITY, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_USAGE, TableColumnType.Double), - TableColumn(RESOURCE_STATE_CPU_USAGE_PCT, TableColumnType.Double), - TableColumn(RESOURCE_MEM_CAPACITY, TableColumnType.Double), - TableColumn(RESOURCE_STATE_MEM_USAGE, TableColumnType.Double), - TableColumn(RESOURCE_STATE_DISK_READ, TableColumnType.Double), - TableColumn(RESOURCE_STATE_DISK_WRITE, TableColumnType.Double), - TableColumn(RESOURCE_STATE_NET_RX, TableColumnType.Double), - TableColumn(RESOURCE_STATE_NET_TX, TableColumnType.Double) + TABLE_RESOURCE_STATES -> + TableDetails( + listOf( + TableColumn(resourceID, TableColumnType.String), + TableColumn(resourceStateTimestamp, TableColumnType.Instant), + TableColumn(resourceCpuCount, TableColumnType.Int), + TableColumn(resourceCpuCapacity, TableColumnType.Double), + TableColumn(resourceStateCpuUsage, TableColumnType.Double), + TableColumn(resourceStateCpuUsagePct, TableColumnType.Double), + TableColumn(resourceMemCapacity, TableColumnType.Double), + TableColumn(resourceStateMemUsage, TableColumnType.Double), + TableColumn(resourceStateDiskRead, TableColumnType.Double), + TableColumn(resourceStateDiskWrite, TableColumnType.Double), + TableColumn(resourceStateNetRx, TableColumnType.Double), + TableColumn(resourceStateNetTx, TableColumnType.Double), + ), ) - ) else -> throw IllegalArgumentException("Table $table not supported") } } - override fun newReader(path: Path, table: String, projection: List<String>?): TableReader { + override fun newReader( + path: Path, + table: String, + projection: List<String>?, + ): TableReader { return when (table) { TABLE_RESOURCES -> { - val vms = Files.walk(path, 1) - .filter { !Files.isDirectory(it) && it.extension == "csv" } - .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) - .toSortedMap() + val vms = + Files.walk(path, 1) + .filter { !Files.isDirectory(it) && it.extension == "csv" } + .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) + .toSortedMap() BitbrainsResourceTableReader(factory, vms) } TABLE_RESOURCE_STATES -> newResourceStateReader(path) @@ -114,7 +125,10 @@ public class BitbrainsTraceFormat : TraceFormat { } } - override fun newWriter(path: Path, table: String): TableWriter { + override fun newWriter( + path: Path, + table: String, + ): TableWriter { throw UnsupportedOperationException("Writing not supported for this format") } @@ -122,10 +136,11 @@ public class BitbrainsTraceFormat : TraceFormat { * Construct a [TableReader] for reading over all resource state partitions. */ private fun newResourceStateReader(path: Path): TableReader { - val partitions = Files.walk(path, 1) - .filter { !Files.isDirectory(it) && it.extension == "csv" } - .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) - .toSortedMap() + val partitions = + Files.walk(path, 1) + .filter { !Files.isDirectory(it) && it.extension == "csv" } + .collect(Collectors.toMap({ it.nameWithoutExtension }, { it })) + .toSortedMap() val it = partitions.iterator() return object : CompositeTableReader() { diff --git a/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormatTest.kt b/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormatTest.kt index e8c7094b..18c59fb8 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormatTest.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormatTest.kt @@ -33,9 +33,9 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows import org.opendc.trace.TableColumn import org.opendc.trace.TableReader -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP import org.opendc.trace.conv.TABLE_RESOURCE_STATES +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateTimestamp import org.opendc.trace.testkit.TableReaderTestKit import java.nio.file.Paths @@ -72,8 +72,8 @@ internal class BitbrainsExTraceFormatTest { assertAll( { assertTrue(reader.nextRow()) }, - { assertEquals(1631911500, reader.getInstant(RESOURCE_STATE_TIMESTAMP)?.epochSecond) }, - { assertEquals(21.2, reader.getDouble(RESOURCE_STATE_CPU_USAGE), 0.01) } + { assertEquals(1631911500, reader.getInstant(resourceStateTimestamp)?.epochSecond) }, + { assertEquals(21.2, reader.getDouble(resourceStateCpuUsage), 0.01) }, ) reader.close() diff --git a/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormatTest.kt b/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormatTest.kt index edab8747..8ff13852 100644 --- a/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormatTest.kt +++ b/opendc-trace/opendc-trace-bitbrains/src/test/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormatTest.kt @@ -34,11 +34,11 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows import org.opendc.trace.TableColumn import org.opendc.trace.TableReader -import org.opendc.trace.conv.RESOURCE_ID -import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE -import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP import org.opendc.trace.conv.TABLE_RESOURCES import org.opendc.trace.conv.TABLE_RESOURCE_STATES +import org.opendc.trace.conv.resourceID +import org.opendc.trace.conv.resourceStateCpuUsage +import org.opendc.trace.conv.resourceStateTimestamp import org.opendc.trace.testkit.TableReaderTestKit import java.nio.file.Paths @@ -75,8 +75,8 @@ class BitbrainsTraceFormatTest { assertAll( { assertTrue(reader.nextRow()) }, - { assertEquals("bitbrains", reader.getString(RESOURCE_ID)) }, - { assertFalse(reader.nextRow()) } + { assertEquals("bitbrains", reader.getString(resourceID)) }, + { assertFalse(reader.nextRow()) }, ) reader.close() @@ -89,8 +89,8 @@ class BitbrainsTraceFormatTest { assertAll( { assertTrue(reader.nextRow()) }, - { assertEquals(1376314846, reader.getInstant(RESOURCE_STATE_TIMESTAMP)?.epochSecond) }, - { assertEquals(19.066, reader.getDouble(RESOURCE_STATE_CPU_USAGE), 0.01) } + { assertEquals(1376314846, reader.getInstant(resourceStateTimestamp)?.epochSecond) }, + { assertEquals(19.066, reader.getDouble(resourceStateCpuUsage), 0.01) }, ) reader.close() |
