summaryrefslogtreecommitdiff
path: root/opendc-trace/opendc-trace-bitbrains/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'opendc-trace/opendc-trace-bitbrains/src/main')
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt212
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt108
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt288
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt109
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt124
-rw-r--r--opendc-trace/opendc-trace-bitbrains/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat2
6 files changed, 843 insertions, 0 deletions
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt
new file mode 100644
index 00000000..c1b6f5ba
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExResourceStateTableReader.kt
@@ -0,0 +1,212 @@
+/*
+ * Copyright (c) 2021 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.trace.bitbrains
+
+import org.opendc.trace.*
+import java.io.BufferedReader
+import java.time.Instant
+
+/**
+ * A [TableReader] for the Bitbrains resource state table.
+ */
+internal class BitbrainsExResourceStateTableReader(private val reader: BufferedReader) : TableReader {
+ override fun nextRow(): Boolean {
+ reset()
+
+ var line: String
+ var num = 0
+
+ while (true) {
+ line = reader.readLine() ?: return false
+ num++
+
+ if (line[0] == '#' || line.isBlank()) {
+ // Ignore empty lines or comments
+ continue
+ }
+
+ break
+ }
+
+ line = line.trim()
+
+ val length = line.length
+ var col = 0
+ var start: Int
+ var end = 0
+
+ while (end < length) {
+ // Trim all whitespace before the field
+ start = end
+ while (start < length && line[start].isWhitespace()) {
+ start++
+ }
+
+ end = line.indexOf(' ', start)
+
+ if (end < 0) {
+ end = length
+ }
+
+ val field = line.subSequence(start, end) as String
+ when (col++) {
+ COL_TIMESTAMP -> timestamp = Instant.ofEpochSecond(field.toLong(10))
+ COL_CPU_USAGE -> cpuUsage = field.toDouble()
+ COL_CPU_DEMAND -> cpuDemand = field.toDouble()
+ COL_DISK_READ -> diskRead = field.toDouble()
+ COL_DISK_WRITE -> diskWrite = field.toDouble()
+ COL_CLUSTER_ID -> cluster = field.trim()
+ COL_NCPUS -> cpuCores = field.toInt(10)
+ COL_CPU_READY_PCT -> cpuReadyPct = field.toDouble()
+ COL_POWERED_ON -> poweredOn = field.toInt(10) == 1
+ COL_CPU_CAPACITY -> cpuCapacity = field.toDouble()
+ COL_ID -> id = field.trim()
+ COL_MEM_CAPACITY -> memCapacity = field.toDouble() * 1000 // Convert from MB to KB
+ }
+ }
+
+ return true
+ }
+
+ override fun resolve(column: TableColumn<*>): Int = columns[column] ?: -1
+
+ override fun isNull(index: Int): Boolean {
+ require(index in 0..COL_MAX) { "Invalid column index" }
+ return false
+ }
+
+ override fun get(index: Int): Any? {
+ return when (index) {
+ COL_ID -> id
+ COL_CLUSTER_ID -> cluster
+ COL_TIMESTAMP -> timestamp
+ COL_NCPUS -> getInt(index)
+ COL_POWERED_ON -> getInt(index)
+ COL_CPU_CAPACITY, COL_CPU_USAGE, COL_CPU_USAGE_PCT, COL_CPU_READY_PCT, COL_CPU_DEMAND, COL_MEM_CAPACITY, COL_DISK_READ, COL_DISK_WRITE -> getDouble(index)
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getBoolean(index: Int): Boolean {
+ return when (index) {
+ COL_POWERED_ON -> poweredOn
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getInt(index: Int): Int {
+ return when (index) {
+ COL_NCPUS -> cpuCores
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getLong(index: Int): Long {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getDouble(index: Int): Double {
+ return when (index) {
+ COL_CPU_CAPACITY -> cpuCapacity
+ COL_CPU_USAGE -> cpuUsage
+ COL_CPU_USAGE_PCT -> cpuUsage / cpuCapacity
+ COL_CPU_READY_PCT -> cpuReadyPct
+ COL_CPU_DEMAND -> cpuDemand
+ COL_MEM_CAPACITY -> memCapacity
+ COL_DISK_READ -> diskRead
+ COL_DISK_WRITE -> diskWrite
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun close() {
+ reader.close()
+ }
+
+ /**
+ * State fields of the reader.
+ */
+ private var id: String? = null
+ private var cluster: String? = null
+ private var timestamp: Instant? = null
+ private var cpuCores = -1
+ private var cpuCapacity = Double.NaN
+ private var cpuUsage = Double.NaN
+ private var cpuDemand = Double.NaN
+ private var cpuReadyPct = Double.NaN
+ private var memCapacity = Double.NaN
+ private var diskRead = Double.NaN
+ private var diskWrite = Double.NaN
+ private var poweredOn: Boolean = false
+
+ /**
+ * Reset the state of the reader.
+ */
+ private fun reset() {
+ id = null
+ timestamp = null
+ cluster = null
+ cpuCores = -1
+ cpuCapacity = Double.NaN
+ cpuUsage = Double.NaN
+ cpuDemand = Double.NaN
+ cpuReadyPct = Double.NaN
+ memCapacity = Double.NaN
+ diskRead = Double.NaN
+ diskWrite = Double.NaN
+ poweredOn = false
+ }
+
+ /**
+ * Default column indices for the extended Bitbrains format.
+ */
+ private val COL_TIMESTAMP = 0
+ private val COL_CPU_USAGE = 1
+ private val COL_CPU_DEMAND = 2
+ private val COL_DISK_READ = 4
+ private val COL_DISK_WRITE = 6
+ private val COL_CLUSTER_ID = 10
+ private val COL_NCPUS = 12
+ private val COL_CPU_READY_PCT = 13
+ private val COL_POWERED_ON = 14
+ private val COL_CPU_CAPACITY = 18
+ private val COL_ID = 19
+ private val COL_MEM_CAPACITY = 20
+ private val COL_CPU_USAGE_PCT = 21
+ private val COL_MAX = COL_CPU_USAGE_PCT + 1
+
+ private val columns = mapOf(
+ RESOURCE_ID to COL_ID,
+ RESOURCE_CLUSTER_ID to COL_CLUSTER_ID,
+ RESOURCE_STATE_TIMESTAMP to COL_TIMESTAMP,
+ RESOURCE_CPU_COUNT to COL_NCPUS,
+ RESOURCE_CPU_CAPACITY to COL_CPU_CAPACITY,
+ RESOURCE_STATE_CPU_USAGE to COL_CPU_USAGE,
+ RESOURCE_STATE_CPU_USAGE_PCT to COL_CPU_USAGE_PCT,
+ RESOURCE_STATE_CPU_DEMAND to COL_CPU_DEMAND,
+ RESOURCE_STATE_CPU_READY_PCT to COL_CPU_READY_PCT,
+ RESOURCE_MEM_CAPACITY to COL_MEM_CAPACITY,
+ RESOURCE_STATE_DISK_READ to COL_DISK_READ,
+ RESOURCE_STATE_DISK_WRITE to COL_DISK_WRITE
+ )
+}
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt
new file mode 100644
index 00000000..20222c8a
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsExTraceFormat.kt
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2021 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.trace.bitbrains
+
+import org.opendc.trace.*
+import org.opendc.trace.spi.TableDetails
+import org.opendc.trace.spi.TraceFormat
+import org.opendc.trace.util.CompositeTableReader
+import java.nio.file.Files
+import java.nio.file.Path
+import java.util.stream.Collectors
+import kotlin.io.path.bufferedReader
+import kotlin.io.path.extension
+import kotlin.io.path.nameWithoutExtension
+
+/**
+ * A format implementation for the extended Bitbrains trace format.
+ */
+public class BitbrainsExTraceFormat : TraceFormat {
+ /**
+ * The name of this trace format.
+ */
+ override val name: String = "bitbrains-ex"
+
+ override fun create(path: Path) {
+ throw UnsupportedOperationException("Writing not supported for this format")
+ }
+
+ override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCE_STATES)
+
+ override fun getDetails(path: Path, table: String): TableDetails {
+ return when (table) {
+ TABLE_RESOURCE_STATES -> TableDetails(
+ listOf(
+ RESOURCE_ID,
+ RESOURCE_CLUSTER_ID,
+ RESOURCE_STATE_TIMESTAMP,
+ RESOURCE_CPU_COUNT,
+ RESOURCE_CPU_CAPACITY,
+ RESOURCE_STATE_CPU_USAGE,
+ RESOURCE_STATE_CPU_USAGE_PCT,
+ RESOURCE_STATE_CPU_DEMAND,
+ RESOURCE_STATE_CPU_READY_PCT,
+ RESOURCE_MEM_CAPACITY,
+ RESOURCE_STATE_DISK_READ,
+ RESOURCE_STATE_DISK_WRITE
+ ),
+ listOf(RESOURCE_ID, RESOURCE_STATE_TIMESTAMP)
+ )
+ else -> throw IllegalArgumentException("Table $table not supported")
+ }
+ }
+
+ override fun newReader(path: Path, table: String): TableReader {
+ return when (table) {
+ TABLE_RESOURCE_STATES -> newResourceStateReader(path)
+ else -> throw IllegalArgumentException("Table $table not supported")
+ }
+ }
+
+ override fun newWriter(path: Path, table: String): TableWriter {
+ throw UnsupportedOperationException("Writing not supported for this format")
+ }
+
+ /**
+ * Construct a [TableReader] for reading over all resource state partitions.
+ */
+ private fun newResourceStateReader(path: Path): TableReader {
+ val partitions = Files.walk(path, 1)
+ .filter { !Files.isDirectory(it) && it.extension == "txt" }
+ .collect(Collectors.toMap({ it.nameWithoutExtension }, { it }))
+ .toSortedMap()
+ val it = partitions.iterator()
+
+ return object : CompositeTableReader() {
+ override fun nextReader(): TableReader? {
+ return if (it.hasNext()) {
+ val (_, partPath) = it.next()
+ return BitbrainsExResourceStateTableReader(partPath.bufferedReader())
+ } else {
+ null
+ }
+ }
+
+ override fun toString(): String = "BitbrainsExCompositeTableReader"
+ }
+ }
+}
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt
new file mode 100644
index 00000000..3a8839b4
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceStateTableReader.kt
@@ -0,0 +1,288 @@
+/*
+ * Copyright (c) 2021 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.trace.bitbrains
+
+import com.fasterxml.jackson.core.JsonParseException
+import com.fasterxml.jackson.core.JsonToken
+import com.fasterxml.jackson.dataformat.csv.CsvParser
+import com.fasterxml.jackson.dataformat.csv.CsvSchema
+import org.opendc.trace.*
+import java.text.NumberFormat
+import java.time.Instant
+import java.time.LocalDateTime
+import java.time.ZoneOffset
+import java.time.format.DateTimeFormatter
+import java.time.format.DateTimeParseException
+import java.util.*
+
+/**
+ * A [TableReader] for the Bitbrains resource state table.
+ */
+internal class BitbrainsResourceStateTableReader(private val partition: String, private val parser: CsvParser) : TableReader {
+ /**
+ * The [DateTimeFormatter] used to parse the timestamps in case of the Materna trace.
+ */
+ private val formatter = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss")
+
+ /**
+ * The type of timestamps in the trace.
+ */
+ private var timestampType: TimestampType = TimestampType.UNDECIDED
+
+ /**
+ * The [NumberFormat] used to parse doubles containing a comma.
+ */
+ private val nf = NumberFormat.getInstance(Locale.GERMAN)
+
+ /**
+ * A flag to indicate that the trace contains decimals with a comma separator.
+ */
+ private var usesCommaDecimalSeparator = false
+
+ init {
+ parser.schema = schema
+ }
+
+ override fun nextRow(): Boolean {
+ // Reset the row state
+ reset()
+
+ if (!nextStart()) {
+ return false
+ }
+
+ while (true) {
+ val token = parser.nextValue()
+
+ if (token == null || token == JsonToken.END_OBJECT) {
+ break
+ }
+
+ when (parser.currentName) {
+ "Timestamp [ms]" -> {
+ timestamp = when (timestampType) {
+ TimestampType.UNDECIDED -> {
+ try {
+ val res = LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC)
+ timestampType = TimestampType.DATE_TIME
+ res
+ } catch (e: DateTimeParseException) {
+ timestampType = TimestampType.EPOCH_MILLIS
+ Instant.ofEpochSecond(parser.longValue)
+ }
+ }
+ TimestampType.DATE_TIME -> LocalDateTime.parse(parser.text, formatter).toInstant(ZoneOffset.UTC)
+ TimestampType.EPOCH_MILLIS -> Instant.ofEpochSecond(parser.longValue)
+ }
+ }
+ "CPU cores" -> cpuCores = parser.intValue
+ "CPU capacity provisioned [MHZ]" -> cpuCapacity = parseSafeDouble()
+ "CPU usage [MHZ]" -> cpuUsage = parseSafeDouble()
+ "CPU usage [%]" -> cpuUsagePct = parseSafeDouble() / 100.0 // Convert to range [0, 1]
+ "Memory capacity provisioned [KB]" -> memCapacity = parseSafeDouble()
+ "Memory usage [KB]" -> memUsage = parseSafeDouble()
+ "Disk read throughput [KB/s]" -> diskRead = parseSafeDouble()
+ "Disk write throughput [KB/s]" -> diskWrite = parseSafeDouble()
+ "Network received throughput [KB/s]" -> netReceived = parseSafeDouble()
+ "Network transmitted throughput [KB/s]" -> netTransmitted = parseSafeDouble()
+ }
+ }
+
+ return true
+ }
+
+ override fun resolve(column: TableColumn<*>): Int = columns[column] ?: -1
+
+ override fun isNull(index: Int): Boolean {
+ check(index in 0..columns.size) { "Invalid column index" }
+ return false
+ }
+
+ override fun get(index: Int): Any? {
+ return when (index) {
+ COL_ID -> partition
+ COL_TIMESTAMP -> timestamp
+ COL_CPU_COUNT -> getInt(index)
+ COL_CPU_CAPACITY, COL_CPU_USAGE, COL_CPU_USAGE_PCT, COL_MEM_CAPACITY, COL_MEM_USAGE, COL_DISK_READ, COL_DISK_WRITE, COL_NET_RX, COL_NET_TX -> getDouble(index)
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getBoolean(index: Int): Boolean {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getInt(index: Int): Int {
+ return when (index) {
+ COL_CPU_COUNT -> cpuCores
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getLong(index: Int): Long {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getDouble(index: Int): Double {
+ return when (index) {
+ COL_CPU_CAPACITY -> cpuCapacity
+ COL_CPU_USAGE -> cpuUsage
+ COL_CPU_USAGE_PCT -> cpuUsagePct
+ COL_MEM_CAPACITY -> memCapacity
+ COL_MEM_USAGE -> memUsage
+ COL_DISK_READ -> diskRead
+ COL_DISK_WRITE -> diskWrite
+ COL_NET_RX -> netReceived
+ COL_NET_TX -> netTransmitted
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun close() {
+ parser.close()
+ }
+
+ /**
+ * Advance the parser until the next object start.
+ */
+ private fun nextStart(): Boolean {
+ var token = parser.nextValue()
+
+ while (token != null && token != JsonToken.START_OBJECT) {
+ token = parser.nextValue()
+ }
+
+ return token != null
+ }
+
+ /**
+ * Try to parse the current value safely as double.
+ */
+ private fun parseSafeDouble(): Double {
+ if (!usesCommaDecimalSeparator) {
+ try {
+ return parser.doubleValue
+ } catch (e: JsonParseException) {
+ usesCommaDecimalSeparator = true
+ }
+ }
+
+ val text = parser.text
+ if (text.isBlank()) {
+ return 0.0
+ }
+
+ return nf.parse(text).toDouble()
+ }
+
+ /**
+ * State fields of the reader.
+ */
+ private var timestamp: Instant? = null
+ private var cpuCores = -1
+ private var cpuCapacity = Double.NaN
+ private var cpuUsage = Double.NaN
+ private var cpuUsagePct = Double.NaN
+ private var memCapacity = Double.NaN
+ private var memUsage = Double.NaN
+ private var diskRead = Double.NaN
+ private var diskWrite = Double.NaN
+ private var netReceived = Double.NaN
+ private var netTransmitted = Double.NaN
+
+ /**
+ * Reset the state.
+ */
+ private fun reset() {
+ timestamp = null
+ cpuCores = -1
+ cpuCapacity = Double.NaN
+ cpuUsage = Double.NaN
+ cpuUsagePct = Double.NaN
+ memCapacity = Double.NaN
+ memUsage = Double.NaN
+ diskRead = Double.NaN
+ diskWrite = Double.NaN
+ netReceived = Double.NaN
+ netTransmitted = Double.NaN
+ }
+
+ private val COL_TIMESTAMP = 0
+ private val COL_CPU_COUNT = 1
+ private val COL_CPU_CAPACITY = 2
+ private val COL_CPU_USAGE = 3
+ private val COL_CPU_USAGE_PCT = 4
+ private val COL_MEM_CAPACITY = 5
+ private val COL_MEM_USAGE = 6
+ private val COL_DISK_READ = 7
+ private val COL_DISK_WRITE = 8
+ private val COL_NET_RX = 9
+ private val COL_NET_TX = 10
+ private val COL_ID = 11
+
+ private val columns = mapOf(
+ RESOURCE_ID to COL_ID,
+ RESOURCE_STATE_TIMESTAMP to COL_TIMESTAMP,
+ RESOURCE_CPU_COUNT to COL_CPU_COUNT,
+ RESOURCE_CPU_CAPACITY to COL_CPU_CAPACITY,
+ RESOURCE_STATE_CPU_USAGE to COL_CPU_USAGE,
+ RESOURCE_STATE_CPU_USAGE_PCT to COL_CPU_USAGE_PCT,
+ RESOURCE_MEM_CAPACITY to COL_MEM_CAPACITY,
+ RESOURCE_STATE_MEM_USAGE to COL_MEM_USAGE,
+ RESOURCE_STATE_DISK_READ to COL_DISK_READ,
+ RESOURCE_STATE_DISK_WRITE to COL_DISK_WRITE,
+ RESOURCE_STATE_NET_RX to COL_NET_RX,
+ RESOURCE_STATE_NET_TX to COL_NET_TX
+ )
+
+ /**
+ * The type of the timestamp in the trace.
+ */
+ private enum class TimestampType {
+ UNDECIDED, DATE_TIME, EPOCH_MILLIS
+ }
+
+ companion object {
+ /**
+ * The [CsvSchema] that is used to parse the trace.
+ */
+ private val schema = CsvSchema.builder()
+ .addColumn("Timestamp [ms]", CsvSchema.ColumnType.NUMBER_OR_STRING)
+ .addColumn("CPU cores", CsvSchema.ColumnType.NUMBER)
+ .addColumn("CPU capacity provisioned [MHZ]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("CPU usage [MHZ]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("CPU usage [%]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Memory capacity provisioned [KB]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Memory usage [KB]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Memory usage [%]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Disk read throughput [KB/s]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Disk write throughput [KB/s]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Disk size [GB]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Network received throughput [KB/s]", CsvSchema.ColumnType.NUMBER)
+ .addColumn("Network transmitted throughput [KB/s]", CsvSchema.ColumnType.NUMBER)
+ .setAllowComments(true)
+ .setUseHeader(true)
+ .setColumnSeparator(';')
+ .build()
+ }
+}
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt
new file mode 100644
index 00000000..3701994a
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsResourceTableReader.kt
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2021 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.trace.bitbrains
+
+import com.fasterxml.jackson.dataformat.csv.CsvFactory
+import org.opendc.trace.*
+import java.nio.file.Path
+
+/**
+ * A [TableReader] for the Bitbrains resource table.
+ */
+internal class BitbrainsResourceTableReader(private val factory: CsvFactory, vms: Map<String, Path>) : TableReader {
+ /**
+ * An iterator to iterate over the resource entries.
+ */
+ private val it = vms.iterator()
+
+ override fun nextRow(): Boolean {
+ reset()
+
+ while (it.hasNext()) {
+ val (name, path) = it.next()
+
+ val parser = factory.createParser(path.toFile())
+ val reader = BitbrainsResourceStateTableReader(name, parser)
+ val idCol = reader.resolve(RESOURCE_ID)
+
+ try {
+ if (!reader.nextRow()) {
+ continue
+ }
+
+ id = reader.get(idCol) as String
+ return true
+ } finally {
+ reader.close()
+ }
+ }
+
+ return false
+ }
+
+ override fun resolve(column: TableColumn<*>): Int = columns[column] ?: -1
+
+ override fun isNull(index: Int): Boolean {
+ check(index in 0..columns.size) { "Invalid column index" }
+ return false
+ }
+
+ override fun get(index: Int): Any? {
+ return when (index) {
+ COL_ID -> id
+ else -> throw IllegalArgumentException("Invalid column")
+ }
+ }
+
+ override fun getBoolean(index: Int): Boolean {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getInt(index: Int): Int {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getLong(index: Int): Long {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun getDouble(index: Int): Double {
+ throw IllegalArgumentException("Invalid column")
+ }
+
+ override fun close() {}
+
+ /**
+ * State fields of the reader.
+ */
+ private var id: String? = null
+
+ /**
+ * Reset the state of the reader.
+ */
+ private fun reset() {
+ id = null
+ }
+
+ private val COL_ID = 0
+ private val columns = mapOf(RESOURCE_ID to COL_ID)
+}
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt
new file mode 100644
index 00000000..3885c931
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/kotlin/org/opendc/trace/bitbrains/BitbrainsTraceFormat.kt
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2021 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.trace.bitbrains
+
+import com.fasterxml.jackson.dataformat.csv.CsvFactory
+import com.fasterxml.jackson.dataformat.csv.CsvParser
+import org.opendc.trace.*
+import org.opendc.trace.spi.TableDetails
+import org.opendc.trace.spi.TraceFormat
+import org.opendc.trace.util.CompositeTableReader
+import java.nio.file.Files
+import java.nio.file.Path
+import java.util.stream.Collectors
+import kotlin.io.path.extension
+import kotlin.io.path.nameWithoutExtension
+
+/**
+ * A format implementation for the GWF trace format.
+ */
+public class BitbrainsTraceFormat : TraceFormat {
+ /**
+ * The name of this trace format.
+ */
+ override val name: String = "bitbrains"
+
+ /**
+ * The [CsvFactory] used to create the parser.
+ */
+ private val factory = CsvFactory()
+ .enable(CsvParser.Feature.ALLOW_COMMENTS)
+ .enable(CsvParser.Feature.TRIM_SPACES)
+
+ override fun create(path: Path) {
+ throw UnsupportedOperationException("Writing not supported for this format")
+ }
+
+ override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCES, TABLE_RESOURCE_STATES)
+
+ override fun getDetails(path: Path, table: String): TableDetails {
+ return when (table) {
+ TABLE_RESOURCES -> TableDetails(listOf(RESOURCE_ID))
+ TABLE_RESOURCE_STATES -> TableDetails(
+ listOf(
+ RESOURCE_ID,
+ RESOURCE_STATE_TIMESTAMP,
+ RESOURCE_CPU_COUNT,
+ RESOURCE_CPU_CAPACITY,
+ RESOURCE_STATE_CPU_USAGE,
+ RESOURCE_STATE_CPU_USAGE_PCT,
+ RESOURCE_MEM_CAPACITY,
+ RESOURCE_STATE_MEM_USAGE,
+ RESOURCE_STATE_DISK_READ,
+ RESOURCE_STATE_DISK_WRITE,
+ RESOURCE_STATE_NET_RX,
+ RESOURCE_STATE_NET_TX,
+ ),
+ listOf(RESOURCE_ID, RESOURCE_STATE_TIMESTAMP)
+ )
+ else -> throw IllegalArgumentException("Table $table not supported")
+ }
+ }
+
+ override fun newReader(path: Path, table: String): TableReader {
+ return when (table) {
+ TABLE_RESOURCES -> {
+ val vms = Files.walk(path, 1)
+ .filter { !Files.isDirectory(it) && it.extension == "csv" }
+ .collect(Collectors.toMap({ it.nameWithoutExtension }, { it }))
+ .toSortedMap()
+ BitbrainsResourceTableReader(factory, vms)
+ }
+ TABLE_RESOURCE_STATES -> newResourceStateReader(path)
+ else -> throw IllegalArgumentException("Table $table not supported")
+ }
+ }
+
+ override fun newWriter(path: Path, table: String): TableWriter {
+ throw UnsupportedOperationException("Writing not supported for this format")
+ }
+
+ /**
+ * Construct a [TableReader] for reading over all resource state partitions.
+ */
+ private fun newResourceStateReader(path: Path): TableReader {
+ val partitions = Files.walk(path, 1)
+ .filter { !Files.isDirectory(it) && it.extension == "csv" }
+ .collect(Collectors.toMap({ it.nameWithoutExtension }, { it }))
+ .toSortedMap()
+ val it = partitions.iterator()
+
+ return object : CompositeTableReader() {
+ override fun nextReader(): TableReader? {
+ return if (it.hasNext()) {
+ val (partition, partPath) = it.next()
+ return BitbrainsResourceStateTableReader(partition, factory.createParser(partPath.toFile()))
+ } else {
+ null
+ }
+ }
+
+ override fun toString(): String = "BitbrainsCompositeTableReader"
+ }
+ }
+}
diff --git a/opendc-trace/opendc-trace-bitbrains/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat b/opendc-trace/opendc-trace-bitbrains/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat
new file mode 100644
index 00000000..fd6a2180
--- /dev/null
+++ b/opendc-trace/opendc-trace-bitbrains/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat
@@ -0,0 +1,2 @@
+org.opendc.trace.bitbrains.BitbrainsTraceFormat
+org.opendc.trace.bitbrains.BitbrainsExTraceFormat