summaryrefslogtreecommitdiff
path: root/opendc-trace/opendc-trace-azure/src
diff options
context:
space:
mode:
Diffstat (limited to 'opendc-trace/opendc-trace-azure/src')
-rw-r--r--opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt82
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt219
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt246
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt147
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat1
-rw-r--r--opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt131
-rw-r--r--opendc-trace/opendc-trace-azure/src/test/resources/trace/vm_cpu_readings/vm_cpu_readings-file-1-of-125.csv.gzbin6905 -> 0 bytes
-rw-r--r--opendc-trace/opendc-trace-azure/src/test/resources/trace/vmtable/vmtable.csv.gzbin1423 -> 0 bytes
8 files changed, 0 insertions, 826 deletions
diff --git a/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt b/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt
deleted file mode 100644
index bb3c2450..00000000
--- a/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2022 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.trace.azure
-
-import org.opendc.trace.conv.TABLE_RESOURCES
-import org.opendc.trace.conv.TABLE_RESOURCE_STATES
-import org.opendc.trace.conv.resourceID
-import org.opendc.trace.spi.TraceFormat
-import org.openjdk.jmh.annotations.Benchmark
-import org.openjdk.jmh.annotations.Fork
-import org.openjdk.jmh.annotations.Measurement
-import org.openjdk.jmh.annotations.Scope
-import org.openjdk.jmh.annotations.Setup
-import org.openjdk.jmh.annotations.State
-import org.openjdk.jmh.annotations.Warmup
-import org.openjdk.jmh.infra.Blackhole
-import java.nio.file.Path
-import java.util.concurrent.TimeUnit
-
-/**
- * Benchmarks for parsing traces in the Azure VM format.
- */
-@State(Scope.Thread)
-@Fork(1)
-@Warmup(iterations = 2, time = 1, timeUnit = TimeUnit.SECONDS)
-@Measurement(iterations = 5, time = 3, timeUnit = TimeUnit.SECONDS)
-class AzureTraceBenchmarks {
- private lateinit var path: Path
- private lateinit var format: TraceFormat
-
- @Setup
- fun setUp() {
- path = Path.of("src/test/resources/trace")
- format = AzureTraceFormat()
- }
-
- @Benchmark
- fun benchmarkResourcesReader(bh: Blackhole) {
- val reader = format.newReader(path, TABLE_RESOURCES, null)
- try {
- val idColumn = reader.resolve(resourceID)
- while (reader.nextRow()) {
- bh.consume(reader.getString(idColumn))
- }
- } finally {
- reader.close()
- }
- }
-
- @Benchmark
- fun benchmarkResourceStatesReader(bh: Blackhole) {
- val reader = format.newReader(path, TABLE_RESOURCE_STATES, null)
- try {
- val idColumn = reader.resolve(resourceID)
- while (reader.nextRow()) {
- bh.consume(reader.getString(idColumn))
- }
- } finally {
- reader.close()
- }
- }
-}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt
deleted file mode 100644
index bcf6ff52..00000000
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
- * Copyright (c) 2021 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.trace.azure
-
-import com.fasterxml.jackson.core.JsonToken
-import com.fasterxml.jackson.dataformat.csv.CsvParser
-import com.fasterxml.jackson.dataformat.csv.CsvSchema
-import org.opendc.trace.TableReader
-import org.opendc.trace.conv.resourceID
-import org.opendc.trace.conv.resourceStateCpuUsagePct
-import org.opendc.trace.conv.resourceStateTimestamp
-import java.time.Duration
-import java.time.Instant
-import java.util.UUID
-
-/**
- * A [TableReader] for the Azure v1 VM resource state table.
- */
-internal class AzureResourceStateTableReader(private val parser: CsvParser) : TableReader {
- /**
- * A flag to indicate whether a single row has been read already.
- */
- private var isStarted = false
-
- init {
- parser.schema = schema
- }
-
- override fun nextRow(): Boolean {
- if (!isStarted) {
- isStarted = true
- }
-
- reset()
-
- if (!nextStart()) {
- return false
- }
-
- while (true) {
- val token = parser.nextValue()
-
- if (token == null || token == JsonToken.END_OBJECT) {
- break
- }
-
- when (parser.currentName) {
- "timestamp" -> timestamp = Instant.ofEpochSecond(parser.longValue)
- "vm id" -> id = parser.text
- "CPU avg cpu" -> cpuUsagePct = (parser.doubleValue / 100.0) // Convert from % to [0, 1]
- }
- }
-
- return true
- }
-
- private val colID = 0
- private val colTimestamp = 1
- private val colCpuUsagePct = 2
-
- override fun resolve(name: String): Int {
- return when (name) {
- resourceID -> colID
- resourceStateTimestamp -> colTimestamp
- resourceStateCpuUsagePct -> colCpuUsagePct
- else -> -1
- }
- }
-
- override fun isNull(index: Int): Boolean {
- require(index in 0..colCpuUsagePct) { "Invalid column index" }
- return false
- }
-
- override fun getBoolean(index: Int): Boolean {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getInt(index: Int): Int {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getLong(index: Int): Long {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getFloat(index: Int): Float {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getDouble(index: Int): Double {
- checkActive()
- return when (index) {
- colCpuUsagePct -> cpuUsagePct
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getString(index: Int): String? {
- checkActive()
- return when (index) {
- colID -> id
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getUUID(index: Int): UUID? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getInstant(index: Int): Instant? {
- checkActive()
- return when (index) {
- colTimestamp -> timestamp
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getDuration(index: Int): Duration? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <T> getList(
- index: Int,
- elementType: Class<T>,
- ): List<T>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <K, V> getMap(
- index: Int,
- keyType: Class<K>,
- valueType: Class<V>,
- ): Map<K, V>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <T> getSet(
- index: Int,
- elementType: Class<T>,
- ): Set<T>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun close() {
- parser.close()
- }
-
- /**
- * Helper method to check if the reader is active.
- */
- private fun checkActive() {
- check(isStarted && !parser.isClosed) { "No active row. Did you call nextRow()?" }
- }
-
- /**
- * Advance the parser until the next object start.
- */
- private fun nextStart(): Boolean {
- var token = parser.nextValue()
-
- while (token != null && token != JsonToken.START_OBJECT) {
- token = parser.nextValue()
- }
-
- return token != null
- }
-
- /**
- * State fields of the reader.
- */
- private var id: String? = null
- private var timestamp: Instant? = null
- private var cpuUsagePct = Double.NaN
-
- /**
- * Reset the state.
- */
- private fun reset() {
- id = null
- timestamp = null
- cpuUsagePct = Double.NaN
- }
-
- companion object {
- /**
- * The [CsvSchema] that is used to parse the trace.
- */
- private val schema =
- CsvSchema.builder()
- .addColumn("timestamp", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm id", CsvSchema.ColumnType.STRING)
- .addColumn("CPU min cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("CPU max cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("CPU avg cpu", CsvSchema.ColumnType.NUMBER)
- .setAllowComments(true)
- .build()
- }
-}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt
deleted file mode 100644
index d86a0466..00000000
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
- * Copyright (c) 2021 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.trace.azure
-
-import com.fasterxml.jackson.core.JsonToken
-import com.fasterxml.jackson.dataformat.csv.CsvParser
-import com.fasterxml.jackson.dataformat.csv.CsvSchema
-import org.opendc.trace.TableReader
-import org.opendc.trace.conv.resourceCpuCount
-import org.opendc.trace.conv.resourceID
-import org.opendc.trace.conv.resourceMemCapacity
-import org.opendc.trace.conv.resourceStartTime
-import org.opendc.trace.conv.resourceStopTime
-import java.time.Duration
-import java.time.Instant
-import java.util.UUID
-
-/**
- * A [TableReader] for the Azure v1 VM resources table.
- */
-internal class AzureResourceTableReader(private val parser: CsvParser) : TableReader {
- /**
- * A flag to indicate whether a single row has been read already.
- */
- private var isStarted = false
-
- init {
- parser.schema = schema
- }
-
- override fun nextRow(): Boolean {
- if (!isStarted) {
- isStarted = true
- }
-
- reset()
-
- if (!nextStart()) {
- return false
- }
-
- while (true) {
- val token = parser.nextValue()
-
- if (token == null || token == JsonToken.END_OBJECT) {
- break
- }
-
- when (parser.currentName) {
- "vm id" -> id = parser.text
- "timestamp vm created" -> startTime = Instant.ofEpochSecond(parser.longValue)
- "timestamp vm deleted" -> stopTime = Instant.ofEpochSecond(parser.longValue)
- "vm virtual core count" -> cpuCores = parser.intValue
- "vm memory" -> memCapacity = parser.doubleValue * 1e6 // GB to KB
- }
- }
-
- return true
- }
-
- private val colID = 0
- private val colStartTime = 1
- private val colStopTime = 2
- private val colCpuCount = 3
- private val colMemCapacity = 4
-
- override fun resolve(name: String): Int {
- return when (name) {
- resourceID -> colID
- resourceStartTime -> colStartTime
- resourceStopTime -> colStopTime
- resourceCpuCount -> colCpuCount
- resourceMemCapacity -> colMemCapacity
- else -> -1
- }
- }
-
- override fun isNull(index: Int): Boolean {
- require(index in 0..colMemCapacity) { "Invalid column index" }
- return false
- }
-
- override fun getBoolean(index: Int): Boolean {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getInt(index: Int): Int {
- checkActive()
- return when (index) {
- colCpuCount -> cpuCores
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getLong(index: Int): Long {
- checkActive()
- return when (index) {
- colCpuCount -> cpuCores.toLong()
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getFloat(index: Int): Float {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getDouble(index: Int): Double {
- checkActive()
- return when (index) {
- colMemCapacity -> memCapacity
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getString(index: Int): String? {
- checkActive()
- return when (index) {
- colID -> id
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getUUID(index: Int): UUID? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun getInstant(index: Int): Instant? {
- checkActive()
- return when (index) {
- colStartTime -> startTime
- colStopTime -> stopTime
- else -> throw IllegalArgumentException("Invalid column")
- }
- }
-
- override fun getDuration(index: Int): Duration? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <T> getList(
- index: Int,
- elementType: Class<T>,
- ): List<T>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <T> getSet(
- index: Int,
- elementType: Class<T>,
- ): Set<T>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun <K, V> getMap(
- index: Int,
- keyType: Class<K>,
- valueType: Class<V>,
- ): Map<K, V>? {
- throw IllegalArgumentException("Invalid column")
- }
-
- override fun close() {
- parser.close()
- }
-
- /**
- * Helper method to check if the reader is active.
- */
- private fun checkActive() {
- check(isStarted && !parser.isClosed) { "No active row. Did you call nextRow()?" }
- }
-
- /**
- * Advance the parser until the next object start.
- */
- private fun nextStart(): Boolean {
- var token = parser.nextValue()
-
- while (token != null && token != JsonToken.START_OBJECT) {
- token = parser.nextValue()
- }
-
- return token != null
- }
-
- /**
- * State fields of the reader.
- */
- private var id: String? = null
- private var startTime: Instant? = null
- private var stopTime: Instant? = null
- private var cpuCores = -1
- private var memCapacity = Double.NaN
-
- /**
- * Reset the state.
- */
- private fun reset() {
- id = null
- startTime = null
- stopTime = null
- cpuCores = -1
- memCapacity = Double.NaN
- }
-
- companion object {
- /**
- * The [CsvSchema] that is used to parse the trace.
- */
- private val schema =
- CsvSchema.builder()
- .addColumn("vm id", CsvSchema.ColumnType.NUMBER)
- .addColumn("subscription id", CsvSchema.ColumnType.STRING)
- .addColumn("deployment id", CsvSchema.ColumnType.NUMBER)
- .addColumn("timestamp vm created", CsvSchema.ColumnType.NUMBER)
- .addColumn("timestamp vm deleted", CsvSchema.ColumnType.NUMBER)
- .addColumn("max cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("avg cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("p95 cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm category", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm virtual core count", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm memory", CsvSchema.ColumnType.NUMBER)
- .setAllowComments(true)
- .build()
- }
-}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt
deleted file mode 100644
index a75da9d9..00000000
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (c) 2021 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.trace.azure
-
-import com.fasterxml.jackson.dataformat.csv.CsvFactory
-import com.fasterxml.jackson.dataformat.csv.CsvParser
-import org.opendc.trace.TableColumn
-import org.opendc.trace.TableColumnType
-import org.opendc.trace.TableReader
-import org.opendc.trace.TableWriter
-import org.opendc.trace.conv.TABLE_RESOURCES
-import org.opendc.trace.conv.TABLE_RESOURCE_STATES
-import org.opendc.trace.conv.resourceCpuCount
-import org.opendc.trace.conv.resourceID
-import org.opendc.trace.conv.resourceMemCapacity
-import org.opendc.trace.conv.resourceStartTime
-import org.opendc.trace.conv.resourceStateCpuUsagePct
-import org.opendc.trace.conv.resourceStateTimestamp
-import org.opendc.trace.conv.resourceStopTime
-import org.opendc.trace.spi.TableDetails
-import org.opendc.trace.spi.TraceFormat
-import org.opendc.trace.util.CompositeTableReader
-import java.nio.file.Files
-import java.nio.file.Path
-import java.util.stream.Collectors
-import java.util.zip.GZIPInputStream
-import kotlin.io.path.inputStream
-import kotlin.io.path.name
-
-/**
- * A format implementation for the Azure v1 format.
- */
-public class AzureTraceFormat : TraceFormat {
- /**
- * The name of this trace format.
- */
- override val name: String = "azure"
-
- /**
- * The [CsvFactory] used to create the parser.
- */
- private val factory =
- CsvFactory()
- .enable(CsvParser.Feature.ALLOW_COMMENTS)
- .enable(CsvParser.Feature.TRIM_SPACES)
-
- override fun create(path: Path) {
- throw UnsupportedOperationException("Writing not supported for this format")
- }
-
- override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCES, TABLE_RESOURCE_STATES)
-
- override fun getDetails(
- path: Path,
- table: String,
- ): TableDetails {
- return when (table) {
- TABLE_RESOURCES ->
- TableDetails(
- listOf(
- TableColumn(resourceID, TableColumnType.String),
- TableColumn(resourceStartTime, TableColumnType.Instant),
- TableColumn(resourceStopTime, TableColumnType.Instant),
- TableColumn(resourceCpuCount, TableColumnType.Int),
- TableColumn(resourceMemCapacity, TableColumnType.Double),
- ),
- )
- TABLE_RESOURCE_STATES ->
- TableDetails(
- listOf(
- TableColumn(resourceID, TableColumnType.String),
- TableColumn(resourceStateTimestamp, TableColumnType.Instant),
- TableColumn(resourceStateCpuUsagePct, TableColumnType.Double),
- ),
- )
- else -> throw IllegalArgumentException("Table $table not supported")
- }
- }
-
- override fun newReader(
- path: Path,
- table: String,
- projection: List<String>?,
- ): TableReader {
- return when (table) {
- TABLE_RESOURCES -> {
- val stream = GZIPInputStream(path.resolve("vmtable/vmtable.csv.gz").inputStream())
- AzureResourceTableReader(factory.createParser(stream))
- }
- TABLE_RESOURCE_STATES -> newResourceStateReader(path)
- else -> throw IllegalArgumentException("Table $table not supported")
- }
- }
-
- override fun newWriter(
- path: Path,
- table: String,
- ): TableWriter {
- throw UnsupportedOperationException("Writing not supported for this format")
- }
-
- /**
- * Construct a [TableReader] for reading over all VM CPU readings.
- */
- private fun newResourceStateReader(path: Path): TableReader {
- val partitions =
- Files.walk(path.resolve("vm_cpu_readings"), 1)
- .filter { !Files.isDirectory(it) && it.name.endsWith(".csv.gz") }
- .collect(Collectors.toMap({ it.name.removeSuffix(".csv.gz") }, { it }))
- .toSortedMap()
- val it = partitions.iterator()
-
- return object : CompositeTableReader() {
- override fun nextReader(): TableReader? {
- return if (it.hasNext()) {
- val (_, partPath) = it.next()
- val stream = GZIPInputStream(partPath.inputStream())
- return AzureResourceStateTableReader(factory.createParser(stream))
- } else {
- null
- }
- }
-
- override fun toString(): String = "AzureCompositeTableReader"
- }
- }
-}
diff --git a/opendc-trace/opendc-trace-azure/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat b/opendc-trace/opendc-trace-azure/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat
deleted file mode 100644
index 08e75529..00000000
--- a/opendc-trace/opendc-trace-azure/src/main/resources/META-INF/services/org.opendc.trace.spi.TraceFormat
+++ /dev/null
@@ -1 +0,0 @@
-org.opendc.trace.azure.AzureTraceFormat
diff --git a/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt b/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt
deleted file mode 100644
index 4fe96a8e..00000000
--- a/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright (c) 2021 AtLarge Research
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package org.opendc.trace.azure
-
-import org.junit.jupiter.api.Assertions.assertAll
-import org.junit.jupiter.api.Assertions.assertDoesNotThrow
-import org.junit.jupiter.api.Assertions.assertEquals
-import org.junit.jupiter.api.Assertions.assertTrue
-import org.junit.jupiter.api.BeforeEach
-import org.junit.jupiter.api.DisplayName
-import org.junit.jupiter.api.Nested
-import org.junit.jupiter.api.Test
-import org.junit.jupiter.api.assertThrows
-import org.opendc.trace.TableColumn
-import org.opendc.trace.TableReader
-import org.opendc.trace.conv.TABLE_RESOURCES
-import org.opendc.trace.conv.TABLE_RESOURCE_STATES
-import org.opendc.trace.conv.resourceCpuCount
-import org.opendc.trace.conv.resourceID
-import org.opendc.trace.conv.resourceMemCapacity
-import org.opendc.trace.conv.resourceStateCpuUsagePct
-import org.opendc.trace.conv.resourceStateTimestamp
-import org.opendc.trace.testkit.TableReaderTestKit
-import java.nio.file.Paths
-
-/**
- * Test suite for the [AzureTraceFormat] class.
- */
-@DisplayName("Azure VM TraceFormat")
-class AzureTraceFormatTest {
- private val format = AzureTraceFormat()
-
- @Test
- fun testTables() {
- val path = Paths.get("src/test/resources/trace")
-
- assertEquals(listOf(TABLE_RESOURCES, TABLE_RESOURCE_STATES), format.getTables(path))
- }
-
- @Test
- fun testTableExists() {
- val path = Paths.get("src/test/resources/trace")
-
- assertDoesNotThrow { format.getDetails(path, TABLE_RESOURCE_STATES) }
- }
-
- @Test
- fun testTableDoesNotExist() {
- val path = Paths.get("src/test/resources/trace")
- assertThrows<IllegalArgumentException> { format.getDetails(path, "test") }
- }
-
- @Test
- fun testResources() {
- val path = Paths.get("src/test/resources/trace")
- val reader = format.newReader(path, TABLE_RESOURCES, null)
- assertAll(
- { assertTrue(reader.nextRow()) },
- { assertEquals("x/XsOfHO4ocsV99i4NluqKDuxctW2MMVmwqOPAlg4wp8mqbBOe3wxBlQo0+Qx+uf", reader.getString(resourceID)) },
- { assertEquals(1, reader.getInt(resourceCpuCount)) },
- { assertEquals(1750000.0, reader.getDouble(resourceMemCapacity)) },
- )
-
- reader.close()
- }
-
- @Test
- fun testSmoke() {
- val path = Paths.get("src/test/resources/trace")
- val reader = format.newReader(path, TABLE_RESOURCE_STATES, null)
-
- assertAll(
- { assertTrue(reader.nextRow()) },
- { assertEquals("+ZcrOp5/c/fJ6mVgP5qMZlOAGDwyjaaDNM0WoWOt2IDb47gT0UwK9lFwkPQv3C7Q", reader.getString(resourceID)) },
- { assertEquals(0, reader.getInstant(resourceStateTimestamp)?.epochSecond) },
- { assertEquals(0.0286979, reader.getDouble(resourceStateCpuUsagePct), 0.01) },
- )
-
- reader.close()
- }
-
- @DisplayName("TableReader for Resources")
- @Nested
- inner class ResourcesTableReaderTest : TableReaderTestKit() {
- override lateinit var reader: TableReader
- override lateinit var columns: List<TableColumn>
-
- @BeforeEach
- fun setUp() {
- val path = Paths.get("src/test/resources/trace")
-
- columns = format.getDetails(path, TABLE_RESOURCES).columns
- reader = format.newReader(path, TABLE_RESOURCES, null)
- }
- }
-
- @DisplayName("TableReader for Resource States")
- @Nested
- inner class ResourceStatesTableReaderTest : TableReaderTestKit() {
- override lateinit var reader: TableReader
- override lateinit var columns: List<TableColumn>
-
- @BeforeEach
- fun setUp() {
- val path = Paths.get("src/test/resources/trace")
-
- columns = format.getDetails(path, TABLE_RESOURCE_STATES).columns
- reader = format.newReader(path, TABLE_RESOURCE_STATES, null)
- }
- }
-}
diff --git a/opendc-trace/opendc-trace-azure/src/test/resources/trace/vm_cpu_readings/vm_cpu_readings-file-1-of-125.csv.gz b/opendc-trace/opendc-trace-azure/src/test/resources/trace/vm_cpu_readings/vm_cpu_readings-file-1-of-125.csv.gz
deleted file mode 100644
index 592c7316..00000000
--- a/opendc-trace/opendc-trace-azure/src/test/resources/trace/vm_cpu_readings/vm_cpu_readings-file-1-of-125.csv.gz
+++ /dev/null
Binary files differ
diff --git a/opendc-trace/opendc-trace-azure/src/test/resources/trace/vmtable/vmtable.csv.gz b/opendc-trace/opendc-trace-azure/src/test/resources/trace/vmtable/vmtable.csv.gz
deleted file mode 100644
index 0adc6b7e..00000000
--- a/opendc-trace/opendc-trace-azure/src/test/resources/trace/vmtable/vmtable.csv.gz
+++ /dev/null
Binary files differ