summaryrefslogtreecommitdiff
path: root/opendc-trace/opendc-trace-azure
diff options
context:
space:
mode:
authorDante Niewenhuis <d.niewenhuis@hotmail.com>2024-03-05 13:23:57 +0100
committerGitHub <noreply@github.com>2024-03-05 13:23:57 +0100
commit5864cbcbfe2eb8c36ca05c3a39c7e5916aeecaec (patch)
tree5b2773b8dc21c2e1b526fb70f829c376dd80532a /opendc-trace/opendc-trace-azure
parentd28002a3c151d198298574312f32f1cb43f3a660 (diff)
Updated package versions, updated web server tests. (#207)
* Updated all package versions including kotlin. Updated all web-server tests to run. * Changed the java version of the tests. OpenDC now only supports java 19. * small update * test update * new update * updated docker version to 19 * updated docker version to 19
Diffstat (limited to 'opendc-trace/opendc-trace-azure')
-rw-r--r--opendc-trace/opendc-trace-azure/build.gradle.kts2
-rw-r--r--opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt6
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt59
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt89
-rw-r--r--opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt76
-rw-r--r--opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt22
6 files changed, 145 insertions, 109 deletions
diff --git a/opendc-trace/opendc-trace-azure/build.gradle.kts b/opendc-trace/opendc-trace-azure/build.gradle.kts
index ee53c583..21b8b439 100644
--- a/opendc-trace/opendc-trace-azure/build.gradle.kts
+++ b/opendc-trace/opendc-trace-azure/build.gradle.kts
@@ -22,7 +22,7 @@
description = "Support for Azure VM traces in OpenDC"
-/* Build configuration */
+// Build configuration
plugins {
`kotlin-library-conventions`
`benchmark-conventions`
diff --git a/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt b/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt
index 6759f38a..bb3c2450 100644
--- a/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt
+++ b/opendc-trace/opendc-trace-azure/src/jmh/kotlin/org/opendc/trace/azure/AzureTraceBenchmarks.kt
@@ -22,9 +22,9 @@
package org.opendc.trace.azure
-import org.opendc.trace.conv.RESOURCE_ID
import org.opendc.trace.conv.TABLE_RESOURCES
import org.opendc.trace.conv.TABLE_RESOURCE_STATES
+import org.opendc.trace.conv.resourceID
import org.opendc.trace.spi.TraceFormat
import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.Fork
@@ -58,7 +58,7 @@ class AzureTraceBenchmarks {
fun benchmarkResourcesReader(bh: Blackhole) {
val reader = format.newReader(path, TABLE_RESOURCES, null)
try {
- val idColumn = reader.resolve(RESOURCE_ID)
+ val idColumn = reader.resolve(resourceID)
while (reader.nextRow()) {
bh.consume(reader.getString(idColumn))
}
@@ -71,7 +71,7 @@ class AzureTraceBenchmarks {
fun benchmarkResourceStatesReader(bh: Blackhole) {
val reader = format.newReader(path, TABLE_RESOURCE_STATES, null)
try {
- val idColumn = reader.resolve(RESOURCE_ID)
+ val idColumn = reader.resolve(resourceID)
while (reader.nextRow()) {
bh.consume(reader.getString(idColumn))
}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt
index 0c60c75d..bcf6ff52 100644
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt
+++ b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceStateTableReader.kt
@@ -26,9 +26,9 @@ import com.fasterxml.jackson.core.JsonToken
import com.fasterxml.jackson.dataformat.csv.CsvParser
import com.fasterxml.jackson.dataformat.csv.CsvSchema
import org.opendc.trace.TableReader
-import org.opendc.trace.conv.RESOURCE_ID
-import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT
-import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP
+import org.opendc.trace.conv.resourceID
+import org.opendc.trace.conv.resourceStateCpuUsagePct
+import org.opendc.trace.conv.resourceStateTimestamp
import java.time.Duration
import java.time.Instant
import java.util.UUID
@@ -74,21 +74,21 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
return true
}
- private val COL_ID = 0
- private val COL_TIMESTAMP = 1
- private val COL_CPU_USAGE_PCT = 2
+ private val colID = 0
+ private val colTimestamp = 1
+ private val colCpuUsagePct = 2
override fun resolve(name: String): Int {
return when (name) {
- RESOURCE_ID -> COL_ID
- RESOURCE_STATE_TIMESTAMP -> COL_TIMESTAMP
- RESOURCE_STATE_CPU_USAGE_PCT -> COL_CPU_USAGE_PCT
+ resourceID -> colID
+ resourceStateTimestamp -> colTimestamp
+ resourceStateCpuUsagePct -> colCpuUsagePct
else -> -1
}
}
override fun isNull(index: Int): Boolean {
- require(index in 0..COL_CPU_USAGE_PCT) { "Invalid column index" }
+ require(index in 0..colCpuUsagePct) { "Invalid column index" }
return false
}
@@ -111,7 +111,7 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
override fun getDouble(index: Int): Double {
checkActive()
return when (index) {
- COL_CPU_USAGE_PCT -> cpuUsagePct
+ colCpuUsagePct -> cpuUsagePct
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -119,7 +119,7 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
override fun getString(index: Int): String? {
checkActive()
return when (index) {
- COL_ID -> id
+ colID -> id
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -131,7 +131,7 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
override fun getInstant(index: Int): Instant? {
checkActive()
return when (index) {
- COL_TIMESTAMP -> timestamp
+ colTimestamp -> timestamp
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -140,15 +140,25 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
throw IllegalArgumentException("Invalid column")
}
- override fun <T> getList(index: Int, elementType: Class<T>): List<T>? {
+ override fun <T> getList(
+ index: Int,
+ elementType: Class<T>,
+ ): List<T>? {
throw IllegalArgumentException("Invalid column")
}
- override fun <K, V> getMap(index: Int, keyType: Class<K>, valueType: Class<V>): Map<K, V>? {
+ override fun <K, V> getMap(
+ index: Int,
+ keyType: Class<K>,
+ valueType: Class<V>,
+ ): Map<K, V>? {
throw IllegalArgumentException("Invalid column")
}
- override fun <T> getSet(index: Int, elementType: Class<T>): Set<T>? {
+ override fun <T> getSet(
+ index: Int,
+ elementType: Class<T>,
+ ): Set<T>? {
throw IllegalArgumentException("Invalid column")
}
@@ -196,13 +206,14 @@ internal class AzureResourceStateTableReader(private val parser: CsvParser) : Ta
/**
* The [CsvSchema] that is used to parse the trace.
*/
- private val schema = CsvSchema.builder()
- .addColumn("timestamp", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm id", CsvSchema.ColumnType.STRING)
- .addColumn("CPU min cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("CPU max cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("CPU avg cpu", CsvSchema.ColumnType.NUMBER)
- .setAllowComments(true)
- .build()
+ private val schema =
+ CsvSchema.builder()
+ .addColumn("timestamp", CsvSchema.ColumnType.NUMBER)
+ .addColumn("vm id", CsvSchema.ColumnType.STRING)
+ .addColumn("CPU min cpu", CsvSchema.ColumnType.NUMBER)
+ .addColumn("CPU max cpu", CsvSchema.ColumnType.NUMBER)
+ .addColumn("CPU avg cpu", CsvSchema.ColumnType.NUMBER)
+ .setAllowComments(true)
+ .build()
}
}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt
index c0acb67a..d86a0466 100644
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt
+++ b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureResourceTableReader.kt
@@ -26,11 +26,11 @@ import com.fasterxml.jackson.core.JsonToken
import com.fasterxml.jackson.dataformat.csv.CsvParser
import com.fasterxml.jackson.dataformat.csv.CsvSchema
import org.opendc.trace.TableReader
-import org.opendc.trace.conv.RESOURCE_CPU_COUNT
-import org.opendc.trace.conv.RESOURCE_ID
-import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY
-import org.opendc.trace.conv.RESOURCE_START_TIME
-import org.opendc.trace.conv.RESOURCE_STOP_TIME
+import org.opendc.trace.conv.resourceCpuCount
+import org.opendc.trace.conv.resourceID
+import org.opendc.trace.conv.resourceMemCapacity
+import org.opendc.trace.conv.resourceStartTime
+import org.opendc.trace.conv.resourceStopTime
import java.time.Duration
import java.time.Instant
import java.util.UUID
@@ -78,25 +78,25 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
return true
}
- private val COL_ID = 0
- private val COL_START_TIME = 1
- private val COL_STOP_TIME = 2
- private val COL_CPU_COUNT = 3
- private val COL_MEM_CAPACITY = 4
+ private val colID = 0
+ private val colStartTime = 1
+ private val colStopTime = 2
+ private val colCpuCount = 3
+ private val colMemCapacity = 4
override fun resolve(name: String): Int {
return when (name) {
- RESOURCE_ID -> COL_ID
- RESOURCE_START_TIME -> COL_START_TIME
- RESOURCE_STOP_TIME -> COL_STOP_TIME
- RESOURCE_CPU_COUNT -> COL_CPU_COUNT
- RESOURCE_MEM_CAPACITY -> COL_MEM_CAPACITY
+ resourceID -> colID
+ resourceStartTime -> colStartTime
+ resourceStopTime -> colStopTime
+ resourceCpuCount -> colCpuCount
+ resourceMemCapacity -> colMemCapacity
else -> -1
}
}
override fun isNull(index: Int): Boolean {
- require(index in 0..COL_MEM_CAPACITY) { "Invalid column index" }
+ require(index in 0..colMemCapacity) { "Invalid column index" }
return false
}
@@ -107,7 +107,7 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
override fun getInt(index: Int): Int {
checkActive()
return when (index) {
- COL_CPU_COUNT -> cpuCores
+ colCpuCount -> cpuCores
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -115,7 +115,7 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
override fun getLong(index: Int): Long {
checkActive()
return when (index) {
- COL_CPU_COUNT -> cpuCores.toLong()
+ colCpuCount -> cpuCores.toLong()
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -127,7 +127,7 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
override fun getDouble(index: Int): Double {
checkActive()
return when (index) {
- COL_MEM_CAPACITY -> memCapacity
+ colMemCapacity -> memCapacity
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -135,7 +135,7 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
override fun getString(index: Int): String? {
checkActive()
return when (index) {
- COL_ID -> id
+ colID -> id
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -147,8 +147,8 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
override fun getInstant(index: Int): Instant? {
checkActive()
return when (index) {
- COL_START_TIME -> startTime
- COL_STOP_TIME -> stopTime
+ colStartTime -> startTime
+ colStopTime -> stopTime
else -> throw IllegalArgumentException("Invalid column")
}
}
@@ -157,15 +157,25 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
throw IllegalArgumentException("Invalid column")
}
- override fun <T> getList(index: Int, elementType: Class<T>): List<T>? {
+ override fun <T> getList(
+ index: Int,
+ elementType: Class<T>,
+ ): List<T>? {
throw IllegalArgumentException("Invalid column")
}
- override fun <T> getSet(index: Int, elementType: Class<T>): Set<T>? {
+ override fun <T> getSet(
+ index: Int,
+ elementType: Class<T>,
+ ): Set<T>? {
throw IllegalArgumentException("Invalid column")
}
- override fun <K, V> getMap(index: Int, keyType: Class<K>, valueType: Class<V>): Map<K, V>? {
+ override fun <K, V> getMap(
+ index: Int,
+ keyType: Class<K>,
+ valueType: Class<V>,
+ ): Map<K, V>? {
throw IllegalArgumentException("Invalid column")
}
@@ -217,19 +227,20 @@ internal class AzureResourceTableReader(private val parser: CsvParser) : TableRe
/**
* The [CsvSchema] that is used to parse the trace.
*/
- private val schema = CsvSchema.builder()
- .addColumn("vm id", CsvSchema.ColumnType.NUMBER)
- .addColumn("subscription id", CsvSchema.ColumnType.STRING)
- .addColumn("deployment id", CsvSchema.ColumnType.NUMBER)
- .addColumn("timestamp vm created", CsvSchema.ColumnType.NUMBER)
- .addColumn("timestamp vm deleted", CsvSchema.ColumnType.NUMBER)
- .addColumn("max cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("avg cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("p95 cpu", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm category", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm virtual core count", CsvSchema.ColumnType.NUMBER)
- .addColumn("vm memory", CsvSchema.ColumnType.NUMBER)
- .setAllowComments(true)
- .build()
+ private val schema =
+ CsvSchema.builder()
+ .addColumn("vm id", CsvSchema.ColumnType.NUMBER)
+ .addColumn("subscription id", CsvSchema.ColumnType.STRING)
+ .addColumn("deployment id", CsvSchema.ColumnType.NUMBER)
+ .addColumn("timestamp vm created", CsvSchema.ColumnType.NUMBER)
+ .addColumn("timestamp vm deleted", CsvSchema.ColumnType.NUMBER)
+ .addColumn("max cpu", CsvSchema.ColumnType.NUMBER)
+ .addColumn("avg cpu", CsvSchema.ColumnType.NUMBER)
+ .addColumn("p95 cpu", CsvSchema.ColumnType.NUMBER)
+ .addColumn("vm category", CsvSchema.ColumnType.NUMBER)
+ .addColumn("vm virtual core count", CsvSchema.ColumnType.NUMBER)
+ .addColumn("vm memory", CsvSchema.ColumnType.NUMBER)
+ .setAllowComments(true)
+ .build()
}
}
diff --git a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt
index 3f64c640..a75da9d9 100644
--- a/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt
+++ b/opendc-trace/opendc-trace-azure/src/main/kotlin/org/opendc/trace/azure/AzureTraceFormat.kt
@@ -28,15 +28,15 @@ import org.opendc.trace.TableColumn
import org.opendc.trace.TableColumnType
import org.opendc.trace.TableReader
import org.opendc.trace.TableWriter
-import org.opendc.trace.conv.RESOURCE_CPU_COUNT
-import org.opendc.trace.conv.RESOURCE_ID
-import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY
-import org.opendc.trace.conv.RESOURCE_START_TIME
-import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT
-import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP
-import org.opendc.trace.conv.RESOURCE_STOP_TIME
import org.opendc.trace.conv.TABLE_RESOURCES
import org.opendc.trace.conv.TABLE_RESOURCE_STATES
+import org.opendc.trace.conv.resourceCpuCount
+import org.opendc.trace.conv.resourceID
+import org.opendc.trace.conv.resourceMemCapacity
+import org.opendc.trace.conv.resourceStartTime
+import org.opendc.trace.conv.resourceStateCpuUsagePct
+import org.opendc.trace.conv.resourceStateTimestamp
+import org.opendc.trace.conv.resourceStopTime
import org.opendc.trace.spi.TableDetails
import org.opendc.trace.spi.TraceFormat
import org.opendc.trace.util.CompositeTableReader
@@ -59,9 +59,10 @@ public class AzureTraceFormat : TraceFormat {
/**
* The [CsvFactory] used to create the parser.
*/
- private val factory = CsvFactory()
- .enable(CsvParser.Feature.ALLOW_COMMENTS)
- .enable(CsvParser.Feature.TRIM_SPACES)
+ private val factory =
+ CsvFactory()
+ .enable(CsvParser.Feature.ALLOW_COMMENTS)
+ .enable(CsvParser.Feature.TRIM_SPACES)
override fun create(path: Path) {
throw UnsupportedOperationException("Writing not supported for this format")
@@ -69,29 +70,38 @@ public class AzureTraceFormat : TraceFormat {
override fun getTables(path: Path): List<String> = listOf(TABLE_RESOURCES, TABLE_RESOURCE_STATES)
- override fun getDetails(path: Path, table: String): TableDetails {
+ override fun getDetails(
+ path: Path,
+ table: String,
+ ): TableDetails {
return when (table) {
- TABLE_RESOURCES -> TableDetails(
- listOf(
- TableColumn(RESOURCE_ID, TableColumnType.String),
- TableColumn(RESOURCE_START_TIME, TableColumnType.Instant),
- TableColumn(RESOURCE_STOP_TIME, TableColumnType.Instant),
- TableColumn(RESOURCE_CPU_COUNT, TableColumnType.Int),
- TableColumn(RESOURCE_MEM_CAPACITY, TableColumnType.Double)
+ TABLE_RESOURCES ->
+ TableDetails(
+ listOf(
+ TableColumn(resourceID, TableColumnType.String),
+ TableColumn(resourceStartTime, TableColumnType.Instant),
+ TableColumn(resourceStopTime, TableColumnType.Instant),
+ TableColumn(resourceCpuCount, TableColumnType.Int),
+ TableColumn(resourceMemCapacity, TableColumnType.Double),
+ ),
)
- )
- TABLE_RESOURCE_STATES -> TableDetails(
- listOf(
- TableColumn(RESOURCE_ID, TableColumnType.String),
- TableColumn(RESOURCE_STATE_TIMESTAMP, TableColumnType.Instant),
- TableColumn(RESOURCE_STATE_CPU_USAGE_PCT, TableColumnType.Double)
+ TABLE_RESOURCE_STATES ->
+ TableDetails(
+ listOf(
+ TableColumn(resourceID, TableColumnType.String),
+ TableColumn(resourceStateTimestamp, TableColumnType.Instant),
+ TableColumn(resourceStateCpuUsagePct, TableColumnType.Double),
+ ),
)
- )
else -> throw IllegalArgumentException("Table $table not supported")
}
}
- override fun newReader(path: Path, table: String, projection: List<String>?): TableReader {
+ override fun newReader(
+ path: Path,
+ table: String,
+ projection: List<String>?,
+ ): TableReader {
return when (table) {
TABLE_RESOURCES -> {
val stream = GZIPInputStream(path.resolve("vmtable/vmtable.csv.gz").inputStream())
@@ -102,7 +112,10 @@ public class AzureTraceFormat : TraceFormat {
}
}
- override fun newWriter(path: Path, table: String): TableWriter {
+ override fun newWriter(
+ path: Path,
+ table: String,
+ ): TableWriter {
throw UnsupportedOperationException("Writing not supported for this format")
}
@@ -110,10 +123,11 @@ public class AzureTraceFormat : TraceFormat {
* Construct a [TableReader] for reading over all VM CPU readings.
*/
private fun newResourceStateReader(path: Path): TableReader {
- val partitions = Files.walk(path.resolve("vm_cpu_readings"), 1)
- .filter { !Files.isDirectory(it) && it.name.endsWith(".csv.gz") }
- .collect(Collectors.toMap({ it.name.removeSuffix(".csv.gz") }, { it }))
- .toSortedMap()
+ val partitions =
+ Files.walk(path.resolve("vm_cpu_readings"), 1)
+ .filter { !Files.isDirectory(it) && it.name.endsWith(".csv.gz") }
+ .collect(Collectors.toMap({ it.name.removeSuffix(".csv.gz") }, { it }))
+ .toSortedMap()
val it = partitions.iterator()
return object : CompositeTableReader() {
diff --git a/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt b/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt
index 00cdc174..4fe96a8e 100644
--- a/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt
+++ b/opendc-trace/opendc-trace-azure/src/test/kotlin/org/opendc/trace/azure/AzureTraceFormatTest.kt
@@ -33,13 +33,13 @@ import org.junit.jupiter.api.Test
import org.junit.jupiter.api.assertThrows
import org.opendc.trace.TableColumn
import org.opendc.trace.TableReader
-import org.opendc.trace.conv.RESOURCE_CPU_COUNT
-import org.opendc.trace.conv.RESOURCE_ID
-import org.opendc.trace.conv.RESOURCE_MEM_CAPACITY
-import org.opendc.trace.conv.RESOURCE_STATE_CPU_USAGE_PCT
-import org.opendc.trace.conv.RESOURCE_STATE_TIMESTAMP
import org.opendc.trace.conv.TABLE_RESOURCES
import org.opendc.trace.conv.TABLE_RESOURCE_STATES
+import org.opendc.trace.conv.resourceCpuCount
+import org.opendc.trace.conv.resourceID
+import org.opendc.trace.conv.resourceMemCapacity
+import org.opendc.trace.conv.resourceStateCpuUsagePct
+import org.opendc.trace.conv.resourceStateTimestamp
import org.opendc.trace.testkit.TableReaderTestKit
import java.nio.file.Paths
@@ -76,9 +76,9 @@ class AzureTraceFormatTest {
val reader = format.newReader(path, TABLE_RESOURCES, null)
assertAll(
{ assertTrue(reader.nextRow()) },
- { assertEquals("x/XsOfHO4ocsV99i4NluqKDuxctW2MMVmwqOPAlg4wp8mqbBOe3wxBlQo0+Qx+uf", reader.getString(RESOURCE_ID)) },
- { assertEquals(1, reader.getInt(RESOURCE_CPU_COUNT)) },
- { assertEquals(1750000.0, reader.getDouble(RESOURCE_MEM_CAPACITY)) }
+ { assertEquals("x/XsOfHO4ocsV99i4NluqKDuxctW2MMVmwqOPAlg4wp8mqbBOe3wxBlQo0+Qx+uf", reader.getString(resourceID)) },
+ { assertEquals(1, reader.getInt(resourceCpuCount)) },
+ { assertEquals(1750000.0, reader.getDouble(resourceMemCapacity)) },
)
reader.close()
@@ -91,9 +91,9 @@ class AzureTraceFormatTest {
assertAll(
{ assertTrue(reader.nextRow()) },
- { assertEquals("+ZcrOp5/c/fJ6mVgP5qMZlOAGDwyjaaDNM0WoWOt2IDb47gT0UwK9lFwkPQv3C7Q", reader.getString(RESOURCE_ID)) },
- { assertEquals(0, reader.getInstant(RESOURCE_STATE_TIMESTAMP)?.epochSecond) },
- { assertEquals(0.0286979, reader.getDouble(RESOURCE_STATE_CPU_USAGE_PCT), 0.01) }
+ { assertEquals("+ZcrOp5/c/fJ6mVgP5qMZlOAGDwyjaaDNM0WoWOt2IDb47gT0UwK9lFwkPQv3C7Q", reader.getString(resourceID)) },
+ { assertEquals(0, reader.getInstant(resourceStateTimestamp)?.epochSecond) },
+ { assertEquals(0.0286979, reader.getDouble(resourceStateCpuUsagePct), 0.01) },
)
reader.close()