summaryrefslogtreecommitdiff
path: root/opendc-compute/opendc-compute-workload/src/test
diff options
context:
space:
mode:
authorFabian Mastenbroek <mail.fabianm@gmail.com>2022-05-02 16:06:44 +0200
committerGitHub <noreply@github.com>2022-05-02 16:06:44 +0200
commitc78285f6346236053979aa98113ba9e6d7efb21e (patch)
tree44221b3a39516a235a0b41adf525a79a60abb998 /opendc-compute/opendc-compute-workload/src/test
parent44ddd27a745f2dfe4b6ffef1b7657d156dd61489 (diff)
parente4d3a8add5388182cf7a12b1099678a0b769b106 (diff)
merge: Add support for SQL via Apache Calcite (#78)
This pull request integrates initial support for SQL queries via Apache Calcite into the OpenDC codebase. Our vision is that users of OpenDC should be able to use SQL queries to access and process most of the experiment data generated by simulations. This pull request moves towards this goal by adding the ability to query workload traces supported by OpenDC using SQL. We also provide a CLI for querying the data in workload traces via `opendc-trace-tools`: ```bash opendc-trace-tools query -i data/bitbrains-small -f opendc-vm "SELECT MAX(cpu_count) FROM resource_states" ``` ## Implementation Notes :hammer_and_pick: * Add Calcite (SQL) integration * Add support for writing via SQL * Add support for writing via SQL * Support custom Parquet ReadSupport implementations * Read records using low-level Parquet API * Do not use Avro when exporting experiment data * Do not use Avro when reading WTF trace * Drop dependency on Avro * Add support for projections ## External Dependencies :four_leaf_clover: * Apache Calcite ## Breaking API Changes :warning: * The existing code for reading Parquet traces using Apache Avro has been removed. * `TraceFormat.newReader` now accepts a nullable `projection` parameter
Diffstat (limited to 'opendc-compute/opendc-compute-workload/src/test')
-rw-r--r--opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/HostDataWriterTest.kt79
-rw-r--r--opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServerDataWriterTest.kt73
-rw-r--r--opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServiceDataWriterTest.kt67
3 files changed, 219 insertions, 0 deletions
diff --git a/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/HostDataWriterTest.kt b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/HostDataWriterTest.kt
new file mode 100644
index 00000000..dae03513
--- /dev/null
+++ b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/HostDataWriterTest.kt
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2022 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.compute.workload.export.parquet
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.assertDoesNotThrow
+import org.opendc.telemetry.compute.table.HostInfo
+import org.opendc.telemetry.compute.table.HostTableReader
+import java.nio.file.Files
+import java.time.Instant
+
+/**
+ * Test suite for [ParquetHostDataWriter]
+ */
+class HostDataWriterTest {
+ /**
+ * The path to write the data file to.
+ */
+ private val path = Files.createTempFile("opendc", "parquet")
+
+ /**
+ * The writer used to write the data.
+ */
+ private val writer = ParquetHostDataWriter(path.toFile(), bufferSize = 4096)
+
+ @AfterEach
+ fun tearDown() {
+ writer.close()
+ Files.deleteIfExists(path)
+ }
+
+ @Test
+ fun testSmoke() {
+ assertDoesNotThrow {
+ writer.write(object : HostTableReader {
+ override val timestamp: Instant = Instant.now()
+ override val host: HostInfo = HostInfo("id", "test", "x86", 4, 4096)
+ override val guestsTerminated: Int = 0
+ override val guestsRunning: Int = 0
+ override val guestsError: Int = 0
+ override val guestsInvalid: Int = 0
+ override val cpuLimit: Double = 4096.0
+ override val cpuUsage: Double = 1.0
+ override val cpuDemand: Double = 1.0
+ override val cpuUtilization: Double = 0.0
+ override val cpuActiveTime: Long = 1
+ override val cpuIdleTime: Long = 1
+ override val cpuStealTime: Long = 1
+ override val cpuLostTime: Long = 1
+ override val powerUsage: Double = 1.0
+ override val powerTotal: Double = 1.0
+ override val uptime: Long = 1
+ override val downtime: Long = 1
+ override val bootTime: Instant? = null
+ })
+ }
+ }
+}
diff --git a/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServerDataWriterTest.kt b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServerDataWriterTest.kt
new file mode 100644
index 00000000..280f5ef8
--- /dev/null
+++ b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServerDataWriterTest.kt
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2022 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.compute.workload.export.parquet
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.assertDoesNotThrow
+import org.opendc.telemetry.compute.table.HostInfo
+import org.opendc.telemetry.compute.table.ServerInfo
+import org.opendc.telemetry.compute.table.ServerTableReader
+import java.nio.file.Files
+import java.time.Instant
+
+/**
+ * Test suite for [ParquetServerDataWriter]
+ */
+class ServerDataWriterTest {
+ /**
+ * The path to write the data file to.
+ */
+ private val path = Files.createTempFile("opendc", "parquet")
+
+ /**
+ * The writer used to write the data.
+ */
+ private val writer = ParquetServerDataWriter(path.toFile(), bufferSize = 4096)
+
+ @AfterEach
+ fun tearDown() {
+ writer.close()
+ Files.deleteIfExists(path)
+ }
+
+ @Test
+ fun testSmoke() {
+ assertDoesNotThrow {
+ writer.write(object : ServerTableReader {
+ override val timestamp: Instant = Instant.now()
+ override val server: ServerInfo = ServerInfo("id", "test", "vm", "x86", "test", "test", 2, 4096)
+ override val host: HostInfo = HostInfo("id", "test", "x86", 4, 4096)
+ override val cpuLimit: Double = 4096.0
+ override val cpuActiveTime: Long = 1
+ override val cpuIdleTime: Long = 1
+ override val cpuStealTime: Long = 1
+ override val cpuLostTime: Long = 1
+ override val uptime: Long = 1
+ override val downtime: Long = 1
+ override val provisionTime: Instant = timestamp
+ override val bootTime: Instant? = null
+ })
+ }
+ }
+}
diff --git a/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServiceDataWriterTest.kt b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServiceDataWriterTest.kt
new file mode 100644
index 00000000..7ffa7186
--- /dev/null
+++ b/opendc-compute/opendc-compute-workload/src/test/kotlin/org/opendc/compute/workload/export/parquet/ServiceDataWriterTest.kt
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2022 AtLarge Research
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package org.opendc.compute.workload.export.parquet
+
+import org.junit.jupiter.api.AfterEach
+import org.junit.jupiter.api.Test
+import org.junit.jupiter.api.assertDoesNotThrow
+import org.opendc.telemetry.compute.table.ServiceTableReader
+import java.nio.file.Files
+import java.time.Instant
+
+/**
+ * Test suite for [ParquetServiceDataWriter]
+ */
+class ServiceDataWriterTest {
+ /**
+ * The path to write the data file to.
+ */
+ private val path = Files.createTempFile("opendc", "parquet")
+
+ /**
+ * The writer used to write the data.
+ */
+ private val writer = ParquetServiceDataWriter(path.toFile(), bufferSize = 4096)
+
+ @AfterEach
+ fun tearDown() {
+ writer.close()
+ Files.deleteIfExists(path)
+ }
+
+ @Test
+ fun testSmoke() {
+ assertDoesNotThrow {
+ writer.write(object : ServiceTableReader {
+ override val timestamp: Instant = Instant.now()
+ override val hostsUp: Int = 1
+ override val hostsDown: Int = 0
+ override val serversPending: Int = 1
+ override val serversActive: Int = 1
+ override val attemptsSuccess: Int = 1
+ override val attemptsFailure: Int = 0
+ override val attemptsError: Int = 0
+ })
+ }
+ }
+}