From d6190d20d0de9b5e8258dfe626fbe0d6bf48ba15 Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Tue, 8 Jun 2021 22:52:32 +0200 Subject: test: Fix logging warning for tests This change fixes the SLF4J logging warnings that occur during the tests. --- opendc-format/build.gradle.kts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'opendc-format') diff --git a/opendc-format/build.gradle.kts b/opendc-format/build.gradle.kts index e95cb666..6c87dd99 100644 --- a/opendc-format/build.gradle.kts +++ b/opendc-format/build.gradle.kts @@ -39,10 +39,11 @@ dependencies { exclude(group = "org.jetbrains.kotlin", module = "kotlin-reflect") } implementation(kotlin("reflect")) - implementation(libs.parquet) implementation(libs.hadoop.client) { exclude(group = "org.slf4j", module = "slf4j-log4j12") exclude(group = "log4j") } + + testRuntimeOnly(libs.slf4j.simple) } -- cgit v1.2.3 From 9097811e0ac6872de3e4ff5f521d8859870b1000 Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Tue, 8 Jun 2021 23:42:48 +0200 Subject: format: Add implementation of local Parquet InputFile This change adds an implementation of Parquet's local InputFile in order to eliminate the dependency on the entire Hadoop system. This implementation allows users to read Parquet files locally without needing a Parquet filesystem implementation. --- opendc-format/build.gradle.kts | 2 +- .../org/opendc/format/util/LocalInputFile.kt | 101 +++++++++++++++++++ .../org/opendc/format/util/LocalParquetReader.kt | 112 +++++++++++++++++++++ 3 files changed, 214 insertions(+), 1 deletion(-) create mode 100644 opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt create mode 100644 opendc-format/src/main/kotlin/org/opendc/format/util/LocalParquetReader.kt (limited to 'opendc-format') diff --git a/opendc-format/build.gradle.kts b/opendc-format/build.gradle.kts index 6c87dd99..c0ffeb3e 100644 --- a/opendc-format/build.gradle.kts +++ b/opendc-format/build.gradle.kts @@ -44,6 +44,6 @@ dependencies { exclude(group = "org.slf4j", module = "slf4j-log4j12") exclude(group = "log4j") } - + testRuntimeOnly(libs.slf4j.simple) } diff --git a/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt new file mode 100644 index 00000000..d8c25a62 --- /dev/null +++ b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2021 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.opendc.format.util + +import org.apache.parquet.io.InputFile +import org.apache.parquet.io.SeekableInputStream +import java.io.EOFException +import java.nio.ByteBuffer +import java.nio.channels.FileChannel +import java.nio.file.Path +import java.nio.file.StandardOpenOption + +/** + * An [InputFile] on the local filesystem. + */ +public class LocalInputFile(private val path: Path) : InputFile { + /** + * The [FileChannel] used for accessing the input path. + */ + private val channel = FileChannel.open(path, StandardOpenOption.READ) + + override fun getLength(): Long = channel.size() + + override fun newStream(): SeekableInputStream = object : SeekableInputStream() { + override fun read(buf: ByteBuffer): Int { + return channel.read(buf) + } + + override fun read(): Int { + val single = ByteBuffer.allocate(1) + var read: Int + + // ReadableByteChannel#read might read zero bytes so continue until we read at least one byte + do { + read = channel.read(single) + } while (read == 0) + + return if (read == -1) { + read + } else { + single.get(0).toInt() and 0xff + } + } + + override fun getPos(): Long { + return channel.position() + } + + override fun seek(newPos: Long) { + channel.position(newPos) + } + + override fun readFully(bytes: ByteArray) { + readFully(ByteBuffer.wrap(bytes)) + } + + override fun readFully(bytes: ByteArray, start: Int, len: Int) { + readFully(ByteBuffer.wrap(bytes, start, len)) + } + + override fun readFully(buf: ByteBuffer) { + var remainder = buf.remaining() + while (remainder > 0) { + val read = channel.read(buf) + remainder -= read + + if (read == -1 && remainder > 0) { + throw EOFException() + } + } + } + + override fun close() { + channel.close() + } + + override fun toString(): String = "NioSeekableInputStream" + } + + override fun toString(): String = "LocalInputFile[path=$path]" +} diff --git a/opendc-format/src/main/kotlin/org/opendc/format/util/LocalParquetReader.kt b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalParquetReader.kt new file mode 100644 index 00000000..5083f3e1 --- /dev/null +++ b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalParquetReader.kt @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2021 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.opendc.format.util + +import org.apache.parquet.avro.AvroParquetReader +import org.apache.parquet.hadoop.ParquetReader +import org.apache.parquet.io.InputFile +import java.io.File +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.isDirectory + +/** + * A helper class to read Parquet files. + * + * @param path The path to the Parquet file or directory to read. + */ +public class LocalParquetReader(path: Path) : AutoCloseable { + /** + * The input files to process. + */ + private val filesIterator = if (path.isDirectory()) + Files.list(path) + .filter { !it.isDirectory() } + .sorted() + .map { LocalInputFile(it) } + .iterator() + else + listOf(LocalInputFile(path)).iterator() + + /** + * The Parquet reader to use. + */ + private var reader: ParquetReader? = null + + /** + * Construct a [LocalParquetReader] for the specified [file]. + */ + public constructor(file: File) : this(file.toPath()) + + /** + * Read a single entry in the Parquet file. + */ + public fun read(): T? { + return try { + val next = reader?.read() + if (next != null) { + next + } else { + initReader() + + if (reader == null) + null + else + read() + } + } catch (e: InterruptedException) { + throw IOException(e) + } + } + + /** + * Close the Parquet reader. + */ + override fun close() { + reader?.close() + } + + /** + * Initialize the next reader. + */ + private fun initReader() { + reader?.close() + + this.reader = if (filesIterator.hasNext()) { + createReader(filesIterator.next()) + } else { + null + } + } + + /** + * Create a Parquet reader for the specified file. + */ + private fun createReader(input: InputFile): ParquetReader { + return AvroParquetReader + .builder(input) + .disableCompatibility() + .build() + } +} -- cgit v1.2.3 From 1b52a443e508bc4130071e67a1a8e17a6714c6b8 Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Tue, 8 Jun 2021 23:46:07 +0200 Subject: format: Use LocalInputFile for Parquet reader This change updates the format implementations that use Parquet by switching to our InputFile implementation for local files, which eliminates the need for Hadoop's filesystem support. --- opendc-format/build.gradle.kts | 11 +++- .../org/opendc/format/trace/wtf/WtfTraceReader.kt | 76 ++++++++++++---------- .../opendc/format/trace/wtf/WtfTraceReaderTest.kt | 3 +- 3 files changed, 53 insertions(+), 37 deletions(-) (limited to 'opendc-format') diff --git a/opendc-format/build.gradle.kts b/opendc-format/build.gradle.kts index c0ffeb3e..d3c1a59a 100644 --- a/opendc-format/build.gradle.kts +++ b/opendc-format/build.gradle.kts @@ -39,8 +39,15 @@ dependencies { exclude(group = "org.jetbrains.kotlin", module = "kotlin-reflect") } implementation(kotlin("reflect")) - implementation(libs.parquet) - implementation(libs.hadoop.client) { + + implementation(libs.parquet) { + exclude(group = "org.apache.hadoop") + } + implementation(libs.hadoop.common) { + exclude(group = "org.slf4j", module = "slf4j-log4j12") + exclude(group = "log4j") + } + implementation(libs.hadoop.mapreduce.client.core) { exclude(group = "org.slf4j", module = "slf4j-log4j12") exclude(group = "log4j") } diff --git a/opendc-format/src/main/kotlin/org/opendc/format/trace/wtf/WtfTraceReader.kt b/opendc-format/src/main/kotlin/org/opendc/format/trace/wtf/WtfTraceReader.kt index feadf61f..dde1b340 100644 --- a/opendc-format/src/main/kotlin/org/opendc/format/trace/wtf/WtfTraceReader.kt +++ b/opendc-format/src/main/kotlin/org/opendc/format/trace/wtf/WtfTraceReader.kt @@ -23,15 +23,16 @@ package org.opendc.format.trace.wtf import org.apache.avro.generic.GenericRecord -import org.apache.hadoop.fs.Path -import org.apache.parquet.avro.AvroParquetReader import org.opendc.format.trace.TraceEntry import org.opendc.format.trace.TraceReader +import org.opendc.format.util.LocalParquetReader import org.opendc.simulator.compute.workload.SimFlopsWorkload import org.opendc.workflow.api.Job import org.opendc.workflow.api.Task import org.opendc.workflow.api.WORKFLOW_TASK_CORES import org.opendc.workflow.api.WORKFLOW_TASK_DEADLINE +import java.io.File +import java.nio.file.Path import java.util.UUID import kotlin.math.min @@ -41,12 +42,19 @@ import kotlin.math.min * * @param path The path to the trace. */ -public class WtfTraceReader(path: String) : TraceReader { +public class WtfTraceReader(path: Path) : TraceReader { /** * The internal iterator to use for this reader. */ private val iterator: Iterator> + /** + * Construct a [TraceReader] from the specified [path]. + * + * @param path The path to the trace. + */ + public constructor(path: File) : this(path.toPath()) + /** * Initialize the reader. */ @@ -56,43 +64,43 @@ public class WtfTraceReader(path: String) : TraceReader { val tasks = mutableMapOf() val taskDependencies = mutableMapOf>() - @Suppress("DEPRECATION") - val reader = AvroParquetReader.builder(Path(path, "tasks/schema-1.0")).build() + LocalParquetReader(path.resolve("tasks/schema-1.0")).use { reader -> + while (true) { + val nextRecord = reader.read() ?: break - while (true) { - val nextRecord = reader.read() ?: break + val workflowId = nextRecord.get("workflow_id") as Long + val taskId = nextRecord.get("id") as Long + val submitTime = nextRecord.get("ts_submit") as Long + val runtime = nextRecord.get("runtime") as Long + val cores = (nextRecord.get("resource_amount_requested") as Double).toInt() - val workflowId = nextRecord.get("workflow_id") as Long - val taskId = nextRecord.get("id") as Long - val submitTime = nextRecord.get("ts_submit") as Long - val runtime = nextRecord.get("runtime") as Long - val cores = (nextRecord.get("resource_amount_requested") as Double).toInt() - @Suppress("UNCHECKED_CAST") - val dependencies = (nextRecord.get("parents") as ArrayList).map { - it.get("item") as Long - } + @Suppress("UNCHECKED_CAST") + val dependencies = (nextRecord.get("parents") as ArrayList).map { + it.get("item") as Long + } - val flops: Long = 4100 * (runtime / 1000) * cores + val flops: Long = 4100 * (runtime / 1000) * cores - val workflow = workflows.getOrPut(workflowId) { - Job(UUID(0L, workflowId), "", HashSet()) - } - val workload = SimFlopsWorkload(flops) - val task = Task( - UUID(0L, taskId), - "", - HashSet(), - mapOf( - "workload" to workload, - WORKFLOW_TASK_CORES to cores, - WORKFLOW_TASK_DEADLINE to runtime + val workflow = workflows.getOrPut(workflowId) { + Job(UUID(0L, workflowId), "", HashSet()) + } + val workload = SimFlopsWorkload(flops) + val task = Task( + UUID(0L, taskId), + "", + HashSet(), + mapOf( + "workload" to workload, + WORKFLOW_TASK_CORES to cores, + WORKFLOW_TASK_DEADLINE to runtime + ) ) - ) - starts.merge(workflowId, submitTime, ::min) - (workflow.tasks as MutableSet).add(task) - tasks[taskId] = task - taskDependencies[task] = dependencies + starts.merge(workflowId, submitTime, ::min) + (workflow.tasks as MutableSet).add(task) + tasks[taskId] = task + taskDependencies[task] = dependencies + } } // Fix dependencies and dependents for all tasks diff --git a/opendc-format/src/test/kotlin/org/opendc/format/trace/wtf/WtfTraceReaderTest.kt b/opendc-format/src/test/kotlin/org/opendc/format/trace/wtf/WtfTraceReaderTest.kt index bcfa7553..31ae03e0 100644 --- a/opendc-format/src/test/kotlin/org/opendc/format/trace/wtf/WtfTraceReaderTest.kt +++ b/opendc-format/src/test/kotlin/org/opendc/format/trace/wtf/WtfTraceReaderTest.kt @@ -24,6 +24,7 @@ package org.opendc.format.trace.wtf import org.junit.jupiter.api.Assertions.* import org.junit.jupiter.api.Test +import java.io.File /** * Test suite for the [WtfTraceReader] class. @@ -34,7 +35,7 @@ class WtfTraceReaderTest { */ @Test fun testParseWtf() { - val reader = WtfTraceReader("src/test/resources/wtf-trace") + val reader = WtfTraceReader(File("src/test/resources/wtf-trace")) var entry = reader.next() assertEquals(0, entry.start) assertEquals(23, entry.workload.tasks.size) -- cgit v1.2.3 From f0f59a0b98fe474da4411c0d5048ccdf4a2d7c43 Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Wed, 9 Jun 2021 09:48:07 +0200 Subject: exp: Use LocalInputFile for Parquet readers This change updates the Parquet readers used in the Capelin experiments to use our InputFile implementation for local files, to reduce our dependency on Apache Hadoop. --- .../src/main/kotlin/org/opendc/format/util/LocalInputFile.kt | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'opendc-format') diff --git a/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt index d8c25a62..92319ace 100644 --- a/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt +++ b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalInputFile.kt @@ -25,6 +25,7 @@ package org.opendc.format.util import org.apache.parquet.io.InputFile import org.apache.parquet.io.SeekableInputStream import java.io.EOFException +import java.io.File import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.Path @@ -39,6 +40,11 @@ public class LocalInputFile(private val path: Path) : InputFile { */ private val channel = FileChannel.open(path, StandardOpenOption.READ) + /** + * Construct a [LocalInputFile] for the specified [file]. + */ + public constructor(file: File) : this(file.toPath()) + override fun getLength(): Long = channel.size() override fun newStream(): SeekableInputStream = object : SeekableInputStream() { -- cgit v1.2.3 From 2837ee439aef908b3fe281b9707dbb961e700f1c Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Wed, 9 Jun 2021 12:35:44 +0200 Subject: format: Add implementation of local Parquet OutputFile This change adds an implementation of Parquet OutputFile for local files in order to eliminate the dependency on the entire Hadoop system. This implementation allows users to read Parquet files locally without needing a Parquet filesystem implementation. --- .../org/opendc/format/util/LocalOutputFile.kt | 95 ++++++++++++++++ .../kotlin/org/opendc/format/util/ParquetTest.kt | 125 +++++++++++++++++++++ 2 files changed, 220 insertions(+) create mode 100644 opendc-format/src/main/kotlin/org/opendc/format/util/LocalOutputFile.kt create mode 100644 opendc-format/src/test/kotlin/org/opendc/format/util/ParquetTest.kt (limited to 'opendc-format') diff --git a/opendc-format/src/main/kotlin/org/opendc/format/util/LocalOutputFile.kt b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalOutputFile.kt new file mode 100644 index 00000000..657bca5a --- /dev/null +++ b/opendc-format/src/main/kotlin/org/opendc/format/util/LocalOutputFile.kt @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2021 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.opendc.format.util + +import org.apache.parquet.io.OutputFile +import org.apache.parquet.io.PositionOutputStream +import java.io.File +import java.io.OutputStream +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.StandardOpenOption + +/** + * An [OutputFile] on the local filesystem. + */ +public class LocalOutputFile(private val path: Path) : OutputFile { + /** + * Construct a [LocalOutputFile] from the specified [file] + */ + public constructor(file: File) : this(file.toPath()) + + override fun create(blockSizeHint: Long): PositionOutputStream { + val output = Files.newOutputStream(path, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + return NioPositionOutputStream(output) + } + + override fun createOrOverwrite(blockSizeHint: Long): PositionOutputStream { + val output = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + return NioPositionOutputStream(output) + } + + override fun supportsBlockSize(): Boolean = false + + override fun defaultBlockSize(): Long = + throw UnsupportedOperationException("Local filesystem does not have default block size") + + override fun getPath(): String = path.toString() + + /** + * Implementation of [PositionOutputStream] for an [OutputStream]. + */ + private class NioPositionOutputStream(private val output: OutputStream) : PositionOutputStream() { + /** + * The current position in the file. + */ + private var _pos = 0L + + override fun getPos(): Long = _pos + + override fun write(b: Int) { + output.write(b) + _pos++ + } + + override fun write(b: ByteArray) { + output.write(b) + _pos += b.size + } + + override fun write(b: ByteArray, off: Int, len: Int) { + output.write(b, off, len) + _pos += len + } + + override fun flush() { + output.flush() + } + + override fun close() { + output.close() + } + + override fun toString(): String = "NioPositionOutputStream[output=$output]" + } +} diff --git a/opendc-format/src/test/kotlin/org/opendc/format/util/ParquetTest.kt b/opendc-format/src/test/kotlin/org/opendc/format/util/ParquetTest.kt new file mode 100644 index 00000000..e496dd96 --- /dev/null +++ b/opendc-format/src/test/kotlin/org/opendc/format/util/ParquetTest.kt @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2021 AtLarge Research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.opendc.format.util + +import org.apache.avro.SchemaBuilder +import org.apache.avro.generic.GenericData +import org.apache.parquet.avro.AvroParquetReader +import org.apache.parquet.avro.AvroParquetWriter +import org.apache.parquet.hadoop.ParquetFileWriter +import org.junit.jupiter.api.* +import org.junit.jupiter.api.Assertions.assertEquals +import java.io.File +import java.nio.file.FileAlreadyExistsException +import java.nio.file.NoSuchFileException + +/** + * Test suite for the Parquet helper classes. + */ +internal class ParquetTest { + private val schema = SchemaBuilder + .record("test") + .namespace("org.opendc.format.util") + .fields() + .name("field").type().intType().noDefault() + .endRecord() + + private lateinit var file: File + + /** + * Setup the test + */ + @BeforeEach + fun setUp() { + file = File.createTempFile("opendc", "parquet") + } + + /** + * Tear down the test. + */ + @AfterEach + fun tearDown() { + file.delete() + } + + /** + * Initial test to verify whether the Parquet writer works. + */ + @Test + fun testSmoke() { + val n = 4 + val writer = AvroParquetWriter.builder(LocalOutputFile(file)) + .withSchema(schema) + .withWriteMode(ParquetFileWriter.Mode.OVERWRITE) + .build() + + try { + repeat(n) { i -> + val record = GenericData.Record(schema) + record.put("field", i) + writer.write(record) + } + } finally { + writer.close() + } + + val reader = AvroParquetReader.builder(LocalInputFile(file)) + .build() + + var counter = 0 + try { + while (true) { + val record = reader.read() ?: break + assertEquals(counter++, record.get("field")) + } + } finally { + reader.close() + } + + assertEquals(n, counter) + } + + /** + * Test if overwriting fails if not specified. + */ + @Test + fun testOverwrite() { + assertThrows { + AvroParquetWriter.builder(LocalOutputFile(file)) + .withSchema(schema) + .build() + } + } + + /** + * Test non-existent file. + */ + @Test + fun testNonExistent() { + file.delete() + assertThrows { + AvroParquetReader.builder(LocalInputFile(file)) + .build() + } + } +} -- cgit v1.2.3 From 0eb4fa604efe4e0b84d69749f688a79c2249c8b3 Mon Sep 17 00:00:00 2001 From: Fabian Mastenbroek Date: Wed, 9 Jun 2021 13:38:56 +0200 Subject: build: Eliminate most Hadoop dependencies This change eliminates all Hadoop dependencies that are not necessary for Parquet to work correctly. As a result, the number of dependencies should now be greatly reduced, which in turn leads to less artifacts that need to be retrieved at build time. --- opendc-format/build.gradle.kts | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) (limited to 'opendc-format') diff --git a/opendc-format/build.gradle.kts b/opendc-format/build.gradle.kts index d3c1a59a..e19e0ec8 100644 --- a/opendc-format/build.gradle.kts +++ b/opendc-format/build.gradle.kts @@ -40,16 +40,28 @@ dependencies { } implementation(kotlin("reflect")) + /* This configuration is necessary for a slim dependency on Apache Parquet */ implementation(libs.parquet) { exclude(group = "org.apache.hadoop") } - implementation(libs.hadoop.common) { + runtimeOnly(libs.hadoop.common) { exclude(group = "org.slf4j", module = "slf4j-log4j12") exclude(group = "log4j") + exclude(group = "org.apache.hadoop") + exclude(group = "org.apache.curator") + exclude(group = "org.apache.zookeeper") + exclude(group = "org.apache.kerby") + exclude(group = "org.apache.httpcomponents") + exclude(group = "org.apache.htrace") + exclude(group = "commons-cli") + exclude(group = "javax.servlet") + exclude(group = "org.eclipse.jetty") + exclude(group = "com.sun.jersey") + exclude(group = "com.jcraft") + exclude(group = "dnsjava") } - implementation(libs.hadoop.mapreduce.client.core) { - exclude(group = "org.slf4j", module = "slf4j-log4j12") - exclude(group = "log4j") + runtimeOnly(libs.hadoop.mapreduce.client.core) { + isTransitive = false } testRuntimeOnly(libs.slf4j.simple) -- cgit v1.2.3