summaryrefslogtreecommitdiff
path: root/opendc-common/src/main/kotlin/org
diff options
context:
space:
mode:
Diffstat (limited to 'opendc-common/src/main/kotlin/org')
-rw-r--r--opendc-common/src/main/kotlin/org/opendc/common/utils/ConfigParser.kt97
-rw-r--r--opendc-common/src/main/kotlin/org/opendc/common/utils/Kafka.kt41
-rw-r--r--opendc-common/src/main/kotlin/org/opendc/common/utils/PostgresqlDB.kt71
3 files changed, 209 insertions, 0 deletions
diff --git a/opendc-common/src/main/kotlin/org/opendc/common/utils/ConfigParser.kt b/opendc-common/src/main/kotlin/org/opendc/common/utils/ConfigParser.kt
new file mode 100644
index 00000000..e6f18da5
--- /dev/null
+++ b/opendc-common/src/main/kotlin/org/opendc/common/utils/ConfigParser.kt
@@ -0,0 +1,97 @@
+package org.opendc.common.utils
+
+import kotlinx.serialization.ExperimentalSerializationApi
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.decodeFromStream
+
+import java.io.File
+import java.io.IOException
+import java.io.InputStream
+import java.io.OutputStream
+import java.net.Socket
+import java.sql.Connection
+
+/**
+ * @author Mateusz
+ * @property name
+ * @property backlog the amount of connections to accept
+ * @property address IPv4 address
+ * @property port
+ * @property postgresql Postgresql port
+ * @property username Postgresql user
+ * @property password Postgresql password
+ * @property database Postgresql database
+ * @property topic Kafka topic
+ * @property kafka Kafka port
+ */
+@Serializable
+public data class Config(
+ val name: String = "",
+ var backlog: Int = 0,
+ val address: String = "",
+ val port: Int = 0,
+ val postgresql: Int = 0,
+ val username : String = "",
+ val password : String = "",
+ val database: String = "",
+ val topic : String = "",
+ val kafka: Int = 0,
+){
+
+ public companion object{
+ public var input: InputStream? = null
+ public var output: OutputStream? = null
+ public var connection : Connection? = null
+ public var kafka : Kafka? = null
+ public var database : PostgresqlDB? = null
+
+ public var socket: Socket? = null
+
+ public fun setConfigSocket(socket: Socket?){
+ this.socket = socket
+ try {
+ input = socket?.getInputStream()
+ output = socket?.getOutputStream()
+ } catch (e: IOException){
+ print("${e.message}")
+ }
+ }
+
+ public fun getConfigReader() : InputStream? {
+ return input
+ }
+
+ public fun getConfigWriter() : OutputStream? {
+ return output
+ }
+
+ public fun setKafkaInstance(kafka : Kafka) {
+ this.kafka = kafka
+ }
+
+ public fun getKafkaInstance() : Kafka? {
+ return this.kafka
+ }
+
+ public fun setDB(db : PostgresqlDB){
+ this.database = db
+ }
+
+ public fun getDB() : PostgresqlDB?{
+ return this.database
+ }
+ }
+}
+/**
+ * @author Mateusz
+ * Reads `config.json` into Config data class.
+ */
+public class ConfigReader {
+ private val jsonReader = Json
+ public fun read(file: File): Config = read(file.inputStream())
+ @OptIn(ExperimentalSerializationApi::class)
+ public fun read(input: InputStream): Config {
+ return jsonReader.decodeFromStream<Config>(input)
+ }
+}
diff --git a/opendc-common/src/main/kotlin/org/opendc/common/utils/Kafka.kt b/opendc-common/src/main/kotlin/org/opendc/common/utils/Kafka.kt
new file mode 100644
index 00000000..48590d9f
--- /dev/null
+++ b/opendc-common/src/main/kotlin/org/opendc/common/utils/Kafka.kt
@@ -0,0 +1,41 @@
+package org.opendc.common.utils
+import org.apache.kafka.clients.producer.KafkaProducer
+import org.apache.kafka.clients.producer.Producer
+import org.apache.kafka.clients.producer.ProducerRecord
+import org.opendc.common.ProtobufMetrics
+
+import java.util.*
+
+public class Kafka (
+ private val topic : String,
+ address : String,
+ port : Int,
+) {
+ private val servers : String = "$address:$port"
+ private var properties: Properties? = null
+ private var producer: Producer<String, ProtobufMetrics.ProtoExport>? = null
+
+ init {
+ properties = Properties()
+ properties?.put("bootstrap.servers", servers)
+ properties?.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
+ properties?.put("value.serializer", "io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer")
+ properties?.put("schema.registry.url", "http://localhost:8081")
+ properties?.put("auto.register.schemas", "true")
+
+ try {
+ producer = KafkaProducer(properties)
+ } catch (e: Exception){
+ println("${e.message}")
+ }
+ }
+
+ public fun getProducer() : Producer<String, ProtobufMetrics.ProtoExport>? {
+ return this.producer
+ }
+
+ public fun send(value : ProtobufMetrics.ProtoExport){
+ producer?.send(ProducerRecord(this.topic, value))
+ }
+
+} \ No newline at end of file
diff --git a/opendc-common/src/main/kotlin/org/opendc/common/utils/PostgresqlDB.kt b/opendc-common/src/main/kotlin/org/opendc/common/utils/PostgresqlDB.kt
new file mode 100644
index 00000000..69314ef3
--- /dev/null
+++ b/opendc-common/src/main/kotlin/org/opendc/common/utils/PostgresqlDB.kt
@@ -0,0 +1,71 @@
+package org.opendc.common.utils
+
+import java.sql.Connection
+import java.sql.DriverManager
+import java.sql.SQLException
+
+/**
+ * Represents the Postgresql database.
+ * On setup cleans the entire database and creates empty tables.
+ *
+ * @author Mateusz
+ *
+ * @param address ipv4 address
+ * @param port postgres post
+ * @param dbName database name
+ * @param user
+ * @param password
+ */
+public class PostgresqlDB(
+ address : String,
+ port : Int,
+ dbName : String,
+ private var user : String,
+ private var password : String,
+) {
+ private var connection : Connection? = null
+ private var dbUrl : String = ""
+
+ init {
+ dbUrl = "jdbc:postgresql://$address:$port/$dbName"
+ println(dbUrl)
+ try {
+ connection = DriverManager.getConnection(dbUrl, user, password)
+ clear()
+ setup()
+ } catch (e: SQLException) {
+ print("${e.message}")
+ }
+ }
+ public fun setup(){
+ val CREATE_TABLE = """
+ CREATE TABLE metrics (
+ id SERIAL PRIMARY KEY,
+ timestamp bigint,
+ tasksActive integer,
+ clusterName varchar(10));
+ """.trimIndent()
+
+ try {
+ val conn = DriverManager.getConnection(dbUrl, user, password)
+ val st = conn.createStatement()
+ st.executeQuery(CREATE_TABLE)
+ } catch (e: SQLException){
+ println("${e.message}")
+ }
+ }
+
+ public fun clear(){
+ val DELETE_ALL_TABLES = """
+ DROP SCHEMA public CASCADE;
+ CREATE SCHEMA public;
+ """.trimIndent()
+ try {
+ val conn = DriverManager.getConnection(dbUrl, user, password)
+ val st = conn.createStatement()
+ st.executeQuery(DELETE_ALL_TABLES)
+ } catch (e: SQLException){
+ println("${e.message}")
+ }
+ }
+} \ No newline at end of file