summaryrefslogtreecommitdiff
path: root/opendc/opendc-format
diff options
context:
space:
mode:
authorFabian Mastenbroek <mail.fabianm@gmail.com>2020-02-14 12:43:29 +0100
committerFabian Mastenbroek <mail.fabianm@gmail.com>2020-02-14 12:50:35 +0100
commit92e858e398bf69380dbacebc042dde2bfa8cfe9c (patch)
treea7431f3d74dd449d5c6053e77b4cb60cdb36d924 /opendc/opendc-format
parent5095d42c0a1fe0a593c84bccfdd594712e12ca1a (diff)
refactor: Integrate opendc-compute in existing model
This change refactors the existing model to use the new interfaces from the opendc-compute module.
Diffstat (limited to 'opendc/opendc-format')
-rw-r--r--opendc/opendc-format/build.gradle.kts1
-rw-r--r--opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt47
-rw-r--r--opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/gwf/GwfTraceReader.kt4
3 files changed, 29 insertions, 23 deletions
diff --git a/opendc/opendc-format/build.gradle.kts b/opendc/opendc-format/build.gradle.kts
index 5f9ac1ec..21b0dc57 100644
--- a/opendc/opendc-format/build.gradle.kts
+++ b/opendc/opendc-format/build.gradle.kts
@@ -31,6 +31,7 @@ plugins {
dependencies {
api(project(":opendc:opendc-core"))
+ api(project(":opendc:opendc-compute"))
api(project(":opendc:opendc-workflows"))
api("com.fasterxml.jackson.module:jackson-module-kotlin:2.9.8")
implementation(kotlin("stdlib"))
diff --git a/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt b/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt
index ad111e74..7436778f 100644
--- a/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt
+++ b/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt
@@ -24,20 +24,22 @@
package com.atlarge.opendc.format.environment.sc18
-import com.atlarge.opendc.core.Cluster
+import com.atlarge.opendc.compute.core.Flavor
+import com.atlarge.opendc.compute.core.ProcessingUnit
+import com.atlarge.opendc.compute.metal.driver.FakeBareMetalDriver
+import com.atlarge.opendc.compute.metal.service.ProvisioningService
+import com.atlarge.opendc.compute.metal.service.SimpleProvisioningService
import com.atlarge.opendc.core.Environment
import com.atlarge.opendc.core.Platform
import com.atlarge.opendc.core.Zone
-import com.atlarge.opendc.core.resources.compute.ProcessingElement
-import com.atlarge.opendc.core.resources.compute.ProcessingUnit
-import com.atlarge.opendc.core.resources.compute.host.Host
-import com.atlarge.opendc.core.resources.compute.scheduling.SpaceSharedMachineScheduler
+import com.atlarge.opendc.core.services.ServiceRegistryImpl
import com.atlarge.opendc.format.environment.EnvironmentReader
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.fasterxml.jackson.module.kotlin.readValue
import java.io.InputStream
import java.util.UUID
+import kotlinx.coroutines.runBlocking
/**
* A parser for the JSON experiment setup files used for the SC18 paper: "A Reference Architecture for Datacenter
@@ -54,29 +56,39 @@ class Sc18EnvironmentReader(input: InputStream, mapper: ObjectMapper = jacksonOb
init {
val setup = mapper.readValue<Setup>(input)
- val clusters = setup.rooms.mapIndexed { i, room ->
- var counter = 0
- val hosts = room.objects.flatMap { roomObject ->
+ var counter = 0
+ val nodes = setup.rooms.flatMap { room ->
+ room.objects.flatMap { roomObject ->
when (roomObject) {
is RoomObject.Rack -> {
roomObject.machines.map { machine ->
- val cores = machine.cpus.flatMap { id ->
+ val cores = machine.cpus.map { id ->
when (id) {
- 1 -> List(4) { ProcessingElement(it, CPUS[0]) }
- 2 -> List(2) { ProcessingElement(it, CPUS[1]) }
+ 1 -> ProcessingUnit("Intel", "Core(TM) i7-6920HQ", "amd64", 4100.0, 4)
+ 2 -> ProcessingUnit("Intel", "Core(TM) I7-6920HQ", "amd64", 3500.0, 2)
else -> throw IllegalArgumentException("The cpu id $id is not recognized")
}
}
- Host(UUID.randomUUID(), "node-${counter++}", SpaceSharedMachineScheduler, cores)
+ val flavor = Flavor(cores)
+ FakeBareMetalDriver(UUID.randomUUID(), "node-${counter++}", flavor)
}
}
}
}
- Cluster(UUID.randomUUID(), "cluster-$i", hosts)
}
+ val provisioningService = SimpleProvisioningService()
+ runBlocking {
+ for (node in nodes) {
+ provisioningService.create(node)
+ }
+ }
+
+ val serviceRegistry = ServiceRegistryImpl()
+ serviceRegistry[ProvisioningService.Key] = provisioningService
+
val platform = Platform(UUID.randomUUID(), "sc18-platform", listOf(
- Zone(UUID.randomUUID(), "zone", emptySet(), clusters)
+ Zone(UUID.randomUUID(), "zone", serviceRegistry)
))
environment = Environment(setup.name, null, listOf(platform))
@@ -85,11 +97,4 @@ class Sc18EnvironmentReader(input: InputStream, mapper: ObjectMapper = jacksonOb
override fun read(): Environment = environment
override fun close() {}
-
- companion object {
- val CPUS = arrayOf(
- ProcessingUnit("Intel", 6, 6920, "Intel(R) Core(TM) i7-6920HQ CPU @ 4.10GHz", 4100.0, 1),
- ProcessingUnit("Intel", 6, 6930, "Intel(R) Core(TM) i7-6920HQ CPU @ 3.50GHz", 3500.0, 1)
- )
- }
}
diff --git a/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/gwf/GwfTraceReader.kt b/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/gwf/GwfTraceReader.kt
index 407a5f4e..33db78c9 100644
--- a/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/gwf/GwfTraceReader.kt
+++ b/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/gwf/GwfTraceReader.kt
@@ -24,8 +24,8 @@
package com.atlarge.opendc.format.trace.gwf
+import com.atlarge.opendc.compute.core.image.FlopsApplicationImage
import com.atlarge.opendc.core.User
-import com.atlarge.opendc.core.workload.application.FlopsApplication
import com.atlarge.opendc.format.trace.TraceEntry
import com.atlarge.opendc.format.trace.TraceReader
import com.atlarge.opendc.workflows.workload.Job
@@ -120,7 +120,7 @@ class GwfTraceReader(reader: BufferedReader) : TraceReader<Job> {
val workflow = entry.workload
val task = Task(
UUID(0L, taskId), "<unnamed>",
- FlopsApplication(UUID(0L, taskId), "<unnamed>", workflow.owner, cores, flops),
+ FlopsApplicationImage(flops, cores),
HashSet()
)
entry.submissionTime = min(entry.submissionTime, submitTime)