diff options
| author | jc0b <j@jc0b.computer> | 2020-07-21 21:22:44 +0200 |
|---|---|---|
| committer | Fabian Mastenbroek <mail.fabianm@gmail.com> | 2020-08-24 19:48:14 +0200 |
| commit | 67b6ec800df8e023efadb60ae5f7919030b19789 (patch) | |
| tree | 9aa496408a4097857b6a032b84dd0a396321e1d3 | |
| parent | 5b4ab37ac7be2b2c34e2fad928b0cd7f3a837263 (diff) | |
| parent | 04686bf5cef4aea51fd613a158aa8b155763d0e7 (diff) | |
Merge branch 'master' onto local working copy
Preserve working copy while updating
166 files changed, 2242 insertions, 1463 deletions
diff --git a/.editorconfig b/.editorconfig index 508cb765..b0ec2ebd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -14,3 +14,7 @@ insert_final_newline = true [*.md] trim_trailing_whitespace = false + +# Ensure YAML formatting is according to standard +[*.{yml,yaml}] +indent_size = 2 diff --git a/.gitattributes b/.gitattributes index 526c8a38..9d6ef431 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,5 @@ -*.sh text eol=lf
\ No newline at end of file +# https://help.github.com/articles/dealing-with-line-endings/ +# +# These are explicitly windows files and should use crlf +*.bat text eol=crlf +*.sh text eol=lf diff --git a/.github/workflows/api.yml b/.github/workflows/api.yml new file mode 100644 index 00000000..ae67b753 --- /dev/null +++ b/.github/workflows/api.yml @@ -0,0 +1,34 @@ +name: REST API + +on: + push: + paths: + - 'api/**' + +defaults: + run: + working-directory: api + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python: [3.8] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Lint with pylint + run: | + ./check.sh + - name: Test with pytest + run: | + pytest opendc diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index ec4a7e71..da6f1031 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -1,29 +1,29 @@ name: Frontend on: - push: - paths: - - 'frontend/*' + push: + paths: + - 'frontend/**' defaults: - run: - working-directory: frontend + run: + working-directory: frontend jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest] - node: [12.x] - steps: - - uses: actions/checkout@v2 - - name: Set up Node.js - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - run: npm install - - run: npm run build --if-present - - run: npm test - env: - CI: true + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + node: [12.x] + steps: + - uses: actions/checkout@v2 + - name: Set up Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node }} + - run: npm install + - run: npm run build --if-present + - run: npm test + env: + CI: true diff --git a/.github/workflows/simulator.yml b/.github/workflows/simulator.yml index 887d4af6..8174ae3a 100644 --- a/.github/workflows/simulator.yml +++ b/.github/workflows/simulator.yml @@ -1,37 +1,37 @@ name: Simulator on: - push: - paths: - - 'simulator/*' + push: + paths: + - 'simulator/**' defaults: - run: - working-directory: simulator + run: + working-directory: simulator jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest] - java: [14] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - name: Set up JDK - uses: actions/setup-java@v1 - with: - java-version: ${{ matrix.java }} - - name: Grant execute permission for gradlew - run: chmod +x gradlew - - uses: actions/cache@v1 - with: - path: ~/.gradle/caches - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }} - restore-keys: | - ${{ runner.os }}-gradle- - - name: Build with Gradle - run: ./gradlew assemble - - name: Check with Gradle - run: ./gradlew check --info + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + java: [14] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Set up JDK + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + - name: Grant execute permission for gradlew + run: chmod +x gradlew + - uses: actions/cache@v1 + with: + path: ~/.gradle/caches + key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }} + restore-keys: | + ${{ runner.os }}-gradle- + - name: Build with Gradle + run: ./gradlew assemble + - name: Check with Gradle + run: ./gradlew check --info diff --git a/.github/workflows/web-server.yml b/.github/workflows/web-server.yml deleted file mode 100644 index 6f14f97b..00000000 --- a/.github/workflows/web-server.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Web server - -on: - push: - paths: - - 'web-server/*' - -defaults: - run: - working-directory: web-server - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest] - python: [3.8] - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Lint with pylint - run: | - ./check.sh - - name: Test with pytest - run: | - pytest opendc @@ -13,3 +13,6 @@ database/opendc_testing/* # Old credential setup file keys.json + +# Traces +traces/ diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 50af30b1..00000000 --- a/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM nikolaik/python-nodejs:python3.8-nodejs14 -MAINTAINER OpenDC Maintainers <opendc@atlarge-research.com> - -## Dockerfile for the frontend/server part of the deployment - -# Installing packages -RUN apt-get update \ - && apt-get install -y yarn git sed - -# Copy OpenDC directory -COPY ./ /opendc - -# Fetch web server dependencies -RUN pip install -r /opendc/web-server/requirements.txt - -# Build frontend -RUN cd /opendc/frontend \ - && rm -rf ./build \ - && yarn \ - && yarn build - -# Set working directory -WORKDIR /opendc - -CMD ["sh", "-c", "python web-server/main.py"] @@ -23,7 +23,7 @@ OpenDC is a project by the [@Large Research Group](http://atlarge-research.com). ## Architecture -OpenDC consists of four components: a Kotlin [simulator](/simulator), a MongoDB database, a Python Flask [web server](/web-server), and a React.js [frontend](/frontend), each in their own subdirectories. +OpenDC consists of four components: a Kotlin [simulator](/simulator), a MongoDB database, a Python Flask [API](/api), and a React.js [frontend](/frontend), each in their own subdirectories. <p align="center"> <img src="misc/artwork/opendc-component-diagram.png" alt="OpenDC Component Diagram"> @@ -84,6 +84,9 @@ OPENDC_ROOT_DIR=/your/path/to/opendc OPENDC_SERVER_BASE_URL=http://localhost:8081 ``` +Afterwards, you should also create a `traces/` directory in which you place the VM and workflow traces you want to +experiment with. + If you plan to publicly deploy, please also tweak the other settings. In that case, also check the `docker-compose.yml` for further instructions. Now, start the server: diff --git a/web-server/.gitignore b/api/.gitignore index fef0da65..fef0da65 100644 --- a/web-server/.gitignore +++ b/api/.gitignore diff --git a/web-server/.gitlab-ci.yml b/api/.gitlab-ci.yml index d80ba836..d80ba836 100644 --- a/web-server/.gitlab-ci.yml +++ b/api/.gitlab-ci.yml diff --git a/web-server/.pylintrc b/api/.pylintrc index f25e4fc2..f25e4fc2 100644 --- a/web-server/.pylintrc +++ b/api/.pylintrc diff --git a/web-server/.style.yapf b/api/.style.yapf index f5c26c57..f5c26c57 100644 --- a/web-server/.style.yapf +++ b/api/.style.yapf diff --git a/api/Dockerfile b/api/Dockerfile new file mode 100644 index 00000000..49702c90 --- /dev/null +++ b/api/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3.8 +MAINTAINER OpenDC Maintainers <opendc@atlarge-research.com> + +# Ensure the STDOUT is not buffered by Python so that our logs become visible +# See https://stackoverflow.com/q/29663459/10213073 +ENV PYTHONUNBUFFERED 1 + +# Copy OpenDC directory +COPY ./ /opendc + +# Fetch web server dependencies +RUN pip install -r /opendc/requirements.txt + +# Set working directory +WORKDIR /opendc + +CMD ["python3", "main.py"] diff --git a/web-server/README.md b/api/README.md index 84fd09cc..4e8110d0 100644 --- a/web-server/README.md +++ b/api/README.md @@ -66,7 +66,7 @@ Clone OpenDC and follow the [instructions in the main repository](../) to set up **Important:** Be sure to set up environment variables according to those instructions, in a `.env` file. -In `opendc-web-server/static/index.html`, add your own `OAUTH_CLIENT_ID` in `content=` on line `2`. +In `api/static/index.html`, add your own `OAUTH_CLIENT_ID` in `content=` on line `2`. #### Set up the database @@ -84,7 +84,7 @@ This will set you up with a running MongoDB instance and a visual inspection too Run the server. ```bash -cd web-server +cd api python main.py ``` diff --git a/web-server/check.sh b/api/check.sh index abe2c596..abe2c596 100755 --- a/web-server/check.sh +++ b/api/check.sh diff --git a/web-server/conftest.py b/api/conftest.py index 1f4831b8..1f4831b8 100644 --- a/web-server/conftest.py +++ b/api/conftest.py diff --git a/web-server/format.sh b/api/format.sh index 18cba452..18cba452 100755 --- a/web-server/format.sh +++ b/api/format.sh diff --git a/web-server/main.py b/api/main.py index c466c0f2..7544333a 100644..100755 --- a/web-server/main.py +++ b/api/main.py @@ -1,14 +1,16 @@ -import flask_socketio +#!/usr/bin/env python3 import json import os import sys import traceback import urllib.request -from flask import Flask, request, send_from_directory, jsonify + +import flask_socketio +from dotenv import load_dotenv +from flask import Flask, request, jsonify from flask_compress import Compress -from oauth2client import client, crypt from flask_cors import CORS -from dotenv import load_dotenv +from oauth2client import client, crypt from opendc.models.user import User from opendc.util import rest, path_parser, database @@ -18,12 +20,6 @@ load_dotenv() TEST_MODE = "OPENDC_FLASK_TESTING" in os.environ -# Specify the directory of static assets -if TEST_MODE: - STATIC_ROOT = os.curdir -else: - STATIC_ROOT = os.path.join(os.environ['OPENDC_ROOT_DIR'], 'frontend', 'build') - # Set up database if not testing if not TEST_MODE: database.DB.initialize_database( @@ -33,7 +29,7 @@ if not TEST_MODE: host=os.environ['OPENDC_DB_HOST'] if 'OPENDC_DB_HOST' in os.environ else 'localhost') # Set up the core app -FLASK_CORE_APP = Flask(__name__, static_url_path='', static_folder=STATIC_ROOT) +FLASK_CORE_APP = Flask(__name__) FLASK_CORE_APP.config['SECRET_KEY'] = os.environ['OPENDC_FLASK_SECRET'] # Set up CORS support for local setups @@ -49,11 +45,6 @@ else: SOCKET_IO_CORE = flask_socketio.SocketIO(FLASK_CORE_APP) -@FLASK_CORE_APP.errorhandler(404) -def page_not_found(e): - return send_from_directory(STATIC_ROOT, 'index.html') - - @FLASK_CORE_APP.route('/tokensignin', methods=['POST']) def sign_in(): """Authenticate a user with Google sign in""" @@ -131,20 +122,6 @@ def api_call(version, endpoint_path): return flask_response -@FLASK_CORE_APP.route('/my-auth-token') -def serve_web_server_test(): - """Serve the web server test.""" - return send_from_directory(STATIC_ROOT, 'index.html') - - -@FLASK_CORE_APP.route('/') -@FLASK_CORE_APP.route('/projects') -@FLASK_CORE_APP.route('/projects/<path:project_id>') -@FLASK_CORE_APP.route('/profile') -def serve_index(project_id=None): - return send_from_directory(STATIC_ROOT, 'index.html') - - @SOCKET_IO_CORE.on('request') def receive_message(message): """"Receive a SocketIO request""" diff --git a/web-server/misc/artwork/opendc-web-server-component-diagram.png b/api/misc/artwork/opendc-web-server-component-diagram.png Binary files differindex 91b26006..91b26006 100644 --- a/web-server/misc/artwork/opendc-web-server-component-diagram.png +++ b/api/misc/artwork/opendc-web-server-component-diagram.png diff --git a/web-server/opendc/__init__.py b/api/opendc/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/__init__.py +++ b/api/opendc/__init__.py diff --git a/web-server/opendc/api/__init__.py b/api/opendc/api/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/__init__.py +++ b/api/opendc/api/__init__.py diff --git a/web-server/opendc/api/v2/__init__.py b/api/opendc/api/v2/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/__init__.py +++ b/api/opendc/api/v2/__init__.py diff --git a/web-server/opendc/api/v2/paths.json b/api/opendc/api/v2/paths.json index 90d5a2e6..90d5a2e6 100644 --- a/web-server/opendc/api/v2/paths.json +++ b/api/opendc/api/v2/paths.json diff --git a/web-server/opendc/api/v2/portfolios/__init__.py b/api/opendc/api/v2/portfolios/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/portfolios/__init__.py +++ b/api/opendc/api/v2/portfolios/__init__.py diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/__init__.py b/api/opendc/api/v2/portfolios/portfolioId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/__init__.py +++ b/api/opendc/api/v2/portfolios/portfolioId/__init__.py diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/endpoint.py b/api/opendc/api/v2/portfolios/portfolioId/endpoint.py index c0ca64e0..c0ca64e0 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/endpoint.py +++ b/api/opendc/api/v2/portfolios/portfolioId/endpoint.py diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/__init__.py b/api/opendc/api/v2/portfolios/portfolioId/scenarios/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/__init__.py +++ b/api/opendc/api/v2/portfolios/portfolioId/scenarios/__init__.py diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/endpoint.py b/api/opendc/api/v2/portfolios/portfolioId/scenarios/endpoint.py index 1c5e0ab6..ca1db36a 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/endpoint.py +++ b/api/opendc/api/v2/portfolios/portfolioId/scenarios/endpoint.py @@ -33,7 +33,7 @@ def POST(request): scenario = Scenario(request.params_body['scenario']) scenario.set_property('portfolioId', request.params_path['portfolioId']) - scenario.set_property('simulationState', 'QUEUED') + scenario.set_property('simulation', {'state': 'QUEUED'}) scenario.insert() diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/test_endpoint.py b/api/opendc/api/v2/portfolios/portfolioId/scenarios/test_endpoint.py index 8b55bab0..329e68e8 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/scenarios/test_endpoint.py +++ b/api/opendc/api/v2/portfolios/portfolioId/scenarios/test_endpoint.py @@ -72,7 +72,9 @@ def test_add_scenario(client, mocker): 'projectId': '1', 'authorizationLevel': 'EDIT' }], - 'simulationState': 'QUEUED', + 'simulation': { + 'state': 'QUEUED', + }, }) mocker.patch.object(DB, 'insert', @@ -92,7 +94,9 @@ def test_add_scenario(client, mocker): 'schedulerName': 'DEFAULT', }, 'portfolioId': '1', - 'simulationState': 'QUEUED', + 'simulationState': { + 'state': 'QUEUED', + }, }) mocker.patch.object(DB, 'update', return_value=None) res = client.post( @@ -115,5 +119,5 @@ def test_add_scenario(client, mocker): } }) assert 'portfolioId' in res.json['content'] - assert 'simulationState' in res.json['content'] + assert 'simulation' in res.json['content'] assert '200' in res.status diff --git a/web-server/opendc/api/v2/portfolios/portfolioId/test_endpoint.py b/api/opendc/api/v2/portfolios/portfolioId/test_endpoint.py index 7ac346d4..7ac346d4 100644 --- a/web-server/opendc/api/v2/portfolios/portfolioId/test_endpoint.py +++ b/api/opendc/api/v2/portfolios/portfolioId/test_endpoint.py diff --git a/web-server/opendc/api/v2/prefabs/__init__.py b/api/opendc/api/v2/prefabs/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/prefabs/__init__.py +++ b/api/opendc/api/v2/prefabs/__init__.py diff --git a/web-server/opendc/api/v2/prefabs/endpoint.py b/api/opendc/api/v2/prefabs/endpoint.py index 723a2f0d..723a2f0d 100644 --- a/web-server/opendc/api/v2/prefabs/endpoint.py +++ b/api/opendc/api/v2/prefabs/endpoint.py diff --git a/web-server/opendc/api/v2/prefabs/prefabId/__init__.py b/api/opendc/api/v2/prefabs/prefabId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/prefabs/prefabId/__init__.py +++ b/api/opendc/api/v2/prefabs/prefabId/__init__.py diff --git a/web-server/opendc/api/v2/projects/__init__.py b/api/opendc/api/v2/prefabs/prefabId/endpoint.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/projects/__init__.py +++ b/api/opendc/api/v2/prefabs/prefabId/endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/__init__.py b/api/opendc/api/v2/prefabs/prefabId/test_endpoint.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/projects/projectId/__init__.py +++ b/api/opendc/api/v2/prefabs/prefabId/test_endpoint.py diff --git a/web-server/opendc/api/v2/prefabs/test_endpoint.py b/api/opendc/api/v2/prefabs/test_endpoint.py index 47029579..47029579 100644 --- a/web-server/opendc/api/v2/prefabs/test_endpoint.py +++ b/api/opendc/api/v2/prefabs/test_endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/authorizations/__init__.py b/api/opendc/api/v2/projects/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/projects/projectId/authorizations/__init__.py +++ b/api/opendc/api/v2/projects/__init__.py diff --git a/web-server/opendc/api/v2/projects/endpoint.py b/api/opendc/api/v2/projects/endpoint.py index bf031382..bf031382 100644 --- a/web-server/opendc/api/v2/projects/endpoint.py +++ b/api/opendc/api/v2/projects/endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/portfolios/__init__.py b/api/opendc/api/v2/projects/projectId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/projects/projectId/portfolios/__init__.py +++ b/api/opendc/api/v2/projects/projectId/__init__.py diff --git a/web-server/opendc/api/v2/projects/projectId/topologies/__init__.py b/api/opendc/api/v2/projects/projectId/authorizations/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/projects/projectId/topologies/__init__.py +++ b/api/opendc/api/v2/projects/projectId/authorizations/__init__.py diff --git a/web-server/opendc/api/v2/projects/projectId/authorizations/endpoint.py b/api/opendc/api/v2/projects/projectId/authorizations/endpoint.py index 9f6a60ec..9f6a60ec 100644 --- a/web-server/opendc/api/v2/projects/projectId/authorizations/endpoint.py +++ b/api/opendc/api/v2/projects/projectId/authorizations/endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/authorizations/test_endpoint.py b/api/opendc/api/v2/projects/projectId/authorizations/test_endpoint.py index c3bbc093..c3bbc093 100644 --- a/web-server/opendc/api/v2/projects/projectId/authorizations/test_endpoint.py +++ b/api/opendc/api/v2/projects/projectId/authorizations/test_endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/endpoint.py b/api/opendc/api/v2/projects/projectId/endpoint.py index 77b66d75..77b66d75 100644 --- a/web-server/opendc/api/v2/projects/projectId/endpoint.py +++ b/api/opendc/api/v2/projects/projectId/endpoint.py diff --git a/web-server/opendc/api/v2/scenarios/__init__.py b/api/opendc/api/v2/projects/projectId/portfolios/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/scenarios/__init__.py +++ b/api/opendc/api/v2/projects/projectId/portfolios/__init__.py diff --git a/web-server/opendc/api/v2/projects/projectId/portfolios/endpoint.py b/api/opendc/api/v2/projects/projectId/portfolios/endpoint.py index 0bc65565..0bc65565 100644 --- a/web-server/opendc/api/v2/projects/projectId/portfolios/endpoint.py +++ b/api/opendc/api/v2/projects/projectId/portfolios/endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/portfolios/test_endpoint.py b/api/opendc/api/v2/projects/projectId/portfolios/test_endpoint.py index 24416cc3..24416cc3 100644 --- a/web-server/opendc/api/v2/projects/projectId/portfolios/test_endpoint.py +++ b/api/opendc/api/v2/projects/projectId/portfolios/test_endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/test_endpoint.py b/api/opendc/api/v2/projects/projectId/test_endpoint.py index 7a862e8d..7a862e8d 100644 --- a/web-server/opendc/api/v2/projects/projectId/test_endpoint.py +++ b/api/opendc/api/v2/projects/projectId/test_endpoint.py diff --git a/web-server/opendc/api/v2/scenarios/scenarioId/__init__.py b/api/opendc/api/v2/projects/projectId/topologies/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/scenarios/scenarioId/__init__.py +++ b/api/opendc/api/v2/projects/projectId/topologies/__init__.py diff --git a/web-server/opendc/api/v2/projects/projectId/topologies/endpoint.py b/api/opendc/api/v2/projects/projectId/topologies/endpoint.py index 211dc15d..211dc15d 100644 --- a/web-server/opendc/api/v2/projects/projectId/topologies/endpoint.py +++ b/api/opendc/api/v2/projects/projectId/topologies/endpoint.py diff --git a/web-server/opendc/api/v2/projects/projectId/topologies/test_endpoint.py b/api/opendc/api/v2/projects/projectId/topologies/test_endpoint.py index ca123a73..ca123a73 100644 --- a/web-server/opendc/api/v2/projects/projectId/topologies/test_endpoint.py +++ b/api/opendc/api/v2/projects/projectId/topologies/test_endpoint.py diff --git a/web-server/opendc/api/v2/projects/test_endpoint.py b/api/opendc/api/v2/projects/test_endpoint.py index a50735b0..a50735b0 100644 --- a/web-server/opendc/api/v2/projects/test_endpoint.py +++ b/api/opendc/api/v2/projects/test_endpoint.py diff --git a/web-server/opendc/api/v2/schedulers/__init__.py b/api/opendc/api/v2/scenarios/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/schedulers/__init__.py +++ b/api/opendc/api/v2/scenarios/__init__.py diff --git a/web-server/opendc/api/v2/topologies/__init__.py b/api/opendc/api/v2/scenarios/scenarioId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/topologies/__init__.py +++ b/api/opendc/api/v2/scenarios/scenarioId/__init__.py diff --git a/web-server/opendc/api/v2/scenarios/scenarioId/endpoint.py b/api/opendc/api/v2/scenarios/scenarioId/endpoint.py index 02d39063..02d39063 100644 --- a/web-server/opendc/api/v2/scenarios/scenarioId/endpoint.py +++ b/api/opendc/api/v2/scenarios/scenarioId/endpoint.py diff --git a/web-server/opendc/api/v2/scenarios/scenarioId/test_endpoint.py b/api/opendc/api/v2/scenarios/scenarioId/test_endpoint.py index 09b7d0c0..c3be0215 100644 --- a/web-server/opendc/api/v2/scenarios/scenarioId/test_endpoint.py +++ b/api/opendc/api/v2/scenarios/scenarioId/test_endpoint.py @@ -19,6 +19,7 @@ def test_get_scenario_not_authorized(client, mocker): mocker.patch.object(DB, 'fetch_one', return_value={ + 'projectId': '1', 'portfolioId': '1', '_id': '1', 'authorizations': [{ @@ -34,6 +35,7 @@ def test_get_scenario(client, mocker): mocker.patch.object(DB, 'fetch_one', return_value={ + 'projectId': '1', 'portfolioId': '1', '_id': '1', 'authorizations': [{ @@ -63,6 +65,7 @@ def test_update_scenario_not_authorized(client, mocker): 'fetch_one', return_value={ '_id': '1', + 'projectId': '1', 'portfolioId': '1', 'authorizations': [{ 'projectId': '1', @@ -82,6 +85,7 @@ def test_update_scenario(client, mocker): 'fetch_one', return_value={ '_id': '1', + 'projectId': '1', 'portfolioId': '1', 'authorizations': [{ 'projectId': '1', @@ -110,6 +114,7 @@ def test_delete_project_different_user(client, mocker): 'fetch_one', return_value={ '_id': '1', + 'projectId': '1', 'portfolioId': '1', 'googleId': 'other_test', 'authorizations': [{ @@ -126,6 +131,7 @@ def test_delete_project(client, mocker): 'fetch_one', return_value={ '_id': '1', + 'projectId': '1', 'portfolioId': '1', 'googleId': 'test', 'scenarioIds': ['1'], diff --git a/web-server/opendc/api/v2/topologies/topologyId/__init__.py b/api/opendc/api/v2/schedulers/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/topologies/topologyId/__init__.py +++ b/api/opendc/api/v2/schedulers/__init__.py diff --git a/web-server/opendc/api/v2/schedulers/endpoint.py b/api/opendc/api/v2/schedulers/endpoint.py index a96fdd88..127b5f1a 100644 --- a/web-server/opendc/api/v2/schedulers/endpoint.py +++ b/api/opendc/api/v2/schedulers/endpoint.py @@ -1,6 +1,6 @@ from opendc.util.rest import Response -SCHEDULERS = ['DEFAULT'] +SCHEDULERS = ['core-mem'] def GET(_): diff --git a/web-server/opendc/api/v2/schedulers/test_endpoint.py b/api/opendc/api/v2/schedulers/test_endpoint.py index a0bd8758..a0bd8758 100644 --- a/web-server/opendc/api/v2/schedulers/test_endpoint.py +++ b/api/opendc/api/v2/schedulers/test_endpoint.py diff --git a/web-server/opendc/api/v2/traces/__init__.py b/api/opendc/api/v2/topologies/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/traces/__init__.py +++ b/api/opendc/api/v2/topologies/__init__.py diff --git a/web-server/opendc/api/v2/traces/traceId/__init__.py b/api/opendc/api/v2/topologies/topologyId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/traces/traceId/__init__.py +++ b/api/opendc/api/v2/topologies/topologyId/__init__.py diff --git a/web-server/opendc/api/v2/topologies/topologyId/endpoint.py b/api/opendc/api/v2/topologies/topologyId/endpoint.py index 512b050a..512b050a 100644 --- a/web-server/opendc/api/v2/topologies/topologyId/endpoint.py +++ b/api/opendc/api/v2/topologies/topologyId/endpoint.py diff --git a/web-server/opendc/api/v2/topologies/topologyId/test_endpoint.py b/api/opendc/api/v2/topologies/topologyId/test_endpoint.py index b25cb798..b25cb798 100644 --- a/web-server/opendc/api/v2/topologies/topologyId/test_endpoint.py +++ b/api/opendc/api/v2/topologies/topologyId/test_endpoint.py diff --git a/web-server/opendc/api/v2/users/__init__.py b/api/opendc/api/v2/traces/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/users/__init__.py +++ b/api/opendc/api/v2/traces/__init__.py diff --git a/web-server/opendc/api/v2/traces/endpoint.py b/api/opendc/api/v2/traces/endpoint.py index ee699e02..ee699e02 100644 --- a/web-server/opendc/api/v2/traces/endpoint.py +++ b/api/opendc/api/v2/traces/endpoint.py diff --git a/web-server/opendc/api/v2/traces/test_endpoint.py b/api/opendc/api/v2/traces/test_endpoint.py index 9f806085..9f806085 100644 --- a/web-server/opendc/api/v2/traces/test_endpoint.py +++ b/api/opendc/api/v2/traces/test_endpoint.py diff --git a/web-server/opendc/api/v2/users/userId/__init__.py b/api/opendc/api/v2/traces/traceId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/api/v2/users/userId/__init__.py +++ b/api/opendc/api/v2/traces/traceId/__init__.py diff --git a/web-server/opendc/api/v2/traces/traceId/endpoint.py b/api/opendc/api/v2/traces/traceId/endpoint.py index 670f88d1..670f88d1 100644 --- a/web-server/opendc/api/v2/traces/traceId/endpoint.py +++ b/api/opendc/api/v2/traces/traceId/endpoint.py diff --git a/web-server/opendc/api/v2/traces/traceId/test_endpoint.py b/api/opendc/api/v2/traces/traceId/test_endpoint.py index 56792ca9..56792ca9 100644 --- a/web-server/opendc/api/v2/traces/traceId/test_endpoint.py +++ b/api/opendc/api/v2/traces/traceId/test_endpoint.py diff --git a/web-server/opendc/models/__init__.py b/api/opendc/api/v2/users/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/models/__init__.py +++ b/api/opendc/api/v2/users/__init__.py diff --git a/web-server/opendc/api/v2/users/endpoint.py b/api/opendc/api/v2/users/endpoint.py index 0dcf2463..0dcf2463 100644 --- a/web-server/opendc/api/v2/users/endpoint.py +++ b/api/opendc/api/v2/users/endpoint.py diff --git a/web-server/opendc/api/v2/users/test_endpoint.py b/api/opendc/api/v2/users/test_endpoint.py index d60429b3..d60429b3 100644 --- a/web-server/opendc/api/v2/users/test_endpoint.py +++ b/api/opendc/api/v2/users/test_endpoint.py diff --git a/web-server/opendc/util/__init__.py b/api/opendc/api/v2/users/userId/__init__.py index e69de29b..e69de29b 100644 --- a/web-server/opendc/util/__init__.py +++ b/api/opendc/api/v2/users/userId/__init__.py diff --git a/web-server/opendc/api/v2/users/userId/endpoint.py b/api/opendc/api/v2/users/userId/endpoint.py index be3462c0..be3462c0 100644 --- a/web-server/opendc/api/v2/users/userId/endpoint.py +++ b/api/opendc/api/v2/users/userId/endpoint.py diff --git a/web-server/opendc/api/v2/users/userId/test_endpoint.py b/api/opendc/api/v2/users/userId/test_endpoint.py index cdff2229..cdff2229 100644 --- a/web-server/opendc/api/v2/users/userId/test_endpoint.py +++ b/api/opendc/api/v2/users/userId/test_endpoint.py diff --git a/api/opendc/models/__init__.py b/api/opendc/models/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/api/opendc/models/__init__.py diff --git a/web-server/opendc/models/model.py b/api/opendc/models/model.py index bcb833ae..bcb833ae 100644 --- a/web-server/opendc/models/model.py +++ b/api/opendc/models/model.py diff --git a/web-server/opendc/models/portfolio.py b/api/opendc/models/portfolio.py index 32961b63..32961b63 100644 --- a/web-server/opendc/models/portfolio.py +++ b/api/opendc/models/portfolio.py diff --git a/web-server/opendc/models/prefab.py b/api/opendc/models/prefab.py index 70910c4a..70910c4a 100644 --- a/web-server/opendc/models/prefab.py +++ b/api/opendc/models/prefab.py diff --git a/web-server/opendc/models/project.py b/api/opendc/models/project.py index b57e9f77..b57e9f77 100644 --- a/web-server/opendc/models/project.py +++ b/api/opendc/models/project.py diff --git a/web-server/opendc/models/scenario.py b/api/opendc/models/scenario.py index 8d53e408..8d53e408 100644 --- a/web-server/opendc/models/scenario.py +++ b/api/opendc/models/scenario.py diff --git a/web-server/opendc/models/topology.py b/api/opendc/models/topology.py index cb4c4bab..cb4c4bab 100644 --- a/web-server/opendc/models/topology.py +++ b/api/opendc/models/topology.py diff --git a/web-server/opendc/models/trace.py b/api/opendc/models/trace.py index 2f6e4926..2f6e4926 100644 --- a/web-server/opendc/models/trace.py +++ b/api/opendc/models/trace.py diff --git a/web-server/opendc/models/user.py b/api/opendc/models/user.py index 8e8ff945..8e8ff945 100644 --- a/web-server/opendc/models/user.py +++ b/api/opendc/models/user.py diff --git a/api/opendc/util/__init__.py b/api/opendc/util/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/api/opendc/util/__init__.py diff --git a/web-server/opendc/util/database.py b/api/opendc/util/database.py index 80cdcbab..80cdcbab 100644 --- a/web-server/opendc/util/database.py +++ b/api/opendc/util/database.py diff --git a/web-server/opendc/util/exceptions.py b/api/opendc/util/exceptions.py index 7724a407..7724a407 100644 --- a/web-server/opendc/util/exceptions.py +++ b/api/opendc/util/exceptions.py diff --git a/web-server/opendc/util/parameter_checker.py b/api/opendc/util/parameter_checker.py index 14dd1dc0..14dd1dc0 100644 --- a/web-server/opendc/util/parameter_checker.py +++ b/api/opendc/util/parameter_checker.py diff --git a/web-server/opendc/util/path_parser.py b/api/opendc/util/path_parser.py index a8bbdeba..a8bbdeba 100644 --- a/web-server/opendc/util/path_parser.py +++ b/api/opendc/util/path_parser.py diff --git a/web-server/opendc/util/rest.py b/api/opendc/util/rest.py index abd2f3de..abd2f3de 100644 --- a/web-server/opendc/util/rest.py +++ b/api/opendc/util/rest.py diff --git a/web-server/pytest.ini b/api/pytest.ini index 775a8ff4..775a8ff4 100644 --- a/web-server/pytest.ini +++ b/api/pytest.ini diff --git a/web-server/requirements.txt b/api/requirements.txt index 140a046f..140a046f 100644 --- a/web-server/requirements.txt +++ b/api/requirements.txt diff --git a/web-server/static/index.html b/api/static/index.html index ac78cbfb..ac78cbfb 100644 --- a/web-server/static/index.html +++ b/api/static/index.html diff --git a/database/mongo-init-opendc-db.sh b/database/mongo-init-opendc-db.sh index 44fa75a3..3a4c4e9b 100644 --- a/database/mongo-init-opendc-db.sh +++ b/database/mongo-init-opendc-db.sh @@ -21,6 +21,20 @@ $MONGO_CMD --eval 'db.createCollection("scenarios");' $MONGO_CMD --eval 'db.createCollection("traces");' $MONGO_CMD --eval 'db.createCollection("prefabs");' +echo 'Loading default traces' + +$MONGO_CMD --eval 'db.traces.update( + {"_id": "bitbrains-small"}, + { + "$set": { + "_id": "bitbrains-small", + "name": "bitbrains-small", + "type": "VM", + } + }, + {"upsert": true} +);' + echo 'Loading test data' $MONGO_CMD --eval 'db.prefabs.insertOne( diff --git a/docker-compose.yml b/docker-compose.yml index 6338e3d0..c3e62317 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,13 +1,28 @@ -version: "3" +version: "3.8" services: frontend: - build: ./ + build: + context: ./frontend + args: + - REACT_APP_OAUTH_CLIENT_ID=${OPENDC_OAUTH_CLIENT_ID} image: frontend restart: on-failure + networks: + - backend + depends_on: + - api ports: - - "8081:8081" - links: - - mongo + - "8081:80" + + api: + build: ./api + image: api + restart: on-failure + # Comment out these 2 lines for deployment + ports: + - "8082:8081" + networks: + - backend depends_on: - mongo environment: @@ -20,34 +35,34 @@ services: - OPENDC_DB_HOST=mongo - OPENDC_FLASK_SECRET - OPENDC_OAUTH_CLIENT_ID - - REACT_APP_OAUTH_CLIENT_ID=${OPENDC_OAUTH_CLIENT_ID} - OPENDC_ROOT_DIR - OPENDC_SERVER_BASE_URL -# TODO: Implement new database interaction on the simulator side -# simulator: -# build: -# context: ./opendc-simulator -# dockerfile: opendc-model-odc/setup/Dockerfile -# image: simulator -# restart: on-failure -# links: -# - mongo -# depends_on: -# - mongo -# environment: -# - PERSISTENCE_URL=jdbc:mysql://mariadb:3306/opendc -# - PERSISTENCE_USER=opendc -# - PERSISTENCE_PASSWORD=opendcpassword -# - COLLECT_MACHINE_STATES=ON -# - COLLECT_TASK_STATES=ON -# - COLLECT_STAGE_MEASUREMENTS=OFF -# - COLLECT_TASK_METRICS=OFF -# - COLLECT_JOB_METRICS=OFF + simulator: + build: ./simulator + image: simulator + restart: on-failure + networks: + - backend + depends_on: + - mongo + volumes: + - type: bind + source: ./traces + target: /app/traces + - type: volume + source: results-volume + target: /results + environment: + - OPENDC_DB + - OPENDC_DB_USERNAME + - OPENDC_DB_PASSWORD + - OPENDC_DB_HOST=mongo + - OPENDC_OUTPUT=/results mongo: build: - context: database + context: database restart: on-failure environment: - MONGO_INITDB_ROOT_USERNAME @@ -56,26 +71,30 @@ services: - OPENDC_DB - OPENDC_DB_USERNAME - OPENDC_DB_PASSWORD + networks: + - backend # Comment out for public deployment ports: - - 27017:27017 - # Uncomment for persistent deployment - #volumes: - # - mongo-volume:/data/db + - "27017:27017" + volumes: + - mongo-volume:/data/db mongo-express: image: mongo-express restart: on-failure - links: - - mongo + networks: + - backend depends_on: - - mongo + - mongo ports: - - 8082:8081 + - "8083:8081" environment: ME_CONFIG_MONGODB_ADMINUSERNAME: "${MONGO_INITDB_ROOT_USERNAME}" ME_CONFIG_MONGODB_ADMINPASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}" volumes: - mongo-volume: - external: false + mongo-volume: + results-volume: + +networks: + backend: {} diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 00000000..36e3c20b --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,19 @@ +FROM node:14 +MAINTAINER OpenDC Maintainers <opendc@atlarge-research.com> + +ARG REACT_APP_OAUTH_CLIENT_ID + +# Copy OpenDC directory +COPY ./ /opendc + +# Build frontend +RUN cd /opendc/ \ + && rm -rf ./build \ + && yarn \ + && export REACT_APP_OAUTH_CLIENT_ID=$REACT_APP_OAUTH_CLIENT_ID \ + && yarn build + +# Setup nginx to serve the frontend +FROM nginx:1.19 +COPY --from=0 /opendc/build /usr/share/nginx/html +COPY nginx.conf /etc/nginx/conf.d/default.conf diff --git a/frontend/nginx.conf b/frontend/nginx.conf new file mode 100644 index 00000000..ed7e5cfe --- /dev/null +++ b/frontend/nginx.conf @@ -0,0 +1,32 @@ +server { + listen 80; + server_name opendc.org; + + location / { + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html; + } + + location /socket.io { + proxy_http_version 1.1; + + proxy_buffering off; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_pass http://api:8081/socket.io; + } + + location /tokensignin { + proxy_pass http://api:8081/tokensignin; + } + + location /api { + proxy_pass http://api:8081/api; + } + + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} diff --git a/frontend/package.json b/frontend/package.json index f5ade772..174b2f39 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -16,7 +16,7 @@ "author": "Georgios Andreadis <g.andreadis@atlarge-research.com> (https://gandreadis.com/)", "license": "MIT", "private": true, - "proxy": "http://localhost:8081", + "proxy": "http://localhost:8082", "dependencies": { "approximate-number": "~2.0.0", "classnames": "~2.2.5", diff --git a/frontend/src/api/socket.js b/frontend/src/api/socket.js index 93ce8fa8..759c119e 100644 --- a/frontend/src/api/socket.js +++ b/frontend/src/api/socket.js @@ -6,11 +6,9 @@ let requestIdCounter = 0 const callbacks = {} export function setupSocketConnection(onConnect) { - let port = window.location.port - if (process.env.NODE_ENV !== 'production') { - port = 8081 - } - socket = io.connect(window.location.protocol + '//' + window.location.hostname + ':' + port) + const apiUrl = process.env.REACT_APP_API_URL || window.location.hostname + ':' + window.location.port; + + socket = io.connect(window.location.protocol + '//' + apiUrl); socket.on('connect', onConnect) socket.on('response', onSocketResponse) } diff --git a/frontend/src/shapes/index.js b/frontend/src/shapes/index.js index 32914f25..8296055a 100644 --- a/frontend/src/shapes/index.js +++ b/frontend/src/shapes/index.js @@ -111,7 +111,9 @@ Shapes.Scenario = PropTypes.shape({ _id: PropTypes.string.isRequired, portfolioId: PropTypes.string.isRequired, name: PropTypes.string.isRequired, - simulationState: PropTypes.string.isRequired, + simulation: PropTypes.shape({ + state: PropTypes.string.isRequired, + }).isRequired, trace: PropTypes.shape({ traceId: PropTypes.string.isRequired, trace: Shapes.Trace, diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 00c6e441..2859e4e0 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -11527,7 +11527,7 @@ uuid@^3.0.1, uuid@^3.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuidv4@^6.1.1: +uuidv4@~6.1.1: version "6.1.1" resolved "https://registry.yarnpkg.com/uuidv4/-/uuidv4-6.1.1.tgz#6565b4f2be7d6f841c14106f420fdb701eae5c81" integrity sha512-ZplGb1SHFMVH3l7PUQl2Uwo+FpJQV6IPOoU+MjjbqrNYQolqbGwv+/sn9F+AGMsMOgGz3r9JN3ztGUi0VzMxmw== diff --git a/opendc-api-spec.yml b/opendc-api-spec.yml index 39cb4f1c..3009ab03 100644 --- a/opendc-api-spec.yml +++ b/opendc-api-spec.yml @@ -1,918 +1,921 @@ swagger: '2.0' info: - version: 1.0.0 - title: OpenDC API - description: 'OpenDC is an open-source datacenter simulator for education, featuring real-time online collaboration, diverse simulation models, and detailed performance feedback statistics.' + version: 1.0.0 + title: OpenDC API + description: 'OpenDC is an open-source datacenter simulator for education, featuring real-time online collaboration, diverse simulation models, and detailed performance feedback statistics.' host: opendc.org basePath: /v2 schemes: - - https + - https paths: - '/users': - get: - tags: - - users - description: Search for a User using their email address. - parameters: - - name: email - in: query - description: User's email address. - required: true - type: string - responses: - '200': - description: Successfully searched Users. - schema: - $ref: '#/definitions/User' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '404': - description: User not found. - post: - tags: - - users - description: Add a new User. - parameters: - - name: user - in: body - description: The new User. - required: true - schema: - $ref: '#/definitions/User' - responses: - '200': - description: Successfully added User. - schema: - $ref: '#/definitions/User' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '409': - description: User already exists. - '/users/{userId}': - get: - tags: - - users - description: Get this User. - parameters: - - name: userId - in: path - description: User's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved User. - schema: - $ref: '#/definitions/User' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '404': - description: User not found. - put: - tags: - - users - description: Update this User's given name and/ or family name. - parameters: - - name: userId - in: path - description: User's ID. - required: true - type: string - - name: user - in: body - description: User's new properties. - required: true - schema: - properties: - givenName: - type: string - familyName: - type: string - responses: - '200': - description: Successfully updated User. - schema: - $ref: '#/definitions/User' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating User. - '404': - description: User not found. - delete: - tags: - - users - description: Delete this User. - parameters: - - name: userId - in: path - description: User's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted User. - schema: - $ref: '#/definitions/User' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting User. - '404': - description: User not found. - '/projects': - post: - tags: - - projects - description: Add a Project. - parameters: - - name: project - in: body - description: The new Project. - required: true - schema: - properties: - name: - type: string - responses: - '200': - description: Successfully added Project. - schema: - $ref: '#/definitions/Project' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '/projects/{projectId}': - get: - tags: - - projects - description: Get this Project. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Project. - schema: - $ref: '#/definitions/Project' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving Project. - '404': - description: Project not found - put: - tags: - - projects - description: Update this Project. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - - name: project - in: body - description: Project's new properties. - required: true - schema: - properties: - project: - $ref: '#/definitions/Project' - responses: - '200': - description: Successfully updated Project. - schema: - $ref: '#/definitions/Project' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating Project. - '404': - description: Project not found. - delete: - tags: - - projects - description: Delete this project. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted Project. - schema: - $ref: '#/definitions/Project' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting Project. - '404': - description: Project not found. - '/projects/{projectId}/authorizations': - get: - tags: - - projects - description: Get this Project's Authorizations. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Project's Authorizations. - schema: - type: array - items: - type: object - properties: - userId: - type: string - projectId: - type: string - authorizationLevel: - type: string - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving this Project's Authorizations. - '404': - description: Project not found. - '/projects/{projectId}/topologies': - post: - tags: - - projects - description: Add a Topology. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - - name: topology - in: body - description: The new Topology. - required: true - schema: - properties: - topology: - $ref: '#/definitions/Topology' - responses: - '200': - description: Successfully added Topology. - schema: - $ref: '#/definitions/Topology' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '/projects/{projectId}/portfolios': - post: - tags: - - portfolios - description: Add a Portfolio. - parameters: - - name: projectId - in: path - description: Project's ID. - required: true - type: string - - name: portfolio - in: body - description: The new Portfolio. - required: true - schema: - properties: - topology: - $ref: '#/definitions/Portfolio' - responses: - '200': - description: Successfully added Portfolio. - schema: - $ref: '#/definitions/Portfolio' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '/topologies/{topologyId}': - get: - tags: - - topologies - description: Get this Topology. - parameters: - - name: topologyId - in: path - description: Topology's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Topology. - schema: - $ref: '#/definitions/Topology' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving Topology. - '404': - description: Topology not found. - put: - tags: - - topologies - description: Update this Topology's name. - parameters: - - name: topologyId - in: path - description: Topology's ID. - required: true - type: string - - name: topology - in: body - description: Topology's new properties. - required: true - schema: - properties: - topology: - $ref: '#/definitions/Topology' - responses: - '200': - description: Successfully updated Topology. - schema: - $ref: '#/definitions/Topology' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating Topology. - '404': - description: Topology not found. - delete: - tags: - - topologies - description: Delete this Topology. - parameters: - - name: topologyId - in: path - description: Topology's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted Topology. - schema: - $ref: '#/definitions/Topology' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting Topology. - '404': - description: Topology not found. - '/portfolios/{portfolioId}': - get: - tags: - - portfolios - description: Get this Portfolio. - parameters: - - name: portfolioId - in: path - description: Portfolio's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Portfolio. - schema: - $ref: '#/definitions/Portfolio' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving Portfolio. - '404': - description: Portfolio not found. - put: - tags: - - portfolios - description: "Update this Portfolio." - parameters: - - name: portfolioId - in: path - description: Portfolio's ID. - required: true - type: string - - name: portfolio - in: body - description: Portfolio's new properties. - required: true - schema: - $ref: '#/definitions/Portfolio' - responses: - '200': - description: Successfully updated Portfolio. - schema: - $ref: '#/definitions/Portfolio' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating Portfolio. - '404': - description: 'Portfolio not found.' - delete: - tags: - - portfolios - description: Delete this Portfolio. - parameters: - - name: portfolioId - in: path - description: Portfolio's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted Portfolio. - schema: - $ref: '#/definitions/Portfolio' - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting Portfolio. - '404': - description: Portfolio not found. - '/scenarios/{scenarioId}': - get: - tags: - - scenarios - description: Get this Scenario. - parameters: - - name: scenarioId - in: path - description: Scenario's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Scenario. - schema: - $ref: '#/definitions/Scenario' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving Scenario. - '404': - description: Scenario not found. - put: - tags: - - scenarios - description: "Update this Scenario's name (other properties are read-only)." - parameters: - - name: scenarioId - in: path - description: Scenario's ID. - required: true - type: string - - name: scenario - in: body - description: Scenario with new name. - required: true - schema: - $ref: '#/definitions/Scenario' - responses: - '200': - description: Successfully updated Scenario. - schema: - $ref: '#/definitions/Scenario' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating Scenario. - '404': - description: 'Scenario not found.' - delete: - tags: - - scenarios - description: Delete this Scenario. - parameters: - - name: scenarioId - in: path - description: Scenario's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted Scenario. - schema: - $ref: '#/definitions/Scenario' - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting Scenario. - '404': - description: Scenario not found. - /schedulers: - get: - tags: - - simulation - description: Get all available Schedulers - responses: - '200': - description: Successfully retrieved Schedulers. - schema: - type: array - items: - $ref: '#/definitions/Scheduler' - '401': - description: Unauthorized. - /traces: - get: - tags: - - simulation - description: Get all available Traces (non-populated). - responses: - '200': - description: Successfully retrieved Traces (non-populated). - schema: - type: array - items: - type: object - properties: - _id: - type: string - name: - type: string - '401': - description: Unauthorized. - '/traces/{traceId}': - get: - tags: - - simulation - description: Get this Trace. - parameters: - - name: traceId - in: path - description: Trace's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Trace. - schema: - $ref: '#/definitions/Trace' - '401': - description: Unauthorized. - '404': - description: Trace not found. - /prefabs: - post: - tags: - - prefabs - description: Add a Prefab. - parameters: - - name: prefab - in: body - description: The new Prefab. - required: true - schema: - properties: - name: - type: string - responses: - '200': - description: Successfully added Prefab. - schema: - $ref: '#/definitions/Prefab' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '/prefabs/{prefabId}': - get: - tags: - - prefabs - description: Get this Prefab. - parameters: - - name: prefabId - in: path - description: Prefab's ID. - required: true - type: string - responses: - '200': - description: Successfully retrieved Prefab. - schema: - $ref: '#/definitions/Prefab' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from retrieving Prefab. - '404': - description: Prefab not found - put: - tags: - - prefabs - description: Update this Prefab. - parameters: - - name: prefabId - in: path - description: Prefab's ID. - required: true - type: string - - name: prefab - in: body - description: Prefab's new properties. - required: true - schema: - properties: - project: - $ref: '#/definitions/Prefab' - responses: - '200': - description: Successfully updated Prefab. - schema: - $ref: '#/definitions/Prefab' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from updating Prefab. - '404': - description: Prefab not found. - delete: - tags: - - prefabs - description: Delete this prefab. - parameters: - - name: prefabId - in: path - description: Prefab's ID. - required: true - type: string - responses: - '200': - description: Successfully deleted Prefab. - schema: - $ref: '#/definitions/Prefab' - '400': - description: Missing or incorrectly typed parameter. - '401': - description: Unauthorized. - '403': - description: Forbidden from deleting Prefab. - '404': - description: Prefab not found. - -definitions: - Prefab: - type: object - properties: - _id: - type: string - name: + '/users': + get: + tags: + - users + description: Search for a User using their email address. + parameters: + - name: email + in: query + description: User's email address. + required: true + type: string + responses: + '200': + description: Successfully searched Users. + schema: + $ref: '#/definitions/User' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '404': + description: User not found. + post: + tags: + - users + description: Add a new User. + parameters: + - name: user + in: body + description: The new User. + required: true + schema: + $ref: '#/definitions/User' + responses: + '200': + description: Successfully added User. + schema: + $ref: '#/definitions/User' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '409': + description: User already exists. + '/users/{userId}': + get: + tags: + - users + description: Get this User. + parameters: + - name: userId + in: path + description: User's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved User. + schema: + $ref: '#/definitions/User' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '404': + description: User not found. + put: + tags: + - users + description: Update this User's given name and/ or family name. + parameters: + - name: userId + in: path + description: User's ID. + required: true + type: string + - name: user + in: body + description: User's new properties. + required: true + schema: + properties: + givenName: type: string - datetimeCreated: + familyName: type: string - format: dateTime - datetimeLastEdited: + responses: + '200': + description: Successfully updated User. + schema: + $ref: '#/definitions/User' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating User. + '404': + description: User not found. + delete: + tags: + - users + description: Delete this User. + parameters: + - name: userId + in: path + description: User's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted User. + schema: + $ref: '#/definitions/User' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting User. + '404': + description: User not found. + '/projects': + post: + tags: + - projects + description: Add a Project. + parameters: + - name: project + in: body + description: The new Project. + required: true + schema: + properties: + name: type: string - format: dateTime - Scheduler: - type: object - properties: - name: + responses: + '200': + description: Successfully added Project. + schema: + $ref: '#/definitions/Project' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '/projects/{projectId}': + get: + tags: + - projects + description: Get this Project. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Project. + schema: + $ref: '#/definitions/Project' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving Project. + '404': + description: Project not found + put: + tags: + - projects + description: Update this Project. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + - name: project + in: body + description: Project's new properties. + required: true + schema: + properties: + project: + $ref: '#/definitions/Project' + responses: + '200': + description: Successfully updated Project. + schema: + $ref: '#/definitions/Project' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating Project. + '404': + description: Project not found. + delete: + tags: + - projects + description: Delete this project. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted Project. + schema: + $ref: '#/definitions/Project' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting Project. + '404': + description: Project not found. + '/projects/{projectId}/authorizations': + get: + tags: + - projects + description: Get this Project's Authorizations. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Project's Authorizations. + schema: + type: array + items: + type: object + properties: + userId: + type: string + projectId: + type: string + authorizationLevel: + type: string + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving this Project's Authorizations. + '404': + description: Project not found. + '/projects/{projectId}/topologies': + post: + tags: + - projects + description: Add a Topology. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + - name: topology + in: body + description: The new Topology. + required: true + schema: + properties: + topology: + $ref: '#/definitions/Topology' + responses: + '200': + description: Successfully added Topology. + schema: + $ref: '#/definitions/Topology' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '/projects/{projectId}/portfolios': + post: + tags: + - portfolios + description: Add a Portfolio. + parameters: + - name: projectId + in: path + description: Project's ID. + required: true + type: string + - name: portfolio + in: body + description: The new Portfolio. + required: true + schema: + properties: + topology: + $ref: '#/definitions/Portfolio' + responses: + '200': + description: Successfully added Portfolio. + schema: + $ref: '#/definitions/Portfolio' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '/topologies/{topologyId}': + get: + tags: + - topologies + description: Get this Topology. + parameters: + - name: topologyId + in: path + description: Topology's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Topology. + schema: + $ref: '#/definitions/Topology' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving Topology. + '404': + description: Topology not found. + put: + tags: + - topologies + description: Update this Topology's name. + parameters: + - name: topologyId + in: path + description: Topology's ID. + required: true + type: string + - name: topology + in: body + description: Topology's new properties. + required: true + schema: + properties: + topology: + $ref: '#/definitions/Topology' + responses: + '200': + description: Successfully updated Topology. + schema: + $ref: '#/definitions/Topology' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating Topology. + '404': + description: Topology not found. + delete: + tags: + - topologies + description: Delete this Topology. + parameters: + - name: topologyId + in: path + description: Topology's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted Topology. + schema: + $ref: '#/definitions/Topology' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting Topology. + '404': + description: Topology not found. + '/portfolios/{portfolioId}': + get: + tags: + - portfolios + description: Get this Portfolio. + parameters: + - name: portfolioId + in: path + description: Portfolio's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Portfolio. + schema: + $ref: '#/definitions/Portfolio' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving Portfolio. + '404': + description: Portfolio not found. + put: + tags: + - portfolios + description: "Update this Portfolio." + parameters: + - name: portfolioId + in: path + description: Portfolio's ID. + required: true + type: string + - name: portfolio + in: body + description: Portfolio's new properties. + required: true + schema: + $ref: '#/definitions/Portfolio' + responses: + '200': + description: Successfully updated Portfolio. + schema: + $ref: '#/definitions/Portfolio' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating Portfolio. + '404': + description: 'Portfolio not found.' + delete: + tags: + - portfolios + description: Delete this Portfolio. + parameters: + - name: portfolioId + in: path + description: Portfolio's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted Portfolio. + schema: + $ref: '#/definitions/Portfolio' + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting Portfolio. + '404': + description: Portfolio not found. + '/scenarios/{scenarioId}': + get: + tags: + - scenarios + description: Get this Scenario. + parameters: + - name: scenarioId + in: path + description: Scenario's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Scenario. + schema: + $ref: '#/definitions/Scenario' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving Scenario. + '404': + description: Scenario not found. + put: + tags: + - scenarios + description: "Update this Scenario's name (other properties are read-only)." + parameters: + - name: scenarioId + in: path + description: Scenario's ID. + required: true + type: string + - name: scenario + in: body + description: Scenario with new name. + required: true + schema: + $ref: '#/definitions/Scenario' + responses: + '200': + description: Successfully updated Scenario. + schema: + $ref: '#/definitions/Scenario' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating Scenario. + '404': + description: 'Scenario not found.' + delete: + tags: + - scenarios + description: Delete this Scenario. + parameters: + - name: scenarioId + in: path + description: Scenario's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted Scenario. + schema: + $ref: '#/definitions/Scenario' + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting Scenario. + '404': + description: Scenario not found. + /schedulers: + get: + tags: + - simulation + description: Get all available Schedulers + responses: + '200': + description: Successfully retrieved Schedulers. + schema: + type: array + items: + $ref: '#/definitions/Scheduler' + '401': + description: Unauthorized. + /traces: + get: + tags: + - simulation + description: Get all available Traces (non-populated). + responses: + '200': + description: Successfully retrieved Traces (non-populated). + schema: + type: array + items: + type: object + properties: + _id: + type: string + name: + type: string + '401': + description: Unauthorized. + '/traces/{traceId}': + get: + tags: + - simulation + description: Get this Trace. + parameters: + - name: traceId + in: path + description: Trace's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Trace. + schema: + $ref: '#/definitions/Trace' + '401': + description: Unauthorized. + '404': + description: Trace not found. + /prefabs: + post: + tags: + - prefabs + description: Add a Prefab. + parameters: + - name: prefab + in: body + description: The new Prefab. + required: true + schema: + properties: + name: type: string - Project: - type: object - properties: + responses: + '200': + description: Successfully added Prefab. + schema: + $ref: '#/definitions/Prefab' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '/prefabs/{prefabId}': + get: + tags: + - prefabs + description: Get this Prefab. + parameters: + - name: prefabId + in: path + description: Prefab's ID. + required: true + type: string + responses: + '200': + description: Successfully retrieved Prefab. + schema: + $ref: '#/definitions/Prefab' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from retrieving Prefab. + '404': + description: Prefab not found + put: + tags: + - prefabs + description: Update this Prefab. + parameters: + - name: prefabId + in: path + description: Prefab's ID. + required: true + type: string + - name: prefab + in: body + description: Prefab's new properties. + required: true + schema: + properties: + project: + $ref: '#/definitions/Prefab' + responses: + '200': + description: Successfully updated Prefab. + schema: + $ref: '#/definitions/Prefab' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from updating Prefab. + '404': + description: Prefab not found. + delete: + tags: + - prefabs + description: Delete this prefab. + parameters: + - name: prefabId + in: path + description: Prefab's ID. + required: true + type: string + responses: + '200': + description: Successfully deleted Prefab. + schema: + $ref: '#/definitions/Prefab' + '400': + description: Missing or incorrectly typed parameter. + '401': + description: Unauthorized. + '403': + description: Forbidden from deleting Prefab. + '404': + description: Prefab not found. + +definitions: + Prefab: + type: object + properties: + _id: + type: string + name: + type: string + datetimeCreated: + type: string + format: dateTime + datetimeLastEdited: + type: string + format: dateTime + Scheduler: + type: object + properties: + name: + type: string + Project: + type: object + properties: + _id: + type: string + name: + type: string + datetimeCreated: + type: string + format: dateTime + datetimeLastEdited: + type: string + format: dateTime + topologyIds: + type: array + items: + type: string + portfolioIds: + type: array + items: + type: string + Topology: + type: object + properties: + _id: + type: string + projectId: + type: string + name: + type: string + rooms: + type: array + items: + type: object + properties: _id: - type: string + type: string name: - type: string - datetimeCreated: - type: string - format: dateTime - datetimeLastEdited: - type: string - format: dateTime - topologyIds: - type: array - items: - type: string - portfolioIds: - type: array - items: + type: string + tiles: + type: array + items: + type: object + properties: + _id: type: string - Topology: - type: object - properties: - _id: - type: string - projectId: - type: string - name: - type: string - rooms: - type: array - items: + positionX: + type: integer + positionY: + type: integer + object: type: object properties: - _id: - type: string - name: - type: string - tiles: - type: array - items: + capacity: + type: integer + powerCapacityW: + type: integer + machines: + type: array + items: + type: object + properties: + position: + type: integer + cpuItems: + type: array + items: + type: object + properties: + name: + type: string + clockRateMhz: + type: integer + numberOfCores: + type: integer + gpuItems: + type: array + items: + type: object + properties: + name: + type: string + clockRateMhz: + type: integer + numberOfCores: + type: integer + memoryItems: + type: array + items: type: object properties: - _id: - type: string - positionX: - type: integer - positionY: - type: integer - object: - type: object - properties: - capacity: - type: integer - powerCapacityW: - type: integer - machines: - type: array - items: - type: object - properties: - position: - type: integer - cpuItems: - type: array - items: - type: object - properties: - name: - type: string - clockRateMhz: - type: integer - numberOfCores: - type: integer - gpuItems: - type: array - items: - type: object - properties: - name: - type: string - clockRateMhz: - type: integer - numberOfCores: - type: integer - memoryItems: - type: array - items: - type: object - properties: - name: - type: string - speedMbPerS: - type: integer - sizeMb: - type: integer - storageItems: - type: array - items: - type: integer - properties: - name: - type: string - speedMbPerS: - type: integer - sizeMb: - type: integer - Portfolio: + name: + type: string + speedMbPerS: + type: integer + sizeMb: + type: integer + storageItems: + type: array + items: + type: integer + properties: + name: + type: string + speedMbPerS: + type: integer + sizeMb: + type: integer + Portfolio: + type: object + properties: + _id: + type: string + projectId: + type: string + name: + type: string + scenarioIds: + type: array + items: + type: string + targets: type: object properties: - _id: - type: string - projectId: - type: string - name: - type: string - scenarioIds: - type: array - items: - type: string - targets: - type: object - properties: - enabledMetrics: - type: array - items: - type: string - repeatsPerScenario: - type: integer - Scenario: + enabledMetrics: + type: array + items: + type: string + repeatsPerScenario: + type: integer + Scenario: + type: object + properties: + _id: + type: string + portfolioId: + type: string + name: + type: string + simulation: type: object properties: - _id: - type: string - portfolioId: - type: string - name: - type: string - simulationState: - type: string - trace: - type: object - properties: - traceId: - type: string - loadSamplingFraction: - type: number - topology: - type: object - properties: - topologyId: - type: string - operational: - type: object - properties: - failuresEnabled: - type: boolean - performanceInterferenceEnabled: - type: boolean - schedulerName: - type: string - Trace: + state: + type: string + trace: type: object properties: - _id: - type: string - name: - type: string - path: - type: string - type: - type: string - User: + traceId: + type: string + loadSamplingFraction: + type: number + topology: type: object properties: - _id: - type: string - googleId: - type: integer - email: - type: string - givenName: - type: string - familyName: - type: string - authorizations: - type: array - items: - type: object - properties: - projectId: - type: string - authorizationLevel: - type: string + topologyId: + type: string + operational: + type: object + properties: + failuresEnabled: + type: boolean + performanceInterferenceEnabled: + type: boolean + schedulerName: + type: string + Trace: + type: object + properties: + _id: + type: string + name: + type: string + path: + type: string + type: + type: string + User: + type: object + properties: + _id: + type: string + googleId: + type: integer + email: + type: string + givenName: + type: string + familyName: + type: string + authorizations: + type: array + items: + type: object + properties: + projectId: + type: string + authorizationLevel: + type: string diff --git a/simulator/.dockerignore b/simulator/.dockerignore new file mode 100644 index 00000000..816d338c --- /dev/null +++ b/simulator/.dockerignore @@ -0,0 +1,10 @@ +.git + +.idea/ +**/out +*.iml +.idea_modules/ + +.gradle +**/build/ + diff --git a/simulator/.editorconfig b/simulator/.editorconfig index a17544c9..a5584e95 100644 --- a/simulator/.editorconfig +++ b/simulator/.editorconfig @@ -4,4 +4,4 @@ # ktlint [*.{kt, kts}] -disabled_rules = import-ordering +disabled_rules = no-wildcard-imports diff --git a/simulator/.gitattributes b/simulator/.gitattributes deleted file mode 100644 index 12924725..00000000 --- a/simulator/.gitattributes +++ /dev/null @@ -1,7 +0,0 @@ -# https://help.github.com/articles/dealing-with-line-endings/ -# -# These are explicitly windows files and should use crlf -*.bat text eol=crlf - -# See https://github.com/gradle/gradle/issues/12248 -buildSrc/src/main/**/*.gradle.kts text eol=lf diff --git a/simulator/.gitignore b/simulator/.gitignore index 4ec6f778..917f2e6a 100644 --- a/simulator/.gitignore +++ b/simulator/.gitignore @@ -38,7 +38,7 @@ data/ # IntelliJ /out/ .idea/ -*/out +**/out *.iml # mpeltonen/sbt-idea plugin diff --git a/simulator/.gitlab-ci.yml b/simulator/.gitlab-ci.yml deleted file mode 100644 index a095f7e7..00000000 --- a/simulator/.gitlab-ci.yml +++ /dev/null @@ -1,34 +0,0 @@ -image: gradle:6.1-jdk13 - -variables: - GRADLE_OPTS: "-Dorg.gradle.daemon=false" - -before_script: - - export GRADLE_USER_HOME=`pwd`/.gradle - -stages: - - build - - test - -build: - stage: build - script: - - gradle --build-cache assemble - allow_failure: false - cache: - key: "$CI_COMMIT_REF_NAME" - policy: push - paths: - - build - - .gradle - -test: - stage: test - script: - - gradle check - cache: - key: "$CI_COMMIT_REF_NAME" - policy: pull - paths: - - build - - .gradle diff --git a/simulator/.travis.yml b/simulator/.travis.yml deleted file mode 100644 index dff5f3a5..00000000 --- a/simulator/.travis.yml +++ /dev/null @@ -1 +0,0 @@ -language: java diff --git a/simulator/Dockerfile b/simulator/Dockerfile new file mode 100644 index 00000000..e42c7f14 --- /dev/null +++ b/simulator/Dockerfile @@ -0,0 +1,31 @@ +FROM openjdk:14-slim AS staging +MAINTAINER OpenDC Maintainers <opendc@atlarge-research.com> + +# Build staging artifacts for dependency caching +COPY ./ /app +WORKDIR /app +RUN mkdir /staging \ + && cp -r buildSrc/ /staging \ + && cp gradle.properties /staging 2>/dev/null | true \ + && find -name "*.gradle.kts" | xargs cp --parents -t /staging + +FROM openjdk:14-slim AS builder + +# Obtain (cache) Gradle wrapper +COPY gradlew /app/ +COPY gradle /app/gradle +WORKDIR /app +RUN ./gradlew --version + +# Install (cache) project dependencies only +COPY --from=staging /staging/ /app/ +RUN ./gradlew clean build --no-daemon > /dev/null 2>&1 || true + +# Build project +COPY ./ /app/ +RUN ./gradlew --no-daemon :opendc:opendc-runner-web:installDist + +FROM openjdk:14-slim +COPY --from=builder /app/opendc/opendc-runner-web/build/install /app +WORKDIR /app +CMD opendc-runner-web/bin/opendc-runner-web diff --git a/simulator/build.gradle.kts b/simulator/build.gradle.kts index 90f43749..4775369b 100644 --- a/simulator/build.gradle.kts +++ b/simulator/build.gradle.kts @@ -30,7 +30,3 @@ allprojects { group = "com.atlarge.opendc" version = "2.0.0" } - -tasks.wrapper { - gradleVersion = "6.0" -} diff --git a/simulator/gradle/wrapper/gradle-wrapper.jar b/simulator/gradle/wrapper/gradle-wrapper.jar Binary files differindex 87b738cb..62d4c053 100644 --- a/simulator/gradle/wrapper/gradle-wrapper.jar +++ b/simulator/gradle/wrapper/gradle-wrapper.jar diff --git a/simulator/gradle/wrapper/gradle-wrapper.properties b/simulator/gradle/wrapper/gradle-wrapper.properties index a4b44297..bb8b2fc2 100644 --- a/simulator/gradle/wrapper/gradle-wrapper.properties +++ b/simulator/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.3-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.5.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/simulator/gradlew b/simulator/gradlew index af6708ff..fbd7c515 100755 --- a/simulator/gradlew +++ b/simulator/gradlew @@ -1,5 +1,21 @@ #!/usr/bin/env sh +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + ############################################################################## ## ## Gradle start up script for UN*X @@ -28,7 +44,7 @@ APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m"' +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" @@ -66,6 +82,7 @@ esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then @@ -109,10 +126,11 @@ if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath @@ -138,19 +156,19 @@ if $cygwin ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi @@ -159,14 +177,9 @@ save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@" diff --git a/simulator/gradlew.bat b/simulator/gradlew.bat index 6d57edc7..5093609d 100644 --- a/simulator/gradlew.bat +++ b/simulator/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -13,8 +29,11 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome @@ -65,6 +84,7 @@ set CMD_LINE_ARGS=%* set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% diff --git a/simulator/odcsim/odcsim-api/src/main/kotlin/com/atlarge/odcsim/flow/EventFlow.kt b/simulator/odcsim/odcsim-api/src/main/kotlin/com/atlarge/odcsim/flow/EventFlow.kt index 5d9af9ec..0e18f82f 100644 --- a/simulator/odcsim/odcsim-api/src/main/kotlin/com/atlarge/odcsim/flow/EventFlow.kt +++ b/simulator/odcsim/odcsim-api/src/main/kotlin/com/atlarge/odcsim/flow/EventFlow.kt @@ -24,6 +24,7 @@ package com.atlarge.odcsim.flow +import java.util.WeakHashMap import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.FlowPreview import kotlinx.coroutines.InternalCoroutinesApi @@ -32,7 +33,6 @@ import kotlinx.coroutines.channels.SendChannel import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.FlowCollector import kotlinx.coroutines.flow.consumeAsFlow -import java.util.WeakHashMap /** * A [Flow] that can be used to emit events. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/core/Server.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/core/Server.kt index 01968cd8..fd0fc836 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/core/Server.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/core/Server.kt @@ -28,8 +28,8 @@ import com.atlarge.opendc.compute.core.image.Image import com.atlarge.opendc.core.resource.Resource import com.atlarge.opendc.core.resource.TagContainer import com.atlarge.opendc.core.services.ServiceRegistry -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A server instance that is running on some physical or virtual machine. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/Node.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/Node.kt index 7cb4c0c5..cb637aea 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/Node.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/Node.kt @@ -27,8 +27,8 @@ package com.atlarge.opendc.compute.metal import com.atlarge.opendc.compute.core.Server import com.atlarge.opendc.compute.core.image.Image import com.atlarge.opendc.core.Identity -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A bare-metal compute node. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/BareMetalDriver.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/BareMetalDriver.kt index 41cec291..17d8ee53 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/BareMetalDriver.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/BareMetalDriver.kt @@ -30,8 +30,8 @@ import com.atlarge.opendc.compute.metal.Node import com.atlarge.opendc.core.failure.FailureDomain import com.atlarge.opendc.core.power.Powerable import com.atlarge.opendc.core.services.AbstractServiceKey -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A driver interface for the management interface of a bare-metal compute node. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriver.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriver.kt index 6a77415c..a453e459 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriver.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriver.kt @@ -28,10 +28,10 @@ import com.atlarge.odcsim.Domain import com.atlarge.odcsim.SimulationContext import com.atlarge.odcsim.flow.EventFlow import com.atlarge.odcsim.flow.StateFlow -import com.atlarge.opendc.compute.core.ProcessingUnit -import com.atlarge.opendc.compute.core.Server import com.atlarge.opendc.compute.core.Flavor import com.atlarge.opendc.compute.core.MemoryUnit +import com.atlarge.opendc.compute.core.ProcessingUnit +import com.atlarge.opendc.compute.core.Server import com.atlarge.opendc.compute.core.ServerEvent import com.atlarge.opendc.compute.core.ServerState import com.atlarge.opendc.compute.core.execution.ServerContext @@ -46,6 +46,14 @@ import com.atlarge.opendc.compute.metal.power.ConstantPowerModel import com.atlarge.opendc.core.power.PowerModel import com.atlarge.opendc.core.services.ServiceKey import com.atlarge.opendc.core.services.ServiceRegistry +import java.lang.Exception +import java.time.Clock +import java.util.UUID +import kotlin.coroutines.ContinuationInterceptor +import kotlin.math.ceil +import kotlin.math.max +import kotlin.math.min +import kotlin.random.Random import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.Delay import kotlinx.coroutines.DisposableHandle @@ -59,15 +67,7 @@ import kotlinx.coroutines.intrinsics.startCoroutineCancellable import kotlinx.coroutines.launch import kotlinx.coroutines.selects.SelectClause0 import kotlinx.coroutines.selects.SelectInstance -import java.util.UUID -import kotlin.math.ceil -import kotlin.math.max -import kotlin.math.min import kotlinx.coroutines.withContext -import java.lang.Exception -import java.time.Clock -import kotlin.coroutines.ContinuationInterceptor -import kotlin.random.Random /** * A basic implementation of the [BareMetalDriver] that simulates an [Image] running on a bare-metal machine. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/Hypervisor.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/Hypervisor.kt index 69b0124d..1e7e351f 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/Hypervisor.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/Hypervisor.kt @@ -25,8 +25,8 @@ package com.atlarge.opendc.compute.virt import com.atlarge.opendc.core.Identity -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A hypervisor (or virtual machine monitor) is software or firmware that virtualizes the host compute environment diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/HypervisorImage.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/HypervisorImage.kt index bd395f0d..607759a8 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/HypervisorImage.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/HypervisorImage.kt @@ -29,9 +29,9 @@ import com.atlarge.opendc.compute.core.image.Image import com.atlarge.opendc.compute.virt.driver.SimpleVirtDriver import com.atlarge.opendc.compute.virt.driver.VirtDriver import com.atlarge.opendc.core.resource.TagContainer +import java.util.UUID import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.suspendCancellableCoroutine -import java.util.UUID /** * A hypervisor managing the VMs of a node. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/SimpleVirtDriver.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/SimpleVirtDriver.kt index 3c41f52e..192db413 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/SimpleVirtDriver.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/SimpleVirtDriver.kt @@ -35,11 +35,15 @@ import com.atlarge.opendc.compute.core.execution.ServerContext import com.atlarge.opendc.compute.core.execution.ServerManagementContext import com.atlarge.opendc.compute.core.execution.ShutdownException import com.atlarge.opendc.compute.core.image.Image +import com.atlarge.opendc.compute.core.workload.IMAGE_PERF_INTERFERENCE_MODEL +import com.atlarge.opendc.compute.core.workload.PerformanceInterferenceModel import com.atlarge.opendc.compute.virt.HypervisorEvent import com.atlarge.opendc.core.services.ServiceKey import com.atlarge.opendc.core.services.ServiceRegistry -import com.atlarge.opendc.compute.core.workload.IMAGE_PERF_INTERFERENCE_MODEL -import com.atlarge.opendc.compute.core.workload.PerformanceInterferenceModel +import java.util.UUID +import kotlin.math.ceil +import kotlin.math.max +import kotlin.math.min import kotlinx.coroutines.CancellationException import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.DisposableHandle @@ -55,10 +59,6 @@ import kotlinx.coroutines.launch import kotlinx.coroutines.selects.SelectClause0 import kotlinx.coroutines.selects.SelectInstance import kotlinx.coroutines.selects.select -import java.util.UUID -import kotlin.math.ceil -import kotlin.math.max -import kotlin.math.min /** * A [VirtDriver] that is backed by a simple hypervisor implementation. diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/VirtDriver.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/VirtDriver.kt index 1002d382..b1844f67 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/VirtDriver.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/driver/VirtDriver.kt @@ -29,8 +29,8 @@ import com.atlarge.opendc.compute.core.Server import com.atlarge.opendc.compute.core.image.Image import com.atlarge.opendc.compute.virt.HypervisorEvent import com.atlarge.opendc.core.services.AbstractServiceKey -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A driver interface for a hypervisor running on some host server and communicating with the central compute service to diff --git a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/service/SimpleVirtProvisioningService.kt b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/service/SimpleVirtProvisioningService.kt index ff4aa3d7..79388bc3 100644 --- a/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/service/SimpleVirtProvisioningService.kt +++ b/simulator/opendc/opendc-compute/src/main/kotlin/com/atlarge/opendc/compute/virt/service/SimpleVirtProvisioningService.kt @@ -11,11 +11,14 @@ import com.atlarge.opendc.compute.core.image.Image import com.atlarge.opendc.compute.core.image.VmImage import com.atlarge.opendc.compute.metal.service.ProvisioningService import com.atlarge.opendc.compute.virt.HypervisorEvent -import com.atlarge.opendc.compute.virt.driver.VirtDriver import com.atlarge.opendc.compute.virt.HypervisorImage import com.atlarge.opendc.compute.virt.driver.InsufficientMemoryOnServerException +import com.atlarge.opendc.compute.virt.driver.VirtDriver import com.atlarge.opendc.compute.virt.service.allocation.AllocationPolicy import com.atlarge.opendc.core.services.ServiceKey +import kotlin.coroutines.Continuation +import kotlin.coroutines.resume +import kotlin.math.max import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.Job @@ -27,9 +30,6 @@ import kotlinx.coroutines.launch import kotlinx.coroutines.suspendCancellableCoroutine import kotlinx.coroutines.withContext import mu.KotlinLogging -import kotlin.coroutines.Continuation -import kotlin.coroutines.resume -import kotlin.math.max private val logger = KotlinLogging.logger {} diff --git a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/core/image/FlopsApplicationImageTest.kt b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/core/image/FlopsApplicationImageTest.kt index 417db77d..1c7b751c 100644 --- a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/core/image/FlopsApplicationImageTest.kt +++ b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/core/image/FlopsApplicationImageTest.kt @@ -24,10 +24,10 @@ package com.atlarge.opendc.compute.core.image +import java.util.UUID import org.junit.jupiter.api.DisplayName import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows -import java.util.UUID /** * Test suite for [FlopsApplicationImage] diff --git a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriverTest.kt b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriverTest.kt index 071c0626..af9d3421 100644 --- a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriverTest.kt +++ b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/driver/SimpleBareMetalDriverTest.kt @@ -31,6 +31,8 @@ import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.core.ServerEvent import com.atlarge.opendc.compute.core.ServerState import com.atlarge.opendc.compute.core.image.FlopsApplicationImage +import java.util.ServiceLoader +import java.util.UUID import kotlinx.coroutines.flow.collect import kotlinx.coroutines.flow.launchIn import kotlinx.coroutines.flow.onEach @@ -39,8 +41,6 @@ import kotlinx.coroutines.runBlocking import kotlinx.coroutines.withContext import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -import java.util.ServiceLoader -import java.util.UUID internal class SimpleBareMetalDriverTest { /** diff --git a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/service/SimpleProvisioningServiceTest.kt b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/service/SimpleProvisioningServiceTest.kt index f8bd786e..ed2256c0 100644 --- a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/service/SimpleProvisioningServiceTest.kt +++ b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/metal/service/SimpleProvisioningServiceTest.kt @@ -29,13 +29,13 @@ import com.atlarge.opendc.compute.core.ProcessingNode import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.core.image.FlopsApplicationImage import com.atlarge.opendc.compute.metal.driver.SimpleBareMetalDriver +import java.util.ServiceLoader +import java.util.UUID import kotlinx.coroutines.delay import kotlinx.coroutines.flow.collect import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.Test -import java.util.ServiceLoader -import java.util.UUID /** * Test suite for the [SimpleProvisioningService]. diff --git a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/virt/HypervisorTest.kt b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/virt/HypervisorTest.kt index ca00fc94..622b185e 100644 --- a/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/virt/HypervisorTest.kt +++ b/simulator/opendc/opendc-compute/src/test/kotlin/com/atlarge/opendc/compute/virt/HypervisorTest.kt @@ -25,14 +25,16 @@ package com.atlarge.opendc.compute.virt import com.atlarge.odcsim.SimulationEngineProvider -import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.core.Flavor import com.atlarge.opendc.compute.core.ProcessingNode +import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.core.image.FlopsApplicationImage import com.atlarge.opendc.compute.core.image.FlopsHistoryFragment import com.atlarge.opendc.compute.core.image.VmImage import com.atlarge.opendc.compute.metal.driver.SimpleBareMetalDriver import com.atlarge.opendc.compute.virt.driver.VirtDriver +import java.util.ServiceLoader +import java.util.UUID import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.delay import kotlinx.coroutines.flow.launchIn @@ -43,8 +45,6 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Disabled import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertAll -import java.util.ServiceLoader -import java.util.UUID /** * Basic test-suite for the hypervisor. diff --git a/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/CorrelatedFaultInjector.kt b/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/CorrelatedFaultInjector.kt index 50261db5..f77a581e 100644 --- a/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/CorrelatedFaultInjector.kt +++ b/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/CorrelatedFaultInjector.kt @@ -26,14 +26,14 @@ package com.atlarge.opendc.core.failure import com.atlarge.odcsim.Domain import com.atlarge.odcsim.simulationContext -import kotlinx.coroutines.Job -import kotlinx.coroutines.delay -import kotlinx.coroutines.ensureActive -import kotlinx.coroutines.launch import kotlin.math.exp import kotlin.math.max import kotlin.random.Random import kotlin.random.asJavaRandom +import kotlinx.coroutines.Job +import kotlinx.coroutines.delay +import kotlinx.coroutines.ensureActive +import kotlinx.coroutines.launch /** * A [FaultInjector] that injects fault in the system which are correlated to each other. Failures do not occur in diff --git a/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/UncorrelatedFaultInjector.kt b/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/UncorrelatedFaultInjector.kt index 1b896858..0f62667f 100644 --- a/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/UncorrelatedFaultInjector.kt +++ b/simulator/opendc/opendc-core/src/main/kotlin/com/atlarge/opendc/core/failure/UncorrelatedFaultInjector.kt @@ -25,11 +25,11 @@ package com.atlarge.opendc.core.failure import com.atlarge.odcsim.simulationContext -import kotlinx.coroutines.delay -import kotlinx.coroutines.launch import kotlin.math.ln1p import kotlin.math.pow import kotlin.random.Random +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch /** * A [FaultInjector] that injects uncorrelated faults into the system, meaning that failures of the subsystems are diff --git a/simulator/opendc/opendc-experiments-sc18/src/main/kotlin/com/atlarge/opendc/experiments/sc18/TestExperiment.kt b/simulator/opendc/opendc-experiments-sc18/src/main/kotlin/com/atlarge/opendc/experiments/sc18/TestExperiment.kt index b0182ab3..7659b18e 100644 --- a/simulator/opendc/opendc-experiments-sc18/src/main/kotlin/com/atlarge/opendc/experiments/sc18/TestExperiment.kt +++ b/simulator/opendc/opendc-experiments-sc18/src/main/kotlin/com/atlarge/opendc/experiments/sc18/TestExperiment.kt @@ -38,6 +38,8 @@ import com.atlarge.opendc.workflows.service.stage.resource.FirstFitResourceSelec import com.atlarge.opendc.workflows.service.stage.resource.FunctionalResourceFilterPolicy import com.atlarge.opendc.workflows.service.stage.task.NullTaskEligibilityPolicy import com.atlarge.opendc.workflows.service.stage.task.SubmissionTimeTaskOrderPolicy +import java.io.File +import java.util.ServiceLoader import kotlin.math.max import kotlinx.coroutines.async import kotlinx.coroutines.channels.Channel @@ -46,8 +48,6 @@ import kotlinx.coroutines.flow.collect import kotlinx.coroutines.flow.onEach import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking -import java.io.File -import java.util.ServiceLoader /** * Main entry point of the experiment. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/Main.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/Main.kt index 677af381..faa68e34 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/Main.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/Main.kt @@ -48,9 +48,9 @@ import com.github.ajalt.clikt.parameters.options.required import com.github.ajalt.clikt.parameters.types.choice import com.github.ajalt.clikt.parameters.types.file import com.github.ajalt.clikt.parameters.types.int -import mu.KotlinLogging import java.io.File import java.io.InputStream +import mu.KotlinLogging /** * The logger for this experiment. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/ExperimentHelpers.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/ExperimentHelpers.kt index a70297d2..b09c0dbb 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/ExperimentHelpers.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/ExperimentHelpers.kt @@ -45,19 +45,20 @@ import com.atlarge.opendc.experiments.sc20.experiment.monitor.ExperimentMonitor import com.atlarge.opendc.experiments.sc20.trace.Sc20StreamingParquetTraceReader import com.atlarge.opendc.format.environment.EnvironmentReader import com.atlarge.opendc.format.trace.TraceReader +import java.io.File +import kotlin.math.ln +import kotlin.math.max +import kotlin.random.Random import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.delay import kotlinx.coroutines.flow.collect import kotlinx.coroutines.flow.launchIn import kotlinx.coroutines.flow.onEach +import kotlinx.coroutines.flow.takeWhile import kotlinx.coroutines.launch import kotlinx.coroutines.withContext import mu.KotlinLogging -import java.io.File -import kotlin.math.ln -import kotlin.math.max -import kotlin.random.Random /** * The logger for this experiment. @@ -209,7 +210,6 @@ suspend fun processTrace(reader: TraceReader<VmWorkload>, scheduler: SimpleVirtP try { var submitted = 0 - val finished = Channel<Unit>(Channel.CONFLATED) while (reader.hasNext()) { val (time, workload) = reader.next() @@ -228,17 +228,20 @@ suspend fun processTrace(reader: TraceReader<VmWorkload>, scheduler: SimpleVirtP if (it is ServerEvent.StateChanged) { monitor.reportVmStateChange(simulationContext.clock.millis(), it.server) } - - delay(1) - finished.send(Unit) } .collect() } } - while (scheduler.finishedVms + scheduler.unscheduledVms != submitted) { - finished.receive() - } + scheduler.events + .takeWhile { + when (it) { + is VirtProvisioningEvent.MetricsAvailable -> + it.inactiveVmCount + it.failedVmCount != submitted + } + } + .collect() + delay(1) } finally { reader.close() } diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/Run.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/Run.kt index 5d1c29e2..1580e4dd 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/Run.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/Run.kt @@ -38,13 +38,13 @@ import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionC import com.atlarge.opendc.experiments.sc20.trace.Sc20ParquetTraceReader import com.atlarge.opendc.experiments.sc20.trace.Sc20RawParquetTraceReader import com.atlarge.opendc.format.environment.sc20.Sc20ClusterEnvironmentReader +import java.io.File +import java.util.ServiceLoader +import kotlin.random.Random import kotlinx.coroutines.cancel import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.launch import mu.KotlinLogging -import java.io.File -import java.util.ServiceLoader -import kotlin.random.Random /** * The logger for the experiment scenario. @@ -106,7 +106,11 @@ public data class Run(override val parent: Scenario, val id: Int, val seed: Int) ?.construct(seeder) ?: emptyMap() val trace = Sc20ParquetTraceReader(rawReaders, performanceInterferenceModel, parent.workload, seed) - val monitor = ParquetExperimentMonitor(this) + val monitor = ParquetExperimentMonitor( + parent.parent.parent.output, + "portfolio_id=${parent.parent.id}/scenario_id=${parent.id}/run_id=$id", + parent.parent.parent.bufferSize + ) root.launch { val (bareMetalProvisioner, scheduler) = createProvisioner( diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/monitor/ParquetExperimentMonitor.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/monitor/ParquetExperimentMonitor.kt index be60e5b7..b931fef9 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/monitor/ParquetExperimentMonitor.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/experiment/monitor/ParquetExperimentMonitor.kt @@ -27,13 +27,12 @@ package com.atlarge.opendc.experiments.sc20.experiment.monitor import com.atlarge.opendc.compute.core.Server import com.atlarge.opendc.compute.virt.driver.VirtDriver import com.atlarge.opendc.compute.virt.service.VirtProvisioningEvent -import com.atlarge.opendc.experiments.sc20.experiment.Run import com.atlarge.opendc.experiments.sc20.telemetry.HostEvent import com.atlarge.opendc.experiments.sc20.telemetry.ProvisionerEvent import com.atlarge.opendc.experiments.sc20.telemetry.parquet.ParquetHostEventWriter import com.atlarge.opendc.experiments.sc20.telemetry.parquet.ParquetProvisionerEventWriter -import mu.KotlinLogging import java.io.File +import mu.KotlinLogging /** * The logger instance to use. @@ -43,15 +42,14 @@ private val logger = KotlinLogging.logger {} /** * An [ExperimentMonitor] that logs the events to a Parquet file. */ -class ParquetExperimentMonitor(val run: Run) : ExperimentMonitor { - private val partition = "portfolio_id=${run.parent.parent.id}/scenario_id=${run.parent.id}/run_id=${run.id}" +class ParquetExperimentMonitor(base: File, partition: String, bufferSize: Int) : ExperimentMonitor { private val hostWriter = ParquetHostEventWriter( - File(run.parent.parent.parent.output, "host-metrics/$partition/data.parquet"), - run.parent.parent.parent.bufferSize + File(base, "host-metrics/$partition/data.parquet"), + bufferSize ) private val provisionerWriter = ParquetProvisionerEventWriter( - File(run.parent.parent.parent.output, "provisioner-metrics/$partition/data.parquet"), - run.parent.parent.parent.bufferSize + File(base, "provisioner-metrics/$partition/data.parquet"), + bufferSize ) private val currentHostEvent = mutableMapOf<Server, HostEvent>() private var startTime = -1L diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/reporter/ConsoleExperimentReporter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/reporter/ConsoleExperimentReporter.kt index f59402d5..b446abc8 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/reporter/ConsoleExperimentReporter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/reporter/ConsoleExperimentReporter.kt @@ -30,6 +30,7 @@ import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionL import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionResult import me.tongfei.progressbar.ProgressBar import me.tongfei.progressbar.ProgressBarBuilder +import mu.KotlinLogging /** * A reporter that reports the experiment progress to the console. @@ -46,6 +47,11 @@ public class ConsoleExperimentReporter : ExperimentExecutionListener { private var total = 0 /** + * The logger for this reporter. + */ + private val logger = KotlinLogging.logger {} + + /** * The progress bar to keep track of the progress. */ private val pb: ProgressBar = ProgressBarBuilder() @@ -69,6 +75,10 @@ public class ConsoleExperimentReporter : ExperimentExecutionListener { pb.close() } } + + if (result is ExperimentExecutionResult.Failed) { + logger.warn(result.throwable) { "Descriptor $descriptor failed" } + } } override fun executionStarted(descriptor: ExperimentDescriptor) {} diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ExperimentScheduler.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ExperimentScheduler.kt index 0346a7f8..96678abf 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ExperimentScheduler.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ExperimentScheduler.kt @@ -49,11 +49,10 @@ interface ExperimentScheduler : Closeable { * * @param descriptor The descriptor to execute. * @param context The context to execute the descriptor in. - * @return The results of the experiment trial. */ suspend operator fun invoke( descriptor: ExperimentDescriptor, context: ExperimentExecutionContext - ): ExperimentExecutionResult + ) } } diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ThreadPoolExperimentScheduler.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ThreadPoolExperimentScheduler.kt index 31632b8c..a8ee59a8 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ThreadPoolExperimentScheduler.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/execution/ThreadPoolExperimentScheduler.kt @@ -25,12 +25,12 @@ package com.atlarge.opendc.experiments.sc20.runner.execution import com.atlarge.opendc.experiments.sc20.runner.ExperimentDescriptor +import java.util.concurrent.Executors import kotlinx.coroutines.asCoroutineDispatcher import kotlinx.coroutines.launch import kotlinx.coroutines.supervisorScope import kotlinx.coroutines.sync.Semaphore import kotlinx.coroutines.withContext -import java.util.concurrent.Executors /** * An [ExperimentScheduler] that runs experiments using a local thread pool. @@ -47,7 +47,7 @@ class ThreadPoolExperimentScheduler(parallelism: Int = Runtime.getRuntime().avai override suspend fun invoke( descriptor: ExperimentDescriptor, context: ExperimentExecutionContext - ): ExperimentExecutionResult = supervisorScope { + ) = supervisorScope { val listener = object : ExperimentExecutionListener { override fun descriptorRegistered(descriptor: ExperimentDescriptor) { @@ -70,10 +70,7 @@ class ThreadPoolExperimentScheduler(parallelism: Int = Runtime.getRuntime().avai try { withContext(dispatcher) { descriptor(newContext) - ExperimentExecutionResult.Success } - } catch (e: Throwable) { - ExperimentExecutionResult.Failed(e) } finally { tickets.release() } diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/internal/DefaultExperimentRunner.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/internal/DefaultExperimentRunner.kt index 3b80276f..28a19172 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/internal/DefaultExperimentRunner.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/runner/internal/DefaultExperimentRunner.kt @@ -30,8 +30,8 @@ import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionC import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionListener import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentExecutionResult import com.atlarge.opendc.experiments.sc20.runner.execution.ExperimentScheduler -import kotlinx.coroutines.runBlocking import java.util.concurrent.ConcurrentHashMap +import kotlinx.coroutines.runBlocking /** * The default implementation of the [ExperimentRunner] interface. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetEventWriter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetEventWriter.kt index a69bd4b2..e42ac654 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetEventWriter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetEventWriter.kt @@ -25,17 +25,17 @@ package com.atlarge.opendc.experiments.sc20.telemetry.parquet import com.atlarge.opendc.experiments.sc20.telemetry.Event +import java.io.Closeable +import java.io.File +import java.util.concurrent.ArrayBlockingQueue +import java.util.concurrent.BlockingQueue +import kotlin.concurrent.thread import mu.KotlinLogging import org.apache.avro.Schema import org.apache.avro.generic.GenericData import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetWriter import org.apache.parquet.hadoop.metadata.CompressionCodecName -import java.io.Closeable -import java.io.File -import java.util.concurrent.ArrayBlockingQueue -import java.util.concurrent.BlockingQueue -import kotlin.concurrent.thread /** * The logging instance to use. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetHostEventWriter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetHostEventWriter.kt index 3bc09435..9fa4e0fb 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetHostEventWriter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetHostEventWriter.kt @@ -25,10 +25,10 @@ package com.atlarge.opendc.experiments.sc20.telemetry.parquet import com.atlarge.opendc.experiments.sc20.telemetry.HostEvent +import java.io.File import org.apache.avro.Schema import org.apache.avro.SchemaBuilder import org.apache.avro.generic.GenericData -import java.io.File /** * A Parquet event writer for [HostEvent]s. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetProvisionerEventWriter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetProvisionerEventWriter.kt index 1f3b0472..3d28860c 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetProvisionerEventWriter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetProvisionerEventWriter.kt @@ -25,10 +25,10 @@ package com.atlarge.opendc.experiments.sc20.telemetry.parquet import com.atlarge.opendc.experiments.sc20.telemetry.ProvisionerEvent +import java.io.File import org.apache.avro.Schema import org.apache.avro.SchemaBuilder import org.apache.avro.generic.GenericData -import java.io.File /** * A Parquet event writer for [ProvisionerEvent]s. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetRunEventWriter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetRunEventWriter.kt index 1549b8d2..c1724369 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetRunEventWriter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/telemetry/parquet/ParquetRunEventWriter.kt @@ -25,10 +25,10 @@ package com.atlarge.opendc.experiments.sc20.telemetry.parquet import com.atlarge.opendc.experiments.sc20.telemetry.RunEvent +import java.io.File import org.apache.avro.Schema import org.apache.avro.SchemaBuilder import org.apache.avro.generic.GenericData -import java.io.File /** * A Parquet event writer for [RunEvent]s. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20RawParquetTraceReader.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20RawParquetTraceReader.kt index 652f7746..f9709b9f 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20RawParquetTraceReader.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20RawParquetTraceReader.kt @@ -30,12 +30,12 @@ import com.atlarge.opendc.compute.core.workload.VmWorkload import com.atlarge.opendc.core.User import com.atlarge.opendc.format.trace.TraceEntry import com.atlarge.opendc.format.trace.TraceReader +import java.io.File +import java.util.UUID import mu.KotlinLogging import org.apache.avro.generic.GenericData import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetReader -import java.io.File -import java.util.UUID private val logger = KotlinLogging.logger {} diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20StreamingParquetTraceReader.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20StreamingParquetTraceReader.kt index f6d6e6fd..8b7b222f 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20StreamingParquetTraceReader.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20StreamingParquetTraceReader.kt @@ -32,6 +32,14 @@ import com.atlarge.opendc.compute.core.workload.VmWorkload import com.atlarge.opendc.core.User import com.atlarge.opendc.format.trace.TraceEntry import com.atlarge.opendc.format.trace.TraceReader +import java.io.File +import java.io.Serializable +import java.util.SortedSet +import java.util.TreeSet +import java.util.UUID +import java.util.concurrent.ArrayBlockingQueue +import kotlin.concurrent.thread +import kotlin.random.Random import mu.KotlinLogging import org.apache.avro.generic.GenericData import org.apache.hadoop.fs.Path @@ -41,14 +49,6 @@ import org.apache.parquet.filter2.predicate.FilterApi import org.apache.parquet.filter2.predicate.Statistics import org.apache.parquet.filter2.predicate.UserDefinedPredicate import org.apache.parquet.io.api.Binary -import java.io.File -import java.io.Serializable -import java.util.SortedSet -import java.util.TreeSet -import java.util.UUID -import java.util.concurrent.ArrayBlockingQueue -import kotlin.concurrent.thread -import kotlin.random.Random private val logger = KotlinLogging.logger {} diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20TraceConverter.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20TraceConverter.kt index 0877ad52..d6726910 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20TraceConverter.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/Sc20TraceConverter.kt @@ -25,6 +25,12 @@ package com.atlarge.opendc.experiments.sc20.trace import com.atlarge.opendc.format.trace.sc20.Sc20VmPlacementReader +import java.io.BufferedReader +import java.io.File +import java.io.FileReader +import java.util.Random +import kotlin.math.max +import kotlin.math.min import me.tongfei.progressbar.ProgressBar import org.apache.avro.Schema import org.apache.avro.SchemaBuilder @@ -33,12 +39,6 @@ import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetWriter import org.apache.parquet.hadoop.ParquetWriter import org.apache.parquet.hadoop.metadata.CompressionCodecName -import java.io.BufferedReader -import java.io.File -import java.io.FileReader -import java.util.Random -import kotlin.math.max -import kotlin.math.min /** * A script to convert a trace in text format into a Parquet trace. diff --git a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/WorkloadSampler.kt b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/WorkloadSampler.kt index dd70d4f1..f2a0e627 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/WorkloadSampler.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/main/kotlin/com/atlarge/opendc/experiments/sc20/trace/WorkloadSampler.kt @@ -28,8 +28,8 @@ import com.atlarge.opendc.compute.core.workload.VmWorkload import com.atlarge.opendc.experiments.sc20.experiment.model.CompositeWorkload import com.atlarge.opendc.experiments.sc20.experiment.model.Workload import com.atlarge.opendc.format.trace.TraceEntry -import mu.KotlinLogging import kotlin.random.Random +import mu.KotlinLogging private val logger = KotlinLogging.logger {} diff --git a/simulator/opendc/opendc-experiments-sc20/src/test/kotlin/com/atlarge/opendc/experiments/sc20/Sc20IntegrationTest.kt b/simulator/opendc/opendc-experiments-sc20/src/test/kotlin/com/atlarge/opendc/experiments/sc20/Sc20IntegrationTest.kt index 5ecf7605..a79e9a5a 100644 --- a/simulator/opendc/opendc-experiments-sc20/src/test/kotlin/com/atlarge/opendc/experiments/sc20/Sc20IntegrationTest.kt +++ b/simulator/opendc/opendc-experiments-sc20/src/test/kotlin/com/atlarge/opendc/experiments/sc20/Sc20IntegrationTest.kt @@ -42,6 +42,8 @@ import com.atlarge.opendc.experiments.sc20.trace.Sc20RawParquetTraceReader import com.atlarge.opendc.format.environment.EnvironmentReader import com.atlarge.opendc.format.environment.sc20.Sc20ClusterEnvironmentReader import com.atlarge.opendc.format.trace.TraceReader +import java.io.File +import java.util.ServiceLoader import kotlinx.coroutines.cancel import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.launch @@ -52,8 +54,6 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertAll -import java.io.File -import java.util.ServiceLoader /** * An integration test suite for the SC20 experiments. diff --git a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt index 5f220ad0..a9aa3337 100644 --- a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt +++ b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/environment/sc18/Sc18EnvironmentReader.kt @@ -25,9 +25,9 @@ package com.atlarge.opendc.format.environment.sc18 import com.atlarge.odcsim.Domain -import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.core.MemoryUnit import com.atlarge.opendc.compute.core.ProcessingNode +import com.atlarge.opendc.compute.core.ProcessingUnit import com.atlarge.opendc.compute.metal.driver.SimpleBareMetalDriver import com.atlarge.opendc.compute.metal.service.ProvisioningService import com.atlarge.opendc.compute.metal.service.SimpleProvisioningService diff --git a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/bitbrains/BitbrainsTraceReader.kt b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/bitbrains/BitbrainsTraceReader.kt index 2a8fefeb..1cabc8bc 100644 --- a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/bitbrains/BitbrainsTraceReader.kt +++ b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/bitbrains/BitbrainsTraceReader.kt @@ -26,10 +26,10 @@ package com.atlarge.opendc.format.trace.bitbrains import com.atlarge.opendc.compute.core.image.FlopsHistoryFragment import com.atlarge.opendc.compute.core.image.VmImage -import com.atlarge.opendc.compute.core.workload.VmWorkload -import com.atlarge.opendc.core.User import com.atlarge.opendc.compute.core.workload.IMAGE_PERF_INTERFERENCE_MODEL import com.atlarge.opendc.compute.core.workload.PerformanceInterferenceModel +import com.atlarge.opendc.compute.core.workload.VmWorkload +import com.atlarge.opendc.core.User import com.atlarge.opendc.format.trace.TraceEntry import com.atlarge.opendc.format.trace.TraceReader import java.io.BufferedReader diff --git a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/sc20/Sc20TraceReader.kt b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/sc20/Sc20TraceReader.kt index 076274d5..8e34505a 100644 --- a/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/sc20/Sc20TraceReader.kt +++ b/simulator/opendc/opendc-format/src/main/kotlin/com/atlarge/opendc/format/trace/sc20/Sc20TraceReader.kt @@ -26,10 +26,10 @@ package com.atlarge.opendc.format.trace.sc20 import com.atlarge.opendc.compute.core.image.FlopsHistoryFragment import com.atlarge.opendc.compute.core.image.VmImage -import com.atlarge.opendc.compute.core.workload.VmWorkload -import com.atlarge.opendc.core.User import com.atlarge.opendc.compute.core.workload.IMAGE_PERF_INTERFERENCE_MODEL import com.atlarge.opendc.compute.core.workload.PerformanceInterferenceModel +import com.atlarge.opendc.compute.core.workload.VmWorkload +import com.atlarge.opendc.core.User import com.atlarge.opendc.format.trace.TraceEntry import com.atlarge.opendc.format.trace.TraceReader import java.io.BufferedReader diff --git a/simulator/opendc/opendc-format/src/test/kotlin/com/atlarge/opendc/format/trace/swf/SwfTraceReaderTest.kt b/simulator/opendc/opendc-format/src/test/kotlin/com/atlarge/opendc/format/trace/swf/SwfTraceReaderTest.kt index 41ad8aba..94e4b0fc 100644 --- a/simulator/opendc/opendc-format/src/test/kotlin/com/atlarge/opendc/format/trace/swf/SwfTraceReaderTest.kt +++ b/simulator/opendc/opendc-format/src/test/kotlin/com/atlarge/opendc/format/trace/swf/SwfTraceReaderTest.kt @@ -1,8 +1,8 @@ package com.atlarge.opendc.format.trace.swf +import java.io.File import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -import java.io.File class SwfTraceReaderTest { @Test diff --git a/simulator/opendc/opendc-runner-web/build.gradle.kts b/simulator/opendc/opendc-runner-web/build.gradle.kts new file mode 100644 index 00000000..6f725de1 --- /dev/null +++ b/simulator/opendc/opendc-runner-web/build.gradle.kts @@ -0,0 +1,55 @@ +/* + * MIT License + * + * Copyright (c) 2020 atlarge-research + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +description = "Experiment runner for OpenDC" + +/* Build configuration */ +plugins { + `kotlin-library-convention` + application +} + +application { + mainClassName = "com.atlarge.opendc.runner.web.MainKt" +} + +dependencies { + api(project(":opendc:opendc-core")) + implementation(project(":opendc:opendc-compute")) + implementation(project(":opendc:opendc-format")) + implementation(project(":opendc:opendc-experiments-sc20")) + + implementation("com.github.ajalt:clikt:2.8.0") + implementation("io.github.microutils:kotlin-logging:1.7.10") + + implementation("org.mongodb:mongodb-driver-sync:4.0.5") + implementation("org.apache.spark:spark-sql_2.12:3.0.0") { + exclude(group = "org.slf4j", module = "slf4j-log4j12") + exclude(group = "log4j") + } + + runtimeOnly(project(":odcsim:odcsim-engine-omega")) + runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:2.13.1") + runtimeOnly("org.apache.logging.log4j:log4j-1.2-api:2.13.1") +} diff --git a/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/Main.kt b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/Main.kt new file mode 100644 index 00000000..0ff9b870 --- /dev/null +++ b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/Main.kt @@ -0,0 +1,338 @@ +package com.atlarge.opendc.runner.web + +import com.atlarge.odcsim.SimulationEngineProvider +import com.atlarge.opendc.compute.virt.service.allocation.* +import com.atlarge.opendc.experiments.sc20.experiment.attachMonitor +import com.atlarge.opendc.experiments.sc20.experiment.createFailureDomain +import com.atlarge.opendc.experiments.sc20.experiment.createProvisioner +import com.atlarge.opendc.experiments.sc20.experiment.model.Workload +import com.atlarge.opendc.experiments.sc20.experiment.monitor.ParquetExperimentMonitor +import com.atlarge.opendc.experiments.sc20.experiment.processTrace +import com.atlarge.opendc.experiments.sc20.trace.Sc20ParquetTraceReader +import com.atlarge.opendc.experiments.sc20.trace.Sc20RawParquetTraceReader +import com.atlarge.opendc.format.trace.sc20.Sc20PerformanceInterferenceReader +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.options.* +import com.github.ajalt.clikt.parameters.types.file +import com.github.ajalt.clikt.parameters.types.int +import com.mongodb.MongoClientSettings +import com.mongodb.MongoCredential +import com.mongodb.ServerAddress +import com.mongodb.client.MongoClients +import com.mongodb.client.MongoCollection +import com.mongodb.client.MongoDatabase +import com.mongodb.client.model.Filters +import java.io.File +import java.util.* +import kotlin.random.Random +import kotlinx.coroutines.* +import kotlinx.coroutines.channels.Channel +import mu.KotlinLogging +import org.bson.Document + +private val logger = KotlinLogging.logger {} + +/** + * The provider for the simulation engine to use. + */ +private val provider = ServiceLoader.load(SimulationEngineProvider::class.java).first() + +/** + * Represents the CLI command for starting the OpenDC web runner. + */ +class RunnerCli : CliktCommand(name = "runner") { + /** + * The name of the database to use. + */ + private val mongoDb by option( + "--mongo-db", + help = "name of the database to use", + envvar = "OPENDC_DB" + ) + .default("opendc") + + /** + * The database host to connect to. + */ + private val mongoHost by option( + "--mongo-host", + help = "database host to connect to", + envvar = "OPENDC_DB_HOST" + ) + .default("localhost") + + /** + * The database port to connect to. + */ + private val mongoPort by option( + "--mongo-port", + help = "database port to connect to", + envvar = "OPENDC_DB_PORT" + ) + .int() + .default(27017) + + /** + * The database user to connect with. + */ + private val mongoUser by option( + "--mongo-user", + help = "database user to connect with", + envvar = "OPENDC_DB_USER" + ) + .default("opendc") + + /** + * The database password to connect with. + */ + private val mongoPassword by option( + "--mongo-password", + help = "database password to connect with", + envvar = "OPENDC_DB_PASSWORD" + ) + .convert { it.toCharArray() } + .required() + + /** + * The path to the traces directory. + */ + private val tracePath by option( + "--traces", + help = "path to the directory containing the traces", + envvar = "OPENDC_TRACES" + ) + .file(canBeFile = false) + .defaultLazy { File("traces/") } + + /** + * The path to the output directory. + */ + private val outputPath by option( + "--output", + help = "path to the results directory", + envvar = "OPENDC_OUTPUT" + ) + .file(canBeFile = false) + .defaultLazy { File("results/") } + + /** + * The Spark master to connect to. + */ + private val spark by option( + "--spark", + help = "Spark master to connect to", + envvar = "OPENDC_SPARK" + ) + .default("local[*]") + + /** + * Connect to the user-specified database. + */ + private fun createDatabase(): MongoDatabase { + val credential = MongoCredential.createScramSha1Credential( + mongoUser, + mongoDb, + mongoPassword + ) + + val settings = MongoClientSettings.builder() + .credential(credential) + .applyToClusterSettings { it.hosts(listOf(ServerAddress(mongoHost, mongoPort))) } + .build() + val client = MongoClients.create(settings) + return client.getDatabase(mongoDb) + } + + /** + * Run a single scenario. + */ + private suspend fun runScenario(portfolio: Document, scenario: Document, topologies: MongoCollection<Document>) { + val id = scenario.getString("_id") + + logger.info { "Constructing performance interference model" } + + val traceDir = File( + tracePath, + scenario.getEmbedded(listOf("trace", "traceId"), String::class.java) + ) + val traceReader = Sc20RawParquetTraceReader(traceDir) + val performanceInterferenceReader = let { + val path = File(traceDir, "performance-interference-model.json") + val operational = scenario.get("operational", Document::class.java) + val enabled = operational.getBoolean("performanceInterferenceEnabled") + + if (!enabled || !path.exists()) { + return@let null + } + + path.inputStream().use { Sc20PerformanceInterferenceReader(it) } + } + + val targets = portfolio.get("targets", Document::class.java) + + repeat(targets.getInteger("repeatsPerScenario")) { + logger.info { "Starting repeat $it" } + runRepeat(scenario, it, topologies, traceReader, performanceInterferenceReader) + } + + logger.info { "Finished simulation for scenario $id" } + } + + /** + * Run a single repeat. + */ + private suspend fun runRepeat( + scenario: Document, + repeat: Int, + topologies: MongoCollection<Document>, + traceReader: Sc20RawParquetTraceReader, + performanceInterferenceReader: Sc20PerformanceInterferenceReader? + ) { + val id = scenario.getString("_id") + val seed = repeat + val traceDocument = scenario.get("trace", Document::class.java) + val workloadName = traceDocument.getString("traceId") + val workloadFraction = traceDocument.get("loadSamplingFraction", Number::class.java).toDouble() + + val seeder = Random(seed) + val system = provider("experiment-$id") + val root = system.newDomain("root") + + val chan = Channel<Unit>(Channel.CONFLATED) + + val operational = scenario.get("operational", Document::class.java) + val allocationPolicy = + when (val policyName = operational.getString("schedulerName")) { + "mem" -> AvailableMemoryAllocationPolicy() + "mem-inv" -> AvailableMemoryAllocationPolicy(true) + "core-mem" -> AvailableCoreMemoryAllocationPolicy() + "core-mem-inv" -> AvailableCoreMemoryAllocationPolicy(true) + "active-servers" -> NumberOfActiveServersAllocationPolicy() + "active-servers-inv" -> NumberOfActiveServersAllocationPolicy(true) + "provisioned-cores" -> ProvisionedCoresAllocationPolicy() + "provisioned-cores-inv" -> ProvisionedCoresAllocationPolicy(true) + "random" -> RandomAllocationPolicy(Random(seeder.nextInt())) + else -> throw IllegalArgumentException("Unknown policy $policyName") + } + + val performanceInterferenceModel = performanceInterferenceReader?.construct(seeder) ?: emptyMap() + val trace = Sc20ParquetTraceReader( + listOf(traceReader), + performanceInterferenceModel, + Workload(workloadName, workloadFraction), + seed + ) + val topologyId = scenario.getEmbedded(listOf("topology", "topologyId"), String::class.java) + val environment = TopologyParser(topologies, topologyId) + val monitor = ParquetExperimentMonitor( + outputPath, + "scenario_id=$id/run_id=$repeat", + 4096 + ) + + root.launch { + val (bareMetalProvisioner, scheduler) = createProvisioner( + root, + environment, + allocationPolicy + ) + + val failureDomain = if (operational.getBoolean("failuresEnabled")) { + logger.debug("ENABLING failures") + createFailureDomain( + seeder.nextInt(), + operational.get("failureFrequency", Number::class.java)?.toDouble() ?: 24.0 * 7, + bareMetalProvisioner, + chan + ) + } else { + null + } + + attachMonitor(scheduler, monitor) + processTrace( + trace, + scheduler, + chan, + monitor, + emptyMap() + ) + + logger.debug("SUBMIT=${scheduler.submittedVms}") + logger.debug("FAIL=${scheduler.unscheduledVms}") + logger.debug("QUEUED=${scheduler.queuedVms}") + logger.debug("RUNNING=${scheduler.runningVms}") + logger.debug("FINISHED=${scheduler.finishedVms}") + + failureDomain?.cancel() + scheduler.terminate() + } + + try { + system.run() + } finally { + system.terminate() + monitor.close() + } + } + + override fun run() = runBlocking(Dispatchers.Default) { + logger.info { "Starting OpenDC web runner" } + logger.info { "Connecting to MongoDB instance" } + val database = createDatabase() + val manager = ScenarioManager(database.getCollection("scenarios")) + val portfolios = database.getCollection("portfolios") + val topologies = database.getCollection("topologies") + + logger.info { "Launching Spark" } + val resultProcessor = ResultProcessor(spark, outputPath) + + logger.info { "Watching for queued scenarios" } + + while (true) { + val scenario = manager.findNext() + + if (scenario == null) { + delay(5000) + continue + } + + val id = scenario.getString("_id") + + logger.info { "Found queued scenario $id: attempting to claim" } + + if (!manager.claim(id)) { + logger.info { "Failed to claim scenario" } + continue + } + + coroutineScope { + // Launch heartbeat process + launch { + delay(60000) + manager.heartbeat(id) + } + + try { + val portfolio = portfolios.find(Filters.eq("_id", scenario.getString("portfolioId"))).first()!! + runScenario(portfolio, scenario, topologies) + + logger.info { "Starting result processing" } + + val result = resultProcessor.process(id) + manager.finish(id, result) + + logger.info { "Successfully finished scenario $id" } + } catch (e: Exception) { + logger.warn(e) { "Scenario failed to finish" } + manager.fail(id) + } + } + } + } +} + +/** + * Main entry point of the runner. + */ +fun main(args: Array<String>) = RunnerCli().main(args) diff --git a/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ResultProcessor.kt b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ResultProcessor.kt new file mode 100644 index 00000000..39092653 --- /dev/null +++ b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ResultProcessor.kt @@ -0,0 +1,187 @@ +package com.atlarge.opendc.runner.web + +import java.io.File +import org.apache.spark.sql.Column +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.Row +import org.apache.spark.sql.SparkSession +import org.apache.spark.sql.functions.* + +/** + * A helper class for processing the experiment results using Apache Spark. + */ +class ResultProcessor(private val master: String, private val outputPath: File) { + /** + * Process the results of the scenario with the given [id]. + */ + fun process(id: String): Result { + val spark = SparkSession.builder() + .master(master) + .appName("opendc-simulator-$id") + .config("spark.driver.bindAddress", "0.0.0.0") // Needed to allow the worker to connect to driver + .orCreate + + try { + val hostMetrics = spark.read().parquet(File(outputPath, "host-metrics/scenario_id=$id").path) + val provisionerMetrics = spark.read().parquet(File(outputPath, "provisioner-metrics/scenario_id=$id").path) + val res = aggregate(hostMetrics, provisionerMetrics).first() + + return Result( + res.getList<Long>(1), + res.getList<Long>(2), + res.getList<Long>(3), + res.getList<Long>(4), + res.getList<Double>(5), + res.getList<Double>(6), + res.getList<Double>(7), + res.getList<Int>(8), + res.getList<Long>(9), + res.getList<Long>(10), + res.getList<Long>(11), + res.getList<Int>(12), + res.getList<Int>(13), + res.getList<Int>(14), + res.getList<Int>(15) + ) + } finally { + spark.close() + } + } + + data class Result( + val totalRequestedBurst: List<Long>, + val totalGrantedBurst: List<Long>, + val totalOvercommittedBurst: List<Long>, + val totalInterferedBurst: List<Long>, + val meanCpuUsage: List<Double>, + val meanCpuDemand: List<Double>, + val meanNumDeployedImages: List<Double>, + val maxNumDeployedImages: List<Int>, + val totalPowerDraw: List<Long>, + val totalFailureSlices: List<Long>, + val totalFailureVmSlices: List<Long>, + val totalVmsSubmitted: List<Int>, + val totalVmsQueued: List<Int>, + val totalVmsFinished: List<Int>, + val totalVmsFailed: List<Int> + ) + + /** + * Perform aggregation of the experiment results. + */ + private fun aggregate(hostMetrics: Dataset<Row>, provisionerMetrics: Dataset<Row>): Dataset<Row> { + // Extrapolate the duration of the entries to span the entire trace + val hostMetricsExtra = hostMetrics + .withColumn("slice_counts", floor(col("duration") / lit(sliceLength))) + .withColumn("power_draw", col("power_draw") * col("slice_counts")) + .withColumn("state_int", states[col("state")]) + .withColumn("state_opposite_int", oppositeStates[col("state")]) + .withColumn("cpu_usage", col("cpu_usage") * col("slice_counts") * col("state_opposite_int")) + .withColumn("cpu_demand", col("cpu_demand") * col("slice_counts")) + .withColumn("failure_slice_count", col("slice_counts") * col("state_int")) + .withColumn("failure_vm_slice_count", col("slice_counts") * col("state_int") * col("vm_count")) + + // Process all data in a single run + val hostMetricsGrouped = hostMetricsExtra.groupBy("run_id") + + // Aggregate the summed total metrics + val systemMetrics = hostMetricsGrouped.agg( + sum("requested_burst").alias("total_requested_burst"), + sum("granted_burst").alias("total_granted_burst"), + sum("overcommissioned_burst").alias("total_overcommitted_burst"), + sum("interfered_burst").alias("total_interfered_burst"), + sum("power_draw").alias("total_power_draw"), + sum("failure_slice_count").alias("total_failure_slices"), + sum("failure_vm_slice_count").alias("total_failure_vm_slices") + ) + + // Aggregate metrics per host + val hvMetrics = hostMetrics + .groupBy("run_id", "host_id") + .agg( + sum("cpu_usage").alias("mean_cpu_usage"), + sum("cpu_demand").alias("mean_cpu_demand"), + avg("vm_count").alias("mean_num_deployed_images"), + count(lit(1)).alias("num_rows") + ) + .withColumn("mean_cpu_usage", col("mean_cpu_usage") / col("num_rows")) + .withColumn("mean_cpu_demand", col("mean_cpu_demand") / col("num_rows")) + .groupBy("run_id") + .agg( + avg("mean_cpu_usage").alias("mean_cpu_usage"), + avg("mean_cpu_demand").alias("mean_cpu_demand"), + avg("mean_num_deployed_images").alias("mean_num_deployed_images"), + max("mean_num_deployed_images").alias("max_num_deployed_images") + ) + + // Group the provisioner metrics per run + val provisionerMetricsGrouped = provisionerMetrics.groupBy("run_id") + + // Aggregate the provisioner metrics + val provisionerMetricsAggregated = provisionerMetricsGrouped.agg( + max("vm_total_count").alias("total_vms_submitted"), + max("vm_waiting_count").alias("total_vms_queued"), + max("vm_active_count").alias("total_vms_running"), + max("vm_inactive_count").alias("total_vms_finished"), + max("vm_failed_count").alias("total_vms_failed") + ) + + // Join the results into a single data frame + return systemMetrics + .join(hvMetrics, "run_id") + .join(provisionerMetricsAggregated, "run_id") + .select( + col("total_requested_burst"), + col("total_granted_burst"), + col("total_overcommitted_burst"), + col("total_interfered_burst"), + col("mean_cpu_usage"), + col("mean_cpu_demand"), + col("mean_num_deployed_images"), + col("max_num_deployed_images"), + col("total_power_draw"), + col("total_failure_slices"), + col("total_failure_vm_slices"), + col("total_vms_submitted"), + col("total_vms_queued"), + col("total_vms_finished"), + col("total_vms_failed") + ) + .groupBy(lit(1)) + .agg( + // TODO Check if order of values is correct + collect_list(col("total_requested_burst")).alias("total_requested_burst"), + collect_list(col("total_granted_burst")).alias("total_granted_burst"), + collect_list(col("total_overcommitted_burst")).alias("total_overcommitted_burst"), + collect_list(col("total_interfered_burst")).alias("total_interfered_burst"), + collect_list(col("mean_cpu_usage")).alias("mean_cpu_usage"), + collect_list(col("mean_cpu_demand")).alias("mean_cpu_demand"), + collect_list(col("mean_num_deployed_images")).alias("mean_num_deployed_images"), + collect_list(col("max_num_deployed_images")).alias("max_num_deployed_images"), + collect_list(col("total_power_draw")).alias("total_power_draw"), + collect_list(col("total_failure_slices")).alias("total_failure_slices"), + collect_list(col("total_failure_vm_slices")).alias("total_failure_vm_slices"), + collect_list(col("total_vms_submitted")).alias("total_vms_submitted"), + collect_list(col("total_vms_queued")).alias("total_vms_queued"), + collect_list(col("total_vms_finished")).alias("total_vms_finished"), + collect_list(col("total_vms_failed")).alias("total_vms_failed") + ) + } + + // Spark helper functions + operator fun Column.times(other: Column): Column = `$times`(other) + operator fun Column.div(other: Column): Column = `$div`(other) + operator fun Column.get(other: Column): Column = this.apply(other) + + val sliceLength = 5 * 60 * 1000 + val states = map( + lit("ERROR"), lit(1), + lit("ACTIVE"), lit(0), + lit("SHUTOFF"), lit(0) + ) + val oppositeStates = map( + lit("ERROR"), lit(0), + lit("ACTIVE"), lit(1), + lit("SHUTOFF"), lit(1) + ) +} diff --git a/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ScenarioManager.kt b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ScenarioManager.kt new file mode 100644 index 00000000..40ffd282 --- /dev/null +++ b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/ScenarioManager.kt @@ -0,0 +1,93 @@ +package com.atlarge.opendc.runner.web + +import com.mongodb.client.MongoCollection +import com.mongodb.client.model.Filters +import com.mongodb.client.model.Updates +import java.time.Instant +import org.bson.Document + +/** + * Manages the queue of scenarios that need to be processed. + */ +class ScenarioManager(private val collection: MongoCollection<Document>) { + /** + * Find the next scenario that the simulator needs to process. + */ + fun findNext(): Document? { + return collection + .find(Filters.eq("simulation.state", "QUEUED")) + .first() + } + + /** + * Claim the scenario in the database with the specified id. + */ + fun claim(id: String): Boolean { + val res = collection.findOneAndUpdate( + Filters.and( + Filters.eq("_id", id), + Filters.eq("simulation.state", "QUEUED") + ), + Updates.combine( + Updates.set("simulation.state", "RUNNING"), + Updates.set("simulation.heartbeat", Instant.now()) + ) + ) + return res != null + } + + /** + * Update the heartbeat of the specified scenario. + */ + fun heartbeat(id: String) { + collection.findOneAndUpdate( + Filters.and( + Filters.eq("_id", id), + Filters.eq("simulation.state", "RUNNING") + ), + Updates.set("simulation.heartbeat", Instant.now()) + ) + } + + /** + * Mark the scenario as failed. + */ + fun fail(id: String) { + collection.findOneAndUpdate( + Filters.eq("_id", id), + Updates.combine( + Updates.set("simulation.state", "FAILED"), + Updates.set("simulation.heartbeat", Instant.now()) + ) + ) + } + + /** + * Persist the specified results. + */ + fun finish(id: String, result: ResultProcessor.Result) { + collection.findOneAndUpdate( + Filters.eq("_id", id), + Updates.combine( + Updates.set("simulation.state", "FINISHED"), + Updates.unset("simulation.time"), + Updates.set("results.total_requested_burst", result.totalRequestedBurst), + Updates.set("results.total_granted_burst", result.totalGrantedBurst), + Updates.set("results.total_overcommitted_burst", result.totalOvercommittedBurst), + Updates.set("results.total_interfered_burst", result.totalInterferedBurst), + Updates.set("results.mean_cpu_usage", result.meanCpuUsage), + Updates.set("results.mean_cpu_demand", result.meanCpuDemand), + Updates.set("results.mean_num_deployed_images", result.meanNumDeployedImages), + Updates.set("results.max_num_deployed_images", result.maxNumDeployedImages), + Updates.set("results.max_num_deployed_images", result.maxNumDeployedImages), + Updates.set("results.total_power_draw", result.totalPowerDraw), + Updates.set("results.total_failure_slices", result.totalFailureSlices), + Updates.set("results.total_failure_vm_slices", result.totalFailureVmSlices), + Updates.set("results.total_vms_submitted", result.totalVmsSubmitted), + Updates.set("results.total_vms_queued", result.totalVmsQueued), + Updates.set("results.total_vms_finished", result.totalVmsFinished), + Updates.set("results.total_vms_failed", result.totalVmsFailed) + ) + ) + } +} diff --git a/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/TopologyParser.kt b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/TopologyParser.kt new file mode 100644 index 00000000..499585ec --- /dev/null +++ b/simulator/opendc/opendc-runner-web/src/main/kotlin/com/atlarge/opendc/runner/web/TopologyParser.kt @@ -0,0 +1,127 @@ +package com.atlarge.opendc.runner.web + +import com.atlarge.odcsim.Domain +import com.atlarge.opendc.compute.core.MemoryUnit +import com.atlarge.opendc.compute.core.ProcessingNode +import com.atlarge.opendc.compute.core.ProcessingUnit +import com.atlarge.opendc.compute.metal.NODE_CLUSTER +import com.atlarge.opendc.compute.metal.driver.SimpleBareMetalDriver +import com.atlarge.opendc.compute.metal.power.LinearLoadPowerModel +import com.atlarge.opendc.compute.metal.service.ProvisioningService +import com.atlarge.opendc.compute.metal.service.SimpleProvisioningService +import com.atlarge.opendc.core.Environment +import com.atlarge.opendc.core.Platform +import com.atlarge.opendc.core.Zone +import com.atlarge.opendc.core.services.ServiceRegistry +import com.atlarge.opendc.format.environment.EnvironmentReader +import com.mongodb.client.AggregateIterable +import com.mongodb.client.MongoCollection +import com.mongodb.client.model.Aggregates +import com.mongodb.client.model.Field +import com.mongodb.client.model.Filters +import com.mongodb.client.model.Projections +import java.util.* +import kotlinx.coroutines.launch +import org.bson.Document + +/** + * A helper class that converts the MongoDB topology into an OpenDC environment. + */ +class TopologyParser(private val collection: MongoCollection<Document>, private val id: String) : EnvironmentReader { + /** + * Parse the topology with the specified [id]. + */ + override suspend fun construct(dom: Domain): Environment { + val nodes = mutableListOf<SimpleBareMetalDriver>() + val random = Random(0) + + for (machine in fetchMachines(id)) { + val machineId = machine.getString("_id") + val clusterId = machine.getString("rack_id") + val position = machine.getInteger("position") + + val processors = machine.getList("cpus", Document::class.java).flatMap { cpu -> + val cores = cpu.getInteger("numberOfCores") + val speed = cpu.get("clockRateMhz", Number::class.java).toDouble() + // TODO Remove hardcoding of vendor + val node = ProcessingNode("Intel", "amd64", cpu.getString("name"), cores) + List(cores) { coreId -> + ProcessingUnit(node, coreId, speed) + } + } + val memoryUnits = machine.getList("memories", Document::class.java).map { memory -> + MemoryUnit( + "Samsung", + memory.getString("name"), + memory.get("speedMbPerS", Number::class.java).toDouble(), + memory.get("sizeMb", Number::class.java).toLong() + ) + } + nodes.add( + SimpleBareMetalDriver( + dom.newDomain(machineId), + UUID(random.nextLong(), random.nextLong()), + "node-$clusterId-$position", + mapOf(NODE_CLUSTER to clusterId), + processors, + memoryUnits, + // For now we assume a simple linear load model with an idle draw of ~200W and a maximum + // power draw of 350W. + // Source: https://stackoverflow.com/questions/6128960 + LinearLoadPowerModel(200.0, 350.0) + ) + ) + } + + val provisioningService = SimpleProvisioningService(dom.newDomain("provisioner")) + dom.launch { + for (node in nodes) { + provisioningService.create(node) + } + } + + val serviceRegistry = ServiceRegistry().put(ProvisioningService, provisioningService) + + val platform = Platform( + UUID.randomUUID(), "opendc-platform", listOf( + Zone(UUID.randomUUID(), "zone", serviceRegistry) + ) + ) + + return Environment(fetchName(id), null, listOf(platform)) + } + + override fun close() {} + + /** + * Fetch the metadata of the topology. + */ + private fun fetchName(id: String): String { + return collection.aggregate( + listOf( + Aggregates.match(Filters.eq("_id", id)), + Aggregates.project(Projections.include("name")) + ) + ) + .first()!! + .getString("name") + } + + /** + * Fetch a topology from the database with the specified [id]. + */ + private fun fetchMachines(id: String): AggregateIterable<Document> { + return collection.aggregate( + listOf( + Aggregates.match(Filters.eq("_id", id)), + Aggregates.project(Projections.fields(Document("racks", "\$rooms.tiles.rack"))), + Aggregates.unwind("\$racks"), + Aggregates.unwind("\$racks"), + Aggregates.replaceRoot("\$racks"), + Aggregates.addFields(Field("machines.rack_id", "\$_id")), + Aggregates.unwind("\$machines"), + Aggregates.replaceRoot("\$machines") + ) + ) + } +} diff --git a/simulator/opendc/opendc-runner-web/src/main/resources/log4j2.xml b/simulator/opendc/opendc-runner-web/src/main/resources/log4j2.xml new file mode 100644 index 00000000..1d873554 --- /dev/null +++ b/simulator/opendc/opendc-runner-web/src/main/resources/log4j2.xml @@ -0,0 +1,52 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + ~ MIT License + ~ + ~ Copyright (c) 2020 atlarge-research + ~ + ~ Permission is hereby granted, free of charge, to any person obtaining a copy + ~ of this software and associated documentation files (the "Software"), to deal + ~ in the Software without restriction, including without limitation the rights + ~ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + ~ copies of the Software, and to permit persons to whom the Software is + ~ furnished to do so, subject to the following conditions: + ~ + ~ The above copyright notice and this permission notice shall be included in all + ~ copies or substantial portions of the Software. + ~ + ~ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + ~ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + ~ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + ~ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + ~ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + ~ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + ~ SOFTWARE. + --> + +<Configuration status="WARN"> + <Appenders> + <Console name="Console" target="SYSTEM_OUT"> + <PatternLayout pattern="%d{HH:mm:ss.SSS} [%highlight{%-5level}] %logger{36} - %msg%n" disableAnsi="false" /> + </Console> + </Appenders> + <Loggers> + <Logger name="com.atlarge.odcsim" level="info" additivity="false"> + <AppenderRef ref="Console"/> + </Logger> + <Logger name="com.atlarge.opendc" level="warn" additivity="false"> + <AppenderRef ref="Console"/> + </Logger> + <Logger name="com.atlarge.opendc.runner" level="info" additivity="false"> + <AppenderRef ref="Console"/> + </Logger> + <Logger name="org.apache.hadoop" level="warn" additivity="false"> + <AppenderRef ref="Console"/> + </Logger> + <Logger name="org.apache.spark" level="info" additivity="false"> + <AppenderRef ref="Console"/> + </Logger> + <Root level="error"> + <AppenderRef ref="Console"/> + </Root> + </Loggers> +</Configuration> diff --git a/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowService.kt b/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowService.kt index 7c7990e2..1193f7b2 100644 --- a/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowService.kt +++ b/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowService.kt @@ -39,13 +39,13 @@ import com.atlarge.opendc.workflows.service.stage.resource.ResourceSelectionPoli import com.atlarge.opendc.workflows.service.stage.task.TaskEligibilityPolicy import com.atlarge.opendc.workflows.service.stage.task.TaskOrderPolicy import com.atlarge.opendc.workflows.workload.Job +import java.util.PriorityQueue +import java.util.Queue import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.launchIn import kotlinx.coroutines.flow.onEach -import java.util.PriorityQueue -import java.util.Queue import kotlinx.coroutines.launch import kotlinx.coroutines.withContext diff --git a/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/WorkflowService.kt b/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/WorkflowService.kt index ad818dde..a60ba0e2 100644 --- a/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/WorkflowService.kt +++ b/simulator/opendc/opendc-workflows/src/main/kotlin/com/atlarge/opendc/workflows/service/WorkflowService.kt @@ -26,8 +26,8 @@ package com.atlarge.opendc.workflows.service import com.atlarge.opendc.core.services.AbstractServiceKey import com.atlarge.opendc.workflows.workload.Job -import kotlinx.coroutines.flow.Flow import java.util.UUID +import kotlinx.coroutines.flow.Flow /** * A service for cloud workflow management. diff --git a/simulator/opendc/opendc-workflows/src/test/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowSchedulerIntegrationTest.kt b/simulator/opendc/opendc-workflows/src/test/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowSchedulerIntegrationTest.kt index 5ee6d5e6..5c129e37 100644 --- a/simulator/opendc/opendc-workflows/src/test/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowSchedulerIntegrationTest.kt +++ b/simulator/opendc/opendc-workflows/src/test/kotlin/com/atlarge/opendc/workflows/service/StageWorkflowSchedulerIntegrationTest.kt @@ -35,6 +35,8 @@ import com.atlarge.opendc.workflows.service.stage.resource.FirstFitResourceSelec import com.atlarge.opendc.workflows.service.stage.resource.FunctionalResourceFilterPolicy import com.atlarge.opendc.workflows.service.stage.task.NullTaskEligibilityPolicy import com.atlarge.opendc.workflows.service.stage.task.SubmissionTimeTaskOrderPolicy +import java.util.ServiceLoader +import kotlin.math.max import kotlinx.coroutines.async import kotlinx.coroutines.delay import kotlinx.coroutines.flow.collect @@ -44,8 +46,6 @@ import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.DisplayName import org.junit.jupiter.api.Test -import java.util.ServiceLoader -import kotlin.math.max /** * Integration test suite for the [StageWorkflowService]. diff --git a/simulator/settings.gradle.kts b/simulator/settings.gradle.kts index 677a9817..9411d882 100644 --- a/simulator/settings.gradle.kts +++ b/simulator/settings.gradle.kts @@ -31,3 +31,4 @@ include(":opendc:opendc-format") include(":opendc:opendc-workflows") include(":opendc:opendc-experiments-sc18") include(":opendc:opendc-experiments-sc20") +include(":opendc:opendc-runner-web") diff --git a/traces/bitbrains-small/meta.parquet b/traces/bitbrains-small/meta.parquet Binary files differnew file mode 100644 index 00000000..ce7a812c --- /dev/null +++ b/traces/bitbrains-small/meta.parquet diff --git a/traces/bitbrains-small/trace.parquet b/traces/bitbrains-small/trace.parquet Binary files differnew file mode 100644 index 00000000..1d7ce882 --- /dev/null +++ b/traces/bitbrains-small/trace.parquet diff --git a/web-server/opendc/api/v2/prefabs/prefabId/endpoint.py b/web-server/opendc/api/v2/prefabs/prefabId/endpoint.py deleted file mode 100644 index e8508ee0..00000000 --- a/web-server/opendc/api/v2/prefabs/prefabId/endpoint.py +++ /dev/null @@ -1,53 +0,0 @@ -from datetime import datetime - -from opendc.models.prefab import Prefab -from opendc.util.database import Database -from opendc.util.rest import Response - - -def GET(request): - """Get this Prefab.""" - - request.check_required_parameters(path={'prefabId': 'string'}) - - prefab = Prefab.from_id(request.params_path['prefabId']) - print(prefab.obj) - prefab.check_exists() - print("before cua") - prefab.check_user_access(request.google_id) - print("after cua") - - return Response(200, 'Successfully retrieved prefab', prefab.obj) - - -def PUT(request): - """Update a prefab's name and/or contents.""" - - request.check_required_parameters(body={'prefab': {'name': 'name'}}, path={'prefabId': 'string'}) - - prefab = Prefab.from_id(request.params_path['prefabId']) - - prefab.check_exists() - prefab.check_user_access(request.google_id) - - prefab.set_property('name', request.params_body['prefab']['name']) - prefab.set_property('rack', request.params_body['prefab']['rack']) - prefab.set_property('datetime_last_edited', Database.datetime_to_string(datetime.now())) - prefab.update() - - return Response(200, 'Successfully updated prefab.', prefab.obj) - - -def DELETE(request): - """Delete this Prefab.""" - - request.check_required_parameters(path={'prefabId': 'string'}) - - prefab = Prefab.from_id(request.params_path['prefabId']) - - prefab.check_exists() - prefab.check_user_access(request.google_id) - - old_object = prefab.delete() - - return Response(200, 'Successfully deleted prefab.', old_object) diff --git a/web-server/opendc/api/v2/prefabs/prefabId/test_endpoint.py b/web-server/opendc/api/v2/prefabs/prefabId/test_endpoint.py deleted file mode 100644 index b25c881d..00000000 --- a/web-server/opendc/api/v2/prefabs/prefabId/test_endpoint.py +++ /dev/null @@ -1,140 +0,0 @@ -from opendc.util.database import DB -from unittest.mock import Mock - - -def test_get_prefab_non_existing(client, mocker): - mocker.patch.object(DB, 'fetch_one', return_value=None) - assert '404' in client.get('/api/v2/prefabs/1').status - -def test_get_private_prefab_not_authorized(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '2', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - res = client.get('/api/v2/prefabs/1') - assert '403' in res.status - - -def test_get_private_prefab(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '1', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - res = client.get('/api/v2/prefabs/1') - assert '200' in res.status - -def test_get_public_prefab(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '2', - 'visibility': 'public', - 'rack': {} - }, - { - '_id': '1' - } - ] - res = client.get('/api/v2/prefabs/1') - assert '200' in res.status - - -def test_update_prefab_missing_parameter(client): - assert '400' in client.put('/api/v2/prefabs/1').status - - -def test_update_prefab_non_existing(client, mocker): - mocker.patch.object(DB, 'fetch_one', return_value=None) - assert '404' in client.put('/api/v2/prefabs/1', json={'prefab': {'name': 'S'}}).status - - -def test_update_prefab_not_authorized(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '2', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - mocker.patch.object(DB, 'update', return_value={}) - assert '403' in client.put('/api/v2/prefabs/1', json={'prefab': {'name': 'test prefab', 'rack' : {}}}).status - - -def test_update_prefab(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '1', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - mocker.patch.object(DB, 'update', return_value={}) - res = client.put('/api/v2/prefabs/1', json={'prefab': {'name': 'test prefab', 'rack' : {}}}) - assert '200' in res.status - - -def test_delete_prefab_non_existing(client, mocker): - mocker.patch.object(DB, 'fetch_one', return_value=None) - assert '404' in client.delete('/api/v2/prefabs/1').status - - -def test_delete_prefab_different_user(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '2', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - mocker.patch.object(DB, 'delete_one', return_value=None) - assert '403' in client.delete('/api/v2/prefabs/1').status - - -def test_delete_prefab(client, mocker): - DB.fetch_one = Mock() - DB.fetch_one.side_effect = [{ - '_id': '1', - 'name': 'test prefab', - 'authorId': '1', - 'visibility': 'private', - 'rack': {} - }, - { - '_id': '1' - } - ] - mocker.patch.object(DB, 'delete_one', return_value={'prefab': {'name': 'name'}}) - res = client.delete('/api/v2/prefabs/1') - assert '200' in res.status |
