Skip to content

Commit aaa639a

Browse files
authored
Migrate to pyenergyplus (#541)
* update to openstudio 3.8, e+ 24.1 and pyfmi 2.11 * first pass at migration to pyenergyplus * first version of pyenergyplus migration passing integration tests * fix formatting, bump min python to 3.11 * fix exception import stack * fix api tests, import changes, point id changes * add all the new stuff that didn't get comitted earlier * remove goaws from simulation ci test * fix simulation test * fix arguments for timescale test * starting fixing mock step job * implement initialize_simulation * correct increment of time in mock job * remove deprecated models from the scaling tests * reduce scale test models to current set * fix simulation model testing * fix influxdb reporting * add sleep to allow mock step run to fall behind * fix step duration * Remove more redundant variables in modelica step_run.py * bump bake-action to v5 * remove files which are no longer needed * Cleanup and document StepRunBase * clean up StepRunProcess and begin cleaning openstudio StepRun * refactor handling of points for openstudio * fix unit tests
1 parent ecb0fa2 commit aaa639a

File tree

128 files changed

+12274
-569639
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

128 files changed

+12274
-569639
lines changed

.github/workflows/ci.yml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ jobs:
2121
- name: Install Python
2222
uses: actions/setup-python@v5
2323
with:
24-
python-version: "3.8"
24+
python-version: "3.11"
2525

2626
- name: Run pre-commit
2727
uses: pre-commit/[email protected]
@@ -39,7 +39,7 @@ jobs:
3939
- name: Install Python
4040
uses: actions/setup-python@v5
4141
with:
42-
python-version: "3.8"
42+
python-version: "3.11"
4343

4444
- name: Install poetry
4545
uses: abatilo/actions-poetry@v3
@@ -76,7 +76,7 @@ jobs:
7676
uses: docker/setup-buildx-action@v3
7777

7878
- name: Build
79-
uses: docker/bake-action@v4
79+
uses: docker/bake-action@v5
8080
with:
8181
files: docker-compose.yml, docker-compose.dev.yml
8282
set: |
@@ -92,7 +92,7 @@ jobs:
9292
GIT_COMMIT: ${{ github.sha }}
9393
run: |
9494
printenv
95-
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d worker mongo redis minio mc goaws
95+
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d worker mongo redis minio mc
9696
9797
- name: Dump docker logs before tests
9898
uses: jwalton/gh-docker-logs@v2
@@ -117,7 +117,7 @@ jobs:
117117
- name: Install Python
118118
uses: actions/setup-python@v5
119119
with:
120-
python-version: "3.8"
120+
python-version: "3.11"
121121

122122
- name: Install poetry
123123
uses: abatilo/actions-poetry@v3
@@ -128,7 +128,7 @@ jobs:
128128
uses: docker/setup-buildx-action@v3
129129

130130
- name: Build
131-
uses: docker/bake-action@v4
131+
uses: docker/bake-action@v5
132132
with:
133133
files: docker-compose.yml
134134
set: |
@@ -196,7 +196,7 @@ jobs:
196196
uses: docker/setup-buildx-action@v3
197197

198198
- name: Build
199-
uses: docker/bake-action@v4
199+
uses: docker/bake-action@v5
200200
with:
201201
load: true
202202
files: docker-compose.yml, docker-compose.dev.yml, docker-compose.historian.yml

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ repos:
2626
- id: requirements-txt-fixer
2727
- id: mixed-line-ending
2828
args: ["--fix=auto"]
29-
- repo: https://github.com/pre-commit/mirrors-autopep8
30-
rev: v2.0.1
29+
- repo: https://github.com/hhatto/autopep8
30+
rev: v2.3.1
3131
hooks:
3232
- id: autopep8
3333
args:

alfalfa_web/server/api-v2.js

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ router.param("pointId", (req, res, next, id) => {
106106
const error = validate(
107107
{ id },
108108
{
109-
id: "required|uuid"
109+
id: "required|string"
110110
}
111111
);
112112
if (error) return res.status(400).json({ message: error });
@@ -164,6 +164,13 @@ router.get("/runs/:runId/time", async (req, res, next) => {
164164
.catch(next);
165165
});
166166

167+
router.get("/runs/:runId/log", async (req, res, next) => {
168+
api
169+
.getRunLog(req.run)
170+
.then((log) => res.json({ payload: { log } }))
171+
.catch(next);
172+
});
173+
167174
router.get("/runs/:runId/points", (req, res, next) => {
168175
api
169176
.getPointsByRun(req.run)

alfalfa_web/server/api.js

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,11 @@ class AlfalfaAPI {
9292
return await getHashValue(this.redis, run.ref_id, "sim_time");
9393
};
9494

95+
getRunLog = async (run) => {
96+
const log_lines = await this.redis.lRange(`run:${run.ref_id}:log`, -100, -1);
97+
return log_lines.join("\n");
98+
};
99+
95100
getPointsByRun = async (run) => {
96101
const pointsCursor = this.points.find({ run: run._id });
97102
return Promise.resolve(pointsCursor.toArray());
@@ -126,7 +131,8 @@ class AlfalfaAPI {
126131
const pointDict = {
127132
id: point.ref_id,
128133
name: point.name,
129-
type: point.point_type
134+
type: point.point_type,
135+
units: point.units
130136
};
131137
return pointDict;
132138
};
@@ -197,7 +203,7 @@ class AlfalfaAPI {
197203

198204
const { startDatetime, endDatetime, timescale, realtime, externalClock } = data;
199205

200-
const job = `alfalfa_worker.jobs.${sim_type === "MODELICA" ? "modelica" : "openstudio"}.StepRun`;
206+
const job = `alfalfa_worker.jobs.${sim_type === "MODELICA" ? "modelica" : "openstudio"}.step_run.StepRun`;
201207
const params = {
202208
run_id: run.ref_id,
203209
start_datetime: startDatetime,
@@ -297,7 +303,9 @@ class AlfalfaAPI {
297303

298304
createRunFromModel = async (model) => {
299305
const runId = uuidv1();
300-
const job = `alfalfa_worker.jobs.${model.model_name.endsWith(".fmu") ? "modelica" : "openstudio"}.CreateRun`;
306+
const job = `alfalfa_worker.jobs.${
307+
model.model_name.endsWith(".fmu") ? "modelica" : "openstudio"
308+
}.create_run.CreateRun`;
301309
const params = {
302310
model_id: model.ref_id,
303311
run_id: runId

alfalfa_worker/Dockerfile

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM ghcr.io/nrel/alfalfa-dependencies:3.1.0 AS base
1+
FROM ghcr.io/nrel/alfalfa-dependencies:prepare_080 AS base
22

33
ENV HOME=/alfalfa
44

@@ -21,10 +21,7 @@ ENV PYTHONPATH="${HOME}:${PYTHONPATH}"
2121

2222
COPY ./alfalfa_worker ${HOME}/alfalfa_worker
2323

24-
RUN pip3.8 install virtualenv \
25-
&& pip3.8 install \
26-
scipy \
27-
symfit
24+
COPY ./alfalfa_worker /alfalfa/alfalfa_worker
2825

2926
COPY ./deploy /alfalfa/deploy
3027
COPY ./deploy/wait-for-it.sh /usr/local/wait-for-it.sh

alfalfa_worker/__main__.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,22 @@
33
import os
44
import sys
55
import traceback
6+
from logging import StreamHandler, basicConfig
67
from pathlib import Path
78

89
# Determine which worker to load based on the QUEUE.
910
# This may be temporary for now, not sure on how else
1011
# to determine which worker gets launched
1112
from alfalfa_worker.dispatcher import Dispatcher
13+
from alfalfa_worker.lib.constants import DATETIME_FORMAT
1214

1315
if __name__ == '__main__':
16+
17+
basicConfig(level=os.environ.get("LOGLEVEL", "INFO"),
18+
handlers=[StreamHandler(sys.stdout)],
19+
format='%(asctime)s - %(name)s - %(levelname)s: %(message)s',
20+
datefmt=DATETIME_FORMAT)
21+
1422
try:
1523
workdir = Path(os.environ.get('RUN_DIR', '/runs'))
1624
dispatcher = Dispatcher(workdir)

alfalfa_worker/dispatcher.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def process_message(self, message):
5353
"""
5454
try:
5555
message_body = json.loads(message)
56-
self.logger.info(f"Processing message of {message_body}")
56+
self.logger.debug(f"Processing message of {message_body}")
5757
job = message_body.get('job')
5858
if job:
5959
params = message_body.get('params', {})
Lines changed: 0 additions & 94 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,4 @@
1-
import json
2-
import os
31
from pathlib import Path
4-
from uuid import uuid4
5-
6-
from pyfmi import load_fmu
72

83
from alfalfa_worker.lib.enums import RunStatus, SimType
94
from alfalfa_worker.lib.job import Job
@@ -19,7 +14,6 @@ def __init__(self, model_id, run_id=None):
1914
# Define FMU specific attributes
2015
self.upload_fmu: Path = self.dir / model_name
2116
self.fmu_path = self.dir / 'model.fmu'
22-
self.fmu_json = self.dir / 'tags.json'
2317
self.model_name = model_name
2418

2519
# Needs to be set after files are uploaded / parsed.
@@ -34,99 +28,11 @@ def exec(self):
3428
"""
3529
self.logger.info("add_fmu for {}".format(self.run.ref_id))
3630

37-
# Create the FMU tags (no longer external now that python2 is deprecated)
38-
self.create_tags()
39-
# insert tags into db
40-
self.insert_fmu_tags()
4131
self.upload_fmu.rename(self.fmu_path)
4232

4333
def validate(self) -> None:
4434
assert (self.dir / 'model.fmu').exists(), "model file not created"
45-
assert (self.dir / 'tags.json').exists(), "tags file not created"
4635

4736
def cleanup(self) -> None:
4837
super().cleanup()
4938
self.set_run_status(RunStatus.READY)
50-
51-
def get_site_ref(self, haystack_json):
52-
"""
53-
Find the site given the haystack JSON file. Remove 'r:' from string.
54-
:param haystack_json: json serialized Haystack document
55-
:return: site_ref: id of site
56-
"""
57-
site_ref = ''
58-
with open(haystack_json) as json_file:
59-
data = json.load(json_file)
60-
for entity in data:
61-
if 'site' in entity:
62-
if entity['site'] == 'm:':
63-
site_ref = entity['id'].replace('r:', '')
64-
break
65-
return site_ref
66-
67-
def insert_fmu_tags(self):
68-
with open(self.fmu_json, 'r') as f:
69-
data = f.read()
70-
points_json = json.loads(data)
71-
72-
self.run_manager.add_site_to_mongo(points_json, self.run)
73-
74-
def create_tags(self):
75-
# 1.0 setup the inputs
76-
fmu = load_fmu(self.upload_fmu)
77-
78-
# 2.0 get input/output variables from the FMU
79-
# causality = 1 is parameter, 2 is input, 3 is output
80-
input_names = fmu.get_model_variables(causality=2).keys()
81-
output_names = fmu.get_model_variables(causality=3).keys()
82-
83-
# 3.0 add site tagging
84-
tags = []
85-
86-
fmu_upload_name = os.path.basename(self.model_name) # without directories
87-
fmu_upload_name = os.path.splitext(fmu_upload_name)[0] # without extension
88-
89-
# TODO: Figure out how to find geo_city
90-
sitetag = {
91-
"dis": "s:%s" % fmu_upload_name,
92-
"id": "r:%s" % self.run.ref_id,
93-
"site": "m:",
94-
"datetime": "s:",
95-
"simStatus": "s:Stopped",
96-
"simType": "s:fmu",
97-
"siteRef": "r:%s" % self.run.ref_id
98-
}
99-
tags.append(sitetag)
100-
101-
# 4.0 add input tagging
102-
for var_input in input_names:
103-
if not var_input.endswith("_activate"):
104-
tag_input = {
105-
"id": "r:%s" % uuid4(),
106-
"dis": "s:%s" % var_input,
107-
"siteRef": "r:%s" % self.run.ref_id,
108-
"point": "m:",
109-
"writable": "m:",
110-
"writeStatus": "s:disabled",
111-
"kind": "s:Number",
112-
}
113-
tags.append(tag_input)
114-
tag_input = {}
115-
116-
# 5.0 add output tagging
117-
for var_output in output_names:
118-
tag_output = {
119-
"id": "r:%s" % uuid4(),
120-
"dis": "s:%s" % var_output,
121-
"siteRef": "r:%s" % self.run.ref_id,
122-
"point": "m:",
123-
"cur": "m:",
124-
"curVal": "n:",
125-
"curStatus": "s:disabled",
126-
"kind": "s:Number",
127-
}
128-
tags.append(tag_output)
129-
130-
# 6.0 write tags to the json file
131-
with open(self.fmu_json, 'w') as outfile:
132-
json.dump(tags, outfile)

0 commit comments

Comments
 (0)