Skip to content

Commit

Permalink
Merge pull request NREL#424 from rewiringamerica/refactor_fix
Browse files Browse the repository at this point in the history
Two small fixes
  • Loading branch information
nmerket authored Jan 5, 2024
2 parents a047cf7 + 1b60710 commit 3cbb673
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 2 deletions.
2 changes: 1 addition & 1 deletion buildstockbatch/aws/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -1706,7 +1706,7 @@ def run_job(cls, job_id, bucket, prefix, job_name, region):
logger.debug("Extracting {}".format(epw_filename))
f_out.write(gzip.decompress(f_gz.getvalue()))

cls.run_simulations(cfg, jobs_d, job_id, sim_dir, S3FileSystem(), bucket, prefix)
cls.run_simulations(cfg, jobs_d, job_id, sim_dir, S3FileSystem(), f"{bucket}/{prefix}")


@log_error_details()
Expand Down
2 changes: 1 addition & 1 deletion buildstockbatch/cloud/docker_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def run_simulations(cls, cfg, job_id, jobs_d, sim_dir, fs, output_path):
dpouts = []
simulation_output_tar_filename = sim_dir.parent / "simulation_outputs.tar.gz"
asset_dirs = os.listdir(sim_dir)
ts_output_dir = (f"{output_path}/results/simulation_output/timeseries",)
ts_output_dir = f"{output_path}/results/simulation_output/timeseries"

with tarfile.open(str(simulation_output_tar_filename), "w:gz") as simout_tar:
for building_id, upgrade_idx in jobs_d["batch"]:
Expand Down
15 changes: 15 additions & 0 deletions buildstockbatch/test/test_docker_base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Tests for the DockerBatchBase class."""
from fsspec.implementations.local import LocalFileSystem
import gzip
import json
import os
import pathlib
Expand Down Expand Up @@ -102,6 +103,12 @@ def test_get_epws_to_download():


def test_run_simulations(basic_residential_project_file):
"""
Test running a single batch of simulation.
This doesn't provide all the necessary inputs for the simulations to succeed, but it confirms that they are
attempted, that the output files are produced, and that intermediate files are cleaned up.
"""
jobs_d = {
"job_num": 0,
"n_datapoints": 10,
Expand Down Expand Up @@ -129,6 +136,14 @@ def test_run_simulations(basic_residential_project_file):

output_dir = bucket / "test_prefix" / "results" / "simulation_output"
assert sorted(os.listdir(output_dir)) == ["results_job0.json.gz", "simulations_job0.tar.gz"]

# Check that buildings 1 and 5 (specified in jobs_d) are in the results
with gzip.open(output_dir / "results_job0.json.gz", "r") as f:
results = json.load(f)
assert len(results) == 2
for building in results:
assert building["building_id"] in (1, 5)

# Check that files were cleaned up correctly
assert not os.listdir(sim_dir)
os.chdir(old_cwd)

0 comments on commit 3cbb673

Please sign in to comment.