diff --git a/docker-compose.yml b/docker-compose.yml index 5d06512..eee23db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,6 +15,6 @@ services: build: context: . environment: - FILESERVICE_URL: http://fileservice:8080/ + FILESERVICE_URL: fileservice ports: - 80:80 diff --git a/src/pgm_service/cim_files/.gitkeep b/src/pgm_service/cim_files/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/src/pgm_service/pgm_powerflow/models.py b/src/pgm_service/pgm_powerflow/models.py index 631993f..e1892f9 100644 --- a/src/pgm_service/pgm_powerflow/models.py +++ b/src/pgm_service/pgm_powerflow/models.py @@ -12,6 +12,10 @@ class StrEnum(str, Enum): pass +class InputData(BaseModel): + sv: str + eq: str + tp: str class PGM_PowerflowCalculationArgs(BaseModel): symmetric: bool = True diff --git a/src/pgm_service/pgm_powerflow/router.py b/src/pgm_service/pgm_powerflow/router.py index 284f93a..8608507 100644 --- a/src/pgm_service/pgm_powerflow/router.py +++ b/src/pgm_service/pgm_powerflow/router.py @@ -1,4 +1,5 @@ from datetime import datetime +import traceback from typing import Dict from uuid import uuid4 from fastapi import APIRouter, BackgroundTasks @@ -6,6 +7,7 @@ from pgm_service.pgm_powerflow.aux_models import JobComplete, Status from pgm_service.pgm_powerflow.models import PGM_Powerflow from pgm_service.power_grid.power_grid_model import calculate_powerflow +from pgm_service.utils import get_py_obj, store_py_obj, update_py_obj router = APIRouter(prefix="/pgm_powerflow", tags=["Powerflow"]) @@ -21,6 +23,7 @@ async def get_all_powerflow_calculation() -> list[str]: async def _calculate(job: JobComplete): job.status = Status.RUNNING + update_py_obj(job, job.id) try: grid = job.input.model @@ -32,7 +35,9 @@ async def _calculate(job: JobComplete): job.status = Status.SUCCESS except Exception as e: job.status = Status.FAILED - job.details = repr(e) + job.details = f"{repr(e)}\n\n{traceback.format_exc()}" + + update_py_obj(job, job.id) @router.post("/") @@ -40,9 +45,13 @@ async def new_powerflow_calculation( resource: PGM_Powerflow, background_tasks: BackgroundTasks, ) -> JobComplete: # TODO should be wrapped in jonb - _id = str(uuid4()) - JOBS[_id] = JobComplete(id=_id, input=resource) - job = JOBS[_id] + job = JobComplete(id="tmp", input=resource) + fileID = store_py_obj(job).json()["data"]["fileID"] + job.id = fileID + update_py_obj(job, id=job.id) + # _id = str(uuid4()) + # JOBS[_id] = JobComplete(id=_id, input=resource) + # job = JOBS[_id] background_tasks.add_task(_calculate, job=job) @@ -51,7 +60,7 @@ async def new_powerflow_calculation( @router.get("/{id}") async def get_powerflow_calculation(id: str) -> JobComplete: - return JOBS[id] + return get_py_obj(id) # @router.put("/{id}") diff --git a/src/pgm_service/power_grid/models.py b/src/pgm_service/power_grid/models.py index beea3b9..145ed80 100644 --- a/src/pgm_service/power_grid/models.py +++ b/src/pgm_service/power_grid/models.py @@ -8,7 +8,13 @@ class StrEnum(str, Enum): pass +class InputData(BaseModel): + sv: str + eq: str + tp: str + + # Basic input data class Grid(BaseModel): - input_data: str + input_data: InputData system_frequency: float = 50.0 diff --git a/src/pgm_service/power_grid/power_grid_model.py b/src/pgm_service/power_grid/power_grid_model.py index ae14660..76a4d93 100644 --- a/src/pgm_service/power_grid/power_grid_model.py +++ b/src/pgm_service/power_grid/power_grid_model.py @@ -1,7 +1,9 @@ import glob import os from pathlib import Path +import tempfile from typing import Dict, Any +from pgm_service.utils import download_grid_data import requests import cimpy from power_grid_model import PowerGridModel @@ -10,40 +12,13 @@ from pgm_service.power_grid.cgmes_pgm_converter import System as CGMESToPGMConverter -def download_data(url): - def download_grid_data(name, url): - with open(name, 'wb') as out_file: - content = requests.get(url, stream=True).content - out_file.write(content) - - url = 'https://raw.githubusercontent.com/dpsim-simulator/cim-grid-data/master/BasicGrids/NEPLAN/Slack_Load_Line_Sample/' - filename = 'Rootnet_FULL_NE_19J18h' - - download_grid_data(filename+'_EQ.xml', url + filename + '_EQ.xml') - download_grid_data(filename+'_TP.xml', url + filename + '_TP.xml') - download_grid_data(filename+'_SV.xml', url + filename + '_SV.xml') - - files = glob.glob(filename+'_*.xml') - - print('CGMES files downloaded:') - print(files) - - this_file_folder = Path(__file__).parents[3] - p = str(this_file_folder) - xml_path = Path(p) - xml_files = [os.path.join(xml_path, filename+'_EQ.xml'), - os.path.join(xml_path, filename+'_TP.xml'), - os.path.join(xml_path, filename+'_SV.xml')] - - print(xml_files) - return xml_files - - def create_model(grid: Grid): # TODO make async - xml_files = download_data(url=grid.input_data) - cgmes_data = cimpy.cim_import(xml_files, "cgmes_v2_4_15") - converter = CGMESToPGMConverter() - converter.load_cim_data(cgmes_data) + # xml_files = download_data(url=grid.input_data) + with tempfile.TemporaryDirectory() as tmpdir: + xml_files = download_grid_data(grid.input_data, tmp_dir=tmpdir) + cgmes_data = cimpy.cim_import(xml_files, "cgmes_v2_4_15") + converter = CGMESToPGMConverter() + converter.load_cim_data(cgmes_data) pgm_data = converter.create_pgm_input() return PowerGridModel(input_data=pgm_data, system_frequency=grid.system_frequency) diff --git a/src/pgm_service/utils.py b/src/pgm_service/utils.py new file mode 100644 index 0000000..9be36c9 --- /dev/null +++ b/src/pgm_service/utils.py @@ -0,0 +1,68 @@ +import os +import pickle +import tempfile +from typing import Any +from pgm_service.pgm_powerflow.models import InputData +import requests + + +def store_py_obj( + data: any, + url: str = os.getenv("FILESERVICE_URL", "127.0.0.1"), + port: int = int(os.getenv("FILESERVICE_PORT", 8080)), +) -> requests.Response: + ret = post_to_fileservice(pickle.dumps(data), url, port) + print(ret.json()) + return ret + + +def update_py_obj( + data: Any, + id: str, + url: str = os.getenv("FILESERVICE_URL", "127.0.0.1"), + port: int = int(os.getenv("FILESERVICE_PORT", 8080)), +): + return requests.put(f"http://{url}:{port}/files/{id}", data=pickle.dumps(data)) + + +def get_py_obj( + id: str, url: str = os.getenv("FILESERVICE_URL", "127.0.0.1"), port: int = int(os.getenv("FILESERVICE_PORT", 8080)) +) -> Any: + return pickle.loads(get_from_fileservice(id, url, port).content) + + +def post_to_fileservice( + data, url: str = os.getenv("FILESERVICE_URL", "127.0.0.1"), port: int = int(os.getenv("FILESERVICE_PORT", 8080)) +) -> requests.Response: + return requests.post(f"http://{url}:{port}/files", data=data) + + +def get_from_fileservice( + id: str, url: str = os.getenv("FILESERVICE_URL", "127.0.0.1"), port: int = int(os.getenv("FILESERVICE_PORT", 8080)) +): # TODO ->CIMFILE + return requests.get(f"http://{url}:{port}/data/{id}") + + +def download_grid_data(input_data: InputData, tmp_dir): + eq = get_from_fileservice(input_data.eq).content + sv = get_from_fileservice(input_data.sv).content + tp = get_from_fileservice(input_data.tp).content + + eq_path = os.path.join(tmp_dir, f"{input_data.eq}_EQ.xml") + sv_path = os.path.join(tmp_dir, f"{input_data.eq}_SV.xml") + tp_path = os.path.join(tmp_dir, f"{input_data.eq}_TP.xml") + with open(eq_path, "bw") as outfile: + outfile.write(eq) + with open(sv_path, "bw") as outfile: + outfile.write(sv) + with open(tp_path, "bw") as outfile: + outfile.write(tp) + return [eq_path, sv_path, tp_path] + + +if __name__ == "__main__": + testob = {"id": 3, "testdata": "test"} + post_response = store_py_obj(testob).json() + print(f"{post_response = }") + get_resp = get_py_obj(id=post_response["data"]["fileID"]) + print(get_resp)