Skip to content

Commit

Permalink
auto path to prescient functional
Browse files Browse the repository at this point in the history
  • Loading branch information
Kyle Skolfield committed Oct 21, 2024
1 parent 9b25745 commit 721fab1
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 11 deletions.
13 changes: 7 additions & 6 deletions gtep/tests/unit/test_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
from pyomo.contrib.appsi.solvers.highs import Highs
import logging

from gtep.validation import populate_generators
from gtep.validation import clone_timeseries, filter_pointers, populate_generators, populate_transmission

input_data_source = "./gtep/data/5bus"
output_data_source = "./gtep/tests/data/5bus_out"

def test_solution():
data_object = ExpansionPlanningData()
Expand Down Expand Up @@ -40,14 +41,14 @@ def test_solution():

class TestValidation(unittest.TestCase):
def test_populate_generators(self):
populate_generators(input_data_source, solution, "")
pass
populate_generators(input_data_source, solution, output_data_source)


def test_populate_transmission(self):
pass
populate_transmission(input_data_source, solution, output_data_source)

def test_filter_pointers(self):
pass
filter_pointers(input_data_source, output_data_source)

def test_clone_timeseries(self):
pass
clone_timeseries(input_data_source, output_data_source)
37 changes: 32 additions & 5 deletions gtep/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from gtep.gtep_model import ExpansionPlanningModel
from gtep.gtep_solution import ExpansionPlanningSolution
import re
import os
import shutil
import logging

import pandas as pd
Expand Down Expand Up @@ -40,9 +42,10 @@ def renewable_name_filter(gen_name):
end_investment_gens += [k for k in end_investment_renewable_gens.keys()]
# populate output dataframe
output_df = input_df[input_df['GEN UID'].isin(end_investment_gens)]
## FIXME: (@jkskolf) this only handles thermals and discards renewables

# TODO: (@jkskolf) should we update prices here? I think no, but ...
if not os.path.exists(data_output_path):
os.makedirs(data_output_path)
output_df.to_csv(data_output_path + '/gen.csv',index=False)

def populate_transmission(data_input_path, sol_object, data_output_path):
Expand All @@ -59,10 +62,34 @@ def branch_name_filter(gen_name):
end_investment_branches = [re.search(r'\[.*\]', k).group(0)[1:-1] for k in end_investment_solution_dict.keys()]
output_df = input_df[input_df['UID'].isin(end_investment_branches)]

if not os.path.exists(data_output_path):
os.makedirs(data_output_path)
output_df.to_csv(data_output_path + '/branch.csv' ,index=False)

def filter_pointers(data_input_path, sol_object, data_output_path):
pass
def filter_pointers(data_input_path, data_output_path):
# load initial timeseries pointers
input_pointers_df = pd.read_csv(data_input_path + "/timeseries_pointers.csv")

def clone_timeseries(data_input_path, sol_object, data_output_path):
pass
# load final generators
# NOTE: must be run _after_ populate_generators and with the same data_output_path
# to pull resulting generator objects
output_generators_df = pd.read_csv(data_output_path + "/gen.csv")

# keep generators that exist at the final investment stage and remove the rest
# keep all non-generator timeseries pointers
matching_gen_list = [gen for gen in output_generators_df['GEN UID']]
output_df = input_pointers_df[input_pointers_df['Object'].isin(matching_gen_list) | input_pointers_df['Category'] != 'Generator']

if not os.path.exists(data_output_path):
os.makedirs(data_output_path)
output_df.to_csv(data_output_path + '/timeseries_pointers.csv')

def clone_timeseries(data_input_path, data_output_path):
file_list = os.listdir(data_input_path)
file_list.remove('timeseries_pointers.csv')
file_list.remove('gen.csv')
file_list.remove('branch.csv')

# @jkskolf, I don't think I like this ...
for fname in file_list:
shutil.copy(data_input_path + "/" + fname, data_output_path + "/" + fname)

0 comments on commit 721fab1

Please sign in to comment.