Skip to content

Commit

Permalink
all bespoke tests passing. exclusion file in `test_bespoke_5min_sampl…
Browse files Browse the repository at this point in the history
…e` is fixed so we access with 'latitude'.
  • Loading branch information
bnb32 committed May 29, 2024
1 parent 49da781 commit a8ab4c8
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 40 deletions.
8 changes: 6 additions & 2 deletions reV/bespoke/bespoke.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,13 +584,17 @@ def _parse_gid_map(gid_map):

if isinstance(gid_map, str):
if gid_map.endswith(".csv"):
gid_map = pd.read_csv(gid_map).to_dict()
gid_map = (
pd.read_csv(gid_map)
.rename(SupplyCurveField.map_to(ResourceMetaField), axis=1)
.to_dict()
)
err_msg = f"Need {ResourceMetaField.GID} in gid_map column"
assert ResourceMetaField.GID in gid_map, err_msg
assert "gid_map" in gid_map, 'Need "gid_map" in gid_map column'
gid_map = {
gid_map[ResourceMetaField.GID][i]: gid_map["gid_map"][i]
for i in gid_map[ResourceMetaField.GID].keys()
for i in gid_map[ResourceMetaField.GID]
}

elif gid_map.endswith(".json"):
Expand Down
93 changes: 58 additions & 35 deletions reV/generation/generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
TroughPhysicalHeat,
WindPower,
)
from reV.utilities import ModuleName, ResourceMetaField
from reV.utilities import ModuleName, ResourceMetaField, SupplyCurveField
from reV.utilities.exceptions import (
ConfigError,
InputError,
Expand All @@ -41,16 +41,16 @@


ATTR_DIR = os.path.dirname(os.path.realpath(__file__))
ATTR_DIR = os.path.join(ATTR_DIR, 'output_attributes')
with open(os.path.join(ATTR_DIR, 'other.json')) as f:
ATTR_DIR = os.path.join(ATTR_DIR, "output_attributes")
with open(os.path.join(ATTR_DIR, "other.json")) as f:
OTHER_ATTRS = json.load(f)
with open(os.path.join(ATTR_DIR, 'generation.json')) as f:
with open(os.path.join(ATTR_DIR, "generation.json")) as f:
GEN_ATTRS = json.load(f)
with open(os.path.join(ATTR_DIR, 'linear_fresnel.json')) as f:
with open(os.path.join(ATTR_DIR, "linear_fresnel.json")) as f:
LIN_ATTRS = json.load(f)
with open(os.path.join(ATTR_DIR, 'solar_water_heat.json')) as f:
with open(os.path.join(ATTR_DIR, "solar_water_heat.json")) as f:
SWH_ATTRS = json.load(f)
with open(os.path.join(ATTR_DIR, 'trough_heat.json')) as f:
with open(os.path.join(ATTR_DIR, "trough_heat.json")) as f:
TPPH_ATTRS = json.load(f)


Expand Down Expand Up @@ -83,13 +83,24 @@ class Gen(BaseGen):
OUT_ATTRS.update(TPPH_ATTRS)
OUT_ATTRS.update(BaseGen.ECON_ATTRS)

def __init__(self, technology, project_points, sam_files, resource_file,
low_res_resource_file=None,
output_request=('cf_mean',),
site_data=None, curtailment=None, gid_map=None,
drop_leap=False, sites_per_worker=None,
memory_utilization_limit=0.4, scale_outputs=True,
write_mapped_gids=False, bias_correct=None):
def __init__(
self,
technology,
project_points,
sam_files,
resource_file,
low_res_resource_file=None,
output_request=("cf_mean",),
site_data=None,
curtailment=None,
gid_map=None,
drop_leap=False,
sites_per_worker=None,
memory_utilization_limit=0.4,
scale_outputs=True,
write_mapped_gids=False,
bias_correct=None,
):
"""ReV generation analysis class.
``reV`` generation analysis runs SAM simulations by piping in
Expand Down Expand Up @@ -479,7 +490,7 @@ def meta(self):
self._meta.loc[:, ResourceMetaField.GID] = sites
self._meta.index = self.project_points.sites
self._meta.index.name = ResourceMetaField.GID
self._meta.loc[:, 'reV_tech'] = self.project_points.tech
self._meta.loc[:, "reV_tech"] = self.project_points.tech

return self._meta

Expand Down Expand Up @@ -569,8 +580,7 @@ def handle_lifetime_index(self, ti):
array_vars = [
var for var, attrs in GEN_ATTRS.items() if attrs["type"] == "array"
]
valid_vars = ['gen_profile', 'cf_profile',
'cf_profile_ac']
valid_vars = ["gen_profile", "cf_profile", "cf_profile_ac"]
invalid_vars = set(array_vars) - set(valid_vars)
invalid_requests = [
var for var in self.output_request if var in invalid_vars
Expand Down Expand Up @@ -746,14 +756,15 @@ def _parse_gid_map(self, gid_map):
if isinstance(gid_map, str):
if gid_map.endswith(".csv"):
gid_map = pd.read_csv(gid_map).to_dict()
msg = f'Need {ResourceMetaField.GID} in gid_map column'
msg = f"Need {ResourceMetaField.GID} in gid_map column"
assert ResourceMetaField.GID in gid_map, msg
assert 'gid_map' in gid_map, 'Need "gid_map" in gid_map column'
assert "gid_map" in gid_map, 'Need "gid_map" in gid_map column'
gid_map = {
gid_map[ResourceMetaField.GID][i]: gid_map['gid_map'][i]
for i in gid_map[ResourceMetaField.GID].keys()}
gid_map[ResourceMetaField.GID][i]: gid_map["gid_map"][i]
for i in gid_map[ResourceMetaField.GID].keys()
}

elif gid_map.endswith('.json'):
elif gid_map.endswith(".json"):
with open(gid_map) as f:
gid_map = json.load(f)

Expand Down Expand Up @@ -816,14 +827,20 @@ def _parse_nn_map(self):
if "*" in self.res_file or "*" in self.lr_res_file:
handler_class = MultiFileResource

with handler_class(self.res_file) as hr_res, \
handler_class(self.lr_res_file) as lr_res:
logger.info('Making nearest neighbor map for multi '
'resolution resource data...')
nn_d, nn_map = MultiResolutionResource.make_nn_map(hr_res,
lr_res)
logger.info('Done making nearest neighbor map for multi '
'resolution resource data!')
with handler_class(self.res_file) as hr_res, handler_class(
self.lr_res_file
) as lr_res:
logger.info(
"Making nearest neighbor map for multi "
"resolution resource data..."
)
nn_d, nn_map = MultiResolutionResource.make_nn_map(
hr_res, lr_res
)
logger.info(
"Done making nearest neighbor map for multi "
"resolution resource data!"
)

logger.info(
"Made nearest neighbor mapping between nominal-"
Expand Down Expand Up @@ -886,18 +903,24 @@ def _parse_bc(bias_correct):
return bias_correct

if isinstance(bias_correct, str):
bias_correct = pd.read_csv(bias_correct)
bias_correct = pd.read_csv(bias_correct).rename(
SupplyCurveField.map_to(ResourceMetaField), axis=1
)

msg = (
"Bias correction data must be a filepath to csv or a dataframe "
"but received: {}".format(type(bias_correct))
)
assert isinstance(bias_correct, pd.DataFrame), msg

msg = ('Bias correction table must have {!r} column but only found: '
'{}'.format(ResourceMetaField.GID, list(bias_correct.columns)))
assert (ResourceMetaField.GID in bias_correct
or bias_correct.index.name == ResourceMetaField.GID), msg
msg = (
"Bias correction table must have {!r} column but only found: "
"{}".format(ResourceMetaField.GID, list(bias_correct.columns))
)
assert (
ResourceMetaField.GID in bias_correct
or bias_correct.index.name == ResourceMetaField.GID
), msg

if bias_correct.index.name != ResourceMetaField.GID:
bias_correct = bias_correct.set_index(ResourceMetaField.GID)
Expand Down
4 changes: 1 addition & 3 deletions tests/test_bespoke.py
Original file line number Diff line number Diff line change
Expand Up @@ -1690,9 +1690,7 @@ def test_bespoke_5min_sample():

# hack techmap because 5min data only has 10 wind resource pixels
with h5py.File(excl_fp, "a") as excl_file:
arr = np.random.choice(
10, size=excl_file[SupplyCurveField.LATITUDE].shape
)
arr = np.random.choice(10, size=excl_file["latitude"].shape)
excl_file.create_dataset(name=tm_dset, data=arr)

bsp = BespokeWindPlants(
Expand Down

0 comments on commit a8ab4c8

Please sign in to comment.