Skip to content

Commit

Permalink
Add time binning
Browse files Browse the repository at this point in the history
  • Loading branch information
dzalkind committed Jan 17, 2025
1 parent 0de0bc8 commit 48a5ddd
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 52 deletions.
3 changes: 3 additions & 0 deletions examples/19_controller_test_bench/controller_testbench.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@ def main():
OFmgmt['OF_run_dir'] = os.path.join(os.path.dirname(modopt_file), testbench_options['Testbench_Options']['output_directory'])
testbench_options['Level3']['openfast_dir'] = os.path.join(os.path.dirname(modopt_file),testbench_options['Level3']['openfast_dir'])

# Postprocessing options (map to OFMgmt)
OFmgmt['postprocessing'] = testbench_options['PostProcessing']


if MPI:
opt_options = {}
Expand Down
56 changes: 31 additions & 25 deletions examples/19_controller_test_bench/testbench_options.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ Turbine_Info: # Can we pull this information from OpenFAST input?
blade: False # use to do local blade loading analysis
tower: False # same

PostProcessing:
binning_time: 20


Level3: # Will become OpenFAST once WEIS supports OpenFAST 4.0
flag: True
Expand All @@ -46,31 +49,34 @@ DLC_driver:
wave_period50: 13.6
DLCs:
- DLC: "1.1"
n_seeds: 6
- DLC: "1.3"
n_seeds: 6
- DLC: "1.4"
- DLC: "1.5"
- DLC: "1.6"
- DLC: "5.1"
- DLC: "AEP"
TI_factor: 0.5
wind_speed: [12]
- DLC: "Ramp" # Up
wind_speed: [5]
analysis_time: 1000.
ramp_speeddelta: 20
ramp_duration: 1000.
- DLC: "Ramp" # Down
wind_speed: [25]
analysis_time: 1000.
ramp_speeddelta: -20
ramp_duration: 1000.
- DLC: "Step" # Down
wind_speed: [18]
analysis_time: 100.
step_speeddelta: 2.0
step_time: 50.0
n_seeds: 2
wind_speed: [6,12]
transient_time: 0
analysis_time: 400
# - DLC: "1.3"
# n_seeds: 6
# - DLC: "1.4"
# - DLC: "1.5"
# - DLC: "1.6"
# - DLC: "5.1"
# - DLC: "AEP"
# TI_factor: 0.5
# wind_speed: [12]
# - DLC: "Ramp" # Up
# wind_speed: [5]
# analysis_time: 1000.
# ramp_speeddelta: 20
# ramp_duration: 1000.
# - DLC: "Ramp" # Down
# wind_speed: [25]
# analysis_time: 1000.
# ramp_speeddelta: -20
# ramp_duration: 1000.
# - DLC: "Step" # Down
# wind_speed: [18]
# analysis_time: 100.
# step_speeddelta: 2.0
# step_time: 50.0
# - DLC: "Steady"
# wind_speed: [8]
# analysis_time: 10.
Expand Down
73 changes: 49 additions & 24 deletions examples/19_controller_test_bench/testbench_options_lite.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
Testbench_Options:
output_directory: outputs/7_setup_all_cases_lite # relative to this file
output_directory: outputs/11_local_runs_for_pp # relative to this file
output_filebase: testbench
n_cores: 8 # not yet set up
use_mpi: False

# TODO: simplify this, maybe move to Testbench_Options
# General:
# openfast_configuration:
# FAST_exe: /projects/weis/dzalkind/openfast-versions/openfast-main/build/glue-codes/openfast/openfast
# path2dll: /home/dzalkind/Tools/ROSCO-1/rosco/lib/libdiscon.so
# turbsim_exe: /projects/weis/dzalkind/openfast-versions/openfast-main/build/modules/turbsim/turbsim

PostProcessing:
binning_time: 20

Turbine_Info: # Can we pull this information from OpenFAST input?
# used to generate wind inputs
wind_speed_cut_in: 5
Expand Down Expand Up @@ -46,28 +56,43 @@ DLC_driver:
wave_period50: 13.6
DLCs:
- DLC: "1.1"
n_seeds: 6
analysis_time: 1.
transient_time: 0.0
- DLC: "1.3"
n_seeds: 6
analysis_time: 1.
transient_time: 0.0
- DLC: "1.4"
analysis_time: 1.
transient_time: 0.0
- DLC: "1.5"
analysis_time: 1.
transient_time: 0.0
- DLC: "1.6"
analysis_time: 1.
transient_time: 0.0
- DLC: "5.1"
analysis_time: 1.
transient_time: 0.0
shutdown_time: 0.5
- DLC: "AEP"
analysis_time: 1.
n_seeds: 3
wind_speed: [8,15]
analysis_time: 300.
transient_time: 0.0
TI_factor: 0.5
# - DLC: "1.3"
# n_seeds: 2
# analysis_time: 1.
# transient_time: 0.0
# - DLC: "1.4"
# analysis_time: 1.
# transient_time: 0.0
# - DLC: "1.5"
# analysis_time: 1.
# transient_time: 0.0
# - DLC: "1.6"
# analysis_time: 1.
# transient_time: 0.0
# - DLC: "5.1"
# analysis_time: 1.
# transient_time: 0.0
# shutdown_time: 0.5
# - DLC: "AEP"
# analysis_time: 1.
# transient_time: 0.0
# TI_factor: 0.5
- DLC: "Ramp" # Up
wind_speed: [10]
n_seeds: 1
analysis_time: 10.
transient_time: 5.0
ramp_speeddelta: 2
ramp_duration: 8
- DLC: "Ramp" # Down
wind_speed: [10]
n_seeds: 1
analysis_time: 10.
transient_time: 5.0
ramp_speeddelta: -2
ramp_duration: 8

44 changes: 41 additions & 3 deletions weis/aeroelasticse/openmdao_openfast.py
Original file line number Diff line number Diff line change
Expand Up @@ -2232,9 +2232,9 @@ def post_process(self, summary_stats, extreme_table, DELs, damage, case_list, ca
if any(summary_stats['openfast_failed']['mean'] > 0):
outputs['openfast_failed'] = 2

# # Did any OpenFAST runs fail?
# if any(summary_stats['openfast_failed']['mean'] > 0):
# outputs['openfast_failed'] = 2
# Wind speed binning
if 'binning_time' in modopt['PostProcessing']: # TODO: figure out a better flag for this
self.wind_speed_binning(chan_time)

# Save Data
if modopt['General']['openfast_configuration']['save_timeseries']:
Expand Down Expand Up @@ -2769,6 +2769,44 @@ def get_charateristic_loads(self,sum_stats,inputs,outputs):
os.makedirs(save_dir, exist_ok=True)
write_yaml(char_loads,os.path.join(save_dir,'charateristic_loads.yaml'))

def wind_speed_binning(self,chan_time):

# First bin all the data for each timeseries
binned_data_ts = [None] * len(chan_time)

for i_case, output in enumerate(chan_time):
po = AeroelasticOutput(output)
df_binned = po.time_binning(self.options['modeling_options']['General']['openfast_configuration']['postprocessing']['binning_time'])
binned_data_ts[i_case] = df_binned

# Make save directories
save_dir = os.path.join(self.FAST_runDirectory,'iteration_'+str(self.of_inumber))
os.makedirs(save_dir, exist_ok=True)

# Get channel list from last timeseries
channels = po.channels.tolist()

# Combine all the data into one binned dataset
binned_data_all = np.empty((0,len(channels)))

# Also combine into dlc-specific datasets
dlcs = self.case_df['DLC'].unique()
for dlc in dlcs:
dlc_ind = np.where(self.case_df['DLC'] == dlc)[0].tolist()
dlc_binned_data = [binned_data_ts[i] for i in dlc_ind]

binned_data_dlc = np.empty((0,len(channels)))
for df_binned in dlc_binned_data:

binned_data_df = df_binned[channels]
binned_data_dlc = np.concatenate((binned_data_dlc,binned_data_df.to_numpy()))
binned_data_all = np.concatenate((binned_data_all,binned_data_df.to_numpy()))

df_binned_dlc = pd.DataFrame(data=binned_data_dlc, columns=channels)
df_binned_dlc.to_pickle(os.path.join(save_dir,f'binned_dlc{dlc}.p'))

df_binned_all = pd.DataFrame(data=binned_data_all, columns=channels)
df_binned_all.to_pickle(os.path.join(save_dir,f'binned_all.p'))

def get_OL2CL_error(self,chan_time,outputs):
ol_case_names = [os.path.join(
Expand Down

0 comments on commit 48a5ddd

Please sign in to comment.