Skip to content

Commit

Permalink
Moved load_profile db to data
Browse files Browse the repository at this point in the history
removed comments
fixed Bug in visualize.py (extra data empty)
removed dead cp
  • Loading branch information
drbacke committed Oct 5, 2024
1 parent ec1dc87 commit caf907a
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 104 deletions.
69 changes: 5 additions & 64 deletions flask_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,8 @@ def flask_gesamtlast():
measured_data["time"] = measured_data["time"].dt.tz_localize(None)

# Instantiate LoadForecast and generate forecast data
lf = LoadForecast(filepath=r"load_profiles.npz", year_energy=year_energy)
file_path = os.path.join("data", "load_profiles.npz")
lf = LoadForecast(filepath=file_path, year_energy=year_energy)
forecast_list = []

# Generate daily forecasts for the date range based on measured data
Expand Down Expand Up @@ -153,68 +154,6 @@ def flask_gesamtlast():
return jsonify(last.tolist())


# @app.route('/gesamtlast', methods=['GET'])
# def flask_gesamtlast():
# if request.method == 'GET':
# year_energy = float(request.args.get("year_energy")) # Get annual energy value from query parameters
# prediction_hours = int(request.args.get("hours", 48)) # Default to 48 hours if not specified
# date_now = datetime.now() # Get the current date and time
# end_date = (date_now + timedelta(hours=prediction_hours)).strftime('%Y-%m-%d %H:%M:%S') # Calculate end date based on prediction hours

# ###############
# # Load Forecast
# ###############
# # Instantiate LastEstimator to retrieve measured data
# estimator = LastEstimator()
# start_date = (date_now - timedelta(days=60)).strftime('%Y-%m-%d') # Start date: last 60 days
# end_date = date_now.strftime('%Y-%m-%d') # Current date

# last_df = estimator.get_last(start_date, end_date) # Get last load data

# selected_columns = last_df[['timestamp', 'Last']] # Select relevant columns
# selected_columns['time'] = pd.to_datetime(selected_columns['timestamp']).dt.floor('H') # Floor timestamps to the nearest hour
# selected_columns['Last'] = pd.to_numeric(selected_columns['Last'], errors='coerce') # Convert 'Last' to numeric, coerce errors
# cleaned_data = selected_columns.dropna() # Clean data by dropping NaN values

# # Instantiate LoadForecast
# lf = LoadForecast(filepath=r'load_profiles.npz', year_energy=year_energy)

# # Generate forecast data
# forecast_list = [] # List to hold daily forecasts
# for single_date in pd.date_range(cleaned_data['time'].min().date(), cleaned_data['time'].max().date()): # Iterate over date range
# date_str = single_date.strftime('%Y-%m-%d') # Format date
# daily_forecast = lf.get_daily_stats(date_str) # Get daily stats from LoadForecast
# mean_values = daily_forecast[0] # Extract mean values
# hours = [single_date + pd.Timedelta(hours=i) for i in range(24)] # Generate hours for the day
# daily_forecast_df = pd.DataFrame({'time': hours, 'Last Pred': mean_values}) # Create DataFrame for daily forecast
# forecast_list.append(daily_forecast_df) # Append to the list

# forecast_df = pd.concat(forecast_list, ignore_index=True) # Concatenate all daily forecasts

# # Create LoadPredictionAdjuster instance
# adjuster = LoadPredictionAdjuster(cleaned_data, forecast_df, lf)
# adjuster.calculate_weighted_mean() # Calculate weighted mean for adjustments
# adjuster.adjust_predictions() # Adjust predictions based on measured data

# # Predict the next hours
# future_predictions = adjuster.predict_next_hours(prediction_hours) # Predict future load

# leistung_haushalt = future_predictions['Adjusted Pred'].values # Extract household power predictions

# gesamtlast = Gesamtlast(prediction_hours=prediction_hours) # Create Gesamtlast instance
# gesamtlast.hinzufuegen("Haushalt", leistung_haushalt) # Add household load to total load calculation

# # ###############
# # # WP (Heat Pump)
# # ##############
# # leistung_wp = wp.simulate_24h(temperature_forecast) # Simulate heat pump load for 24 hours
# # gesamtlast.hinzufuegen("Heatpump", leistung_wp) # Add heat pump load to total load calculation

# last = gesamtlast.gesamtlast_berechnen() # Calculate total load
# print(last) # Output total load
# return jsonify(last.tolist()) # Return total load as JSON


@app.route("/gesamtlast_simple", methods=["GET"])
def flask_gesamtlast_simple():
if request.method == "GET":
Expand All @@ -228,8 +167,10 @@ def flask_gesamtlast_simple():
###############
# Load Forecast
###############
file_path = os.path.join("data", "load_profiles.npz")

lf = LoadForecast(
filepath=r"load_profiles.npz", year_energy=year_energy
filepath=file_path, year_energy=year_energy
) # Instantiate LoadForecast with specified parameters
leistung_haushalt = lf.get_stats_for_date_range(date_now, date)[
0
Expand Down
2 changes: 1 addition & 1 deletion modules/class_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _convert_to_datetime(self, date_str):

# Example usage of the class
if __name__ == "__main__":
filepath = r"..\load_profiles.npz" # Adjust the path to the .npz file
filepath = r"..\data\load_profiles.npz" # Adjust the path to the .npz file
lf = LoadForecast(filepath=filepath, year_energy=2000)
specific_date_prices = lf.get_daily_stats("2024-02-16") # Adjust date as needed
specific_hour_stats = lf.get_hourly_stats(
Expand Down
11 changes: 0 additions & 11 deletions modules/class_load_corrector.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,6 @@
import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score

# from sklearn.model_selection import train_test_split, GridSearchCV
# from sklearn.ensemble import GradientBoostingRegressor
# from xgboost import XGBRegressor
# from statsmodels.tsa.statespace.sarimax import SARIMAX
# from tensorflow.keras.models import Sequential
# from tensorflow.keras.layers import Dense, LSTM
# from tensorflow.keras.optimizers import Adam
# from sklearn.preprocessing import MinMaxScaler
# from sqlalchemy import create_engine


class LoadPredictionAdjuster:
def __init__(self, measured_data, predicted_data, load_forecast):
self.measured_data = measured_data
Expand Down
2 changes: 0 additions & 2 deletions modules/class_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,8 +338,6 @@ def optimierung_ems(
extra_data=extra_data,
)

os.system("cp visualisierungsergebnisse.pdf ~/")

# Return final results as a dictionary
return {
"discharge_hours_bin": discharge_hours_bin,
Expand Down
52 changes: 26 additions & 26 deletions modules/visualize.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def visualisiere_ergebnisse(

plt.figure(figsize=(14, 10))

if ist_dst_wechsel(datetime.now()):
if ist_dst_wechsel(datetime.datetime.now()):
hours = np.arange(start_hour, prediction_hours - 1)
else:
hours = np.arange(start_hour, prediction_hours)
Expand Down Expand Up @@ -262,31 +262,31 @@ def visualisiere_ergebnisse(
if n < 0.01
]
)

best_loss = min(filtered_losses)
worst_loss = max(filtered_losses)
best_balance = min(filtered_balance)
worst_balance = max(filtered_balance)

data = [filtered_losses, filtered_balance]
labels = ["Losses", "Balance"]
# Create plots
fig, axs = plt.subplots(
1, 2, figsize=(10, 6), sharey=False
) # Two subplots, separate y-axes

# First violin plot for losses
axs[0].violinplot(data[0], showmeans=True, showmedians=True)
axs[0].set_title("Losses")
axs[0].set_xticklabels(["Losses"])

# Second violin plot for balance
axs[1].violinplot(data[1], showmeans=True, showmedians=True)
axs[1].set_title("Balance")
axs[1].set_xticklabels(["Balance"])

# Fine-tuning
plt.tight_layout()
if filtered_losses.size != 0:
best_loss = min(filtered_losses)
worst_loss = max(filtered_losses)
best_balance = min(filtered_balance)
worst_balance = max(filtered_balance)

data = [filtered_losses, filtered_balance]
labels = ["Losses", "Balance"]
# Create plots
fig, axs = plt.subplots(
1, 2, figsize=(10, 6), sharey=False
) # Two subplots, separate y-axes

# First violin plot for losses
axs[0].violinplot(data[0], showmeans=True, showmedians=True)
axs[0].set_title("Losses")
axs[0].set_xticklabels(["Losses"])

# Second violin plot for balance
axs[1].violinplot(data[1], showmeans=True, showmedians=True)
axs[1].set_title("Balance")
axs[1].set_xticklabels(["Balance"])

# Fine-tuning
plt.tight_layout()

pdf.savefig() # Save the current figure state to the PDF
plt.close() # Close the figure

0 comments on commit caf907a

Please sign in to comment.