-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmimo.py
292 lines (228 loc) · 8.78 KB
/
mimo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
###############################################################################
# IMPORTS
###############################################################################
# urbs
import urbs
from pyomo.opt.base import SolverFactory
# oemof
import oemofm
import oemof.solph as solph
import oemof.outputlib as outputlib
from oemof.graph import create_nx_graph
# comparison
import comparison as comp
# connection
import connection_oep as conn
# misc.
import os
import time
###############################################################################
# Comparison & Benchmarking
###############################################################################
def benchmarking(input_data):
"""
Function for benchmarking urbs & oemof
Args:
input_data: input data
Returns:
bench: a dictionary containing benchmarking values
"""
# init
bench = {}
# [1,10,20,30,40,50,60,70,80,90,100,200,300,400,500,600,700,800,900,1000]
for i in range(1, 1001):
# simulation timesteps
(offset, length) = (0, i) # time step selection
timesteps = range(offset, offset + length + 1)
if i == 1:
urbs_model, urbs_time = create_um(input_data, timesteps)
oemof_model, oemof_time = create_om(input_data, timesteps)
bench[i] = comparison(urbs_model, oemof_model,
threshold=0.1, benchmark=True)
# setting build time for urbs
bench[i][0]['build'] = urbs_time
# setting build time for oemof
bench[i][1]['build'] = oemof_time
elif i <= 100 and i % 10 == 0:
urbs_model, urbs_time = create_um(input_data, timesteps)
oemof_model, oemof_time = create_om(input_data, timesteps)
bench[i] = comparison(urbs_model, oemof_model,
threshold=0.1, benchmark=True)
# setting build time for urbs
bench[i][0]['build'] = urbs_time
# setting build time for oemof
bench[i][1]['build'] = oemof_time
elif i > 100 and i % 100 == 0:
urbs_model, urbs_time = create_um(input_data, timesteps)
oemof_model, oemof_time = create_om(input_data, timesteps)
bench[i] = comparison(urbs_model, oemof_model,
threshold=0.1, benchmark=True)
# setting build time for urbs
bench[i][0]['build'] = urbs_time
# setting build time for oemof
bench[i][1]['build'] = oemof_time
else:
pass
# process benchmark
comp.process_benchmark(bench)
return bench
def comparison(u_model, o_model, threshold=0.1, benchmark=False):
"""
Function for comparing urbs & oemof
Args:
u_model: urbs model instance use create_um() to generate
o_model: oemof model instance use create_om() to generate
threshold: threshold value for outputting the differences
benchmark: a parameter for activate/deactivate benchmarking
Returns:
urbs: a dictionary containing the specific values
oemof: a dictionary containing the specific values
"""
# init
urbs = {}
oemof = {}
# compare objective
urbs['obj'] = u_model.obj()
oemof['obj'] = o_model.objective()
# terminal output
print('----------------------------------------------------')
if u_model.obj() != o_model.objective():
print('OBJECTIVE')
print('urbs\t', u_model.obj())
print('oemof\t', o_model.objective())
print('Diff\t', u_model.obj() - o_model.objective())
print('----------------------------------------------------')
# create oemof energysytem
o_model = solph.EnergySystem()
o_model.restore(dpath=None, filename=None)
# compare cpu and memory
urbs['cpu'], urbs['memory'], oemof['cpu'], oemof['memory'] = \
comp.compare_cpu_and_memory()
# compare lp files
urbs['const'], oemof['const'] = comp.compare_lp_files()
# compare model variables
if len(u_model.tm) >= 2 and not benchmark:
sto = comp.compare_storages(u_model, o_model, threshold)
tra = comp.compare_transmission(u_model, o_model, threshold)
pro = comp.compare_process(u_model, o_model, threshold)
else:
pass
return urbs, oemof
###############################################################################
# urbs Model
###############################################################################
# create urbs model
def create_um(input_data, timesteps):
"""
Creates an urbs model for given input, time steps
Args:
input_data: input data
timesteps: simulation timesteps
Returns:
model: a model instance
"""
# create model
print('CREATING urbs MODEL')
start = time.perf_counter()
model = urbs.create_model(input_data, 1, timesteps)
end = time.perf_counter()
# solve model and read results
optim = SolverFactory('glpk')
result = optim.solve(model, logfile='urbs_log.txt', tee=False)
# write LP file
filename = os.path.join(os.path.dirname(__file__), 'mimo_urbs.lp')
model.write(filename, io_options={'symbolic_solver_labels': True})
return model, end - start
###############################################################################
# oemof Model
###############################################################################
# create oemof model
def create_om(input_data, timesteps):
"""
Creates an oemof model for given input, time steps
Args:
input_data: input data
timesteps: simulation timesteps
Returns:
model: a model instance
"""
# create oemof energy system
print('CREATING oemof MODEL')
start = time.perf_counter()
es, model = oemofm.create_model(input_data, timesteps)
end = time.perf_counter()
# solve model and read results
model.solve(solver='glpk',
solve_kwargs={'logfile': 'oemof_log.txt', 'tee': False})
# write LP file
filename = os.path.join(os.path.dirname(__file__), 'mimo_oemof.lp')
model.write(filename, io_options={'symbolic_solver_labels': True})
# draw graph
graph = False
if graph:
graph = create_nx_graph(es, model)
oemofm.draw_graph(graph, plot=True, layout='neato', node_size=3000,
node_color={'b_0': '#cd3333',
'b_1': '#7EC0EE',
'b_2': '#eeac7e'})
# get results
es.results['main'] = outputlib.processing.results(model)
es.dump(dpath=None, filename=None)
return model, end - start
if __name__ == '__main__':
# connection to OEP
connection = False
# benchmarking
benchmark = False
# input file
input_file = 'mimo.xlsx'
# simulation timesteps
(offset, length) = (0, 10) # time step selection
timesteps = range(offset, offset + length + 1)
# load data
data = conn.read_data(input_file)
# establish connection to OEP
if connection:
# config for OEP
username, token = open("config.ini", "r").readlines()
# create engine
engine, metadata = conn.connect_oep(username, token)
print('OEP Connection established')
# create table
table = {}
input_data = {}
for key in data:
# normalize data for database
data[key] = conn.normalize(data[key], key)
# setup table
table['mimo_'+key] = conn.setup_table('mimo_'+key,
schema_name='sandbox',
metadata=metadata)
"""
# upload to OEP
table['mimo_'+key] = conn.upload_to_oep(data[key],
table['mimo_'+key],
engine, metadata)
"""
# download from OEP
input_data[key] = conn.get_df(engine, table['mimo_'+key])
# denormalize data for models
input_data[key] = conn.denormalize(input_data[key], key)
# write data
input_data = conn.write_data(input_data)
else:
# write data
input_data = data
input_data = conn.write_data(input_data)
# benchmarking
if benchmark:
print('BENCHMARKING----------------------------------------')
benchmarking(input_data)
print('BENCHMARKING-COMPLETED------------------------------')
# comparing
else:
print('COMPARING-------------------------------------------')
urbs_model, urbs_time = create_um(input_data, timesteps)
oemof_model, oemof_time = create_om(input_data, timesteps)
comparison(urbs_model, oemof_model, threshold=0.1)
print('COMPARING-COMPLETED---------------------------------')