Skip to content

Commit

Permalink
Graphs for methods vs error and time
Browse files Browse the repository at this point in the history
  • Loading branch information
anitacruz committed Apr 26, 2022
1 parent 24309fb commit 9134aef
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 30 deletions.
28 changes: 28 additions & 0 deletions ej_optim_no_lineal/graphs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from turtle import color
from matplotlib import pyplot as plt
import numpy as np

def method_vs_error(gd_errors, cg_errors, adam_errors):
plt.figure(1)
#vector of three colors in red hue
colors = plt.cm.Reds(np.linspace(0.5,1,3))
#bar graph of avarage error vs method with standard deviation
plt.bar(["Gradiente descendente", "Gradiente conjugado", "Método Adam"], [np.mean(gd_errors), np.mean(cg_errors), np.mean(adam_errors)], yerr=[np.std(gd_errors), np.std(cg_errors), np.std(adam_errors)], color=colors)

plt.xlabel("Método")
plt.ylabel("Error")



def method_vs_time(gd_times, cg_times, adam_times):
plt.figure(2)
#vector of three colors in red hue
colors = plt.cm.Reds(np.linspace(0.5,1,3))
#bar graph of avarage time vs method with standard deviation
plt.bar(["Gradiente descendente", "Gradiente conjugado", "Método Adam"], [np.mean(gd_times), np.mean(cg_times), np.mean(adam_times)], yerr=[np.std(gd_times), np.std(cg_times), np.std(adam_times)], color=colors)
plt.xlabel("Método")
plt.ylabel("Tiempo")
plt.show()



85 changes: 57 additions & 28 deletions ej_optim_no_lineal/main.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import time
import graphs as g
from numpy import random
from functions import error


from individual import Individual
from optimization_methods import minimize_adam, minimize_cg, minimize_gd

Expand All @@ -14,34 +16,61 @@ def main():
]
expected_output = [0, 1, 1]
values =[]
print("============START=============")
for n in range(11):
values.append(random.uniform(-10,10))

ind:Individual = Individual(values)
print(str(ind))
init_time= time.time()
res = minimize_gd(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente descendente")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res.x}")
print(f"Valor de error: {res.fun}")
init_time= time.time()
res = minimize_cg(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente conjugado")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res.x}")
print(f"Valor de error: {res.fun}")
init_time= time.time()
res = minimize_adam(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente conjugado")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res}")
print(f"Valor de error: {error(res, expected_output, reagents)}")
print("============END=============")
initial_errors = []
gd_errors = []
cg_errors = []
adam_errors = []
gd_times = []
cg_times = []
adam_times = []

random.seed(1)

for j in range(0,20):
print("============START=============")
values = []
for n in range(11):
values.append(random.uniform(-10,10))
ind:Individual = Individual(values)
initial_errors.append(error(ind.genotype, expected_output, reagents))

#print(str(ind))
init_time= time.time()
res_gd = minimize_gd(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente descendente")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res_gd.x}")
print(f"Valor de error: {res_gd.fun}")
dif_time_1 = end_time - init_time
gd_times.append(dif_time_1)
gd_errors.append(res_gd.fun)

init_time= time.time()
res_cg = minimize_cg(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente conjugado")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res_cg.x}")
print(f"Valor de error: {res_cg.fun}")
dif_time_2 = end_time - init_time
cg_times.append(dif_time_2)
cg_errors.append(res_cg.fun)

init_time= time.time()
res_adam = minimize_adam(ind, expected_output, reagents, 10) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Método Adam")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res_adam}")
print(f"Valor de error: {error(res_adam, expected_output, reagents)}")
dif_time_3 = end_time - init_time
adam_times.append(dif_time_3)
adam_errors.append(error(res_adam, expected_output, reagents))

print("============END=============")
#g.initial_error_vs_errors(initial_errors, gd_errors, cg_errors, adam_errors)
g.method_vs_error(gd_errors, cg_errors, adam_errors)
g.method_vs_time(gd_times, cg_times, adam_times)

main()
4 changes: 2 additions & 2 deletions ej_optim_no_lineal/optimization_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@ def minimize_gd(ind: Individual, expected_output, reagents):
def minimize_cg(ind: Individual, expected_output, reagents):
return minimize(error, ind.genotype, args=(expected_output, reagents), method='CG')

def minimize_adam(ind:Individual, expected_output, reagents):
def minimize_adam(ind:Individual, expected_output, reagents, step):
wrapper = ErrorWrapper(reagents, expected_output)
return adam(nd.Gradient(wrapper.apply_function),ind.genotype,step_size=0.80085)
return adam(nd.Gradient(wrapper.apply_function),ind.genotype, step_size=step)

0 comments on commit 9134aef

Please sign in to comment.