Skip to content

Commit

Permalink
exercise for non linear optimization
Browse files Browse the repository at this point in the history
  • Loading branch information
camilaborinsky committed Apr 25, 2022
1 parent e67a4f1 commit 24309fb
Show file tree
Hide file tree
Showing 7 changed files with 279 additions and 0 deletions.
138 changes: 138 additions & 0 deletions ej_optim_no_lineal/NonLinearOptim.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"inputs = [\n",
" [4.4793, -4.0765, -4.0765],\n",
" [-4.1793, -4.9218, 1.7664],\n",
" [-3.9429, -0.7689, 4.883]\n",
" ]\n",
"expected_output = [0, 1, 1]"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"def g(x):\n",
" try:\n",
" return math.exp(x) / (1 + math.exp(x))\n",
" except:\n",
" return 1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def f(W, omega, omega_zero, sigma):\n",
" outside_sum : float = 0\n",
" for j in range(0,2):\n",
" inside_sum = 0\n",
" for k in range(0,3):\n",
" inside_sum += float(omega[j*3+k]) * float(sigma[k])\n",
" inside_sum -= omega_zero[j] \n",
" inside_g = g(inside_sum)\n",
" outside_sum += inside_g * W[j+1] \n",
" outside_g = g(outside_sum- W[0])\n",
" return outside_g"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def error(W, omega, omega_zero, exact_values,sigma_list): \n",
" error: float = 0\n",
" for i in range(0,3):\n",
" error += pow(exact_values[i]-big_f(W, omega, omega_zero,sigma_list[i]),2)\n",
" return error"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"optimization_methods = {'GD', 'GC', 'ADAM'}"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"class Individual:\n",
" def __init__(self, genotype):\n",
" self.genotype = genotype\n",
" \n",
" def get_genotype_elements(self):\n",
" W = self.genotype[0:3]\n",
" omega = self.genotype[3:9]\n",
" omega_zero = self.genotype[9:11]\n",
" \n",
" return W, omega, omega_zero"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"from scipy.optimize import minimize\n",
"def minimize_gd(ind: Individual):\n",
" return minimize(error, ind.genotype, args=ind.get_genotype_elements(), method='BFGS')\n",
"def minimize_cg(ind:Individual):\n",
" return minimize(error, ind.genotype, args=ind.get_genotype_elements(), method='CG')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
37 changes: 37 additions & 0 deletions ej_optim_no_lineal/functions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import math


def g(x):
try:
return math.exp(x) / (1 + math.exp(x))
except:
return 1

def f(W, omega, omega_zero, sigma):
outside_sum : float = 0
for j in range(0,2):
inside_sum = 0
for k in range(0,3):
inside_sum += float(omega[j*3+k]) * float(sigma[k])
inside_sum -= omega_zero[j]
inside_g = g(inside_sum)
outside_sum += inside_g * W[j+1]
outside_g = g(outside_sum- W[0])
return outside_g

def error(genotype, exact_values,sigma_list):
W = genotype[0:3]
omega = genotype[3:9]
omega_zero = genotype[9:11]
error: float = 0
for i in range(0,3):
error += pow(exact_values[i]-f(W, omega, omega_zero,sigma_list[i]),2)
return error

class ErrorWrapper:
def __init__(self, reagents, expected_output):
self.reagents = reagents
self.expected_output = expected_output

def apply_function(self, genotypes, step=None):
return error(genotypes, self.expected_output, self.reagents)
10 changes: 10 additions & 0 deletions ej_optim_no_lineal/individual.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
class Individual:
def __init__(self, genotype):
self.genotype = genotype

def get_genotype_elements(self):
W = self.genotype[0:3]
omega = self.genotype[3:9]
omega_zero = self.genotype[9:11]

return W, omega, omega_zero
47 changes: 47 additions & 0 deletions ej_optim_no_lineal/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import time
from numpy import random
from functions import error

from individual import Individual
from optimization_methods import minimize_adam, minimize_cg, minimize_gd


def main():
reagents = [
[4.4793, -4.0765, -4.0765],
[-4.1793, -4.9218, 1.7664],
[-3.9429, -0.7689, 4.883]
]
expected_output = [0, 1, 1]
values =[]
print("============START=============")
for n in range(11):
values.append(random.uniform(-10,10))

ind:Individual = Individual(values)
print(str(ind))
init_time= time.time()
res = minimize_gd(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente descendente")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res.x}")
print(f"Valor de error: {res.fun}")
init_time= time.time()
res = minimize_cg(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente conjugado")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res.x}")
print(f"Valor de error: {res.fun}")
init_time= time.time()
res = minimize_adam(ind, expected_output, reagents) #res.x me da el mejor individuo y res.fun el valor del error para el x
end_time = time.time()
print("Gradiente conjugado")
print(f"Tiempo de ejecución :{end_time-init_time}")
print(f"Individuo: {res}")
print(f"Valor de error: {error(res, expected_output, reagents)}")
print("============END=============")


main()
18 changes: 18 additions & 0 deletions ej_optim_no_lineal/optimization_methods.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
optimization_methods = {'GD', 'GC', 'ADAM'}

from scipy.optimize import minimize
from functions import ErrorWrapper, error
from autograd.misc.optimizers import adam
import numdifftools as nd

from individual import Individual
def minimize_gd(ind: Individual, expected_output, reagents):

return minimize(error, ind.genotype, args=(expected_output, reagents), method='BFGS')

def minimize_cg(ind: Individual, expected_output, reagents):
return minimize(error, ind.genotype, args=(expected_output, reagents), method='CG')

def minimize_adam(ind:Individual, expected_output, reagents):
wrapper = ErrorWrapper(reagents, expected_output)
return adam(nd.Gradient(wrapper.apply_function),ind.genotype,step_size=0.80085)
26 changes: 26 additions & 0 deletions ej_optim_no_lineal/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Sistemas de Inteligencia Artificial

## Ejercicio Obligatorio 1: Métodos de Optimización no lineal

### Ingegrantes:

- Ana Cruz
- Agustin Spitzner
- Camila Borinsky

---

## Ejecución

### Requerimientos

- Python v3.8 o superior
- pip

### Dependencias

`pip install -r requirements.txt`

### Ejecución

`python main.py`
3 changes: 3 additions & 0 deletions ej_optim_no_lineal/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
scipy>=1.8.0
autograd>=1.4
numdifftools>=0.9.40

0 comments on commit 24309fb

Please sign in to comment.