From f6526fd75db021989699d4188aa7d6e19472d27c Mon Sep 17 00:00:00 2001 From: Morten Hjorth-Jensen Date: Thu, 22 Feb 2024 09:49:30 +0100 Subject: [PATCH] update --- doc/pub/week6/html/._week6-bs000.html | 66 +- doc/pub/week6/html/._week6-bs001.html | 66 +- doc/pub/week6/html/._week6-bs002.html | 66 +- doc/pub/week6/html/._week6-bs003.html | 64 +- doc/pub/week6/html/._week6-bs004.html | 64 +- doc/pub/week6/html/._week6-bs005.html | 64 +- doc/pub/week6/html/._week6-bs006.html | 64 +- doc/pub/week6/html/._week6-bs007.html | 64 +- doc/pub/week6/html/._week6-bs008.html | 64 +- doc/pub/week6/html/._week6-bs009.html | 64 +- doc/pub/week6/html/._week6-bs010.html | 64 +- doc/pub/week6/html/._week6-bs011.html | 64 +- doc/pub/week6/html/._week6-bs012.html | 64 +- doc/pub/week6/html/._week6-bs013.html | 64 +- doc/pub/week6/html/._week6-bs014.html | 64 +- doc/pub/week6/html/._week6-bs015.html | 64 +- doc/pub/week6/html/._week6-bs016.html | 64 +- doc/pub/week6/html/._week6-bs017.html | 64 +- doc/pub/week6/html/._week6-bs018.html | 64 +- doc/pub/week6/html/._week6-bs019.html | 64 +- doc/pub/week6/html/._week6-bs020.html | 91 +- doc/pub/week6/html/._week6-bs021.html | 134 +- doc/pub/week6/html/._week6-bs022.html | 110 +- doc/pub/week6/html/._week6-bs023.html | 217 +- doc/pub/week6/html/._week6-bs024.html | 236 +- doc/pub/week6/html/._week6-bs025.html | 85 +- doc/pub/week6/html/._week6-bs026.html | 81 +- doc/pub/week6/html/._week6-bs027.html | 85 +- doc/pub/week6/html/._week6-bs028.html | 92 +- doc/pub/week6/html/._week6-bs029.html | 94 +- doc/pub/week6/html/._week6-bs030.html | 86 +- doc/pub/week6/html/._week6-bs031.html | 98 +- doc/pub/week6/html/._week6-bs032.html | 135 +- doc/pub/week6/html/._week6-bs033.html | 128 +- doc/pub/week6/html/._week6-bs034.html | 95 +- doc/pub/week6/html/._week6-bs035.html | 85 +- doc/pub/week6/html/._week6-bs036.html | 92 +- doc/pub/week6/html/._week6-bs037.html | 91 +- doc/pub/week6/html/._week6-bs038.html | 117 +- doc/pub/week6/html/._week6-bs039.html | 118 +- doc/pub/week6/html/._week6-bs040.html | 134 +- doc/pub/week6/html/._week6-bs041.html | 157 +- doc/pub/week6/html/._week6-bs042.html | 162 +- doc/pub/week6/html/._week6-bs043.html | 84 +- doc/pub/week6/html/._week6-bs044.html | 114 +- doc/pub/week6/html/._week6-bs045.html | 108 +- doc/pub/week6/html/._week6-bs046.html | 232 +- doc/pub/week6/html/week6-bs.html | 66 +- doc/pub/week6/html/week6-reveal.html | 88 +- doc/pub/week6/html/week6-solarized.html | 77 +- doc/pub/week6/html/week6.html | 77 +- doc/pub/week6/ipynb/ipynb-week6-src.tar.gz | Bin 191 -> 191 bytes doc/pub/week6/ipynb/week6.ipynb | 518 ++-- doc/pub/week6/pdf/week6.pdf | Bin 396000 -> 405835 bytes .../.ipynb_checkpoints/week6-checkpoint.ipynb | 2166 +++++++++++++++++ doc/src/week6/week6.do.txt | 79 +- 56 files changed, 5137 insertions(+), 2481 deletions(-) create mode 100644 doc/src/week6/.ipynb_checkpoints/week6-checkpoint.ipynb diff --git a/doc/pub/week6/html/._week6-bs000.html b/doc/pub/week6/html/._week6-bs000.html index 66314f1e..816dfa3a 100644 --- a/doc/pub/week6/html/._week6-bs000.html +++ b/doc/pub/week6/html/._week6-bs000.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -266,7 +268,7 @@

    Week 8 February 19-23: Gradient Methods


    -

    Jan 1, 2024

    +

    February 23, 2024


    @@ -291,7 +293,7 @@

    Jan 1, 2024

  • 9
  • 10
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs001.html b/doc/pub/week6/html/._week6-bs001.html index 22641eb5..117d691f 100644 --- a/doc/pub/week6/html/._week6-bs001.html +++ b/doc/pub/week6/html/._week6-bs001.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,7 +250,7 @@

     

     

     

    -

    Overview of week 8

    +

    Overview

    @@ -303,7 +305,7 @@

    Overview of week 8

  • 10
  • 11
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs002.html b/doc/pub/week6/html/._week6-bs002.html index 0e6f1c02..e0a5aaa6 100644 --- a/doc/pub/week6/html/._week6-bs002.html +++ b/doc/pub/week6/html/._week6-bs002.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -250,7 +252,7 @@

    Brief reminder on Newton-Raphson's method

    -

    Let us quickly remind ourselves how we derive the above method.

    +

    Let us quickly remind ourselves on how we derive the above method.

    Perhaps the most celebrated of all one-dimensional root-finding routines is Newton's method, also called the Newton-Raphson @@ -278,7 +280,7 @@

    Brief reminder
  • 11
  • 12
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs003.html b/doc/pub/week6/html/._week6-bs003.html index 10c75050..8b17fcad 100644 --- a/doc/pub/week6/html/._week6-bs003.html +++ b/doc/pub/week6/html/._week6-bs003.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -299,7 +301,7 @@

    The equations

  • 12
  • 13
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs004.html b/doc/pub/week6/html/._week6-bs004.html index 47eafb0f..fb1c3294 100644 --- a/doc/pub/week6/html/._week6-bs004.html +++ b/doc/pub/week6/html/._week6-bs004.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -282,7 +284,7 @@

    Simple geometric interpr
  • 13
  • 14
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs005.html b/doc/pub/week6/html/._week6-bs005.html index 580ac8aa..ef8d2e9f 100644 --- a/doc/pub/week6/html/._week6-bs005.html +++ b/doc/pub/week6/html/._week6-bs005.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -321,7 +323,7 @@

    Extending to more th
  • 14
  • 15
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs006.html b/doc/pub/week6/html/._week6-bs006.html index 0ab76ae8..c3474135 100644 --- a/doc/pub/week6/html/._week6-bs006.html +++ b/doc/pub/week6/html/._week6-bs006.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -289,7 +291,7 @@

    Steepest descent

  • 15
  • 16
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs007.html b/doc/pub/week6/html/._week6-bs007.html index 1f747d23..a460e13f 100644 --- a/doc/pub/week6/html/._week6-bs007.html +++ b/doc/pub/week6/html/._week6-bs007.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -286,7 +288,7 @@

    More on Steepest descent

  • 16
  • 17
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs008.html b/doc/pub/week6/html/._week6-bs008.html index 0acae2f2..3be1347e 100644 --- a/doc/pub/week6/html/._week6-bs008.html +++ b/doc/pub/week6/html/._week6-bs008.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -294,7 +296,7 @@

    The ideal

  • 17
  • 18
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs009.html b/doc/pub/week6/html/._week6-bs009.html index 56a2a533..e603d8c3 100644 --- a/doc/pub/week6/html/._week6-bs009.html +++ b/doc/pub/week6/html/._week6-bs009.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -288,7 +290,7 @@

    The sensitiven
  • 18
  • 19
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs010.html b/doc/pub/week6/html/._week6-bs010.html index 1279e421..2ce2eba7 100644 --- a/doc/pub/week6/html/._week6-bs010.html +++ b/doc/pub/week6/html/._week6-bs010.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -289,7 +291,7 @@

    Convex functions

  • 19
  • 20
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs011.html b/doc/pub/week6/html/._week6-bs011.html index 31c196cb..c4b3b616 100644 --- a/doc/pub/week6/html/._week6-bs011.html +++ b/doc/pub/week6/html/._week6-bs011.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -277,7 +279,7 @@

    Convex function

  • 20
  • 21
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs012.html b/doc/pub/week6/html/._week6-bs012.html index 157fd995..0a84d781 100644 --- a/doc/pub/week6/html/._week6-bs012.html +++ b/doc/pub/week6/html/._week6-bs012.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -313,7 +315,7 @@

    Conditions on convex func
  • 21
  • 22
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs013.html b/doc/pub/week6/html/._week6-bs013.html index ff173f0f..1c3ebfeb 100644 --- a/doc/pub/week6/html/._week6-bs013.html +++ b/doc/pub/week6/html/._week6-bs013.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -300,7 +302,7 @@

    More on convex functions

  • 22
  • 23
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs014.html b/doc/pub/week6/html/._week6-bs014.html index c795bf17..20b62f3c 100644 --- a/doc/pub/week6/html/._week6-bs014.html +++ b/doc/pub/week6/html/._week6-bs014.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -292,7 +294,7 @@

    Some simple problems

  • 23
  • 24
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs015.html b/doc/pub/week6/html/._week6-bs015.html index e27f81c7..210ab020 100644 --- a/doc/pub/week6/html/._week6-bs015.html +++ b/doc/pub/week6/html/._week6-bs015.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -304,7 +306,7 @@

    Standard steepest descent

    24
  • 25
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs016.html b/doc/pub/week6/html/._week6-bs016.html index 800e315a..1dd4ce41 100644 --- a/doc/pub/week6/html/._week6-bs016.html +++ b/doc/pub/week6/html/._week6-bs016.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -286,7 +288,7 @@

    Gradient method

  • 25
  • 26
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs017.html b/doc/pub/week6/html/._week6-bs017.html index 24e8713e..783c7d0d 100644 --- a/doc/pub/week6/html/._week6-bs017.html +++ b/doc/pub/week6/html/._week6-bs017.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -293,7 +295,7 @@

    Steepest descent method

  • 26
  • 27
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs018.html b/doc/pub/week6/html/._week6-bs018.html index a467d40f..f9a854f5 100644 --- a/doc/pub/week6/html/._week6-bs018.html +++ b/doc/pub/week6/html/._week6-bs018.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -301,7 +303,7 @@

    Steepest descent method

  • 27
  • 28
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs019.html b/doc/pub/week6/html/._week6-bs019.html index 3bc3bb7a..cadb56be 100644 --- a/doc/pub/week6/html/._week6-bs019.html +++ b/doc/pub/week6/html/._week6-bs019.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -314,7 +316,7 @@

    Final expressions

  • 28
  • 29
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs020.html b/doc/pub/week6/html/._week6-bs020.html index 6b7889c1..fbf7acd1 100644 --- a/doc/pub/week6/html/._week6-bs020.html +++ b/doc/pub/week6/html/._week6-bs020.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,7 +250,32 @@

     

     

     

    -

    Code examples for steepest descent

    +

    Our simple \( 2\times 2 \) example

    + +

    Last week we introduced the simple two-dimensional function

    +$$ +f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2, +$$ + +

    which is of the form (in terms of vectors and matrices)

    +$$ +f(\boldsymbol{x})=\frac{1}{2}\boldsymbol{x}^T\boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}^T\boldsymbol{x}, +$$ + +

    where we have

    +$$ +\boldsymbol{x}=\begin{bmatrix} x_1 \\ x_2\end{bmatrix}, +$$ + +$$ +\boldsymbol{b}=\begin{bmatrix} 5 \\ 3\end{bmatrix}, +$$ + +

    and

    +$$ +\boldsymbol{A}=\begin{bmatrix} 2 & 1\\ 1& 20\end{bmatrix}. +$$ +

    @@ -275,7 +302,7 @@

    Code examples for ste
  • 29
  • 30
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs021.html b/doc/pub/week6/html/._week6-bs021.html index 80d8332c..bb03e53a 100644 --- a/doc/pub/week6/html/._week6-bs021.html +++ b/doc/pub/week6/html/._week6-bs021.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,59 +250,29 @@

     

     

     

    -

    Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come

    -
    -
    - +

    Derivatives and more

    - -
    -
    -
    -
    -
    -
    #include <cmath>
    -#include <iostream>
    -#include <fstream>
    -#include <iomanip>
    -#include "vectormatrixclass.h"
    -using namespace  std;
    -//   Main function begins here
    -int main(int  argc, char * argv[]){
    -  int dim = 2;
    -  Vector x(dim),xsd(dim), b(dim),x0(dim);
    -  Matrix A(dim,dim);
    +

    Optimizing the above equation, that is

    +$$ +\nabla f = 0 = \boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}, +$$ - // Set our initial guess - x0(0) = x0(1) = 0; - // Set the matrix - A(0,0) = 3; A(1,0) = 2; A(0,1) = 2; A(1,1) = 6; - b(0) = 2; b(1) = -8; - cout << "The Matrix A that we are using: " << endl; - A.Print(); - cout << endl; - xsd = SteepestDescent(A,b,x0); - cout << "The approximate solution using Steepest Descent is: " << endl; - xsd.Print(); - cout << endl; -} -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    +

    which leads to a simple matrix-inversion problem

    +$$ +\boldsymbol{x}=\boldsymbol{A}^{-1}\boldsymbol{b}. +$$ + +

    This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns

    +$$ +\frac{\partial f}{\partial x_1}=2x_1+x_2-5=0, +$$ + +

    and

    +$$ +\frac{\partial f}{\partial x_2}=x_1+20x_2-3=0, +$$ +

    with solutions \( x_1=97/39 \) and \( x_2=1/39 \).

    @@ -327,7 +299,7 @@

    30
  • 31
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs022.html b/doc/pub/week6/html/._week6-bs022.html index 1e63bd27..bb393173 100644 --- a/doc/pub/week6/html/._week6-bs022.html +++ b/doc/pub/week6/html/._week6-bs022.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,7 +250,7 @@

     

     

     

    -

    The routine for the steepest descent method

    +

    Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come

    @@ -259,26 +261,30 @@

    The routine
    -
    Vector SteepestDescent(Matrix A, Vector b, Vector x0){
    -  int IterMax, i;
    -  int dim = x0.Dimension();
    -  const double tolerance = 1.0e-14;
    -  Vector x(dim),f(dim),z(dim);
    -  double c,alpha,d;
    -  IterMax = 30;
    -  x = x0;
    -  r = A*x-b;
    -  i = 0;
    -  while (i <= IterMax){
    -    z = A*r;
    -    c = dot(r,r);
    -    alpha = c/dot(r,z);
    -    x = x - alpha*r;
    -    r =  A*x-b;
    -    if(sqrt(dot(r,r)) < tolerance) break;
    -    i++;
    -  }
    -  return x;
    +  
    #include <cmath>
    +#include <iostream>
    +#include <fstream>
    +#include <iomanip>
    +#include "vectormatrixclass.h"
    +using namespace  std;
    +//   Main function begins here
    +int main(int  argc, char * argv[]){
    +  int dim = 2;
    +  Vector x(dim),xsd(dim), b(dim),x0(dim);
    +  Matrix A(dim,dim);
    +
    +  // Set our initial guess
    +  x0(0) = x0(1) = 0;
    +  // Set the matrix
    +  A(0,0) =  2;    A(1,0) =  1;   A(0,1) =  1;   A(1,1) =  20;
    +  b(0) = 5; b(1) = 3;
    +  cout << "The Matrix A that we are using: " << endl;
    +  A.Print();
    +  cout << endl;
    +  xsd = SteepestDescent(A,b,x0);
    +  cout << "The approximate solution using Steepest Descent is: " << endl;
    +  xsd.Print();
    +  cout << endl;
     }
     
    @@ -323,7 +329,7 @@

    The routine
  • 31
  • 32
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs023.html b/doc/pub/week6/html/._week6-bs023.html index 637f1326..e65e946e 100644 --- a/doc/pub/week6/html/._week6-bs023.html +++ b/doc/pub/week6/html/._week6-bs023.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,102 +250,10 @@

     

     

     

    -

    Steepest descent example

    - - - -
    -
    -
    -
    -
    -
    import numpy as np
    -import numpy.linalg as la
    -
    -import scipy.optimize as sopt
    -
    -import matplotlib.pyplot as pt
    -from mpl_toolkits.mplot3d import axes3d
    -
    -def f(x):
    -    return 0.5*x[0]**2 + 2.5*x[1]**2
    -
    -def df(x):
    -    return np.array([x[0], 5*x[1]])
    -
    -fig = pt.figure()
    -ax = fig.gca(projection="3d")
    -
    -xmesh, ymesh = np.mgrid[-2:2:50j,-2:2:50j]
    -fmesh = f(np.array([xmesh, ymesh]))
    -ax.plot_surface(xmesh, ymesh, fmesh)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -

    And then as countor plot

    - - -
    -
    -
    -
    -
    -
    pt.axis("equal")
    -pt.contour(xmesh, ymesh, fmesh)
    -guesses = [np.array([2, 2./5])]
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -

    Find guesses

    - - -
    -
    -
    -
    -
    -
    x = guesses[-1]
    -s = -df(x)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -

    Run it!

    +

    The routine for the steepest descent method

    +
    +
    +
    @@ -351,13 +261,27 @@

    Steepest descent example

    -
    def f1d(alpha):
    -    return f(x + alpha*s)
    -
    -alpha_opt = sopt.golden(f1d)
    -next_guess = x + alpha_opt * s
    -guesses.append(next_guess)
    -print(next_guess)
    +  
    Vector SteepestDescent(Matrix A, Vector b, Vector x0){
    +  int IterMax, i;
    +  int dim = x0.Dimension();
    +  const double tolerance = 1.0e-14;
    +  Vector x(dim),f(dim),z(dim);
    +  double c,alpha,d;
    +  IterMax = 30;
    +  x = x0;
    +  r = A*x-b;
    +  i = 0;
    +  while (i <= IterMax){
    +    z = A*r;
    +    c = dot(r,r);
    +    alpha = c/dot(r,z);
    +    x = x - alpha*r;
    +    r =  A*x-b;
    +    if(sqrt(dot(r,r)) < tolerance) break;
    +    i++;
    +  }
    +  return x;
    +}
     
    @@ -372,32 +296,7 @@

    Steepest descent example

    - -

    What happened?

    - - -
    -
    -
    -
    -
    -
    pt.axis("equal")
    -pt.contour(xmesh, ymesh, fmesh, 50)
    -it_array = np.array(guesses)
    -pt.plot(it_array.T[0], it_array.T[1], "x-")
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    @@ -426,7 +325,7 @@

    Steepest descent example

  • 32
  • 33
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs024.html b/doc/pub/week6/html/._week6-bs024.html index ceff792b..07ca9834 100644 --- a/doc/pub/week6/html/._week6-bs024.html +++ b/doc/pub/week6/html/._week6-bs024.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,34 +250,156 @@

     

     

     

    -

    Conjugate gradient method

    -
    -
    - -

    In the CG method we define so-called conjugate directions and two vectors -\( \hat{s} \) and \( \hat{t} \) -are said to be -conjugate if -

    -$$ -\begin{equation*} -\hat{s}^T\hat{A}\hat{t}= 0. -\end{equation*} -$$ +

    Steepest descent example

    -

    The philosophy of the CG method is to perform searches in various conjugate directions -of our vectors \( \hat{x}_i \) obeying the above criterion, namely -

    -$$ -\begin{equation*} -\hat{x}_i^T\hat{A}\hat{x}_j= 0. -\end{equation*} -$$ -

    Two vectors are conjugate if they are orthogonal with respect to -this inner product. Being conjugate is a symmetric relation: if \( \hat{s} \) is conjugate to \( \hat{t} \), then \( \hat{t} \) is conjugate to \( \hat{s} \). -

    + +
    +
    +
    +
    +
    +
    import numpy as np
    +import numpy.linalg as la
    +
    +import scipy.optimize as sopt
    +
    +import matplotlib.pyplot as pt
    +from mpl_toolkits.mplot3d import axes3d
    +
    +def f(x):
    +    return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2]
    +
    +def df(x):
    +    return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0)
    +
    +fig = pt.figure()
    +ax = fig.gca(projection="3d")
    +
    +xmesh, ymesh = np.mgrid[-2:3:00j,-2:3:00j]
    +fmesh = f(np.array([xmesh, ymesh]))
    +ax.plot_surface(xmesh, ymesh, fmesh)
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    And then as countor plot

    + + +
    +
    +
    +
    +
    +
    pt.axis("equal")
    +pt.contour(xmesh, ymesh, fmesh)
    +guesses = [np.array([3.0, 0.05])]
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    Find guesses

    + + +
    +
    +
    +
    +
    +
    x = guesses[-1]
    +s = -df(x)
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    Run it!

    + + +
    +
    +
    +
    +
    +
    def f1d(alpha):
    +    return f(x + alpha*s)
    +
    +alpha_opt = sopt.golden(f1d)
    +next_guess = x + alpha_opt * s
    +guesses.append(next_guess)
    +print(next_guess)
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    What happened?

    + + +
    +
    +
    +
    +
    +
    pt.axis("equal")
    +pt.contour(xmesh, ymesh, fmesh, 50)
    +it_array = np.array(guesses)
    +pt.plot(it_array.T[0], it_array.T[1], "x-")
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    @@ -304,7 +428,7 @@

    Conjugate gradient method

    33
  • 34
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs025.html b/doc/pub/week6/html/._week6-bs025.html index d60fdf3e..845a0424 100644 --- a/doc/pub/week6/html/._week6-bs025.html +++ b/doc/pub/week6/html/._week6-bs025.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -252,14 +254,29 @@

    Conjugate gradient method

    -

    An example is given by the eigenvectors of the matrix

    +

    In the CG method we define so-called conjugate directions and two vectors +\( \hat{s} \) and \( \hat{t} \) +are said to be +conjugate if +

    $$ \begin{equation*} -\hat{v}_i^T\hat{A}\hat{v}_j= \lambda\hat{v}_i^T\hat{v}_j, +\hat{s}^T\hat{A}\hat{t}= 0. \end{equation*} $$ -

    which is zero unless \( i=j \).

    +

    The philosophy of the CG method is to perform searches in various conjugate directions +of our vectors \( \hat{x}_i \) obeying the above criterion, namely +

    +$$ +\begin{equation*} +\hat{x}_i^T\hat{A}\hat{x}_j= 0. +\end{equation*} +$$ + +

    Two vectors are conjugate if they are orthogonal with respect to +this inner product. Being conjugate is a symmetric relation: if \( \hat{s} \) is conjugate to \( \hat{t} \), then \( \hat{t} \) is conjugate to \( \hat{s} \). +

    @@ -289,7 +306,7 @@

    Conjugate gradient method

    34
  • 35
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs026.html b/doc/pub/week6/html/._week6-bs026.html index 8848a82f..5ce5fcc3 100644 --- a/doc/pub/week6/html/._week6-bs026.html +++ b/doc/pub/week6/html/._week6-bs026.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -252,25 +254,14 @@

    Conjugate gradient method

    -

    Assume now that we have a symmetric positive-definite matrix \( \hat{A} \) of size -\( n\times n \). At each iteration \( i+1 \) we obtain the conjugate direction of a vector -

    +

    An example is given by the eigenvectors of the matrix

    $$ \begin{equation*} -\hat{x}_{i+1}=\hat{x}_{i}+\alpha_i\hat{p}_{i}. +\hat{v}_i^T\hat{A}\hat{v}_j= \lambda\hat{v}_i^T\hat{v}_j, \end{equation*} $$ -

    We assume that \( \hat{p}_{i} \) is a sequence of \( n \) mutually conjugate directions. -Then the \( \hat{p}_{i} \) form a basis of \( R^n \) and we can expand the solution -$ \hat{A}\hat{x} = \hat{b}$ in this basis, namely -

    - -$$ -\begin{equation*} - \hat{x} = \sum^{n}_{i=1} \alpha_i \hat{p}_i. -\end{equation*} -$$ +

    which is zero unless \( i=j \).

    @@ -300,7 +291,7 @@

    Conjugate gradient method

    35
  • 36
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs027.html b/doc/pub/week6/html/._week6-bs027.html index ef0091a8..33c64bf4 100644 --- a/doc/pub/week6/html/._week6-bs027.html +++ b/doc/pub/week6/html/._week6-bs027.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -252,26 +254,23 @@

    Conjugate gradient method

    -

    The coefficients are given by

    +

    Assume now that we have a symmetric positive-definite matrix \( \hat{A} \) of size +\( n\times n \). At each iteration \( i+1 \) we obtain the conjugate direction of a vector +

    $$ \begin{equation*} - \mathbf{A}\mathbf{x} = \sum^{n}_{i=1} \alpha_i \mathbf{A} \mathbf{p}_i = \mathbf{b}. +\hat{x}_{i+1}=\hat{x}_{i}+\alpha_i\hat{p}_{i}. \end{equation*} $$ -

    Multiplying with \( \hat{p}_k^T \) from the left gives

    +

    We assume that \( \hat{p}_{i} \) is a sequence of \( n \) mutually conjugate directions. +Then the \( \hat{p}_{i} \) form a basis of \( R^n \) and we can expand the solution +$ \hat{A}\hat{x} = \hat{b}$ in this basis, namely +

    $$ \begin{equation*} - \hat{p}_k^T \hat{A}\hat{x} = \sum^{n}_{i=1} \alpha_i\hat{p}_k^T \hat{A}\hat{p}_i= \hat{p}_k^T \hat{b}, -\end{equation*} -$$ - -

    and we can define the coefficients \( \alpha_k \) as

    - -$$ -\begin{equation*} - \alpha_k = \frac{\hat{p}_k^T \hat{b}}{\hat{p}_k^T \hat{A} \hat{p}_k} + \hat{x} = \sum^{n}_{i=1} \alpha_i \hat{p}_i. \end{equation*} $$
    @@ -303,7 +302,7 @@

    Conjugate gradient method

    36
  • 37
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs028.html b/doc/pub/week6/html/._week6-bs028.html index acc46bcf..11a83841 100644 --- a/doc/pub/week6/html/._week6-bs028.html +++ b/doc/pub/week6/html/._week6-bs028.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,36 +250,32 @@

     

     

     

    -

    Conjugate gradient method and iterations

    +

    Conjugate gradient method

    +

    The coefficients are given by

    +$$ +\begin{equation*} + \mathbf{A}\mathbf{x} = \sum^{n}_{i=1} \alpha_i \mathbf{A} \mathbf{p}_i = \mathbf{b}. +\end{equation*} +$$ -

    If we choose the conjugate vectors \( \hat{p}_k \) carefully, -then we may not need all of them to obtain a good approximation to the solution -\( \hat{x} \). -We want to regard the conjugate gradient method as an iterative method. -This will us to solve systems where \( n \) is so large that the direct -method would take too much time. -

    +

    Multiplying with \( \hat{p}_k^T \) from the left gives

    -

    We denote the initial guess for \( \hat{x} \) as \( \hat{x}_0 \). -We can assume without loss of generality that -

    $$ \begin{equation*} -\hat{x}_0=0, + \hat{p}_k^T \hat{A}\hat{x} = \sum^{n}_{i=1} \alpha_i\hat{p}_k^T \hat{A}\hat{p}_i= \hat{p}_k^T \hat{b}, \end{equation*} $$ -

    or consider the system

    +

    and we can define the coefficients \( \alpha_k \) as

    + $$ \begin{equation*} -\hat{A}\hat{z} = \hat{b}-\hat{A}\hat{x}_0, + \alpha_k = \frac{\hat{p}_k^T \hat{b}}{\hat{p}_k^T \hat{A} \hat{p}_k} \end{equation*} $$ - -

    instead.

    @@ -307,7 +305,7 @@

    Conjugate gradi
  • 37
  • 38
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs029.html b/doc/pub/week6/html/._week6-bs029.html index fad7ee08..d7d7c03f 100644 --- a/doc/pub/week6/html/._week6-bs029.html +++ b/doc/pub/week6/html/._week6-bs029.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,32 +250,36 @@

     

     

     

    -

    Conjugate gradient method

    +

    Conjugate gradient method and iterations

    -

    One can show that the solution \( \hat{x} \) is also the unique minimizer of the quadratic form

    + +

    If we choose the conjugate vectors \( \hat{p}_k \) carefully, +then we may not need all of them to obtain a good approximation to the solution +\( \hat{x} \). +We want to regard the conjugate gradient method as an iterative method. +This will us to solve systems where \( n \) is so large that the direct +method would take too much time. +

    + +

    We denote the initial guess for \( \hat{x} \) as \( \hat{x}_0 \). +We can assume without loss of generality that +

    $$ \begin{equation*} - f(\hat{x}) = \frac{1}{2}\hat{x}^T\hat{A}\hat{x} - \hat{x}^T \hat{x} , \quad \hat{x}\in\mathbf{R}^n. +\hat{x}_0=0, \end{equation*} $$ -

    This suggests taking the first basis vector \( \hat{p}_1 \) -to be the gradient of \( f \) at \( \hat{x}=\hat{x}_0 \), -which equals -

    +

    or consider the system

    $$ \begin{equation*} -\hat{A}\hat{x}_0-\hat{b}, +\hat{A}\hat{z} = \hat{b}-\hat{A}\hat{x}_0, \end{equation*} $$ -

    and -\( \hat{x}_0=0 \) it is equal \( -\hat{b} \). -The other vectors in the basis will be conjugate to the gradient, -hence the name conjugate gradient method. -

    +

    instead.

    @@ -303,7 +309,7 @@

    Conjugate gradient method

    38
  • 39
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs030.html b/doc/pub/week6/html/._week6-bs030.html index dfe4a4b0..eef4f1a3 100644 --- a/doc/pub/week6/html/._week6-bs030.html +++ b/doc/pub/week6/html/._week6-bs030.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -252,26 +254,28 @@

    Conjugate gradient method

    -

    Let \( \hat{r}_k \) be the residual at the \( k \)-th step:

    +

    One can show that the solution \( \hat{x} \) is also the unique minimizer of the quadratic form

    $$ \begin{equation*} -\hat{r}_k=\hat{b}-\hat{A}\hat{x}_k. + f(\hat{x}) = \frac{1}{2}\hat{x}^T\hat{A}\hat{x} - \hat{x}^T \hat{x} , \quad \hat{x}\in\mathbf{R}^n. \end{equation*} $$ -

    Note that \( \hat{r}_k \) is the negative gradient of \( f \) at -\( \hat{x}=\hat{x}_k \), -so the gradient descent method would be to move in the direction \( \hat{r}_k \). -Here, we insist that the directions \( \hat{p}_k \) are conjugate to each other, -so we take the direction closest to the gradient \( \hat{r}_k \) -under the conjugacy constraint. -This gives the following expression +

    This suggests taking the first basis vector \( \hat{p}_1 \) +to be the gradient of \( f \) at \( \hat{x}=\hat{x}_0 \), +which equals

    $$ \begin{equation*} -\hat{p}_{k+1}=\hat{r}_k-\frac{\hat{p}_k^T \hat{A}\hat{r}_k}{\hat{p}_k^T\hat{A}\hat{p}_k} \hat{p}_k. +\hat{A}\hat{x}_0-\hat{b}, \end{equation*} $$ + +

    and +\( \hat{x}_0=0 \) it is equal \( -\hat{b} \). +The other vectors in the basis will be conjugate to the gradient, +hence the name conjugate gradient method. +

    @@ -301,7 +305,7 @@

    Conjugate gradient method

    39
  • 40
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs031.html b/doc/pub/week6/html/._week6-bs031.html index f420846c..a4ca86b1 100644 --- a/doc/pub/week6/html/._week6-bs031.html +++ b/doc/pub/week6/html/._week6-bs031.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -252,33 +254,25 @@

    Conjugate gradient method

    -

    We can also compute the residual iteratively as

    +

    Let \( \hat{r}_k \) be the residual at the \( k \)-th step:

    $$ \begin{equation*} -\hat{r}_{k+1}=\hat{b}-\hat{A}\hat{x}_{k+1}, - \end{equation*} +\hat{r}_k=\hat{b}-\hat{A}\hat{x}_k. +\end{equation*} $$ -

    which equals

    +

    Note that \( \hat{r}_k \) is the negative gradient of \( f \) at +\( \hat{x}=\hat{x}_k \), +so the gradient descent method would be to move in the direction \( \hat{r}_k \). +Here, we insist that the directions \( \hat{p}_k \) are conjugate to each other, +so we take the direction closest to the gradient \( \hat{r}_k \) +under the conjugacy constraint. +This gives the following expression +

    $$ \begin{equation*} -\hat{b}-\hat{A}(\hat{x}_k+\alpha_k\hat{p}_k), - \end{equation*} -$$ - -

    or

    -$$ -\begin{equation*} -(\hat{b}-\hat{A}\hat{x}_k)-\alpha_k\hat{A}\hat{p}_k, - \end{equation*} -$$ - -

    which gives

    - -$$ -\begin{equation*} -\hat{r}_{k+1}=\hat{r}_k-\hat{A}\hat{p}_{k}, - \end{equation*} +\hat{p}_{k+1}=\hat{r}_k-\frac{\hat{p}_k^T \hat{A}\hat{r}_k}{\hat{p}_k^T\hat{A}\hat{p}_k} \hat{p}_k. +\end{equation*} $$
    @@ -309,7 +303,7 @@

    Conjugate gradient method

    40
  • 41
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs032.html b/doc/pub/week6/html/._week6-bs032.html index 3dd80c0c..b9b82a27 100644 --- a/doc/pub/week6/html/._week6-bs032.html +++ b/doc/pub/week6/html/._week6-bs032.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,55 +250,38 @@

     

     

     

    -

    Simple implementation of the Conjugate gradient algorithm

    +

    Conjugate gradient method

    +

    We can also compute the residual iteratively as

    +$$ +\begin{equation*} +\hat{r}_{k+1}=\hat{b}-\hat{A}\hat{x}_{k+1}, + \end{equation*} +$$ - -
    -
    -
    -
    -
    -
      Vector ConjugateGradient(Matrix A, Vector b, Vector x0){
    -  int dim = x0.Dimension();
    -  const double tolerance = 1.0e-14;
    -  Vector x(dim),r(dim),v(dim),z(dim);
    -  double c,t,d;
    +

    which equals

    +$$ +\begin{equation*} +\hat{b}-\hat{A}(\hat{x}_k+\alpha_k\hat{p}_k), + \end{equation*} +$$ - x = x0; - r = b - A*x; - v = r; - c = dot(r,r); - int i = 0; IterMax = dim; - while(i <= IterMax){ - z = A*v; - t = c/dot(v,z); - x = x + t*v; - r = r - t*z; - d = dot(r,r); - if(sqrt(d) < tolerance) - break; - v = r + (d/c)*v; - c = d; i++; - } - return x; -} -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    +

    or

    +$$ +\begin{equation*} +(\hat{b}-\hat{A}\hat{x}_k)-\alpha_k\hat{A}\hat{p}_k, + \end{equation*} +$$ + +

    which gives

    + +$$ +\begin{equation*} +\hat{r}_{k+1}=\hat{r}_k-\hat{A}\hat{p}_{k}, + \end{equation*} +$$
    @@ -326,7 +311,7 @@

    41
  • 42
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs033.html b/doc/pub/week6/html/._week6-bs033.html index 854b3ee2..d732257b 100644 --- a/doc/pub/week6/html/._week6-bs033.html +++ b/doc/pub/week6/html/._week6-bs033.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,31 +250,55 @@

     

     

     

    -

    Broyden–Fletcher–Goldfarb–Shanno algorithm

    +

    Simple implementation of the Conjugate gradient algorithm

    -

    The optimization problem is to minimize \( f(\mathbf {x} ) \) where \( \mathbf {x} \) is a vector in \( R^{n} \), and \( f \) is a differentiable scalar function. There are no constraints on the values that \( \mathbf {x} \) can take.

    -

    The algorithm begins at an initial estimate for the optimal value \( \mathbf {x}_{0} \) and proceeds iteratively to get a better estimate at each stage.

    + +
    +
    +
    +
    +
    +
      Vector ConjugateGradient(Matrix A, Vector b, Vector x0){
    +  int dim = x0.Dimension();
    +  const double tolerance = 1.0e-14;
    +  Vector x(dim),r(dim),v(dim),z(dim);
    +  double c,t,d;
     
    -

    The search direction \( p_k \) at stage \( k \) is given by the solution of the analogue of the Newton equation

    -$$ -B_{k}\mathbf {p} _{k}=-\nabla f(\mathbf {x}_{k}), -$$ - -

    where \( B_{k} \) is an approximation to the Hessian matrix, which is -updated iteratively at each stage, and \( \nabla f(\mathbf {x} _{k}) \) -is the gradient of the function -evaluated at \( x_k \). -A line search in the direction \( p_k \) is then used to -find the next point \( x_{k+1} \) by minimising -

    -$$ -f(\mathbf {x}_{k}+\alpha \mathbf {p}_{k}), -$$ - -

    over the scalar \( \alpha > 0 \).

    + x = x0; + r = b - A*x; + v = r; + c = dot(r,r); + int i = 0; IterMax = dim; + while(i <= IterMax){ + z = A*v; + t = c/dot(v,z); + x = x + t*v; + r = r - t*z; + d = dot(r,r); + if(sqrt(d) < tolerance) + break; + v = r + (d/c)*v; + c = d; i++; + } + return x; +} +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    @@ -302,7 +328,7 @@

    Broyden–
  • 42
  • 43
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs034.html b/doc/pub/week6/html/._week6-bs034.html index c30ada46..42a372b7 100644 --- a/doc/pub/week6/html/._week6-bs034.html +++ b/doc/pub/week6/html/._week6-bs034.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,21 +250,34 @@

     

     

     

    -

    Stochastic Gradient Descent

    +

    Broyden–Fletcher–Goldfarb–Shanno algorithm

    +
    +
    + +

    The optimization problem is to minimize \( f(\mathbf {x} ) \) where \( \mathbf {x} \) is a vector in \( R^{n} \), and \( f \) is a differentiable scalar function. There are no constraints on the values that \( \mathbf {x} \) can take.

    -

    Stochastic gradient descent (SGD) and variants thereof address some of -the shortcomings of the Gradient descent method discussed above. -

    +

    The algorithm begins at an initial estimate for the optimal value \( \mathbf {x}_{0} \) and proceeds iteratively to get a better estimate at each stage.

    + +

    The search direction \( p_k \) at stage \( k \) is given by the solution of the analogue of the Newton equation

    +$$ +B_{k}\mathbf {p} _{k}=-\nabla f(\mathbf {x}_{k}), +$$ -

    The underlying idea of SGD comes from the observation that a given -function, which we want to minimize, can almost always be written as a -sum over \( n \) data points \( \{\mathbf{x}_i\}_{i=1}^n \), +

    where \( B_{k} \) is an approximation to the Hessian matrix, which is +updated iteratively at each stage, and \( \nabla f(\mathbf {x} _{k}) \) +is the gradient of the function +evaluated at \( x_k \). +A line search in the direction \( p_k \) is then used to +find the next point \( x_{k+1} \) by minimising

    $$ -C(\mathbf{\beta}) = \sum_{i=1}^n c_i(\mathbf{x}_i, -\mathbf{\beta}). +f(\mathbf {x}_{k}+\alpha \mathbf {p}_{k}), $$ +

    over the scalar \( \alpha > 0 \).

    +
    +
    +

    @@ -289,7 +304,7 @@

    Stochastic Gradient Descent
  • 43
  • 44
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs035.html b/doc/pub/week6/html/._week6-bs035.html index 36b9815c..44eef00c 100644 --- a/doc/pub/week6/html/._week6-bs035.html +++ b/doc/pub/week6/html/._week6-bs035.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,22 +250,21 @@

     

     

     

    -

    Computation of gradients

    +

    Stochastic Gradient Descent

    -

    This in turn means that the gradient can be -computed as a sum over \( i \)-gradients +

    Stochastic gradient descent (SGD) and variants thereof address some of +the shortcomings of the Gradient descent method discussed above. +

    + +

    The underlying idea of SGD comes from the observation that a given +function, which we want to minimize, can almost always be written as a +sum over \( n \) data points \( \{\mathbf{x}_i\}_{i=1}^n \),

    $$ -\nabla_\beta C(\mathbf{\beta}) = \sum_i^n \nabla_\beta c_i(\mathbf{x}_i, -\mathbf{\beta}). +C(\mathbf{\beta}) = \sum_{i=1}^n c_i(\mathbf{x}_i, +\mathbf{\beta}). $$ -

    Stochasticity/randomness is introduced by only taking the -gradient on a subset of the data called minibatches. If there are \( n \) -data points and the size of each minibatch is \( M \), there will be \( n/M \) -minibatches. We denote these minibatches by \( B_k \) where -\( k=1,\cdots,n/M \). -

    @@ -290,7 +291,7 @@

    Computation of gradients

  • 44
  • 45
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs036.html b/doc/pub/week6/html/._week6-bs036.html index 4d9dce72..e5e5376e 100644 --- a/doc/pub/week6/html/._week6-bs036.html +++ b/doc/pub/week6/html/._week6-bs036.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,28 +250,22 @@

     

     

     

    -

    SGD example

    -

    As an example, suppose we have \( 10 \) data points \( (\mathbf{x}_1,\cdots, \mathbf{x}_{10}) \) -and we choose to have \( M=5 \) minibathces, -then each minibatch contains two data points. In particular we have -\( B_1 = (\mathbf{x}_1,\mathbf{x}_2), \cdots, B_5 = -(\mathbf{x}_9,\mathbf{x}_{10}) \). Note that if you choose \( M=1 \) you -have only a single batch with all data points and on the other extreme, -you may choose \( M=n \) resulting in a minibatch for each datapoint, i.e -\( B_k = \mathbf{x}_k \). -

    +

    Computation of gradients

    -

    The idea is now to approximate the gradient by replacing the sum over -all data points with a sum over the data points in one the minibatches -picked at random in each gradient descent step +

    This in turn means that the gradient can be +computed as a sum over \( i \)-gradients

    $$ -\nabla_{\beta} -C(\mathbf{\beta}) = \sum_{i=1}^n \nabla_\beta c_i(\mathbf{x}_i, -\mathbf{\beta}) \rightarrow \sum_{i \in B_k}^n \nabla_\beta -c_i(\mathbf{x}_i, \mathbf{\beta}). +\nabla_\beta C(\mathbf{\beta}) = \sum_i^n \nabla_\beta c_i(\mathbf{x}_i, +\mathbf{\beta}). $$ +

    Stochasticity/randomness is introduced by only taking the +gradient on a subset of the data called minibatches. If there are \( n \) +data points and the size of each minibatch is \( M \), there will be \( n/M \) +minibatches. We denote these minibatches by \( B_k \) where +\( k=1,\cdots,n/M \). +

    @@ -295,6 +291,8 @@

    SGD example

  • 44
  • 45
  • 46
  • +
  • ...
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs037.html b/doc/pub/week6/html/._week6-bs037.html index 78d5c6d9..4384ec88 100644 --- a/doc/pub/week6/html/._week6-bs037.html +++ b/doc/pub/week6/html/._week6-bs037.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,20 +250,28 @@

     

     

     

    -

    The gradient step

    +

    SGD example

    +

    As an example, suppose we have \( 10 \) data points \( (\mathbf{x}_1,\cdots, \mathbf{x}_{10}) \) +and we choose to have \( M=5 \) minibathces, +then each minibatch contains two data points. In particular we have +\( B_1 = (\mathbf{x}_1,\mathbf{x}_2), \cdots, B_5 = +(\mathbf{x}_9,\mathbf{x}_{10}) \). Note that if you choose \( M=1 \) you +have only a single batch with all data points and on the other extreme, +you may choose \( M=n \) resulting in a minibatch for each datapoint, i.e +\( B_k = \mathbf{x}_k \). +

    -

    Thus a gradient descent step now looks like

    +

    The idea is now to approximate the gradient by replacing the sum over +all data points with a sum over the data points in one the minibatches +picked at random in each gradient descent step +

    $$ -\beta_{j+1} = \beta_j - \gamma_j \sum_{i \in B_k}^n \nabla_\beta c_i(\mathbf{x}_i, -\mathbf{\beta}) +\nabla_{\beta} +C(\mathbf{\beta}) = \sum_{i=1}^n \nabla_\beta c_i(\mathbf{x}_i, +\mathbf{\beta}) \rightarrow \sum_{i \in B_k}^n \nabla_\beta +c_i(\mathbf{x}_i, \mathbf{\beta}). $$ -

    where \( k \) is picked at random with equal -probability from \( [1,n/M] \). An iteration over the number of -minibathces (n/M) is commonly referred to as an epoch. Thus it is -typical to choose a number of epochs and for each epoch iterate over -the number of minibatches, as exemplified in the code below. -

    @@ -286,6 +296,7 @@

    The gradient step

  • 44
  • 45
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs038.html b/doc/pub/week6/html/._week6-bs038.html index c449298f..fc2a7d66 100644 --- a/doc/pub/week6/html/._week6-bs038.html +++ b/doc/pub/week6/html/._week6-bs038.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,51 +250,19 @@

     

     

     

    -

    Simple example code

    +

    The gradient step

    +

    Thus a gradient descent step now looks like

    +$$ +\beta_{j+1} = \beta_j - \gamma_j \sum_{i \in B_k}^n \nabla_\beta c_i(\mathbf{x}_i, +\mathbf{\beta}) +$$ - -
    -
    -
    -
    -
    -
    import numpy as np 
    -
    -n = 100 #100 datapoints 
    -M = 5   #size of each minibatch
    -m = int(n/M) #number of minibatches
    -n_epochs = 10 #number of epochs
    -
    -j = 0
    -for epoch in range(1,n_epochs+1):
    -    for i in range(m):
    -        k = np.random.randint(m) #Pick the k-th minibatch at random
    -        #Compute the gradient using the data in minibatch Bk
    -        #Compute new suggestion for 
    -        j += 1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -

    Taking the gradient only on a subset of the data has two important -benefits. First, it introduces randomness which decreases the chance -that our opmization scheme gets stuck in a local minima. Second, if -the size of the minibatches are small relative to the number of -datapoints (\( M < n \)), the computation of the gradient is much -cheaper since we sum over the datapoints in the \( k-th \) minibatch and not -all \( n \) datapoints. +

    where \( k \) is picked at random with equal +probability from \( [1,n/M] \). An iteration over the number of +minibathces (n/M) is commonly referred to as an epoch. Thus it is +typical to choose a number of epochs and for each epoch iterate over +the number of minibatches, as exemplified in the code below.

    @@ -317,6 +287,7 @@

    Simple example code

  • 44
  • 45
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs039.html b/doc/pub/week6/html/._week6-bs039.html index 216815ef..cc0ef023 100644 --- a/doc/pub/week6/html/._week6-bs039.html +++ b/doc/pub/week6/html/._week6-bs039.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,18 +250,51 @@

     

     

     

    -

    When do we stop?

    +

    Simple example code

    -

    A natural question is when do we stop the search for a new minimum? -One possibility is to compute the full gradient after a given number -of epochs and check if the norm of the gradient is smaller than some -threshold and stop if true. However, the condition that the gradient -is zero is valid also for local minima, so this would only tell us -that we are close to a local/global minimum. However, we could also -evaluate the cost function at this point, store the result and -continue the search. If the test kicks in at a later stage we can -compare the values of the cost function and keep the \( \beta \) that -gave the lowest value. + + +

    +
    +
    +
    +
    +
    import numpy as np 
    +
    +n = 100 #100 datapoints 
    +M = 5   #size of each minibatch
    +m = int(n/M) #number of minibatches
    +n_epochs = 10 #number of epochs
    +
    +j = 0
    +for epoch in range(1,n_epochs+1):
    +    for i in range(m):
    +        k = np.random.randint(m) #Pick the k-th minibatch at random
    +        #Compute the gradient using the data in minibatch Bk
    +        #Compute new suggestion for 
    +        j += 1
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    Taking the gradient only on a subset of the data has two important +benefits. First, it introduces randomness which decreases the chance +that our opmization scheme gets stuck in a local minima. Second, if +the size of the minibatches are small relative to the number of +datapoints (\( M < n \)), the computation of the gradient is much +cheaper since we sum over the datapoints in the \( k-th \) minibatch and not +all \( n \) datapoints.

    @@ -283,6 +318,7 @@

    When do we stop?

  • 44
  • 45
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs040.html b/doc/pub/week6/html/._week6-bs040.html index 131bffff..61dc8c38 100644 --- a/doc/pub/week6/html/._week6-bs040.html +++ b/doc/pub/week6/html/._week6-bs040.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,69 +250,20 @@

     

     

     

    -

    Slightly different approach

    +

    When do we stop?

    -

    Another approach is to let the step length \( \gamma_j \) depend on the -number of epochs in such a way that it becomes very small after a -reasonable time such that we do not move at all. +

    A natural question is when do we stop the search for a new minimum? +One possibility is to compute the full gradient after a given number +of epochs and check if the norm of the gradient is smaller than some +threshold and stop if true. However, the condition that the gradient +is zero is valid also for local minima, so this would only tell us +that we are close to a local/global minimum. However, we could also +evaluate the cost function at this point, store the result and +continue the search. If the test kicks in at a later stage we can +compare the values of the cost function and keep the \( \beta \) that +gave the lowest value.

    -

    As an example, let \( e = 0,1,2,3,\cdots \) denote the current epoch and let \( t_0, t_1 > 0 \) be two fixed numbers. Furthermore, let \( t = e \cdot m + i \) where \( m \) is the number of minibatches and \( i=0,\cdots,m-1 \). Then the function $$\gamma_j(t; t_0, t_1) = \frac{t_0}{t+t_1} $$ goes to zero as the number of epochs gets large. I.e. we start with a step length \( \gamma_j (0; t_0, t_1) = t_0/t_1 \) which decays in time \( t \).

    - -

    In this way we can fix the number of epochs, compute \( \beta \) and -evaluate the cost function at the end. Repeating the computation will -give a different result since the scheme is random by design. Then we -pick the final \( \beta \) that gives the lowest value of the cost -function. -

    - - - -
    -
    -
    -
    -
    -
    import numpy as np 
    -
    -def step_length(t,t0,t1):
    -    return t0/(t+t1)
    -
    -n = 100 #100 datapoints 
    -M = 5   #size of each minibatch
    -m = int(n/M) #number of minibatches
    -n_epochs = 500 #number of epochs
    -t0 = 1.0
    -t1 = 10
    -
    -gamma_j = t0/t1
    -j = 0
    -for epoch in range(1,n_epochs+1):
    -    for i in range(m):
    -        k = np.random.randint(m) #Pick the k-th minibatch at random
    -        #Compute the gradient using the data in minibatch Bk
    -        #Compute new suggestion for beta
    -        t = epoch*m+i
    -        gamma_j = step_length(t,t0,t1)
    -        j += 1
    -
    -print("gamma_j after %d epochs: %g" % (n_epochs,gamma_j))
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -

      @@ -331,6 +284,7 @@

      Slightly different approach
    • 44
    • 45
    • 46
    • +
    • 47
    • »
    diff --git a/doc/pub/week6/html/._week6-bs041.html b/doc/pub/week6/html/._week6-bs041.html index 18c31396..382ac30f 100644 --- a/doc/pub/week6/html/._week6-bs041.html +++ b/doc/pub/week6/html/._week6-bs041.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,7 +250,21 @@

     

     

     

    -

    Program for stochastic gradient

    +

    Slightly different approach

    + +

    Another approach is to let the step length \( \gamma_j \) depend on the +number of epochs in such a way that it becomes very small after a +reasonable time such that we do not move at all. +

    + +

    As an example, let \( e = 0,1,2,3,\cdots \) denote the current epoch and let \( t_0, t_1 > 0 \) be two fixed numbers. Furthermore, let \( t = e \cdot m + i \) where \( m \) is the number of minibatches and \( i=0,\cdots,m-1 \). Then the function $$\gamma_j(t; t_0, t_1) = \frac{t_0}{t+t_1} $$ goes to zero as the number of epochs gets large. I.e. we start with a step length \( \gamma_j (0; t_0, t_1) = t_0/t_1 \) which decays in time \( t \).

    + +

    In this way we can fix the number of epochs, compute \( \beta \) and +evaluate the cost function at the end. Repeating the computation will +give a different result since the scheme is random by design. Then we +pick the final \( \beta \) that gives the lowest value of the cost +function. +

    @@ -257,72 +273,30 @@

    Program for stochastic g
    -
    # Importing various packages
    -from math import exp, sqrt
    -from random import random, seed
    -import numpy as np
    -import matplotlib.pyplot as plt
    -from sklearn.linear_model import SGDRegressor
    -
    -x = 2*np.random.rand(100,1)
    -y = 4+3*x+np.random.randn(100,1)
    -
    -xb = np.c_[np.ones((100,1)), x]
    -theta_linreg = np.linalg.inv(xb.T.dot(xb)).dot(xb.T).dot(y)
    -print("Own inversion")
    -print(theta_linreg)
    -sgdreg = SGDRegressor(n_iter = 50, penalty=None, eta0=0.1)
    -sgdreg.fit(x,y.ravel())
    -print("sgdreg from scikit")
    -print(sgdreg.intercept_, sgdreg.coef_)
    -
    +  
    import numpy as np 
     
    -theta = np.random.randn(2,1)
    -
    -eta = 0.1
    -Niterations = 1000
    -m = 100
    -
    -for iter in range(Niterations):
    -    gradients = 2.0/m*xb.T.dot(xb.dot(theta)-y)
    -    theta -= eta*gradients
    -print("theta frm own gd")
    -print(theta)
    -
    -xnew = np.array([[0],[2]])
    -xbnew = np.c_[np.ones((2,1)), xnew]
    -ypredict = xbnew.dot(theta)
    -ypredict2 = xbnew.dot(theta_linreg)
    -
    -
    -n_epochs = 50
    -t0, t1 = 5, 50
    -m = 100
    -def learning_schedule(t):
    +def step_length(t,t0,t1):
         return t0/(t+t1)
     
    -theta = np.random.randn(2,1)
    +n = 100 #100 datapoints 
    +M = 5   #size of each minibatch
    +m = int(n/M) #number of minibatches
    +n_epochs = 500 #number of epochs
    +t0 = 1.0
    +t1 = 10
     
    -for epoch in range(n_epochs):
    +gamma_j = t0/t1
    +j = 0
    +for epoch in range(1,n_epochs+1):
         for i in range(m):
    -        random_index = np.random.randint(m)
    -        xi = xb[random_index:random_index+1]
    -        yi = y[random_index:random_index+1]
    -        gradients = 2 * xi.T.dot(xi.dot(theta)-yi)
    -        eta = learning_schedule(epoch*m+i)
    -        theta = theta - eta*gradients
    -print("theta from own sdg")
    -print(theta)
    -
    +        k = np.random.randint(m) #Pick the k-th minibatch at random
    +        #Compute the gradient using the data in minibatch Bk
    +        #Compute new suggestion for beta
    +        t = epoch*m+i
    +        gamma_j = step_length(t,t0,t1)
    +        j += 1
     
    -plt.plot(xnew, ypredict, "r-")
    -plt.plot(xnew, ypredict2, "b-")
    -plt.plot(x, y ,'ro')
    -plt.axis([0,2.0,0, 15.0])
    -plt.xlabel(r'$x$')
    -plt.ylabel(r'$y$')
    -plt.title(r'Random numbers ')
    -plt.show()
    +print("gamma_j after %d epochs: %g" % (n_epochs,gamma_j))
     
    @@ -358,6 +332,7 @@

    Program for stochastic g
  • 44
  • 45
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs042.html b/doc/pub/week6/html/._week6-bs042.html index 87e8dc4c..445193fa 100644 --- a/doc/pub/week6/html/._week6-bs042.html +++ b/doc/pub/week6/html/._week6-bs042.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,16 +250,97 @@

     

     

     

    -

    Using gradient descent methods, limitations

    +

    Program for stochastic gradient

    + + + +
    +
    +
    +
    +
    +
    # Importing various packages
    +from math import exp, sqrt
    +from random import random, seed
    +import numpy as np
    +import matplotlib.pyplot as plt
    +from sklearn.linear_model import SGDRegressor
    +
    +x = 2*np.random.rand(100,1)
    +y = 4+3*x+np.random.randn(100,1)
    +
    +xb = np.c_[np.ones((100,1)), x]
    +theta_linreg = np.linalg.inv(xb.T.dot(xb)).dot(xb.T).dot(y)
    +print("Own inversion")
    +print(theta_linreg)
    +sgdreg = SGDRegressor(n_iter = 50, penalty=None, eta0=0.1)
    +sgdreg.fit(x,y.ravel())
    +print("sgdreg from scikit")
    +print(sgdreg.intercept_, sgdreg.coef_)
    +
    +
    +theta = np.random.randn(2,1)
    +
    +eta = 0.1
    +Niterations = 1000
    +m = 100
    +
    +for iter in range(Niterations):
    +    gradients = 2.0/m*xb.T.dot(xb.dot(theta)-y)
    +    theta -= eta*gradients
    +print("theta frm own gd")
    +print(theta)
    +
    +xnew = np.array([[0],[2]])
    +xbnew = np.c_[np.ones((2,1)), xnew]
    +ypredict = xbnew.dot(theta)
    +ypredict2 = xbnew.dot(theta_linreg)
    +
    +
    +n_epochs = 50
    +t0, t1 = 5, 50
    +m = 100
    +def learning_schedule(t):
    +    return t0/(t+t1)
    +
    +theta = np.random.randn(2,1)
    +
    +for epoch in range(n_epochs):
    +    for i in range(m):
    +        random_index = np.random.randint(m)
    +        xi = xb[random_index:random_index+1]
    +        yi = y[random_index:random_index+1]
    +        gradients = 2 * xi.T.dot(xi.dot(theta)-yi)
    +        eta = learning_schedule(epoch*m+i)
    +        theta = theta - eta*gradients
    +print("theta from own sdg")
    +print(theta)
    +
    +
    +plt.plot(xnew, ypredict, "r-")
    +plt.plot(xnew, ypredict2, "b-")
    +plt.plot(x, y ,'ro')
    +plt.axis([0,2.0,0, 15.0])
    +plt.xlabel(r'$x$')
    +plt.ylabel(r'$y$')
    +plt.title(r'Random numbers ')
    +plt.show()
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + -
      -
    • Gradient descent (GD) finds local minima of our function. Since the GD algorithm is deterministic, if it converges, it will converge to a local minimum of our energy function. Because in ML we are often dealing with extremely rugged landscapes with many local minima, this can lead to poor performance.
    • -
    • GD is sensitive to initial conditions. One consequence of the local nature of GD is that initial conditions matter. Depending on where one starts, one will end up at a different local minima. Therefore, it is very important to think about how one initializes the training process. This is true for GD as well as more complicated variants of GD.
    • -
    • Gradients are computationally expensive to calculate for large datasets. In many cases in statistics and ML, the energy function is a sum of terms, with one term for each data point. For example, in linear regression, \( E \propto \sum_{i=1}^n (y_i - \mathbf{w}^T\cdot\mathbf{x}_i)^2 \); for logistic regression, the square error is replaced by the cross entropy. To calculate the gradient we have to sum over all \( n \) data points. Doing this at every GD step becomes extremely computationally expensive. An ingenious solution to this, is to calculate the gradients using small subsets of the data called "mini batches". This has the added benefit of introducing stochasticity into our algorithm.
    • -
    • GD is very sensitive to choices of learning rates. GD is extremely sensitive to the choice of learning rates. If the learning rate is very small, the training process take an extremely long time. For larger learning rates, GD can diverge and give poor results. Furthermore, depending on what the local landscape looks like, we have to modify the learning rates to ensure convergence. Ideally, we would adaptively choose the learning rates to match the landscape.
    • -
    • GD treats all directions in parameter space uniformly. Another major drawback of GD is that unlike Newton's method, the learning rate for GD is the same in all directions in parameter space. For this reason, the maximum learning rate is set by the behavior of the steepest direction and this can significantly slow down training. Ideally, we would like to take large steps in flat directions and small steps in steep directions. Since we are exploring rugged landscapes where curvatures change, this requires us to keep track of not only the gradient but second derivatives. The ideal scenario would be to calculate the Hessian but this proves to be too computationally expensive.
    • -
    • GD can take exponential time to escape saddle points, even with random initialization. As we mentioned, GD is extremely sensitive to initial condition since it determines the particular local minimum GD would eventually reach. However, even with a good initialization scheme, through the introduction of randomness, GD can still take exponential time to escape saddle points.
    • -

      @@ -276,6 +359,7 @@

      Using gradien
    • 44
    • 45
    • 46
    • +
    • 47
    • »
    diff --git a/doc/pub/week6/html/._week6-bs043.html b/doc/pub/week6/html/._week6-bs043.html index b85930ad..6fb3be9b 100644 --- a/doc/pub/week6/html/._week6-bs043.html +++ b/doc/pub/week6/html/._week6-bs043.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,23 +250,16 @@

     

     

     

    -

    Codes from numerical recipes

    -
    -
    - -

    You can however use codes we have adapted from the text Numerical Recipes in C++, see chapter 10.7. -Here we present a program, which you also can find at the webpage of the course we use the functions dfpmin and lnsrch. This is a variant of the Broyden et al algorithm discussed in the previous slide. -

    +

    Using gradient descent methods, limitations

      -
    • The program uses the harmonic oscillator in one dimensions as example.
    • -
    • The program does not use armadillo to handle vectors and matrices, but employs rather my own vector-matrix class. These auxiliary functions, and the main program model.cpp can all be found under the program link here.
    • +
    • Gradient descent (GD) finds local minima of our function. Since the GD algorithm is deterministic, if it converges, it will converge to a local minimum of our energy function. Because in ML we are often dealing with extremely rugged landscapes with many local minima, this can lead to poor performance.
    • +
    • GD is sensitive to initial conditions. One consequence of the local nature of GD is that initial conditions matter. Depending on where one starts, one will end up at a different local minima. Therefore, it is very important to think about how one initializes the training process. This is true for GD as well as more complicated variants of GD.
    • +
    • Gradients are computationally expensive to calculate for large datasets. In many cases in statistics and ML, the energy function is a sum of terms, with one term for each data point. For example, in linear regression, \( E \propto \sum_{i=1}^n (y_i - \mathbf{w}^T\cdot\mathbf{x}_i)^2 \); for logistic regression, the square error is replaced by the cross entropy. To calculate the gradient we have to sum over all \( n \) data points. Doing this at every GD step becomes extremely computationally expensive. An ingenious solution to this, is to calculate the gradients using small subsets of the data called "mini batches". This has the added benefit of introducing stochasticity into our algorithm.
    • +
    • GD is very sensitive to choices of learning rates. GD is extremely sensitive to the choice of learning rates. If the learning rate is very small, the training process take an extremely long time. For larger learning rates, GD can diverge and give poor results. Furthermore, depending on what the local landscape looks like, we have to modify the learning rates to ensure convergence. Ideally, we would adaptively choose the learning rates to match the landscape.
    • +
    • GD treats all directions in parameter space uniformly. Another major drawback of GD is that unlike Newton's method, the learning rate for GD is the same in all directions in parameter space. For this reason, the maximum learning rate is set by the behavior of the steepest direction and this can significantly slow down training. Ideally, we would like to take large steps in flat directions and small steps in steep directions. Since we are exploring rugged landscapes where curvatures change, this requires us to keep track of not only the gradient but second derivatives. The ideal scenario would be to calculate the Hessian but this proves to be too computationally expensive.
    • +
    • GD can take exponential time to escape saddle points, even with random initialization. As we mentioned, GD is extremely sensitive to initial condition since it determines the particular local minimum GD would eventually reach. However, even with a good initialization scheme, through the introduction of randomness, GD can still take exponential time to escape saddle points.
    -

    Below we show only excerpts from the main program. For the full program, see the above link.

    -
    -
    - -

      @@ -282,6 +277,7 @@

      Codes from numerical recipe
    • 44
    • 45
    • 46
    • +
    • 47
    • »
    diff --git a/doc/pub/week6/html/._week6-bs044.html b/doc/pub/week6/html/._week6-bs044.html index 39e688bd..6ce00641 100644 --- a/doc/pub/week6/html/._week6-bs044.html +++ b/doc/pub/week6/html/._week6-bs044.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,52 +250,19 @@

     

     

     

    -

    Finding the minimum of the harmonic oscillator model in one dimension

    +

    Codes from numerical recipes

    +

    You can however use codes we have adapted from the text Numerical Recipes in C++, see chapter 10.7. +Here we present a program, which you also can find at the webpage of the course we use the functions dfpmin and lnsrch. This is a variant of the Broyden et al algorithm discussed in the previous slide. +

    - -
    -
    -
    -
    -
    -
    //   Main function begins here
    -int main()
    -{
    -     int n, iter;
    -     double gtol, fret;
    -     double alpha;
    -     n = 1;
    -//   reserve space in memory for vectors containing the variational
    -//   parameters
    -     Vector g(n), p(n);
    -     cout << "Read in guess for alpha" << endl;
    -     cin >> alpha;
    -     gtol = 1.0e-5;
    -//   now call dfmin and compute the minimum
    -     p(0) = alpha;
    -     dfpmin(p, n, gtol, &iter, &fret, Efunction, dEfunction);
    -     cout << "Value of energy minimum = " << fret << endl;
    -     cout << "Number of iterations = " << iter << endl;
    -     cout << "Value of alpha at minimum = " << p(0) << endl;
    -      return 0;
    -}  // end of main program
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    +
      +
    • The program uses the harmonic oscillator in one dimensions as example.
    • +
    • The program does not use armadillo to handle vectors and matrices, but employs rather my own vector-matrix class. These auxiliary functions, and the main program model.cpp can all be found under the program link here.
    • +
    +

    Below we show only excerpts from the main program. For the full program, see the above link.

    @@ -314,6 +283,7 @@

    44
  • 45
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs045.html b/doc/pub/week6/html/._week6-bs045.html index bb4b96db..34c3f875 100644 --- a/doc/pub/week6/html/._week6-bs045.html +++ b/doc/pub/week6/html/._week6-bs045.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -248,15 +250,10 @@

     

     

     

    -

    Functions to observe

    +

    Finding the minimum of the harmonic oscillator model in one dimension

    -

    The functions Efunction and dEfunction compute the expectation value of the energy and its derivative. -They use the the quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) -It uses the first derivatives only. The BFGS algorithm has proven good performance even for non-smooth optimizations. -These functions need to be changed when you want to your own derivatives. -

    @@ -264,18 +261,27 @@

    Functions to observe

    -
    //  this function defines the expectation value of the local energy
    -double Efunction(Vector  &x)
    +  
    //   Main function begins here
    +int main()
     {
    -  double value = x(0)*x(0)*0.5+1.0/(8*x(0)*x(0));
    -  return value;
    -} // end of function to evaluate
    -
    -//  this function defines the derivative of the energy 
    -void dEfunction(Vector &x, Vector &g)
    -{
    -  g(0) = x(0)-1.0/(4*x(0)*x(0)*x(0));
    -} // end of function to evaluate
    +     int n, iter;
    +     double gtol, fret;
    +     double alpha;
    +     n = 1;
    +//   reserve space in memory for vectors containing the variational
    +//   parameters
    +     Vector g(n), p(n);
    +     cout << "Read in guess for alpha" << endl;
    +     cin >> alpha;
    +     gtol = 1.0e-5;
    +//   now call dfmin and compute the minimum
    +     p(0) = alpha;
    +     dfpmin(p, n, gtol, &iter, &fret, Efunction, dEfunction);
    +     cout << "Value of energy minimum = " << fret << endl;
    +     cout << "Number of iterations = " << iter << endl;
    +     cout << "Value of alpha at minimum = " << p(0) << endl;
    +      return 0;
    +}  // end of main program
     
    @@ -290,12 +296,6 @@

    Functions to observe

    - -

    You need to change these functions in order to compute the local energy for your system. I used 1000 -cycles per call to get a new value of \( \langle E_L[\alpha]\rangle \). -When I compute the local energy I also compute its derivative. -After roughly 10-20 iterations I got a converged result in terms of \( \alpha \). -

    @@ -315,6 +315,8 @@

    Functions to observe

  • 44
  • 45
  • 46
  • +
  • 47
  • +
  • »
  • diff --git a/doc/pub/week6/html/._week6-bs046.html b/doc/pub/week6/html/._week6-bs046.html index c2daa80e..2905d021 100644 --- a/doc/pub/week6/html/._week6-bs046.html +++ b/doc/pub/week6/html/._week6-bs046.html @@ -8,8 +8,8 @@ - -Week 8 February 20-24: Gradient Methods + +Week 8 February 19-23: Gradient Methods @@ -36,53 +36,7 @@
  • Overview of week 8, February 20-24
  • -
  • Top-down start
  • -
  • Motivation
  • -
  • Simple example and demonstration
  • -
  • Simple example and demonstration
  • -
  • Exercise 1: Find the local energy for the harmonic oscillator
  • -
  • Variance in the simple model
  • -
  • Computing the derivatives
  • -
  • Expressions for finding the derivatives of the local energy
  • -
  • Derivatives of the local energy
  • -
  • Exercise 2: General expression for the derivative of the energy
  • -
  • Python program for 2-electrons in 2 dimensions
  • -
  • Using Broyden's algorithm in scipy
  • -
  • Brief reminder on Newton-Raphson's method
  • -
  • The equations
  • -
  • Simple geometric interpretation
  • -
  • Extending to more than one variable
  • -
  • Steepest descent
  • -
  • More on Steepest descent
  • -
  • The ideal
  • -
  • The sensitiveness of the gradient descent
  • -
  • Convex functions
  • -
  • Convex function
  • -
  • Conditions on convex functions
  • -
  • More on convex functions
  • -
  • Some simple problems
  • -
  • Standard steepest descent
  • -
  • Gradient method
  • -
  • Steepest descent method
  • -
  • Steepest descent method
  • -
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Overview
  • +
  • Brief reminder on Newton-Raphson's method
  • +
  • The equations
  • +
  • Simple geometric interpretation
  • +
  • Extending to more than one variable
  • +
  • Steepest descent
  • +
  • More on Steepest descent
  • +
  • The ideal
  • +
  • The sensitiveness of the gradient descent
  • +
  • Convex functions
  • +
  • Convex function
  • +
  • Conditions on convex functions
  • +
  • More on convex functions
  • +
  • Some simple problems
  • +
  • Standard steepest descent
  • +
  • Gradient method
  • +
  • Steepest descent method
  • +
  • Steepest descent method
  • +
  • Final expressions
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -306,20 +250,56 @@

     

     

     

    -

    Stochastic Gradient Descent

    - -

    Stochastic gradient descent (SGD) and variants thereof address some of -the shortcomings of the Gradient descent method discussed above. +

    Functions to observe

    +
    +
    + +

    The functions Efunction and dEfunction compute the expectation value of the energy and its derivative. +They use the the quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) +It uses the first derivatives only. The BFGS algorithm has proven good performance even for non-smooth optimizations. +These functions need to be changed when you want to your own derivatives.

    -

    The underlying idea of SGD comes from the observation that a given -function, which we want to minimize, can almost always be written as a -sum over \( n \) data points \( \{\mathbf{x}_i\}_{i=1}^n \), + +

    +
    +
    +
    +
    +
    //  this function defines the expectation value of the local energy
    +double Efunction(Vector  &x)
    +{
    +  double value = x(0)*x(0)*0.5+1.0/(8*x(0)*x(0));
    +  return value;
    +} // end of function to evaluate
    +
    +//  this function defines the derivative of the energy 
    +void dEfunction(Vector &x, Vector &g)
    +{
    +  g(0) = x(0)-1.0/(4*x(0)*x(0)*x(0));
    +} // end of function to evaluate
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +

    You need to change these functions in order to compute the local energy for your system. I used 1000 +cycles per call to get a new value of \( \langle E_L[\alpha]\rangle \). +When I compute the local energy I also compute its derivative. +After roughly 10-20 iterations I got a converged result in terms of \( \alpha \).

    -$$ -C(\mathbf{\beta}) = \sum_{i=1}^n c_i(\mathbf{x}_i, -\mathbf{\beta}). -$$ +
    +

    @@ -337,18 +317,6 @@

    Stochastic Gradient Descent
  • 45
  • 46
  • 47
  • -
  • 48
  • -
  • 49
  • -
  • 50
  • -
  • 51
  • -
  • 52
  • -
  • 53
  • -
  • 54
  • -
  • 55
  • -
  • 56
  • -
  • ...
  • -
  • 58
  • -
  • »
  • diff --git a/doc/pub/week6/html/week6-bs.html b/doc/pub/week6/html/week6-bs.html index 66314f1e..816dfa3a 100644 --- a/doc/pub/week6/html/week6-bs.html +++ b/doc/pub/week6/html/week6-bs.html @@ -36,7 +36,7 @@
  • Overview of week 8
  • +
  • Overview
  • Brief reminder on Newton-Raphson's method
  • The equations
  • Simple geometric interpretation
  • @@ -211,32 +212,33 @@
  • Steepest descent method
  • Steepest descent method
  • Final expressions
  • -
  • Code examples for steepest descent
  • -
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • -
  • The routine for the steepest descent method
  • -
  • Steepest descent example
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method and iterations
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Conjugate gradient method
  • -
  • Simple implementation of the Conjugate gradient algorithm
  • -
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • -
  • Stochastic Gradient Descent
  • -
  • Computation of gradients
  • -
  • SGD example
  • -
  • The gradient step
  • -
  • Simple example code
  • -
  • When do we stop?
  • -
  • Slightly different approach
  • -
  • Program for stochastic gradient
  • -
  • Using gradient descent methods, limitations
  • -
  • Codes from numerical recipes
  • -
  • Finding the minimum of the harmonic oscillator model in one dimension
  • -
  • Functions to observe
  • +
  • Our simple \( 2\times 2 \) example
  • +
  • Derivatives and more
  • +
  • Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come
  • +
  • The routine for the steepest descent method
  • +
  • Steepest descent example
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method and iterations
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Conjugate gradient method
  • +
  • Simple implementation of the Conjugate gradient algorithm
  • +
  • Broyden–Fletcher–Goldfarb–Shanno algorithm
  • +
  • Stochastic Gradient Descent
  • +
  • Computation of gradients
  • +
  • SGD example
  • +
  • The gradient step
  • +
  • Simple example code
  • +
  • When do we stop?
  • +
  • Slightly different approach
  • +
  • Program for stochastic gradient
  • +
  • Using gradient descent methods, limitations
  • +
  • Codes from numerical recipes
  • +
  • Finding the minimum of the harmonic oscillator model in one dimension
  • +
  • Functions to observe
  • @@ -266,7 +268,7 @@

    Week 8 February 19-23: Gradient Methods


    -

    Jan 1, 2024

    +

    February 23, 2024


    @@ -291,7 +293,7 @@

    Jan 1, 2024

  • 9
  • 10
  • ...
  • -
  • 46
  • +
  • 47
  • »
  • diff --git a/doc/pub/week6/html/week6-reveal.html b/doc/pub/week6/html/week6-reveal.html index e81e3f08..7d127766 100644 --- a/doc/pub/week6/html/week6-reveal.html +++ b/doc/pub/week6/html/week6-reveal.html @@ -184,7 +184,7 @@

    Week 8 February 19-23: Gradient Methods


    -

    Jan 1, 2024

    +

    February 23, 2024


    @@ -195,7 +195,7 @@

    Jan 1, 2024

    -

    Overview of week 8

    +

    Overview

    Topics

    @@ -235,7 +235,7 @@

    Overview of week 8

    Brief reminder on Newton-Raphson's method

    -

    Let us quickly remind ourselves how we derive the above method.

    +

    Let us quickly remind ourselves on how we derive the above method.

    Perhaps the most celebrated of all one-dimensional root-finding routines is Newton's method, also called the Newton-Raphson @@ -740,7 +740,75 @@

    Final expressions

    -

    Code examples for steepest descent

    +

    Our simple \( 2\times 2 \) example

    + +

    Last week we introduced the simple two-dimensional function

    +

     
    +$$ +f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2, +$$ +

     
    + +

    which is of the form (in terms of vectors and matrices)

    +

     
    +$$ +f(\boldsymbol{x})=\frac{1}{2}\boldsymbol{x}^T\boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}^T\boldsymbol{x}, +$$ +

     
    + +

    where we have

    +

     
    +$$ +\boldsymbol{x}=\begin{bmatrix} x_1 \\ x_2\end{bmatrix}, +$$ +

     
    + +

     
    +$$ +\boldsymbol{b}=\begin{bmatrix} 5 \\ 3\end{bmatrix}, +$$ +

     
    + +

    and

    +

     
    +$$ +\boldsymbol{A}=\begin{bmatrix} 2 & 1\\ 1& 20\end{bmatrix}. +$$ +

     
    +

    + +
    +

    Derivatives and more

    + +

    Optimizing the above equation, that is

    +

     
    +$$ +\nabla f = 0 = \boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}, +$$ +

     
    + +

    which leads to a simple matrix-inversion problem

    +

     
    +$$ +\boldsymbol{x}=\boldsymbol{A}^{-1}\boldsymbol{b}. +$$ +

     
    + +

    This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns

    +

     
    +$$ +\frac{\partial f}{\partial x_1}=2x_1+x_2-5=0, +$$ +

     
    + +

    and

    +

     
    +$$ +\frac{\partial f}{\partial x_2}=x_1+20x_2-3=0, +$$ +

     
    + +

    with solutions \( x_1=97/39 \) and \( x_2=1/39 \).

    @@ -770,8 +838,8 @@

    0) = x0(1) = 0; // Set the matrix - A(0,0) = 3; A(1,0) = 2; A(0,1) = 2; A(1,1) = 6; - b(0) = 2; b(1) = -8; + A(0,0) = 2; A(1,0) = 1; A(0,1) = 1; A(1,1) = 20; + b(0) = 5; b(1) = 3; cout << "The Matrix A that we are using: " << endl; A.Print(); cout << endl; @@ -866,15 +934,15 @@

    Steepest descent example

    from mpl_toolkits.mplot3d import axes3d def f(x): - return 0.5*x[0]**2 + 2.5*x[1]**2 + return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2] def df(x): - return np.array([x[0], 5*x[1]]) + return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0) fig = pt.figure() ax = fig.gca(projection="3d") -xmesh, ymesh = np.mgrid[-2:2:50j,-2:2:50j] +xmesh, ymesh = np.mgrid[-2:3:00j,-2:3:00j] fmesh = f(np.array([xmesh, ymesh])) ax.plot_surface(xmesh, ymesh, fmesh) @@ -902,7 +970,7 @@

    Steepest descent example

    pt.axis("equal")
     pt.contour(xmesh, ymesh, fmesh)
    -guesses = [np.array([2, 2./5])]
    +guesses = [np.array([3.0, 0.05])]
     
    diff --git a/doc/pub/week6/html/week6-solarized.html b/doc/pub/week6/html/week6-solarized.html index d9b357e5..bbd9d53e 100644 --- a/doc/pub/week6/html/week6-solarized.html +++ b/doc/pub/week6/html/week6-solarized.html @@ -63,7 +63,7 @@










    -

    Overview of week 8

    +

    Overview

    Topics

    @@ -266,7 +267,7 @@

    Overview of week 8











    Brief reminder on Newton-Raphson's method

    -

    Let us quickly remind ourselves how we derive the above method.

    +

    Let us quickly remind ourselves on how we derive the above method.

    Perhaps the most celebrated of all one-dimensional root-finding routines is Newton's method, also called the Newton-Raphson @@ -705,7 +706,57 @@

    Final expressions











    -

    Code examples for steepest descent

    +

    Our simple \( 2\times 2 \) example

    + +

    Last week we introduced the simple two-dimensional function

    +$$ +f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2, +$$ + +

    which is of the form (in terms of vectors and matrices)

    +$$ +f(\boldsymbol{x})=\frac{1}{2}\boldsymbol{x}^T\boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}^T\boldsymbol{x}, +$$ + +

    where we have

    +$$ +\boldsymbol{x}=\begin{bmatrix} x_1 \\ x_2\end{bmatrix}, +$$ + +$$ +\boldsymbol{b}=\begin{bmatrix} 5 \\ 3\end{bmatrix}, +$$ + +

    and

    +$$ +\boldsymbol{A}=\begin{bmatrix} 2 & 1\\ 1& 20\end{bmatrix}. +$$ + + +









    +

    Derivatives and more

    + +

    Optimizing the above equation, that is

    +$$ +\nabla f = 0 = \boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}, +$$ + +

    which leads to a simple matrix-inversion problem

    +$$ +\boldsymbol{x}=\boldsymbol{A}^{-1}\boldsymbol{b}. +$$ + +

    This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns

    +$$ +\frac{\partial f}{\partial x_1}=2x_1+x_2-5=0, +$$ + +

    and

    +$$ +\frac{\partial f}{\partial x_2}=x_1+20x_2-3=0, +$$ + +

    with solutions \( x_1=97/39 \) and \( x_2=1/39 \).











    Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come

    @@ -734,8 +785,8 @@

    0) = x0(1) = 0; // Set the matrix - A(0,0) = 3; A(1,0) = 2; A(0,1) = 2; A(1,1) = 6; - b(0) = 2; b(1) = -8; + A(0,0) = 2; A(1,0) = 1; A(0,1) = 1; A(1,1) = 20; + b(0) = 5; b(1) = 3; cout << "The Matrix A that we are using: " << endl; A.Print(); cout << endl; @@ -830,15 +881,15 @@

    Steepest descent example

    from mpl_toolkits.mplot3d import axes3d def f(x): - return 0.5*x[0]**2 + 2.5*x[1]**2 + return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2] def df(x): - return np.array([x[0], 5*x[1]]) + return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0) fig = pt.figure() ax = fig.gca(projection="3d") -xmesh, ymesh = np.mgrid[-2:2:50j,-2:2:50j] +xmesh, ymesh = np.mgrid[-2:3:00j,-2:3:00j] fmesh = f(np.array([xmesh, ymesh])) ax.plot_surface(xmesh, ymesh, fmesh) @@ -866,7 +917,7 @@

    Steepest descent example

    pt.axis("equal")
     pt.contour(xmesh, ymesh, fmesh)
    -guesses = [np.array([2, 2./5])]
    +guesses = [np.array([3.0, 0.05])]
     
    diff --git a/doc/pub/week6/html/week6.html b/doc/pub/week6/html/week6.html index 78b14dcf..7f9b21a2 100644 --- a/doc/pub/week6/html/week6.html +++ b/doc/pub/week6/html/week6.html @@ -140,7 +140,7 @@










    -

    Overview of week 8

    +

    Overview

    Topics

    @@ -343,7 +344,7 @@

    Overview of week 8











    Brief reminder on Newton-Raphson's method

    -

    Let us quickly remind ourselves how we derive the above method.

    +

    Let us quickly remind ourselves on how we derive the above method.

    Perhaps the most celebrated of all one-dimensional root-finding routines is Newton's method, also called the Newton-Raphson @@ -782,7 +783,57 @@

    Final expressions











    -

    Code examples for steepest descent

    +

    Our simple \( 2\times 2 \) example

    + +

    Last week we introduced the simple two-dimensional function

    +$$ +f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2, +$$ + +

    which is of the form (in terms of vectors and matrices)

    +$$ +f(\boldsymbol{x})=\frac{1}{2}\boldsymbol{x}^T\boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}^T\boldsymbol{x}, +$$ + +

    where we have

    +$$ +\boldsymbol{x}=\begin{bmatrix} x_1 \\ x_2\end{bmatrix}, +$$ + +$$ +\boldsymbol{b}=\begin{bmatrix} 5 \\ 3\end{bmatrix}, +$$ + +

    and

    +$$ +\boldsymbol{A}=\begin{bmatrix} 2 & 1\\ 1& 20\end{bmatrix}. +$$ + + +









    +

    Derivatives and more

    + +

    Optimizing the above equation, that is

    +$$ +\nabla f = 0 = \boldsymbol{A}\boldsymbol{x}-\boldsymbol{b}, +$$ + +

    which leads to a simple matrix-inversion problem

    +$$ +\boldsymbol{x}=\boldsymbol{A}^{-1}\boldsymbol{b}. +$$ + +

    This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns

    +$$ +\frac{\partial f}{\partial x_1}=2x_1+x_2-5=0, +$$ + +

    and

    +$$ +\frac{\partial f}{\partial x_2}=x_1+20x_2-3=0, +$$ + +

    with solutions \( x_1=97/39 \) and \( x_2=1/39 \).











    Simple codes for steepest descent and conjugate gradient using a \( 2\times 2 \) matrix, in c++, Python code to come

    @@ -811,8 +862,8 @@

    // Set our initial guess x0(0) = x0(1) = 0; // Set the matrix - A(0,0) = 3; A(1,0) = 2; A(0,1) = 2; A(1,1) = 6; - b(0) = 2; b(1) = -8; + A(0,0) = 2; A(1,0) = 1; A(0,1) = 1; A(1,1) = 20; + b(0) = 5; b(1) = 3; cout << "The Matrix A that we are using: " << endl; A.Print(); cout << endl; @@ -907,15 +958,15 @@

    Steepest descent example

    from mpl_toolkits.mplot3d import axes3d def f(x): - return 0.5*x[0]**2 + 2.5*x[1]**2 + return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2] def df(x): - return np.array([x[0], 5*x[1]]) + return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0) fig = pt.figure() ax = fig.gca(projection="3d") -xmesh, ymesh = np.mgrid[-2:2:50j,-2:2:50j] +xmesh, ymesh = np.mgrid[-2:3:00j,-2:3:00j] fmesh = f(np.array([xmesh, ymesh])) ax.plot_surface(xmesh, ymesh, fmesh) @@ -943,7 +994,7 @@

    Steepest descent example

    pt.axis("equal")
     pt.contour(xmesh, ymesh, fmesh)
    -guesses = [np.array([2, 2./5])]
    +guesses = [np.array([3.0, 0.05])]
     
    diff --git a/doc/pub/week6/ipynb/ipynb-week6-src.tar.gz b/doc/pub/week6/ipynb/ipynb-week6-src.tar.gz index 6a0da1aadad1199b8f955b4bc2cd5f7f553e24c9..1e3626b356333804994dfac4f7cfdfb3e593ab41 100644 GIT binary patch delta 71 zcmV-N0J#6Z0lxtpABzY8e+SoP00ZsM%?iRW3P{Tuem`(*Ow?Rm-~o>RfIG@9H+ zTnVa~h*Xx*ROo0jjXsk<;vyx+e2o*CXGG)c+zW5JRoOkFgcpgB@8Tq5jJ)s4e`#&n zSaLGNe@;F|oMkjuJW51PWf2Ttm|<>VGbhvDY3)S_;mN3=g%Zj|mvAAe&Z8tPsN`8N zV!Hu6B~nZB;T}Hb$w$-nC`!p$Q`QUo+jvbnN2PnO^wZsAPgk|2t#uKA!y;0&$fb4; zCFfKCGLkB;&>cs$Ol_;dibKF&f8oaib_A75Z4?A5Xc=XokYi>tXH?~on&31S>Btv| zHDI9p38j>003+bQ3e@`9EX!_aAYoAe+KEghv3L0XZSI7ReO>h^DBG!92y*K5H5p9I zNr(ULEWS@|vs?{zJ44O7Me$7qCt4X4o=sO(In=g2!k?eodhJ}9uCE7YfA|sgHi2CA z3wwm3Z%auoZTH}ImAK;oF2Kh^izv~Qr}+rK{{@xfe&}p#o2?AOKM9Q2ij-Bo7$taT z1A5nG7qI!Iw}V~AiGu?rC#G!rkn=@dIq&m&R&IbD%E26j5rT|$MgnkWWgimxwQPGh zpCI=ZHCha!PdCR03u?c>e`LB4|i8CJ{-_?Y3k(CJPfwoH;R%kEMq5~^?aI|_J<%Y7c<+{!@Pn2d~Di51oaU*ggESYFlOof zm_Fv1Qc9+#uSPi{vtd!$_I@=(62j+f2aWO87ow5RM1wgaNrA&a*MolKuAklZFH_12 zU{uPfmf@U#ucyXhe}Bh(py$tF^#J&7u7%bPW{*OJQg>yarBCBEtar!j>}kA)&umBQ!gI(pCmDhwz0vC(AazTuMI-9)t2l0wQ zlL}>SlTrmH0Xdfe!38LP#ac^qsR0wuq@7Eo*2aCi&Ng%DOW0qb+dhdz>$aw z2*?1qTKdB3_A_VzfS2#awv79I_{i@!Tj=)d-c1&SD*%Qa z!#hNrB?6!W%_6RUEK9kF;6BV|Ji0r;HU5QoUAxbp0Fx5o2qj>vrt7JzaFx4n^WN=f zqunKN}m}!O#xnZAPyw4nsA59}hrHl*1aiM= zTKYaA7%&ginsR+4?|7LsV@W#E3$O)s)06U(JWZ^BM0aSWq6H&((us)RZby5Q4)&#P z+C#tytzFp_04sl7TAyR*mM0Ei27R%?04?n`SW%HOYb|mA&Tk2}7qrG}MzU)4I;SGUl)KS18W#@$)T92*)$Y<6dweV8NzV|&cHJB~z z>4$@V`n-O0BtzFsm=+1MDDsG~0%M2t;X!gmoKv$nwk>Xy#yxdBk;N>heMf4P6*bGS zMbS20M}~&Szq_`%rG7wJ(^rT1`=0RA2+gkHZuIqdbX^xl8|uPY*DjS<;GGCcc;6Le z*LkvWVm1xBTmt-FjPWB;q z5M`$G(1Rp2{!5~hm8~SRsT$^VD|?``n(V<*v~2th{UrF4LThDX%^>m_R(=k2?^pLQ znOeXTBL0eguI$R;Q<%l2wVlj>u+L6^woTS{@n0W@ZT*6JL8QwdKrs$xeHsMJ4nhO3 zqwQqYXD6o@UdS61#4RjUA1zX!L|yp_)KydB5t2y=C2-QscXv0x-Hawkf(`=7mCl#} zh8BmLpMOoF9SmZSBq2(A_EQfL%uHcs5>?SpH~$Vf))bReV3~kKO2a~#PR?9^+)jYK zFa`-B)kp9?S*1ggi9enBOAP`Gr};4VubVA5DgBspYu!rnMUDlIj_D*st8dYvcu#^p z`QXzg{B(75eB9IbkRG_;j484jUA8!fg%mzev@X0S@*iYqt7=QxghOLH$ zkmtV7%T=cZb`8Q}a0`-Uf7~nRKgaIsFJ&37E5nPrCqKyU%Fj*CHQE@(&vPVS*@q`g z^U+jHTX$$J$t^o9np?$zh*FqO6@7+{dge!pshIWr*T z7@70L&c)cwID|CYLUxe_m8nF^aT{~ z1Iai)2{Vxy>~&QI{|Wv{f+b7SmsJ%g!TF1-8um&oQ(ApNRlP<~FZChxQlFyK^D@>} zaK)s7Q>6e85IFFYFECI+Yb-2L6Ji@It+@87b$xzL(3f>jJI|$sQB0X>ol9uFTK&g< z)O`HCeBQv`S!r&ogpPmclj9650fmzg4JLm-?_KK_^QIL-D(1J%VMQt@03_sB3AAPa zt_7Xr8iZa&i5=e}#$@t=(o0DyL}N6)9iBICDV!M$J1~x( zliIRgKAQNlbr~Kf(Ij7(G>%zIAb)>8s6J)9cF1^b$au~9J@4Xq0tV@M!iMJwm_*M` zF!DHw_@ISC2*{`e45!t3Q5;)(h=Mlq0f+k_HLJK2#f(kIqJ#=X2@AVXTnO3@+QlKR zyrqGF9cPq%Nx`H&KN{Gf{*az$j!dEE0f;}EK|9nCsNvshaTD}E0m{JS$=QE>dtBc; zA9-}~>W9`mm7xet!LN)f7vkPCvt8a62yZ`1gZFLiyAAr;YUnMh)o(1v4tMXHo<2&W>0-cA$ z+Q2<^=AMerrJL?}Cd0UBen)?WFJi3=cL^#-EY!oZ&-q#kvtiYC{c4E`{k#={J&T9t z^J^ra8{Q&mye_-F>u!gG@S1@LYft-_P5DIYG#HXHT%>P$|K5ku^6<|k5lECz)W9(* zJuP#NQy{SZAFw207Yb!=WOH zR4IS8S=)}=HV}Q^uh656L<~8+CkW6KZ3+Yhf};DDrhwL~%^L9~@uj=T*LNsVGAYxR zv%7h*EqOE?&gIM*iWZ*=TKsWA!}qV37cbs&QCMOXsN(XbV4M@JOrfMEz-4iHU3@54 z>&eH3$+v?noSW3GSH z!~z>}rPv2|oiSRVwK8XPqUXSsue?x_+7)1$FmT!wk<@s}yW81x0;as3RGjnj)9k+q z=(5>lR$)RAZ5S#tglag~+47XLm9j-7^tvq`CW)~cb!cuM=RA#Oa-x{>LYJ+2d=RuQ zDl9CnPZ>uk!@Gp z=^rW#fO(w#~ZUy3#TJ7B7D{zN(r< zlf!F{v2lwLDZ~iU@`oX$!jEX1APf z@j4S1ZHcxvExwyEr=skdv~;h~eTm>+oB1MaCn#dZ2#^v?{7d>dk#7&k+&P zsKco<(Uri~Rh9=ajOPzqbe~C*ByRaE2iJTZmj(g`x-4(jP2YY^M_6xbcMY}g2-%862J&l$n^DD;4P2$fvOWjho~P> z2ALiBrh+9Z`iXxRWHvySIa(3z^i{^S6*t7q7hrqauJQwqRHKg7M;tpqje%oj*Lzxu zrxJ`FY~7V_=x$?sH`%=p_@ObFnvU!;#HnD8gUMV9QZ&wl6oliWA!BXUpP7St)KxZi z6(AD>Nqv^}F6&dz5nfsX%`6Iz|10l2d3NH2WnX4T1!jNoA?H1u9+4k<(chXi&YcR% zcK+nxj&5*tP-TsDxoto=Ml4qA9ABrNsCO;r{g>@*G5fxsbh{QX+59r=Jht*_vPglNMhKKNhP=`-^`I?F8!$z{xSbHF}!^4Y1&{9loN1+h1*0t zQ~Qs0$0Iy|x3VTN6&sIxi3aSeAc z=PG~Ahv=6{C6zDm0mGD>Ff2!ASbKVSf?+a6C>v5gq7X$op zC!z>b-D{hGLAe3NwJ)pBoNeXU932Tgc8-=~LuVv(V#wqBp)&w*fa62!0Mk^AaabT4 zd2WTW`hXy$j>XnxeY0kAG@&R_5(`FfLUDhpd|W~WUG1Af`+$^!^HS=_!K?ZyxzrwR zz+(W<;qG0sO5by}8?rjL zBibGWr=55B=5&8{!fme8p;t^_nnwx12jHV}W$Um;{K>S8gZ zB+Bxd4#?28Sb+e=V(caD11-@uH%sy;xpg!2e=jLgi6qmmWvltvi!Gjy$NTu{cz1@@ z%!1b26R5v`8=suN@u7tf7-8&MztOxc?@*ty7mgNPfBspB}&?t9$tk`YpCM0Fcp0z#wM57|(3IoxCjH7(RZ z*-Bf(QdTgsP>b1$=DERqWkZLM*s$$YY0Ysl)x_?)p#F8~un`*Z)lY_MgtVCJ1gn2C zs{v`3M@Mn+$QamjW1030FSg?%2)nxhJ!dx{gB!}5?SavN{x5Do&)E$w+~>NRJb`rI zVIwxg_U;~jJ~xH8PiUfSRdm(p;-aOJ=zSnS`lH<0pxo~~9aoo1qe`hR9HOtD&zgyF z{ZQ8Ezvz1ZP=B=4)>N9t3sV_&*(HC_pzZ4ST$G{`E*n)^e}4at0iu-br9irt69$?W zugkGQY1HLlqUuoelpsZxOP;M+QqDIdA3?354HWXsi)EkS>(CR?tp z2?;IcS;-UYNm8THzdK&eW$V=K7)nuXH@giGLPSS`z*tLoPqz>|rYV)V{#qG9+ zUQ4N)8x6=}IVXeI`$CQXJ!$x}(?j4EphDtZaYAA3u-vkT}?M zXVy@qQ!Q!a|MC(+O~@b8Xd8>(yZWLgvAc;#6u?;aE_xoqDfwr2a`XW9f{I^P-6#;& zi{B5uD(tf}^Mzg3^ZsT%-$OV#Eml0;X{+aoh5=Q!uC+?O3p^pDZtK^U7|$1Q7zOel zS0P0v`EY1C7=du4Pag|4cKkj*`37a)`c{(xm=gjqIgt?)0x&g`A!aFm?O9uoqc#+N zpI_n56Pa1OHiiUNT2-yoS*efPNPS47hz2KN%dKD}roVn|2cmGv6bwmr)xHp9NOyZOs1y;r%#qbBN3$9kUV6f6H!_uNWOkTS-ziCyu+0B&ICc||i zpi3Mka~lSJ5# zyo$Hmqo9bvSrP2gI8+882{tT_gTH*1!B8MTJXH)jLLrFDJSQ&8g2XiVH)*+B(5%v@ zK!bWEu9guV-3>;gWv2~o!>P=y%i{}wAPW`zfj=nQUJqePS)N93LPreVdrrs|t5UNm zR8$YCxwaysqoJmM1M4BXpPm|c_S$7AY?5j8thy0%tW_+1YN9g?T>PDY+W_{~Npw$; zbrra*yJQo(y`$s~ZW@=|ROkX0h6@{Cy9sz#>;enLE>I27{V<132i!DuUxgRykyFi< z)h(;y`Mc*HzVvP;o80|y>zg0nMi0ILZMI`1$Uv+$ILRn~$4Gxaz$h8*j|~mA6N6&a zb2IXkin2eMTc;tnn8eIs?zZjRZ(U(zlEz=?micisMB2wjL>GjA#pOe8s;wM1JreKn zVn|Uvm_#2??kL*bWI4DK zRX5dSv1-+S$y!uYlX>O}%+uB?t}ixOp6=YO-$j)hofKuo6ds6q1-bmpCC1L2_{KdQ*DDeQ4mHyOLQtGwRYSyKq7%Z!M&PO+1KqFV|ic zk0kJaeH?f+68Q1xUSR2n7HlJ{d=i@{43r9vPn|e+Fv;-d)X`Lughrm6woV{cD69Mr zl%#&dx{9zZz6bsMesI~5dk);tna;_GM%4T9|KVoPa5K2!Tj<{$sQ&wA8#$JI3kT2B zd{oxO1(Qc&E@o??M8Wg6u;s{$v@*zdumyJ~*wVjDq}7jwez8u@`f80N2qw@$bD%FQ zXp{lPbzFW~K70j+lARip0hkj4GBlGxT@wQ^H9407!38LP?OIuH+eQ$6&#&Mkg+$xE zj}&=ui?(Qiz7z&ppa(!Jw8c~+U6OL*{`$@yl(_U#Qep*Yfxej3ayZw{d^58&&h9dt zefI)~>sQw=zWA2oj1XjmpxO0pMi@g%>P!lSC==QB+w9l;>!yCUyDR!~v1Cf*cWv>u zD(eL!dGA+$wq?I=;FjcX7r$Ts@Wr=`X9gKbrR9l{F&qSfpp0n%gawecb!oP<^1w4FBvlsuS1f*D*fM6rs0&G^H=AjIf}B}U9>c*i-iOLzyQ z8iYB?mVO&NrByH?-XH-!M4D+&HY1Y2^W})FPso+XkiB9>>m0@MuBe=6c-}XD<;M9x zJdWJT^6ECFB;|sPw6Xjhi^ zx(u{c?WH^{gn$^Tnq7Esh!e19pEU!x2nE}JWu0f$k5iL*jqP>U`anF9kr=XvvB7~e zJ0=el9}9T1G=g@4po-5>U##q(8Lfm28h zzN%mOrMlRbo5zG{R3f3yOQf^xn`RQcv9uy&WO5?AI5L_%JGnF>SX_Fa-X&AdKT&{hKES740r+$z5H*hz;-+2L z_2#ari;Z7;U-fHG)s~&jPA4aDXgp27u7cRAuFKYEAHrBpGmuC`wD30Ew*d%tAam6t zV8ChqRT-ih$EZ&{QowTIe|C>s0F$9Uw%impqhDK2WTr|4jhF;Yl!`U_v`RWQ zlqqW{joA*0PwWBb9jWvju!=HZq#+R4QT@&-yJICex0%IaC0_-l*3e-cYe2Y-%S&ms zWE=ugF|&2k!}k}oe#-2O2LkZ z^ZF=d2ERQdVUy^mjg)&)=#w?TAPNq;ObzhchM!{$N7@Rq@Gj1&0b-~D3Gm0DtsI2? zhN(z$BIcPYTmQ=$|ErV!SJ3}|v5X=v*=O-v#0Qi#YV5Z!Q)odkq$*}{Y`$MtzB`Qb zLqHo+Xa>6xwEvv@ZHF6=nE5a5DsMqChHVEEOO*_TPc z8Y|L*_Ep#Iww5Nr;+CAT9i5D8ws&PwE@^rmOmZ zSn%p{v9xNE+|SNcAK1>L&`@unL;*Z9g%Lo>e7Tv0fjOsem=+a0jGW?7(ppsgU8XjB zcvmo(zFGWksG6{i;}jBtAMza21XF_;-N#3LLTM$Dip8QQlMVxS2^a%S@Yj{ygx)8G z-HT#nTg4$;`m%LBQ$jO;WshR?vGjnxnD24yCG0z;IY~7i8nArdgTEimCCyD=*bc6q zl&4AQ-w$i?tX51XwS?~(d#qLXwc+v{Nm<5 zVULsQ!Qz~A?Ad?F*HwrIT5#? z5_&3tSqgD3i8!lFP$-%T8qA2XKY9lB_GKOJS4Hh_$`AK@0uRD)V-um-><;B2JxL1< z&QiVfL2oLNKPkW>nRKF0RYYS8MxU$3S^fsqd9 zmJC3i^3QdD=5T%Jxcs3qH0W|w`D27WoF0VFG;@a#P7tBd1`((w#Evuuks({IlAS&@ z$%i1Fi$UQ-ko_w|P@EfrA`%#T><|`#>gqo23k>uLZj-qcYo0iUm~2wiT;3#%jJ_tl zPLAZn(m(^s2ux_yQ~tQy^ws_5@O{FaAt#O)6jJ(s1k|5Z!$-Ao^ofBp<=6g9=7N9; z3HF%=rElA2JNcTjDR28Y@Kc|HC`|8Cx=`o%0sz9@iRjbsC9yG2zL!XMRz}Y|D>I+9 z{@Sua{x2&REi0chD%cr8!N^52^_0&J$Tb>{C7U0P&ye{Et`o@Ek1q(DBd$yIgYMXJ z|Bo&i(_;{p`#Z;Iw8?3-(0`=2iUDNKa&BN69F@m0kabXGBq(WmjS^AD1W_L zTa(+i6@K4e!AI@XLWnzHk007j(>9Z7r|r(&guFDH6*ZOx^La240I^vSVM>5|E9RfiNRt){PzhSBmr+3CyIs*xqs_c&$GEA zg6rdKNn)W6Nra@w>F0z?NXUp{*3Ww9X5IYd?&iY{1(iIb8P7S93QX~wNWrtk=H};L z^K6NSKe!dF$sW6@n~Y&QwcE~h_Me-7c^iwk;<+G#vP?leTIrW+hCwre)8}c%5SvT9 zF4?J_@jAhuq{dJCV{$Ey$A9_Dcet}M88Z##Q(~oL!Emt+nt~+()`HA`-q!!yuZq?= zo|d!Ku2?#M<8;>gkxkh?z#B2J^aI|ZaZ$D9u4t=m-6Wief+-pHp9;hUC|PPrm;&Jq z2><;atH7;?uzhc4S(Av47LxqbeJ&W7PlX}cNgt;*CbTin|~j(h8V44@-H!& zbAeLA`DLXkC^|KUA#O z{$hLYjh9&4%iV`#sW{=e@}xm#<6I^}oyT=>7ZM*6I6G zwdr;}UL}NXJ!`h>eSi1(_=a$$;q)SA?8t^F&if7dlu-3pUalE}m5i4qw{rxr*-y7d z%mRm<#T7oM#6s-KRk2$h1PU58Q-w;3u-EV9$Rv^3dI#zk7rg^9>oqA{TEV|_B!3_L z=;M$dozubQhu4A(cMnx~^Reo-9q;bLh?hV*?c>eyQq5D-mVcYZr<9Q(0{+M?_8}Hu(^t>ck#kk(~AuJc}HRcm~(S4h*e=cG`!couv9@wSx!y>t0pC=(u-< z?S36@+v2xDGTzDTt+adBP2Dft^M~-f+LXl0O>#EGB11HMSTaq72z&UG^FypEC@7b3 zv+g<=wHvJJs(-DDwI5yW%ck+;_mFM7I~jX8)JJ1-*O~fA2$xW)OF`pRi&HU>Q%!Px zEg!;0F;KyG$LS_N!!1)pO?3d9*13r^-+@A>;G${vn=trT!6f#rf3V*6iVxxLvrtEY zSB&%|R%H#xT34;JiKpJ|V1NlVNkt3-$=v75liB!k^?!&+Jrm5wfG{G=hooovxiQ9t zOjh?a-z1bhrv@qAC|qb_IrB4Nm;p@JO|>j{k>d#;7+}Afls=HLsel;vfP~J09D54n zINLi7a-_BWjNg+AGklm30CG;K;@Jp$J_Dg*mKn_g;xqP8MdYbHqma_FY$WT8RN@>s z|0*~Wk@Vr5)*;KzfZ*{kbbMf2{`(o+P!Xg>}xPL{6)!I|M-|jXEQ7j54nBJTUz*H~K zeY*S`mDMu8ohy-44HBG|Q#*}Ua&~%HiuzC&oY-{!d8l&f=k+6Vcu~lV8^@G6RX+;s z0zFYa4ohj(ZD>c5b(|mrQ1`>dGo5l$Avj?LN_R0TDw72yJXbpO_2LThnn)5wKbY$Q z;(r~BetE}j{!-=ebIQSVB;wJe3uwJbtiy5uJ^eDp7zEEDaWOxEvdEhVyu6?B?}W^N z&v;=ZPMzQc8J_V-=+|jXm8%%SbH(5}0aNt_0-ZJc)v9dTa0l32)vIvr(?6ZLzT2+p zfH#}H19+qXyR%VQeG*aH&eS6gF0QI4iGOTbRns%lLja;=Oxu@06zP55*9aqVr*#<% z5ORvO0s+BfSE5D~?$2-~I8d0Hyp+b9{To6PiJ?oBm_P&S7$Xwg%LyZIql_VxB2r#U z_#zl`Tu%tTh&K`KSE&M(OZc!E3e;*`-Id6M>*ffS7gRk9yP9tW)Z&V10zsAW?SE%^ zG7!V<5OJQ5df%L`(gp~DxM`44z46w|6V^*50u~T)^vV}j(bUEVc45CtPH4L#%9wNY zD0MIbRu#Qkbx}cu7^uB@C3b1RBv2Ov(ux*2vSYgIvRtrZELEo*6N3raHC`ePT&qY) zWP;L*vqb>h`OA-8ThTX5y{L86x_?tH3sm7*-(gbYaH`^FmvSLDi9&M#9KnZ%qj%VG z1b*d9P}lOLv&GOH3N!0sGn@mB=e#IZlWPCS-*>i1uA%+>d z1aJ_YFI;kH#m42`Xun2Xwq4m&OJ~vDUC|yio+uDBVRXPWDWa0dL&tC4hJT2RV)6a! zgfbwi?E)>frH`~nRbsDF$wlXBu3aFmcEf4A;LCLj_(jcv^UzzIuS*iDn=}h-9$C}t z%aR784(3|#@&D-hr^xO{yKO@tri3Ggc5m~O>Em+4kSEV&2F5L_-(19ucfqe?JMdqZ zE8m#+=P?-Z5d%T1c7v@SJb#HKId!eMn9%*XhN(bgxA8kCHHk2eY$z|$oJ`>M5Os0WRU-J`Y;T!89L8XzOS!C1b+}l%vdbNSoPRknqpmTbW1hZn z3_aG(;wXi*kuhzDGlZG_t=yI2{PHmp)0dNNFKy9Iyx4_uG6E)as6ETVA7T5u4t%`|6?vF(_zH&l<1GDIsXFK;kXmn> z(iiFnLg~8pv?|?KD}U55Jo^R)^x4NW`+9l=F~GaU)8U|^-MPbPCrxK!a%gG*{e>ga zW^#~mzg@3=CpdIDP#k`G55&_PaVj7IWoY*WPfqpM`SilZ8q0_1_XnC{iFftdXq~st zPk53o*NLO*s&qK%`cW)%zTgCsVTxVyA(Fi0bDL*>h}3^>!(m_kwTHIulB){{Z=15ohSoTSA$yQu9tr#mU5*u zZk8k5JbWrSO*7QB=M2;?h%{j@D2rZ!IsG5e@1Wk3lWlQ-y;*B-+eQ}s&R;Q~7A>$D z&Ykxx(iU#IZVEJo5vSO;S);PLl9}gVCMdBIg*jUUcj|+%T(581m7o@QY|G)vPYW9QxKEMQ^ z6f_J`NMm_QJmaBjW(O-%MP?3Lh53cQKu|b=WIGwD=c4?+eo>3^JMq%Pf1Zo-bhfz= zUw@6Ci}JJ6dgI5bw&AYy_#05W(##uJ}rNL zD4*a9^n1ZFO*Y~syij_uQYVrTY|+lh1Qi>lWN?6;Z^s+lvixzjcvYS}e%x$Oo=i5g zdQqO2KR^G0|Gv1|Y?kYPmF0E4-E8H!D!+t0M~W0kCh$#LCE2%h#Hj84WxAYguC`-2 zsb9k~ua{?6*X!A2-CS=sBVII`-%jd(ay+lc<>l3*B`movr}dq4>g3n@>e$3QT#XisYW~%W`t_61YF;m@;b$<~O3P6p;0y?VbCjgz zbP#cbXa*di8q4)&wD7m%WJpen+;Sx_CLOvrvYP)Aa^E>s|} z%p|DH-$r3kn(Ae1Y`k*{Ys@oLBG>i2U8D zC|^|XHv6e%FU6U-pW=+aPm24XUlX!@p$olr2BZrFnk=mIPEsCJBP7R9W#G1vKqI>| z9?Dg_n@0(3?QWhO`J;pwXhoa$t!TqAE83WC#V+*YR_w!aLE!agH%?peZR4P)aJt7s z+eYKU)|oq29%wZe_O0fB+%T)T{5<4*zSa7$&T`TUSy+QqZCxH#8x$OSDdrQgp<~64 zAGUqCMS(*;%$=w?h5`N#1H2p7VE7yP!g1|{RY`hknpnw+#)c>@H?2XVme=Z(pz_0Rt*tI(?3BdTsDdf?7mNn*30oQN>3^-3m3pIBI~DDF{z{oTXs`mj!u)adVW zuoj*~jPXpX>O+P|#fZhlM z(!_}rWAg|O1|k~PZcf&6@_KvI0R6cgy{gbcFRRtMT#jaod9_|Yn$(N`Z7*TSN0+P7 zbcU1N?T12s5g7V^ia>OoCfY2lLzC@&%)@FglMPu|3oW|n@~~*|wN6=BgQ7KNo=saV z&BMA{+ojsBc8q&<>fE*~x7@DPybScac&E6HdBtrCdu3A0FK%N`!RjJPl%CS356hX8 zViwdOUr|pR+>1qQ4=D?(Ny+H)M4+UQ=ffxCTbnZn_}1osPls=9E`2E626Bm%+9y+r z!^o6UHkrE6UHIZBwOLT35$W~{h0koALl$XUjbC7p+QkeInpAIZb!;9cbte&XkQrn| zek+n|#XaoDi@1KU@o-6-2R1&**z+K*A8gb9qr3+!&AOKDG{pRphu$|lZ& zXL=g2p=PIlJ9&U;U?L#O&jfqe!)AX-g?CifRqOGJ)b{RcEj>_?+T<&;i~T?T(+ftY z7Zv4!EpnRCArlN|OV(wAl|npDd0-PEhRPH}!;OQtnPAPuvi=-TOb(`q1JB9YDWVDF0p03!GlCi|7>+DD%$N62_DX)&wH7DE2oIgD97Q zxdjsgF*G-k5fcM9Gc}h1!38LP%~?xx+cprs`&aNt)h2>B!D(|y+Q!q)Xc+=hBl;C`B|SQ9{8^qMQbQIzk~l!UQAEGjNTiXgdlS zNtC5b848YgCcqT9a7Xojp%a+(sW98HIVT~N7-E7&9-rriDk2yW$s>v+&GzewVKW7D z?y1HdmGY=0y0_&$#yIF1Br*!(1UVqi2x8`UOZ6haOCf^gM&2IpO29=&u?~1wAPMex zjCc92cYBh+i$V$CyWa<3nNXa8NogM!A7|CXKx*ayRsIct3xbM&fmdXdWQduCL!n}i z36&ZyRv_tEuu{ZRdM{AmQ6hPeAwwz^$AZR!LF)@7MEVgq56E(bVyhKl0dW$s^td>I zQm%Y)djP5U7P2=%+c2R=g>jT&>I(yhl0B~60(TE06{3KIgrq2A%uzON5Df}2Ybb5) z_5x|j2t$I>Z?KAgJOY`fjEJLUgnd~V*^L_66Sckra40t)0GAE|+!P5$KsVqqfGZhP zHT={B2rAv<^T3s1a05=5VjZ|?u-s0~`#g=yaK#mfLlH?R1L%`)^);R_%7SW{w-S+G zq*f1*4@&8wv?PP2;t2EYd zVr*)+-dvk&x;bB}-Z2**x5r#+kGZgt&!Dj`mL_kHT!V(T_#@!ozBtsZ%;T%dC zaq9e5PBfJ9L{M=r<=bz7|+!HWxb^*46jQ!ll1M$xRsCgA2?H*;tW>Ph)DLr&O85H1tKca7-joI0I*5j4hu z!;O6xt^aWAKiqi_AvgH8PcHODu_f}45$l1%8q%PD9fbGzR4z$~%ZpQR_uX#D)}ff) z4q{##gzzA&i^f)w)`G}kWWbV8{cm}MA7+qU{{?#e*ZD9AhD3V2gG`Z^UXkzP0uC?p zvL|Err2t12!KB}4o$SmXf>v}M*}efF*Eu3^==bA!H;BWYmQc5$gwlg3c1rHV&vq5- za^}{5me*ii9jIcS%H=%7MGqK+L5p@fNYQ?pd3Pa<6<@Wo?kOCbLVJC%YGlyo2)}47 z|8A`s#imC+sel9>zBwa#9G_~{xGnP!$ID^b8-Ya7FTt^%{ueLYK{C(!{YjN(a$?jT znZE-a9pDv9z{Hc@^6~k3_^g{4=<;4HKM#a|uC~;xwmt|iizKog4RtDUikO+ydb~b| zddh@k(85B8$|O>ok=o0)G2b~p6(w{cRJ@v(dSVu4Wow=?lLihfX4c9K>dR;7Pm=H2l7nj5^=>dp5I1hyfM*_CY|>v~S(|m@KBColkdV zkt2>|Tp*S8w|#^ll}B|+RWuB6k<}S!x2|vfZ8yR(&8W98fB8 z)?@i5Fhoh=Ur+!^(gCzSqAM2y$sJvPFFIB0;($h*xe5UXNhNwnc_2-Bc*aoSdz-9l zSZZAZm!f))8H3Q&#z(r!$8J)S#>+`Q&s%qYy{ab$_-K@Hs7_ZiaHR>S8^oAA=PNSy z{q1*64R1w*2IM~RA$*_xfOP`c6BzQOm;NgCY6v_k=#CRJ8J$-3vio1VKdcydhTI2d z&fQ|UgxcXoS@74j#h~hJE#18>FOJ^&g@eRZFkd{CIhXJXXML5|f#K;E*aCcJCVD(W zR5^MaaGfth5=5}-%jm%R5S~y{FBIuwas$;m0Zg4^cP2o$ZDTv>*yz}{ZQD-A_7mH- zZQHgwwrxA7--A2uKd4VNR@Gd4Zgu&I1`YgI8S4wOgS#f&cDq&k+|J&S6v1*s?HBOd z?ZjhHCvn~5?^Vz82g`QmK_f9idr6QGr+IkhtyF*7`B`=I9}q@vvX1{iheSueU!aVP z|JP5@*0dvSMfQEEP3vmj{gwI>Zvgrlh6h=vPeTa2&+|CbR1{+FnRJa`%ht=MO8Q|C z`IqFX1{r58GGw|@L-q5VP?X&br{nFJ85_{s^>+EhE@X@Aka00ReHDUYo*!__Qv z43x{6dxwGMWpI$tASeVe)wi38aJ9Fu3D)Kc*E%&h4u==fWVc_El>2z7SI3*;Abn`pVQH}tN%C-+bFW`JneH$$2aBosdtx-SNs8p>)k?rxMl`ZwciByKqQL(2kHkQN9uTPhHqLwx|y;Ca6fQoy^=28^f*rsME42S6b1KDVo zz8RwPo7-UZW|xm?TNug5`#o`@)_@**jabt#xs(rOv0qrgYVA)14_&60|Va`vguos~R9e@|}B+<=0 zQB?%$-Bj^-^?LEf$+H7Lc&z}Vi8n6J;U*FHlI*2Ptg}tOsOB7b4>{$NLK^b*wzsmZ zbGa=;!Rv*v)9lXBXvQC6*>dL|H}lJ@$rvp4odYIkYe~&AS7dTAW~L?|tHIuCTwT zSmV4dgvknSaJ~GBM44Mg%(73X!#1!yV=Wv5uKk`h&{6@=YB4@Rc>sY4WqRnCAk3Jf z6HZw%X4rRFW<_FFSl*q2ux-djFQ5WVMS}Ah+iS-pX<(#r<-rfe6iL3nOy#VRlYJ>O zTs}EOo2kD8(#DX%I(RcSVDbI5T9drxW<#|m{v5+G;wEEr7z&Bod4Zg_3Yr!EEZR)^ zq!0e7=B1i(L&Tnl;0K%_E%?_&Yl$$M^nYKDNuiVixjbX{^X?(I+O0ucVj)vmUB-St z)){$$1Fk1)AgSGH`|y2n?ocW|6dqibY(gVkB6$r+z*}J#7YGP@k^FAZk=->U7RVTk zr|D=|RP!pnU$0j5j1-VBi|v4;QbgAZBuB4x%dX}%1>Hwu9RaO>`p-lCx9${z&*!GP zvK)IDPN83;ek1K{5p4K+1v$B|fR{rY5J50bj?r_JB!(j&$XzNpl)ZlxLKf&=D!Oeb zW7Nx_9c}n2C33~jKhkV6TL%AR4^Y^OI9R!=b^>BK_IfXLRYOpeqI&zr6#TwabSbu5 zMujP!21Q0&@c`B(R{n##df<(iTzTAq#T>PWa#@ISpDOcVJ=kY0%rgU$DIv&(kGpEi ziL?#F+`UJd?OAE&By-w9!hHijU?=0~8x7FGjsOnVF9n1H-U!!xx?9(@p6ZVtY_So1 zvhTJ6?*3)Oe4em@y>UH8>;+^h_m0lvsPe|bV&Gia5`b~vUz!J)a@<{T9GNSNDVmBe z<&$_}#ff0hMi_yH|M4^@sMRC{m`eT`eh@*fO={nueyxIPYO^JoauSO!z(A_T4y=7Fe z%^l47rS%uDnt@Ye%2(NC??If0>1pi5#zg*oLPx=NR)SB#ooePJr>PZUi(dwx-Hp|R zoy4+eo8O%mBpfKTyIs1wwLd2bDj<4l0rkBx`weK(mPZu5L_=?-hn#xRV~nNa3AKM& zWXnHRNS4K*<~1-jty5LNl&zsQQtnfrkiiUZmf#dHyqQK-Npk3C`i$a$K1v@Cm{2W4 z>tkHCXh`?;`9Z;gS?0iU;@uH%MHi61b$=8i3@EvK&}n4wl@m^u)J}*7qO|`s|MKnS zFapdTgx%MBA&93)G9z5KhK7&DI87Z93~j>Zk*o-g(j*uY;N6d%_{Dsx3Z5wK%k=>d z0v%=T=)P$9{&aNT=RDrcx<zpzd3rAcNyYy56d4I;?vl*+U_7U{7{%{CkvwH~YLb zx^elefx@%UC(z9uG9qbB5T&=kVS@#6y#jDPum5h;48$`P9LG%#2D~KY6&c#q#eZ$1 zhIw=&(e=`P)SXof>Vz2&yZuG8$tu#Bwt|r*x>ll8obEsi$`8_vkHNA~6&)k@oTUY$ z$$`mjO=ByCb?)gO=A9&is-}q~e$J0lBI>p9hERSh>K_W2%fD8mS2oBVp4zFkO9n^_ z;Px*trpY&(?K0AR)NJ2?$_BbsJd3oACF=&Uc94#Gr7Bk7r9Ilb^)J8(jdnK!JJIi>%xQ&%k`*vI| zjpUgfy|60i?vqVerP-(>{o#S^!8d(=s+286SAdl0t1-yL3@x&77SIpA=xy%K<$X0~ zUko*PL@4HgPE~f2t=lD3JkaHbbh{5CO)m8cu7?HP{}Z5YzWM&fOQS|+>?<4gQ0Q3U zFSa-#!oCP@+}}6Ufw&C+k8aBG=#!rALWh~05If`waRv>gfNQ2_7Hm1pSnx#VSO zRL@$DYSyYz=axx_0EqhhFafu8f7-6$?BEFPJK=k8kU3&XZ{z(xoyK?x(N5vjE5~?a zx$u~b*}$3vU^I1BaRQl3JecfBwH+Drb4P`%p!Z4+QJ57p{4okXs8pr}aQSzZzI;wq z-fuxGK#Yu+;`j+}{&a9u_iEfz2S?mGf>AT%c{pM)*F^@b0M3lGDm*VY&XH`%Z?n(H2K*u46b-{gX9CY7<+>Ub^ zLVW`f%j|FD$`0-_$eol@3Z%-nw{mRABm98xkD- zoKGqug%qQ~8#K>^b(fks`wdGlpjGkT&x+`%Um&ditvLe>WB&tOV|QM@!G7NPaTbF< z3D!8cbfQixAcB!O{^$}2;I!2=Ib;jsN)8G7JL%b);U*U3t`U!sZL=OXQ!`Zy!%M`v zpO2(RH`jM8ybgsVb3+b{o>yFBAp5>+s ze}wN*P8Ca7PfLkTdgylPv&J;lFCzFwn%LW=Bvmxd0*=l!!OP zQBb^N*o|&R2{T1~F(l>{jTqJAr1 zV5F^O5n?1c(k+IvtpHSm!7(G8VVnOB2m?z-$`8aezBGX{ruw{kW`b5-63Oi(Tav4%am%qhM&SL#=usB1P#kqntbM&d6eV1%C4S46;3dtlsN zNF#KBdqO~bpryvAznWKbTGZzQd7J3?fS?J*t@QyTl?XzEmLxTt*V>@$-)As}?Exsb z&oRbb4iG;KJ9RaGMqEU8f)lnK>0fpb`F!YC(-!28mJm7tJf^>4sv$9ef$WepKB0HT zNz8BQ$(?OIPua$8mz}#gpGLrZ|5BdBQ#Uad zlS?TJ)YRD9=kwNK9N(%~={yl0B}(9<&!rGdcI975_+iN`K0hR}LWuSut={D!&^!WL7$IdW3!PcQgBanC|mKa=zO+H*vlAi2^E}qwZE! zpO#F(^)gd%Bh1HrT)ym4{?5;1{Z;$WwsIb=q~1|X2>+tbuB!*U_}Fi{KfeNT5&cPW zH@>zcKSH*-lQBzj)TMEndvl5F3Q0n(68fF9F37ven<`7xeej8)-BeX%>((`tH2K0J z<)Mj>TA8I9UX6ET426eO08 zDjU+sWzYM6={j0Y+y!6T3ycHZUFB`TtekHml1*ZR>@M5ZhFS_z#n9bjn zIY1mfzN2G+0Y!VA$N)Q}sXLQ`8PdgPnnXg{(D$K}XoJkT&f?H0pnTB1kbVu<@XSXn zKoTRa6Q zuup(NYJBvkGuRJy8Y_2<1B>kA+ zgRwW1?zWyPk2cO%j^%fR;H67;79n#`jQE*A^}_gVU1`67P8Tnu~%+ z6-rdvdMcPyy%u3hI0$!BO>UNUnG~?KunFyR11iqoJRR#fB>?5a zfJ^U5=CUV;ihwl$dF$gbyeemiVM*`7>{;PB#T3=>hCj%P`tBTC0`7!Ju~2+FU9ilB zU?Thr4g~Kj-v9eGk7UX+C^?SY5)*>hiOBzOW$>buV&R*YH9SH{uspa7d_&R!G&i$;9 ze`B^Jdj|NsyIWYUEBbV^dc8*+gkWSsbG!Fkgi-ktBmQW}1u|f%jl0UP4(ZbvCs9Gu z_ut+F#K;))eXm9^q!@&fB z2FzjHo@xKdSD)ndUM_oD>@Q5YsFiJ?`>Fw5UVc`ee19 z6zhgp=N!$#$hPVA`C)oWd&SiRd$**jt$23nWHE{77G}HU!a}WzR;s9Hvr3G2N+qKM zn{~9vLnGl?*JVXU|aYJX?+JL-o7|_$`hoOcAA(#UqMX2sE6MhD-Si z4n_$YSmTr$dPz5roDp9m`pP7M5G$71 z|8d)TH%xhY*yPzCgitCunlka8Ypmo#N|@qM${53w zGUpUc-q~^0K5MPJlO(PY%+I24-DX!3aS0P8T$mN|-nbuPBu;Y9>x}$QG-CwgfQmcX zD6zD9GN4TU?xquOngj-uD@=gL_D9x;yABDOQ7BzjeWYPxK(LT#pijmUBQNWYo3#-U z4`uYKrb6rlbw>2{8TZ~%SI`02qU^q5VJz?E@)>rp-k(;!z)a^!4qgn*DD5?K`?AUk zq#@uPXHg}xV`(;QK%_9@mP zogB2d1{}vxto=WSbOHj6B{$Nx+T}b)y}rOOKD8c*0F<~~K-E~y!U(#+kta^}5eX-# zWtwwZQgoXT@w#udv^ZL9D#CneBm8HlQoEEOiTiMrB-%Qn3A`(QUJd_Lm<6N@2KBEwFhgcEo?>jb}JlT&esNl zK#!M}7A~0dsVh71LINGcXxvQX{+IK{viclR%p6pZlkDy03@Ew~3OoMro_+<@m6X=T zuT+dXw$+HOy!a?Y?BnQmE8JXDt?A?$-hUdgvhL0+tsBgdf5UK>ldYPKb&=(|769?) z*E_{(W$ZfP58GKy*~`QVqr?h#nk6V%Q(Sa9@(dZZt9NoKnED`CULM3!sX)pbQfv4YHAhko>L< z^I#bA`9}L(;}e(7q8Jgo8M${{KW;{OkS5CC2DVi~cGY#BH4k2^eyQ^Z6hI&?$LFq0 z-hxE14t92F5FcI=7O*iN{zMI<|_FpTqXzA z!cmO_lDGuQ-emXI*%n(g1^@|UyXDzE#Cf@jeInMSQ^$&?F*DVa-M)MsZ(QI~0*|{! zyK%U5rKg#w!SYbXEXOd-o~S42+pp6KA5M+0E3iT?k8fa#jEsGPOo1EIKq-n;PKPAB zcA}!5bR$qv8j3GWzC#yyp}#adkCfAmD6Yv;A_>i{bRZ@d^ zTyr)4@f@Wn)5)i&{y?88w5{k`+}v()D=O?9{(fhH7=ZFl)PPI9R8=`(%!U@P=mhn* zfX_9n-E7_$b7niD-ucy2C0dxG3tOq~JyRt8wD8|~s?>F2HE{*DLPRK%o6sSex)<;icSf>J zmq_}LEuL(&y%j4RJIZqDk8=pmh~b94OC{f>Y#Kv~7iPNx0d;pGmx4W5S{iP)mSfBX zyP$&aqK-Mo9`8pNmiukPsDENvC;F81bEg$v4OG^2J8CDb%>CZ0gTpOKsTKOd>hv=A ztE22o7)e#xd(DRxE8GVhoM#eW!hfKwI@z#`O}o40N-USGus-puqNsiZ(Q3PQjU+; zE_M4a8%&)yNn=$uC7j#_*B7ykeu@t?(YK|$tI54?Uv}fik1W;Z;y1U7QzvEkuH(VE zC$%bdUkh4W)TK|*F%0>01#_W4$ebcKwNXS|Xb|&)?~`kOy9bU}Abl3A{q@!Kpm?VL^znL~OR(6bTk)o)`VeS5g!h`qN*ZhyNqb#k0DQzRjP>{>He?bN(2>E4jIu&Eo4#ps4Q6Zny$U z^n-{Vln*v(Ae5gPsY@?e)Oxfs+4uu=_b5X4pASh84u*JSG!VA`D>`Ut$L{}=xSXhq z6ehd6!s2hV*Dj`3Y{kFVC6{z527xf9m&Fm1H-i57X0IAL9vI=KSua7RDeii6@~zxt z0W}K*t{(tJ^!7$WBI?Kgu|Tk=otztyhEM0JLt3C21YUJ)uCJB7ZhesddzT9e1y|PH zJugtH>*0xOkr*F74fxP+_iKx7!?YnaxE6%p_hrnaARa=N{PzDnrN<-yHNdr~Ak{-R zjV7@6=arIu$~D8s--lxYuRl76FL5!mdLS$z5AoPM!@W`fnmDS*yd)K-^!J2@ZF6czBfZ*y%`*xVxBIn4A&V2S!onv7 zDe|vyeOWYv7yt;zjEt;^q$E9kGEO~gPYyc7b7x%YkF_;iLte@A(;#ehqV~Hcc-Cex3~)aH@kB9yy|L2B7g$;LffifQx|6ET<6VHhCZN* zo$wMJ@3h+KhA92z-YWDtE}7bfR{^rb_J)YW1rOejfeuKs1-z&lE@hv3GK9V>^^C1f zrgi_(&(l|;;L+7E47jUEl7BIlQni$CrTBx<+SxzB4RKAhX}rFAE>o;}S`wk` ztiiJ^2Ox+oMqJ!!&b3Jwa0-=!ioqCCN&+=}dR1zLVf@zih{kQfdpXPlId zKrWSK#KnuWzu_TI^W>XJJpdpte*jkJYQwbF1-2#c|h zWV+vFb!{*eEUV{1#h`7;kt9>_yO!i83#5MT0TNAdh`oeWhxR8Vzq^R>qX#I`$Rf@6 zd#kiOtUAm~smzg{OG$n5=jZVfWBmVIpE*&fk`ibk5(eH%;}MD4MgB$w-Vj{E|Nb%C z<;nyu4o7X3&l+tcqVL&X#)yhX=nHZ9U~I6!w$kKcVwlrRNV%BiT;8A$9E^lN0Rim} z1Z@4wLEQSl8cRaS=6`9Tyc@m{P1!S*op)(pj~ER|YzQemStsYg=PlGOAzrUl)6 z_)oYmW9WW;ZemLsBDx9~#<~jZ`A=O%1hG&UmUy)@>$PBNQ9IP#_EoLmy>wWIE~oy| znp}8=p>AsH5tjbkWs`@)dGPhH+OAS627GQ-iDWcN>ce#nBN$Q`R_WZv#9YVlo>Ot3 zdRiHKbf1(ywQw(YDrEd{>K}#HV)mo#G-3^lfNy=I^)F)O16#KN`h<KUP%00JS z8zu7IQb(XSlb&)G3uXsq<0;iG65@USTk9MiG5V1^mP^hH@XJIxu-b0{C}(}aDQpcg z2AL4OoDI~(fG9|QAnNC5-~a4g082x{&D;ZVV4-Kyyk9*8C-|1EJhYByjbxS7L8FxEV`ZB-a>!_xwYukMW$+1r=L1V(HB6hO zkO$30N%eMz)c^H9!q%C?E#4IsQ*l4`rQEHmsGKWx-GX>ez-`MD1&|F^7cJW9Vt7tFzHX0dz1VVM`e;lOvGYQFHi-Ue2f$D4;~q8WP1m7+yp6+%Z?CFbUEoJkaGwSeod8*OdWqUe6&jOI zpnWS;(fjb9ZFV2tMSVp^UPPjIu!TM_6i9ADk4{gf-{w>;91E+YEw(PCVG)6N->Z3G zt01lCU#Sj}fB@BT%yHaLe`Tr!#(jCQPh@DtlYlE7WNK;?y9q;5%0SU30wnC(NPyAr zD8x|5xOjW+OTyR_*oC=rTmdJ=DOvRHhuEM0M7y?7+{5buq=A{6b0M@sFtt+ zCr77R%g7FwCVoBO%t~z!(wL(NFm{T$B0Zh{OIAqJL}zYgg45>sR5w_TXh`;|b3R5K zzk#A)#LoeB`yHB?bz8{{@=cI)qjJtGFWqT3+=?2ZEA3?k>qA_q=x|%7nnVNvW(j44 z4gJ=HfS7N0o!FdpIdrwFvpWs-qrQ<*ptmegZgp5z6JSYVo1Y)Wr=g*dHD>xM%c@0{5wJ&5vgd!zw;JHc5(Vuf z3$K0bajS`tLt8}Z(Tj2yESwq-E=Z5?kQgI$17KXQM600t3NqjcVeMZ!&OORVr$N&6 z0myg;`UuL2iGlr;8M|L5JNM_{jl>Y<6ce(xDy2pNecXXRW%PLf0jL%Wa&J6r&+x)RJKo0f>4^p5Jzg}e`j=05R2>($XwWP@pj-*;H|goa2`{Z! z?CDg^gx_5akXdJ)AgJ)P1Z(eg5~oL30~|BK=SoTA50YKg=VtoaiqEivOQoIRU>z5$ za7{2iorxQ~+<-G_*wVK)GdNV4xz4>#(YQh|y`pCx{+yzDTq?oB;xZ`DmFt@)OQOvw zcN-*k5Q1819SH2t)>PA*!~f+Z^qMfcaPo!qOtx6o%*MIx_JX-G$be>MCd2XXG*k zmTjVrtAl_}sC(n*k6_kOds;$;3jk-TbeVqXiX|~_(jqa#bj)eSDtRTDEp($5CwHB| z%NzE`;Yg)yDxeF-M(0fe94_;QrtT(n(_}YhA$?{z8Dak_U1#v5-{NFC<_xMnopJEX z5<~8`3GiC|D6tg}_xO0`h}aQsw>*k9)c99U6j^wkQ0b(H)5B_5kvAeL$3P!cp2iUdBfhlG0RO7wz;&V~<*g>o9QTZ<4Op?G0iWdMX0 z<^L`ipc&*((ib6Mf`7kg32>m+jzOuF0}uU?)PZhskcCPFCA#;XA%-8qBZa#?d5j;z z@igZoLqLA!yoC&$5Y|>FJ#Mc<1tM&`ChZB;gS>Yp6UjmXy#YU1!)7!x4?6?zvmhx8OuMO+rXng?8=@I*%=tw@lwCIE25Na_gF|N){_h2D7vLNHWrl?QD z{f}#h7I$MtNpSuk05VirTqVeFdB`Qh8CQMdPfHU{xg{R=5Y%xq8Ns0s0zO4MR^M*o}K8w-kG;h?HeLc=4le}$ZNTy;|vCVhRmsTcIw%Y+uB3vSi`LWp>f(HhdtEHooD0EOIK&6Tj)EBrF0XPmqjKfTa1g>0puIW79 z$xy^LRyiBR3O18t^AZXMF$$V3UBu)pn$u(}L4FnapRSmbnC=|1FjW$rrYt$d_ zeb}p? zgI=u)=+6VP&outDp9?cY&{xJPQHYQL1tBC$F+yNj_Eq@kx4|Stk&K&m@py;o8p?K?syJw+$cy!q$~VdQxdct;;pM{sC43#30m5!Uf_v6563mfx_Ascr`_v z;ou^FKo*`HZt1;AiU{}hSb;*wkd}&-Szzh-?$Nk)6>!?R$!(dmV_N8V@EvcjzohZO zfcC&;4iVEX7fvW>9JfT4zyL6yEecgWaw|DOI5*s1x^>JDp^n~`MH$eInC>A;Z>o|G zk5Oli$c_!y2G1I{2AtF%E}-WawCaLFtfK4z7!uUUxGHauya1IRehIp6tAAeqFG;PY{J8 z+zKCoELcEuFvnLmopr}B^hFWr89PhO)Apb{oqQ-bfJf@3g;}+$Up-~qzbIu{AeNs5 zSS+}gU0YI$&&9hiMwbFi7v^qQSmMJz5Rjr8cIfJ6PkC z_b>XBVG2yb5VhZ=-Vr*&H%u72zt`;nuoM^i^zbQo^>y)o58{*pM{ZuOSAt_6EpWik zg8mJZrlju(Zkg9c`{_!9!)lQyZUH))m*g6&IU_? zQD|b56P^Zmx&{>^MZ6?U8ps6jdiOSX`WlPpH);i;llN&0F~RR|-VNOkV|?5L9<$>! z2f>{wuh3IIBBIqGOsJAR$<+*stxW`%z}vTBMiG=;-ZVd9Kmf(9cL?d}MX+Y)#9b)6 z2xLFcSNmihqsD)Z?*?+F3D>Wc$68KvIiLp@6h=Yn`X_qs0c){f_c*fr;c_<5u$oEJDc6a$?P!_lSbF)`zy2=YY-~ zc6JQ)mHbDg6fwcWtxb=BJD`#_a-Mt?^x=s}m8k#)5BB7j&YhcSGiuJ4NKe$Ef0=|W zllJpico{=BT6w_k+CsAOqO08MuS==`Qu)s_Jxlyq286DD5}D2U6T#S=13Mz*WNSJ6 zIHj{BvyUQHYvJ1G!LeQ5sg!QHMqFG=$s`MtRGb}Vh;Qjbm$Pw(#a!Ib4R^?9O%2&C z3XLk2fSWbwxDvkC27_5R%C;x2-DjX)L_=i78)gGOB3<>KUz*?=%v4(IU{D(_ekUYTkZ4M;UY~}ns0qVu84lL!$ za*ZU$hQeNcja3fv>hY^j7@&EwEEZ-}vCJkG5gsSAsOEUsg+o>P518M;O&h@P1!*G- zvV^v1SN-M+_L{k{Ewf?la8BG~+WI{#^s9jZ0X^c#EfnSxz$Bl`_HP@+_ylpoCzb=s zgcRw#k-DavO$+eOl|B+B8Bck!1kT}N?-~XkmY%b;045%yYsBhOA|Ud~p)jd-Lx}%K z@YI`5Tt4JGa~XZj*w%^|vvuJ&d)4Amjb)^-J~AhLhxI9!w&<-!n=UH2RsSnUu_L&;` zjf*xbYY{JJ(Iil=3t*>7IuIz?QXj!Urx1^AYm6-0X(_M<;RtrbCc8u%n1wXk6c0#( z3G_=rKuORs7$}=zb%pdi2&pTLl8a zL5Jb$DsO>zH|LD*?9TO6oKQmTOR|J=6}KG%{2!MV4&m|70YKLaxIHY^B5e{_%2??C z9h@)VtEg{wBnZn&?zq5x^hac%3o(1VRY(f9#cM<+VgOe#%pZB@Ew@Ku#tPH?K@?1t z+OP%!MCrBs!WC%Lab+K+g+fgU8c)z(vQ`1Yt1wyHyA8kQxRxh{eU-u)(3=xGU<65S z&+g#;+++=m1|ZHE8f--@A^hgq88Z`G!!uR<5ww0@RaBD=myMBDA0q*-Msu@&8cC`# z)o$}0xLv(@en>B&BM7&}XZnl(9&=hl3F>17LkG22R696yAxrF2v0lzNgyecbM%ccT zD%ZtG?yn4DPnA9e8oMe^XNwLAMd9tdFwTs&NGCcb0ho>GI$&k#%za}`I zOt{}B)~4zPV`5KOK20KsE50?r!p3}Bq>{;(3&i*D5(5lc^HBkDN3q9*l6>i%%)>i}7zj5Klks+swC@TspW z?zY>i+5@=qZMWfC!xq}iSJ;n=A1wa7r>0!o*R4c~&ireQ-z0rK2>n&JMkANyi5+yk z{yt%bN9?(pPv63GW;;gHn7Y#;E3v1cpzAHv1o#e8N5K8P3sYRYVC(&d0Y#9aBB_;G z=fY=XgPIQ+fK1ZUwpgzo3i2<&5_3lD-3mb)O0M%EXdZVP!O?MX?+n6RphmkM1_{xU zI^fW;9>Y0sS^R4Kr%FIC!hyc6ez}}QsYVo2=_nEW9w=W7!3h3t4*xiat(=M$hUxt> z2e{Eu$aD94**VKuMN_ok>_+b-?^py@*Wd89@~EA2R`@%ul{ZXBsAF8q^Jk;nGBR*r zko0-;S{rOyFRcKFb+o%R8zQQ3Ox#qA=hlXXqfzOP7{9d(AEyszur3*%PFHYTwHwrx z%GQM`Sm66; zZbHtNo1}A%3M@Nf-}T$>rDqa;=xzDq$I-nu>wc`Dzij$mf>*tlDU}=2==Cl00BWHv zzRkVMyfgUvrJbCY?oR9|N;YdxGB+RU#uws)$aJ3G_iUfFNzzhCBs{ftr4>1=No%x@aZWk7NmRGj-w z&e>pi%x^S1t)V>BXv2Y&b_JZ0EI z+JkQy7;kmPQeD_l5%1DSZXKceB*2c$x*452OEjgXNzmL@KvTM_L&OpagLtDkUyZYE zQ0!x7ifh^0$zkYh5#IiAp4P1PFu#1gDk5#WJzr}R(E=xJ(Wb`Nf&RzM{~uQaBM0061cfUNX}g1eu7-c>wN>V>GWMhkf(k+* zBm!tX&#`GEOb{c;CjSMRxI>ezf}d_-1XJyZ&gj2+nINJ_;kYxIgR$67N+@zQv#;0i z0GY%QS%=rZPiw=Mm!DFW2@NbMjOnyt20ESyf|FxG6Dd zG7J^#Y8#RT6qz#3DbB~qfwSFzY=2v-B?DXhxYa=JSF`1`Fc^x ze?up^R{2<#(#%K5fXA6gnQ^Xi@Dr$K`9%c5>j`=iV_h`_H+E@MStF--Lw5+?o%N)- zt>s)1Rk9gY>qVtidgiDK&Y;V-4+D6~luyb8Ywtas)WN?ew?W-hd?C0MuxF5$>nM`5geJF%0mAL>g*vVT(9>+%?{T$^cn_ z5mGGGO+5CoF}5WF0Kb%<=P|j+*9+!&06-nfjVjXsLMQYkbvb*mr%q}^?sxd??auhp zzf*zKeh};IejSWIp7p;w0tmUSp!0VmT;W6I(vTA@KwpC42+n)uP`WQbtmj*yzgB*F z08p}GJCURZdM2iK@C?6kck$WC@=En~>^izpa8*|I-(s!U5gDS+kr78_ZHAN!jG-o> z;0!K0BIj}`)@?UxG(JYg=UyZ>nlO_~0-R!+-dyzzxhvd!bT!5+05E5#oh7F8drBcr z+C2L}ovJ%@ZIsptJAM?Lm*asqT7B7wE1!KYrbuB3U}(}&*Ejc*eMpIzMJoH4aLR-X zzhnl*{Z8JI+nv2XTY+fdnI)N6e4K_JXVix^aU=8lXfe+1E-41vu)=*lb>z^TU66(Z z3|xvL5Th-u*&vyG0lmiWkry_xi%};lln2COzUr761Zc(AfFy;pGK#-GiXh5JQxz@v zl|VD9E5xkMy{U3;7B4lG>Vn`Kq z0z(7)XQ26CCKK)EqM~B6!lx?*BjI!!mzFQs(aDYX+rS{rKVMXU3k-~s5*Zwf6ipIX zFfI-q7&<u=1I(G4&e9$tT;G8#FxtD5;jY=02Bz} z?4t;zfeR!(?q7T)KqvqQmhc4|);|G!j>Jz2$AARv%xKZZj`d$mePeK7!M1g5TNB$h zCN?IvZF7>K)oIi-#4`*i3i~-WO5iOpr28uRPAr8n@Pc;T?hcbzW(@Y7 zQRls0*nt}BwLT>_x0ebxz7Xg1!e!%!nTVO@?|shO8CGHGqutZ0NhwEBs~k+$;tK1po;~PA$0>YvK)!@bo1vux^HQ0vztV5J^*#Z)FU$jg>J3OOJc$zdfV%#Qcm_aF zkjZJU6-Ow1eSH}3f9=ABZ5se&(l;-I%N;4p&pnyvMdw)rK z+mM29Gn*75&p#lqzW)yMG_a4Ix(awt*hzi6x$?)eq=tI5i60;@0)fGThb3<(*exQ5 zg$Q8cLV`kmmRI?%3K4zN>I%vziwuQ=L%&iTAuSD-Pus29O`(PReg4G(mL3XB4BRj3 z>o4rXpfOX_tH8rp;AaBhz)C;}5(Xs5$C%`Ck!_TdWp;Nf89id_<6$HcU#&(75!Y(Z z8{X@%z-lCZ)iQnFZwAY(9p82mxv=9KQ(!+_F5VM+46pEMJ?$S;*<`??P8x(x|x>mg$`D_<~9^Odf_C&ciCzrd+R&5&lHUbdh;50y945e}$4*x|l_rrHWM%QC!)TCSaQ9HJ`;6 zuoxMRYSOkiS1{&NGm2N$MEfM&rIp`wvHU2m`Ky$%eSdnV?#A4{_zmc?x`x$Hi@{CX ziFi$b^yv{ROWGe#+?#G^?aVci==Pw+&Q!-NvBH{LO^mwUq*d4~Y}1!XDB8n1tbETY~lJ`z)R zYnhb<+`61jwqW2_Apv?3AKQjfKGNxY=8PwuT$wU88@0Aty(8mp2UaJrz679@hr8P$ zf`rH|4H{lWx}Rc`T4Cqf!z$9V{-v>pjS;m;7OEMyHwOLo*v>VQN4ydz4bZh+e<=@w zQdQ%gDg_)UXylhg@0o?tZgCj#{7P{sOIj*q-SxVQG)9A@a{)(nIj+Fe?Q}E$@wZtA z{O0Gd+otR4Jq4EIGA18x^R@lE@L*g4I-Dg^=Ct`zom|$kelw}W{B_qgu5~9RTys}Q8arDy<8(~ct{E9@R<0TK7F$b^h zxZztJWcGR!@W6^DXGdlV`)LY+7zIZ8g}<03S@`sib3ML3Ii>b@)Ys{6(h9OB!NA$ruqws#|sw`+7^$^VfHIsqXvyKtkMxTsmG_I3E3xQnlf zU%#-mxYen$`MNMW;xT)jLi?DZ!qxv8TkJ%Z+%9WlC0mY8Psfln*4Q8R0lx-0SMUJ; zf_B@=ga>Fw2Cw3kyge?<7>Tmc`TUZ_B^cGjN>}>hIfHwF8jNXx(!g#~bUMh=U%p&+ zNL8r1#y!lEQ}_~ad6c=)hpF|2P`q!^_H#%+ApK#LtTWSr;s;FN$;>Z>pR*6-SQ=NU zgi{1va7W34_yRcaAirRVrfgG|{2lmd_Z34Y(#L_eYC;5OaX0hojXZR{YT4bkl`{sp zHuTpEvF-V?j$u103-O%9ppHBeFy+=|;voNxk7M<54x*(#zH(jVKb@WnWoP#Xa5Moy zMn9iHgb`j>*Y!Bbpw5?2C4amxo{7ym*s~s6-gd@p9*eE{-LG(V@@3rI%4haazjPJ+ zWw!yeT~;FHB^s$eHaXm?n%LWRZ!D~(NsDVECsWo5Wq$?l#wyFVrc$;_+SW3MF-M|^ zgj$d+MnoL!xVWrP0*aiVD=SkU8*JdL3zAW+t~S(<0M}O(DJ9FOh2>Q zG0)opEV@WXVHc+36$ZOlw@KbmRqAmM)?+ z;f``;TcM($IeidHzPAqv&X92#lX-C_bWd3Pl1Oj&)0K+P`I%`0A0ucHe5)!NihjxW zOk|0^blvf*4H$#TLwC=L$_2>tJ7U)i*h$lQ9;-B;1g~_K@XH&R+`hXC9iQm;QhNX{ zD|$~F%qdPc&oBO^n&bhp^x!3s63yN@_Dwy>-wg>}w0xjF_ zXztx|Zh#uR+PiSxTTE8fgdNj;%F@L+8uQY){JPJb8#25{_8r=4Ug9G(n2gMc;Io=p zlh-1o<8dU+iB}HUorzc^>5o_SKMD^Ve=D?Y3q_5rgkzf?vJza4iPb6~kKQYB*Nven ztfnm5-1J~yB%Iqc7hmdcMM#0d(h*}yEtosatBRcL3a&Rr0Al0B6(>W*LOUzfuM)?~ zy(&gTi^f(8M-QZsgm}FU+}-uBLc{)=Ce~lIu&`@`i@v5b2u|QI=2L%K z2V{_|UD7%23uvm@;v7Kc2&(KE?u|&qZPdm)7vf&|a#iSZ?(2EJwMCQYU~lVR>2ev& zAiLP(Jdj%_(}&9CyfFe-+{O7@-Ww`EoP3IVemv|`F0omd74p~#EBtnCDLWxwvYmDt zAhXd~2k#6-M!Q&SaAj>rv?(*1g4)2t_%PUP*vB@FjW3EhPWyx0oW|x_;&3Dvs3t?q zI;uP6db=4AXblDoxe35Xql{XdvC?M<*l#qnKR*AJ6+943POJrzQs^k&vPC~K2ASx( zw{v(DZzIj09L#!M;aK9hq9FZ+gu;H|HR0eVF&iSHu=VJiJ&Et-R?Z%{vA0R#2k;YNNVi82nO~mNXMS;Ti3n2 z)HtpUv$34F3|mYzjMeAr3#o!xvCy2G#JYmB$Zogty^@3zfZ97siqo{bAz>5^7anU$;+F299v( zr^Q}3)K%3WUCciQ$nE%2RxL=ljnPV3;yTD^Dq0$O={MhgR7G%=!AA{Y=4c6;Ni*Vri^h=&4{DiaV zjo%L=str1qV5|#Lfk>Js9mFly`8|*+8IKh0H^OF|E9{sJsC-`oiRe3f548M>vir*{ z@xL=-;+up~+i_N0%LB-|=k2@ir`Zq;H)=Sk*=zkkE@h{89rkHdwuh`F8(4LWOG=Fo z2(-9ZeL7m%*ijF?GdFaycmMfH!2^;$cp}yU`C7r@joibyMp9Lxx(ErEaYhR%=CQXn zX*wqk(cS@C|3oa5?-4`d9ztgYmSs8@_X2R;(kEb+p<;g}u~}O|8&CANt|mHGlX!Ab zJs}w=!m6K!PPXMXhCX}?){PyvJyqA+JxH`y)a770Oi(9KtT)*YuEQ^O&ElN22A_(3 z4;kg8#@QWh-^HlFOqsCoNLgFc8~6qfVJ9eY1KDx-T?SK4-bcu9T}$v`!F|W#6NYzI z!&;lTk|wZvzasS`IkbWld~;rQL4+Qf+(;cj&n8M;cs$7^Kk<_p8`sDlnhS8wcQZuq zX{R;#Drh(Og&vl+k}(I8lWM&EM3U2p2~HC}+SKy*{-L+uTF9olB(@&Kye~i5)+oN# z+SRD>DjtGtrXD@-es6QVpj|~O($W5ptLCU>mq=T&<&~bO7vztv=DWHi1T1tE_xffIEloNj_Kq`)1>*Nr6>YQ97VSYD zxC`oKCiV!+7jQ>k$=>GkiwuJNKEwxZ_K^Kli3^iH$YUL>g9+7VYbUzqIh=XLOIco* z$kPW7>p?-Sasxr!fZD2Jdg`63-KB9LZN&p?olEbls>v6INNwMCe*^-!b3y-48|(I+!v6L^J@iU7taxS{0Chi{uiYL zU0ZG&tVI}$sGEBHa_Zg!AKd4*7YUquaQoIOpNP|z3vA~8zbj=nIpcM$pQu=f9m(la zNxn;>RP^We_IEEl*D(7;5z6KW_JM)eQPu7VUy7zF*OG=*7uL5>{fr7y1^fe&)6%+sjunps7qB)Zp~SSHv}r7QSjJfgJ1YVn8c zF)0i5zp72}g9TB8Duc;~e$_lrXam8pSg#=i^aptIVP04B4~~%E_5tygJ}LBeh1U3# z)?*>|8cal;WxE52J}y7Ik{){oHC3Rc?lW;d2fFXbmrR}3wth(f-?MyN*xzSfY!Ol0 z{Y97EhC7I?j}thVTd4=E9@PBWpgsxLENrGM(GY*X<5cETk>{T{BcJ5%46G%&Jf60y z`#z#Ygw1ZC-5Q{G<5ks7%}Eg9(Fi7C_*9pCKKcywpM+)G+Z)9NNImox;?M5Iynz_Gvbq1L7bMZAfgc9r8{mobZ1MzM*=J z`bV$J)SeBaDT}5klJRABO8OY5JP_>C%jYXfc0AU&wR8y|5D9+R#(34h+Namwt>sCovZf8nY}~fLB*wI& zR4p;u)H^Hoomr!1rTvzc2qK)SQ6Ep8NxK)c6w%PcI-&FXq2PVSeH#KD+G~5bWH{?u zbIVwArEO^$IN$q~q<-6H>^3K8$HsX&IN|NbQJO$%DLfGlS7p=4j(GCIk3!{GS4Y9& zEqqV0SS1?IH%|%i&(FtTA*30NoyDd`m9~9zTQ{0hx{L0$RNZyJnZ(JAk(pVpBc-xB z%5qb?SQDn?S*9m?bV2b1MHb>B3A8W%#zb0ZCFiFd(4QylbP(V4@RDCT+o?YomS$9>S=`r}fzsH}N`Xk5JI%B|yVbBODiQi3G1 z9FC>T>*xhF?8rr@wwd|uG+Jv*TmZ}O)3K4E?(QzBB)A{1g#T2DrS1DE#GpTSSrok$ z;q%l1kgFCKM(h|c6w_|KlWYx^Wg|%NxmCbd4^uS~1o@MQyvw*TW>BxL#qU}86oX<+ z^^QU-?Mik=sa^y1vkuucOmVtBEy6j(V0VoVd?V3W=y3_Wq%|opn^=F3?Y=^@4|g>> zVWJHjQX%!v>6GEhr&j^K;r$1e3QG)b8ulc>m*`CuB$ZmLEGh@bhqx(odT_R@+ zD}xc6f%qy4$(1GY!z+q>wcVvXR75RB! zKeu=F3CY2ViVC+A*|4&kLt$YfPN~@0$oH}~_c8+eECL0yP{*$kG*&Jr?}&rgf*N*? z|4y$a76g4Jt)r#ShdRpx;P*fNGv4)D*aQ&A$D;7c#u}%Sb)?kRAEITXByvnwa_fphan?nD-t0AfnwOK@w_5{hcQjV(XMYD+tQ{U8QW z)dnR0Tq&^U@_Ze+)gRGKEoJitE?5iwi8X?79d@#as!XeKrD$SKtO-nnqGWFE&6f>xX<#;ZSuhJq<{?7!!_d8{-Z+BB+hSJ9CjrS0y zR`*kEzFymD^q^@ZR5qThL)`(>iW;^a>f}GBYDr4m37OC=!iFa~J98gpO~*R_1g4PN z&5iKLr06CHs)ztG{JG!sp`b z@--G}r8^EVd6WFiGMaQSW*ilLd#7k-l;pE^B`^Q2(i;I&PxE|(D@IjWkSZbh{tL}atWELo;7qVrc(c{kD182a zM9GhYG*Y8hj^m(&5e4<_v-T5jPtiUFO_Wwb-bSSLDw%&`^?7SOGTkjdk{ei8LMv=7 zJ5B=D7$ElF^bm`yRJ-LV?5E8SWgIiLys{)2*oC>P$w}OBRJ7DPGft;PO2iHe?IW9P z9qQVMhvF6!wbLkS3GM2t@qGlyG^opwpA;{A)o(7o^O?Qw=Oxs@3^NkXidx24) zDo+9Zrd}KIv+extfaeT>eH>9vKUZ0uEH)K@6MZ{c5w`uge|XS1&wSJoveDb*6>L#C zr?9ak+P9%;ekJ|mE;^Q8yYY|{3rs9Gfshpz4Zkk1*5g*;wmArINUgy>oLu8XQi58w zw_&0WfnH+n@IHsT@z>Z$%*nj1w)pU~#QJVmF?gfFGm#awl=c5l3AInnsR4%qV`okU z{Q-ed-`Z+m2gd%-D)rx>nUM9P>Gdh;8T+KLi&$1vny zh7@&TjW)JI=3lmjK{gZLVWt&jPBNDU9s@*4#o{ogsnt_b$jQhoXb^TF27(=z38#-e zskeL32(a!9X^53^$jP?h3Tl#iApNlsmzj~A3MiNn(?rVgndCwfz2biP8?i$O8?goM z{*ulwl9HmX((TWXQ?DvC<@?ct7ETl@{krBSf536S(wO{Si*(hn6 z$QYEr6{2cAOiFcjg}#c+BON@cD@U`mXZ7z(;e8zNrwHw{Mh`*K8BUq3uGJOn?A;15hq zY3JThVHP6_Vba}aY@=Y*7H9bxN8Y&JYmc>H5mwBl{;@Iea1sa#YsB9_Nm0YDVQ^v8 z2g8}@nQ^gTz>i?cR9m6n*vLQXCM-@AaBfTk@-=xdUQ7mw*+sDKqPez&08Sz@ZTZl# zP>tXK6x3ds2UfEQ6je+UKJT{H8gAV>=ougC^k~$}JPdv~4KunW7gO(LaNd5X>G0}0 zMXk~vT=Xc|1=fUFK}wP5V3S$PQ=unMoJjHf+AlKU@I0{Jby$HgunXAYePo7Qhg5i| zq`uK1J=`@Aj@V9^!G-e4xx?b|{FcGFT1gL!jAsBQ)>ir<85UI(7zR4dwliy$zE_6H|SO_vliH|C?Lm`VJq0udFT`3?dvt*VGX zr9E)}#pWe+et<@T<|Sghd{5N*5f2i9`r{rbd5=3qrd!DLHdbaK2VOx%O&O&y=&wZ_ z{f=uE`klB4NDuo_5m9@^(p5{zyu5e5mdd&6-2s7iG4KT7?n;(2St_)>N|*JSq6*} z4Rt6w2605s!S`@!K>Dx9nl?DCqRc$Rka!Ks5eW&&-(aKhL-^wI>nCDFB?=UlMW95> zSL9QnE`J784T}V;ViOchj1pCN#y|;g;uRkwEaF8~Wb19O=$~0azv0V5jbd2EX-<%A zE%P!FDoS;VgE9jYpIkZ@o(i6;fGJb1fde(1G&hEo+|KhFdA-Ax3kwU-t+S2EUv+;2$SX2M(G4zGVaEB^ zoWBd)HuE*iB^HrhltNXW9C#x(9tiD-f+qsonPvMUmlxFuN zdh-feRDVyx-Nbt>+tAVU3eeB3i`no#4gAU#n%8YM5t3=L=*Al zDq$xX27WfXaN_qqIc8-#r-F~9+PXpk8BJ5(;W1TqApFW7<`>W0w9IFs=Wk==a1x0N zSO>(e6v1QZS1h#XrMlYp2NpMNRr5P*Bk1073Ot#xLlRaVcPcEI7be>}wYpUs-gH)2 zdftjDeDylP7ih(kF?tLICzo1G=`75bO8e#f?Zm@muN{q==ny2D?c6QHW?K-@IgY>; z*wD_eNaWd&BHN%gjdqP%-kS_KIP!@BZ29ti=zwsy`%y3b0uH+o=fn9F2Hem+^a8ix z%jEUIt*X)AHYZ3ImLDkmt46%WKY1fDexPq|uKT#GN!KU4@SDgCDYR7n__492?H8}F z85eKYDDN~R-eP|m?Nu62?iX!d!P8i+v58NpEj3k_Q_r>ed@-m01MeVc*;H8-=-o-m zGO>-?s6MwrpW}JrHudPtDk}u-fAdHjOK4s^)-8pr%aeClz2+t0mR6~oaF=(;IWg1B zp}oFm&cb_KqTHFy;1RUesNnqwNq;&@pC(+*>R+D$|I=>Qw2EHe;Wn>G;VfBfYS(zr zZB^HOv~(Bc=`ic(gH%{DyN?SE#3-*%Jg`i@URZ-K$Ts~Juh3vY_W1JhNX?h=clt

    |zwzhs7uw_>o*`=%%kDOn$NJdSija+8 zNATm&DjN#QaJJ>f8Mycb{n;IJ`|z^5krGod)k17S%N>uUSJcXU&9ON-0KZMP1F#

    vF>Ou@u*eRVfvNeJ!)FtZxSv5!MJ<%dCXxIeorZ+{-pVb zyXQ?~tISH@$U*X`W(y;(QN>&1{O+0gOjSJr{v6>zmz7;mFSCCbcl6&JoW_gu$(08Zdt5%F&*Kk<$)eq zCszheg{T*D{<}Riv+Pt`3Hh&|+d257gJc1*n`g4v89}VCV48ysK;Z}cT?0jc4Po`g zt^@O5?+o0*vswD2t?Hv+)$B5~jP>&8)N?bwTxINuD^vWgj$)2TcOQ7>{2ql}*vqxV zNjj!Z`^rwy^?5lgXVZ%>WQ3NIY>P3Na7)Z}L~X;zS|OWg1~2tja(!`+?0@&8nitf! zxIEcyPO7#7{|e6{0PaVZbeZDjSCjIm-Rkoy!x0WbPY{iagVvPEg|0nZ4-3L$$CVB` zw+@K*Ze=)5TsJ|<8-hyaS@p+x!`wT$*g}ld<4$Kcor{gcF@tNK!hD}EdH?!(5Y243 z{*28T+C-fJHrGo0Yi?Ai#$(uDm#RSCzRe}pOdgtsQ4?G z$rePHwiakWr<*$#&V|2mDsVe(ZT{*Wk9}L(XkApHuks_?mRM)61hxo~34H|ZFf!07 zipbI~hBhs=2sg2~x+D=}`MMz}if0_h;ojBCYdG&R55RTUCJZ8D@o?j8=@g6n%u;#n zb9YhD`+cNDlN}Lija16un{vBk-YDPU_TE!t)9FBUc*(8%L;tha7+pW_RcD zn0b0a&SGRx85OQn;C^X;V5H0&3VkK5={9t>Ib$ z3vw4d?-RHnk=BMESNw+kTbFQX)}+>}f6@MociVu}fa%JA%cciPK8Wm94bA<9=6kBl zV~E!PG!RSrtn3S~^ndyQ>uKY9Sy}1dV-DpWIlrz|>T6uVS^@|vLAMnp0Tui?0B_IO z0?fXoKIEhu>|Gz-zoV$OEIaYUfl7M1JE>_~Juth+`REG;oa}7hN>u z^a+uHmzkNDKg0Cw@#Hz>kad_cFneCe0?GzRhE?(s8USl?F}!B^tp zS7}sWPx8sC=L^Wjr^g`R@6M=@13Vmdk8wh+@M!V!@O{wb)AWqb4&8J4lx*3UTRp|f z35)%ZP!>^({z1s^>HZ6j*wN!WEo&qDtM>DoqL?lrf28g@2qz~e2)w5h#kt(T_ogPp zMFpwuH7a}9%S5)3nV=V7cy$$N-~8y|D|~|fLT=peX=7k8M{5I^ob4S{m3ZG8GDMvv zN^dP=^M^D%1z~pMYBK!HKF`&9VKDz-fa2;M9wRhGW(Hs7PJh+RNDmZ>>&Wu2Dh9ET z#8x4y?#BFzj?-)fp-dT?+-8Axz$^sv$ClFrK={AVg3{k}Pq)Co-%W8AL0R%Tb0cXJ6pW$E`2AYwbivsiTF~w6&XOt z=!U;ed*&={?JQ3BA-4BlCC%V2d%vb3xjwnHK7z*d2=1WE?c8r5IVQ6Y&w6fQ)FXk=COC-ZE83Gg*$SW2qGr`o@ihK%uxL^ zXc=Uj7WntO_>}0Lu;p#r&D3W^dR zh$L&uW&jjAeC~pHvT2;37xcM5TQ8*H}*2htTbp6G*E4JJA5> zq#Z&XsNyg{-Sx##+xVdZM+e^`1XvqFYuJGSbuXRw0DTX=+!q9-PTHr2^5m8`G7Ifg$>x5NfaY-PTj>$rh#IB^E#n>M`Eezu7l`$pl|B ze#-=7ZvB7_!SwsV0A)Q(=oe3~Ud>CV+UId^Wy1%!RxcD#WDL24@G1&r3tz|fA&Uqw z_I$?To!cNoY;k-whF@uYF??K6m_pA%C@u;$Zvk>VU-4RjpLm>({pWVNpQPh(DctX{ z;Fl>u@Gw6&a6TFt{}`SaFqlDdohw2hIbtdlH>G^Bu^I|0aDJzX>R#`0{fyfKS=zDP zv3y;+4Loym2md$#iP^cekm}X?@`3}naf1lH?D0Isb-w})dF=r^R7a(!Zes3=;eQ!8 zP6=i8#`DX6`x@NtR^&g}O8pSx!X3*cyWXnK18U0a$wpRH=Vl?^!QRKjKw&nmFL~*= zZNf3>p@EMr2k)XD788-W(JU+3Gbtvwaso9bDDUkBFj|J12Rgt&;G zOFxC)-&jDHIo!B7B(yW2aql3tir9mbUS;+Hrl6vIBVLAKtoelKAT61%O$mRxoO*e4 zTGj2X{il#}5OSu8ilfYZ2aQ5;++ef2y3H(2mY1>pplhUQjowrcaS>B?ogis}?Wn$d z11)IWi+Ef29QT{I6D9^H24Dc>Va9|dr)lzyx+y~Ae$140qb_AZSF^cKYf(Z@5+C77 zoc-NBaPaCPZnF9*jU_igfK=9%E!2x-xuDMRaUinEE2!I>2L`~V!B?2im z(#)BX%+}aOYXhP}F`2W*) zyR9DRTgbyJJ%>whf0L8U`yvr7;mnZrZm+=0PY=eq+j!vcwdEmqu`p~r2>9YQ%-BQG2Oacb}gL@^>^H6?-rq) zdRw}ZB;L?Zi(pQB29y5%>%sSxHKImnQ^w1b-nyYzKX3r+eCeyYsKtWIjk5 zCY(Z>%@SzX;$GD5T_Nh>6|8)PoC5uA=#m3r#e`+lAhlje{27?F^hjZTAB zr+aa8<>Y)HzV>Ejo}Sj`^TT( ze&Pa_L$0^t}zoC1{B4x*~?0BgWEw8Mt$2Ox!c{mYS(AxQx8VYk)lS0 zcuXH)v1NBiLY$hJ<>T*z7pzBS6;hAYl*9%aQVsO>X&bo68;Xxm2^q)fQ(~c4^I+HP zHkxP54JSHhi#(zDslflXF-XgF%zoU_)D0VjOs%3P>aiViEsGV+ZW<7kBxo4IK}7#% znO%XUPl6{UW*=SwuOD-RcvA@$_|MbPX zvO9_w;d)SK5rOKZzGjRMwIP>B!jfEk!iV!x^6%|3==>i1U9Lx7TdV0XQ{3;!_w_LU zg~p^dSzcyRZz%=y;m-5h+3#5p4zYgVfe$y5@=_)iG|CfOR#N7CUU+kAm7z^eEEku2 zds4>2QScb(tSyEECLS*m;st8HUm{NFJ=+dL^!@IB0Xg+fC8-BLqYbW+YsbXcih!Q5 zm&U-JluoL@=3qS-L;F#(ToCyS)UW{kt>E}b_qUTxOPs@tj48LDoya%EwM%1wN|eR| z!CeiB{@8wTLosUL$?L>VznxwwAerbb#6o_ZdExJYtW~f^iZ5&jOpT!pQ|Whkycy?e zW=lPLJYxzZ^j^PH&Sgkk=SU->lw~agEn?(*Ij9Xn$%_jin%|_)Cgrp%&XP5eS5kG2FOH_=A-T07}JQweDxz*T}x$!_~Tk&Z?C1=r# zTzB0@00+M#UGiscL!hw@w_@yH+chfHhJ=L=Y8}nN4EWRAnYOa+(qu~{(OASYfGCPk zvMoyrBaxr~h2fuJ%@(|H=7X6+1WV)U;UAB}tnMyqK`cV|4^~}b8T)de`8~i-sap$< zjUe=vI8D8!6H5teV=~&);`&gLDc|MP*nn*v zC`)wujwV_#D3p*NGq3?C`g)L7vZ{2nNMf-S; z4JcyI~ec=>3g>R70q+a_FvvlfcQT+0L*5_jHFg8s6+7bgBo zH%9}VkQUK8g#6iNNGBUcc?UE^D33`pR*pR`sQ~=-0j$Lh6=J($^TyXkb^y?CQ=SG2 zKu-3V&TPgE1HJ(G9jbWqpGMmPZV9{}&q^uG)LW0LfxZONFWl5R*EA|L81 zf1o-I6c#U`g`9FHiq77NiQm6l3?z5hIL(H z9ltvZ4}asLTWv7bkf6$XSR$lp`%T?s6at;8YS1-%d6&9dB2Pv!?%gpN{jTjF1C6>L z9&?xg2Y;}E=%7vy%N3U);5|lmEcQF!?wpQKAaie(oCq>}=zJ)QFECU9e;96Zs4QT# zROBU$!TbdnzpNZ>W)F@9W5U2>!XxVuROg$-`gf|gz`FaH3&@%eOvbfq6m|(YUTGebOk1NNgP-oX{s~B4Dlx?TdCu%KDD&h-Eo*D$NH$Ixsey1A zuI?<*L;B7C%e2~u$P6cA*Fr$1``*<-NJjvl{v`$AFr?Hfbam96#dQ*ME6t(pK*VY) z3#dGl_Xyt8R}r3rSe^U6|*-ct-Jr`*CE`~)ThnHzZKy(g9*M%TJqK#hBTe>hu}hTw8U z9Z?Ewb(sa*vWbY_xXt@VDCfo&>#zEztt^Ha2I7g#H~(rHz`>xNH_Sot)!Uw7nEoF0 zt{$@-s}^;EcN8ROu;@|U2zH*dS9m6esXAaa_qC|2YV#u$NEU7SeTu18R0I=*XJ2-F zwv&>N7AFVyY#v}&!nR5G{5mcwTYJR!8qW#@VqJ)O3^Y7GKckjt+gO&dXJg4Vi5l~U zCd1GXWl!4+&=Qfwy)&qhHJ)9Q6t!2W@ZP`FIz)XuH09!Rv=tgqsBdIK|9AdO zhi;npcIGo7b?8nYr&;#{pCP4V;c)~FlKfU>Mr917O{R65?5e0c4yWe0pcQm1bBCaP zrc35$?Z}BKCVwIvZ3B`%iq7?A1`7KlwL~iKCjAg38&p&udP6M~;1UZVF$7ev{e=Y4 zlD2)?+hYlfhJITK5WF%T z2LiY&=QyZSgK&Kmr{09W1;v6f@%vql6l@`}g)3D)Zi?szP4Vb#ix32-&^Lggt2mnl zTnY$A-2;o?4*6jO(+n2A{+;r=>-n>z$RE(MCE`vKk>wlVYi}NLWmw12K`DJ)k=1pv z51gIOJl*sQ@#dyRKRKbEeOK9(WVd;=hKsZs9w}00;4EvHCw=rho2DR-J#dCnDi=kN z-C>R<^ zovRXC{*MC+`*5PuiE5ih76c*u6{S`C-0r;pe3_HtG=h~zuvBxQx&5E_Zb9=~`RB_{ z=+=4ZYD^u4#98M({v0rxSR6}uZbjOhRA?}0b+9oTtIepIreQx8zj%Rq6ib z{A%CCplV&*|GW){uy)@5%EMUcHR{3$>o%>jDGv4h=qhoKR3%K01V-E%1jEr*o+fuU z*|*1&5&NbbuIo!3*y)8=;|aI-W;@mc%jNkKb5`SIT!Yu8$CorUpJ2X?pipNk+oZBf z@bpKG&R%Dwog|Ew4}}1thp3(FF!N$%56*;g#w%03!Y?x!ec)T&p>PGkNh9SL0-1WJ zSNU8WHE;694a>uRtV6pF49e1 z>D;NeG>*no-5tThjueg8RV9i7e} zIV*87QurmMMwMc8NDZkCwCDZoDBsI7BFjUWPnthOSM~?A6-$P69oA94e9do(ZqK4i zPlNtUR>lB{8Y5CVJ(OC@!AvrhRU@2k)h!u2q`5-yn-FL{ixGBc8`Y#zsuR-kwHXm* z9AagyN9g^nvm1PtU*QHz8muh%#08=_md9|%UN~!%NvaK`Ks3UGeDYkuk-(Z zXW}0iNSUb(oNhZMuG?Q6M72&R{h<+mv#<9Q4#fe~5^DU@JbU5-u#-vd{Jaa){|84v zxWCx}-wjK;o8W0ma^qqzYSwR9(Ia~E#MzpBG>;@N37P`sGA{$uJW&HsEfpK9?AAJ1 zpP?%{QgawZ&d>S*ACr_bm)e&y+0Z3TwTb)PaGRw?e|qgyHPMOgl>4H-Kp*~L0-WDi zTdLb0hoMg`e;ovlM~cAlx5=Ldn;o;h67IkcN2CXiq*J*;Md<`z2Oi%Qkx<(s5(NvR zrVszb7^@%ZHVkRX-fa!C?;DzX-Qc@RdL3tOYZ(m1%s~ks=5_3c!w-=#v_8tyhbqG$ zpcihxHp(cohaOt`2H*ONfBky8fIxSBs&vlTA@WQ8f5o^1Q=hTQ89|CUTv4S4(tHU6 zAJQ*KXpK|Jf&&)?zUCU8=2BW3PEzQPLPP_`spKt}6cKac1p!VT26v|*p3$Oe>R#Y@ zw7`~RWtSimIhq68J5N@bJO3)~4`H8{6`=0~S3y*`PlL1pJoE7Vh7D^H@47Dv4EPsX zAS7ane{+4C<(n;kut-YTMXB#8(q5VQm*Z0CL49n4V|Y+SS#^`s{nctk;&lJuxZDmx z-jS1`IwDD-jiDsR5K^)>yhc}0g%s`{nM7Q*&TGIMQ|@1`d-K5 zhZuSZ0pw8_4OyFCq5f+@x)+ydS<--@a;IM!f17^}kpDFmmW61%;fTn`kGa7>*sOsY z7G&vFlA+M~(wV!0;Ic_AU5Ot~Lg_FhY&Ib|1;m~~e8;)-rl3m)i)~;^gQA4L3DmdT zd@3YwJspd;6AyfnCzVp-5!Q zf8>DSVwk04a^sk(Ka~7|VaqpM3DoS2cU4rFo}ffy3!X_d7a9`{D`gTeJt) z{S^exN!@K<-Z6X{eGB&qHVm-`ZYo2q4Z0h(PrHL}_3|sDYsXAS_1n_60oOqfigNhOrI0|Hj_tl`w>h5ie{pDf z&BtdRH4n8P2A^(SFV3_5x$sBRfBI*g|F{SiFnUn^+D4<_=#^1PMA^8cMbz zFRzxG)J&tpD>QxM*wzq@{vlSUVTPKh^YRwYpHXbV&&__A8j}TA+|&Lcy5k1@E=(F4 z!-LMvfZ0xTkwFqwo$ROKUX~$`f6<)aP%LZ2M~8T~hg*g7h5RpH2W*|pW7b{ww$Fb| zD+$jqT*OIchz7kH4Ejb4C-HK)*Myu`gj#iIIJ>-gbm7uhGDqlxs$K65?oZ$|(ypJy zM=YniO>c863cxNY6t&yiH^<4k!gVx{1)V|PuwOleC9ObDTAWYh@fmTaf9<>TFwi6H zEw&7Mz|}533JZTsmTRz$GuvK7xg89#E(>6bw2BBO6g#}O$1>Nq3DdYe+Hi%MrEceLpq5mTyG}&Z2<%YIb%91y)DBuBRgUl#ro3jhbAV-MbC{e1lgGl5p zcWpArkR!|_<1FTuC^yQ6=_&Wb429@FDt7*2%|N4fAhJW$@|K5BYuZTuP+wEph|(u8 zr)$h{1HpER2DrRs*pf4b$ZeW)ezl;2vyzOJAwGfTo7x<+)trYfZ+b;%X<2R4o`t;KgR)+(cN+k^S0 z=iyQd%*%;yN~Y^>e~MocYweGG;yJ=u>Y)5cHkORe`*t-C0HN{%96HtC@0Oo(nwkGMS4&q zN7{uPJu!eC*tXYtFnkkzklJ6Pq~FJdD&(*?MK}=cdx`fMLfztMCAcPM(KBzAiibnT zM8!Ih)lQ>WVM`VzqQb#=eyK4)(2$CMaoDa5>Vm5Wd=OI*3t^S`-{g*S(^muf^8}$! ze~ycUC&?Ex37JtJ8l5h*_AGZ29Cs9jt}8-mKUYKBx>cA+|0q0 zl+Q=A#93QJKDzZuYekuFjA-I9H{t8ae{sY4vt19JQ=Fvg_vY?It0UAHvTF{$7Upj! zI7#SO$;BLE7u_pwR(M!GwYO6*`2$_1k^|{Tafe?2-~S#G9%#B;X5-?7KR=r#WRBss zpCrCSX1&3jQar7tl{xSC?VY)9h#l<1#nDX0uV~t@Zh%4Oj#;?7HyA8XWBl&Wf81K; z-XVYF8wyF5Q(!YWUxyXBY2*6Y=}Kz!m;PHPS}42>m2G2-0agk+WdeeZpN>&OU-#bl z*6t;+My|M8j)w|EX27K$%(`!^njq?llcp}sv1pMH3eRYcvD)jsIAXck3#x}vMU{o9 zxrcu?QAswvZfr(#XFm}cG8s`lf13|`Bj8f)(T{8{_oaReIDA4?_Dez{u5$}d_{WX> zj2Ap{VZ>Xss-K^*iqo7kD0Qh%u81V$kbo<1@u^cM^1!5J3lGJU z;X75+qHSZyNb5jcX#E}%p164$GbU|Vl=7ap;5*bgJdI05)eF|U;d&0Me~iV?RM=uj zFe-Tb9GnP+vfVhwXMRpU%L6Y)(;o z;pV|<9>u-5wckd_m`smJReNhsh7++hTaPUF>++(D?u+A9kI6x7%NHs8iAS#L^BE`V zbawgs2qovt>cA9r^kgrlfA8O22E8=xu@M@i>UNE0Gmkf@X`cn97fxNUw~Yrgtx~9> z3havBY|0SSqs-1vJEhfQ*E)VzrU~G_C+gc_ec!ZIN!23X6)~Ds&D?f~${i)_4^fBR z45-|B&vhnH+JA^Af_B}HhNq9s?r!M%#R5u*mAMHnUArp6YQ4VFf9J7LXetLfJLFJ0 z7Wz)+!)pB<8+v_Og&}9LkkknzSKh&ci#AUpx@O!ji8b6lQe}vk*VW7teaU>hi6-Rk zcy2hjVe{$1%6kik>wJ%qQhh??3~KT3_m*{hsGKCrt7_x;kq(}gPS=D_g;7&ELKC5E z>uSXM5h72aTTg|fe>5G|(&Z(|E+J1JIAWyg&43$0Pm2}H!aIk4Q9u{@GUGj}R3s2a zQn#-2S`KT)(x9fb^r>-O8Qk%eyn5+|r%v-80Z;rbK7jyxrRNBIJq_o_J#yZOy5{wW zw+Egiayg*~drFc}+H@x}vz5g~btBumvR}(~P|;Lfdz0mEe?QiDxUv&64>D|~=dx=@ zAeo&)A?V`1Ub?|w(XKviODXNS;YB0Fw~$moi64~k?#QLJFx>MJmCO$tXkylc*&=aM zVW*rt-|TFOnXB28q>?Xv;@Kp6TDav~6}J8AG(wAy_$t;c#vUckY;s}IEYwPP_n5iq z^HfPx_;~2Be+X^h=6#9At@vbwD>4wlM(c2s)70}2=JjmA$3Xsc?3fY2M(4j@!Wrt9 zcZgMdDpQk{_e)!;q}B0^DQVlpeC)!X#w%@mdX@N*IsrQ}x~Rh)o_w|bQq7y!D-o&< zepFwX?6x!ERd@I$Dt*cpW*se*`q>+Tq#uP+`+aEpf9!bj`Edy%7bB{-`^aehuyT^> zig-vyJ$mZoq8ol%#Z+_#TX@c8@|yfxhLJra75r!VX{{|lZ+AVnw&O?D2F7~tlxSn+ zK%&$Z;%eYmEemb~)Lkf7zxxSvjT5=+J=D|6;+GT@_1vO&qTfQ%Zrp5Ko&m>rKP#kq z0xIlZe-}2ylOjl5lTHuGk~iK`GdS?&H@rhMiX8(^{GupSG(z*CTPDO4*2$>rO;Mc4 zbNUH!YdSUR$^iK@UUw)BU=sR$xBqfxfr+DTr!sdH?%-N+9Tw%#CSNbO-2`xRZ&7no zNy?tj{Jr<2jsKv($<{WP+3?sOXN4&u}{kymbVvUT^%TbEn$_UwyBZA z%#Iyul7Z&$p^!hwN1A(1N{*mJZn4)s0F?yi)A)mI={E4O$ z$0UcEJahcM^*|hRlDQ3P&>c)hOXHt03&&V`4eh2Dh@@Ly9oWbcHK`iTS<}x1sM^4b zg~C#&N!C`Bi655vcm|DLQ6E?& z$~}|T=ThsrQfeUoxLyh+BZv*rkX40&)4V|iU(MrSj>%z=@8V2Hz(~buo^0gfiOWSI z2jmrpt%5#ccX9&V8PkvfOAO5`d|kU~yV3?=vw8Cvo>ysw)7RPOO^|~_s6~Ihe_+4M z`LGhKb-7R2y!|?$GosQEd((g;Nh_9R9Z}P$@_1bVYj;^%Zhd@Fs#kzl+WpHt?&cHT z&NeLJ)vXgR9C-<2s1KwyiPjH`sLT;vyyeLuV*TuoY}vK}X_cGjn7l-=Y;eWh>~m7N z9zbPIw<9V-JNd8eN8Tsd4BvgLe@z~ckh#t+2GHcQvFejxPaMJRf6nESII|?Q z>P?GN3k_3%Y4E^x_c#~Ax-h%g=Jb`koKPRuU?QoRMe8-eC+bi4w@DDXW{pF7)c%gv zeCiV0OCmtQ+i=)N(7kcN>i*NCr>NoW!))x1UKD&+?$jYt+_w}Q&LmsMtXQgQ=}zDS z52{H@rdz?_#C|JvABj>Of2Wr7*js#DdePlYOzV#EYJM~`mvaPHUf{HA(53Jg%A>NF zFx-|1pDW^iaCgqz0BYRU9~h&xsA4crH>PPv@GrVD#B1y}ATy%rk9r!8x-QCHb)`!&w*{G=TW=Q|w&mz;U8>sc@7)Au!ADODeK($Gm$Rsu89+su@OgwU<` zfKKOAEPab?KLgRne|FB8>R6q=&b)_6oi)JgcubAMm!U%_{N`Sxxwhyy^Ar>!4r5?v zrO+q6OU9Vg;XNFEh~)YC03BrnZPGVGZ|?=YoV{i_Vhzyho)IEvI=BV^DB>ydh(4Q> zJh%rF0rhy&{<5;zZHRK^1S4-+wZ=o%nawaf4F_HCeJAhbf63ub_{C2{KaElg5kiv- zaCQ34uZ@uRi9UTYWOi1o{rDJEIMIH7=Kx%nu{KtCNVj;sL~4Hdb?b}0{fl`O2_`DU z+i36ol!00hi>}dDSqi_Gy7abb%F0`rCb=z6M%rJFq8;cwg;(rUX%4(ZC{(@iWC(1J zkhfiFx|6@Ye+arDPEXr5`rT4C#3?d-(80HxaAjH4Ajg{!BoX`RBe1qE-1D`jW1GR_ zaMD=`DU_p^!bO=Un$GZ=2!oRTcnUVxvK%o5Vpnar`k@L$=QOvInUwo2T!y&Jx+yKI zv+UQYD8;>wKW-3ZJ&{ab)K?e-Z6NS{81iE0+?$`se<5+TCJ7WUSCGE%*1w4n55fB} z?`}LnV7%e8CfL%gRu2%=E{3b!q(Xgh^wyg?jEC4=>k`vA?PRCSlaV9;<>VDMb!NDb z?C36;?f=0ArR~clhyK-dnxnwJwYFSCJDF0J`ZrvU6jl29^fLQ>AdATTkD(~H?~FXB z=$dofe_eHi*r9d1pZb!@8yW^?jlKIU7#)jn?h-zTm;0R!P{EiMX~t((DCPcGRQ+j% zUL6Si6~#+KsdVSvyqYbDe`z|UU4WM(+j88>eRNj_k0p1u!KjV2 zf8fbWpWrp-BiTOl$#KDr#!(v#c`wmAE>5I(4%r8{K5wOl!b|$wl-fM*4GW{EH03u!`lIw)njYDqGJMLff}@y@4pM6mJcGVJ!^^Pyw7wX zK7Ig*?pNCvWALhSv zB=8*Hq14S~?o~OH>Hol)yi*Y_e^+V3OsRT?a)N!SxArqG?Sj|QE@8EmMZ!ryEDTiv zvPJhnenWBLyM%RA?FrFb6f)@-d*j6N9_erIW(pi^nTJMxE%{36aBuv0^LaLh#JHhmpy8>R8)|{k<#__tbHH+Of0fHMOS`b# z`X_KO`bV(|7%p%)t1b%(Tve^QW{IqYjCQDcwsZrL z?WW3nK&bk}?&_?koXDR&#Yk+pIV>eRm8SvkLl2ZBFg$bZ zOtHd;dgKWb_7RLnAk=d0I`k1oGhP_kV8Lbub%76=M^)sOI6<0z*KlT}VhnS5D-Kmg zC4Pu{SZU%R5?f_We?1#`EuW%Qp)UZZ(|%j5Foy9awe8)%j0W6I1v;RQ^Fd;X(6J$9 z_+QdvDYu|7-!p_bm&8KFelFJ>J>_N%gG5(!U=o27VaXCT?;W_)twf z9}!7yAUKk*r!k6_erVjW^E7L(cgxLN8jql^pS|;m6@QK5f52leYIq=@>|ydj_J!K$ z#^inDLsYhDI@a;D?_?nT+~r4VOuo6w0P3jC5v~-QGM1UVfm$>1Tl}#^`_lOLyzehP zkR=2$MUOdhu3w-qt}xCUq};7P@p4&f%|Bn1nrS69jZ=~yy)%@1t=-`0fg?NE*DgeY zjl83JbGn|Ue~u79(31wo#rfSJ?~5?JSaC5|I(br>F-%=~cIK=E^CCIM8KsV%1 zD2)_Mh9`8zJN2aunTDsXRP5UiUy&GCoh=&ODC%I$+Dor-Ts5kcgOsvfh+Pq(9|w$f z-3FO#mNB*3qMEjrjb@p}!`YKvJ$2QIxaj7BTSdY>fAj{IH|Vf;k~08^wdJ;Rtq>?0 zzL03`mezdb4liB;d=E)APH&U}Sj_sv>Jl7K&t@qKsh_)x`)h|g`O0-k%&1;}NU?sn zbm0mf%{_RdS^5Q|Xh02up`PayPH8_=6IVnm6yp##QtAg~8P)d-Ywyk%<~&hr04e4M zX#Jfsf6yIOM)&K-tJ1cF`~dWU!9F&8)|Oe>@hfUyOL`o|jkJDkTO&B7>4zXhTRp6$ zSf4jU>4}tL&qs(9!z;XcX1+)ler8ut(9>%S&DFN7WW`)!s<5++M*|I*DVo>(QUiOY z5Gezf+7`h*nS-5)C{ri8P$y_6c2ix$t+epSe}&NcFP@!CcM-7zddxc2c-xE8>Z(4t z4A_TO?zh_&XYU6Oqvz0W5tt6xG|!jXevs+B-I{j`wH0f(Nb9ux1funO5gZ02l5v@= z?K`VMM7|4*7S;j%cvHhDuixf7`T;od@DojC-f`F(n?mAo!lM6}5eLwwq9w{h@?A>a ze+rhE{sgp3O#apC{Laa%^{LQ64gfO8OG1ZIH|hz&TniF!^mmzJS<3US5=`F zw#sGdNhkB9TAhxSg`M*kd0WU4>ynRj61 zAyS~O@dYZO8W?jD8MF!&lZ>rIImVSre`3+ZN8dNQ>u?N-sdXB?yR&e%6gz*GL~bpZ zpV%sVnc<22E-%Y)a;5621#H@4@=HtWukTBpcxtwAKXCa*=}sJ8xY52`*EmBV^T`G2 zt$|FR;SRuj^$N{1iJjiH?9I2*Y7Jqg4hndLRU3VL6NL=CLs}`)s$lXo5m~rTe^144 z^GyOvArqT$hkT-*BZk8k-{96whoN;#*uf!iY^|#+t~sY-#+#(1vD2zhDF1|fiAyP} zRG%yoe(b-1;U`l;`|zpH4(btCybOdsV%>)?WusHk(je}Vwf1f3n(C(){PzT=-3(sa_4=?yM(l47Tx*XP!I zus#fmSoaVm%R4f#A;Hn-zF+;y0_P-4X|#NPCrlX)G~z8`ayEn?o0?K4V-u?_7|7cU ztrkr<(Rs(09REuEQkHMaO3nVx;BBsnUip2@lxy{r$^t<5o#lwbjd}^0f8Qwsp6kaf z8baU45uLd2MuQoxpZ5%7)H&%h782l*MFl0Tx`H!u0G9S_R*N*Vi}mpCk8w-as11{& zp>eqmQywUlIHzNW$&-q!A2f1cQ&=;+0f!WOp?2=h=ES|%r-=P{x?K1}?Klj2m^E7b;k z%1mY*<4*6eYJk?*%h$yh>$OjBwnUomE&3!>xEhM8dhr&96SaZ-A;G=S3YAr)BFNM( zPWl*YfLU{#cllG9?tG3}I-x`Ss&7?`#$eYdBSe=TdDyL|X4(_KI3 zP}qR?PF2z8k*MGe2ffs%C%34tYe-Tv`DfdA1#0Z6xIki&!ZAl9XOXUDoRClm<6B2< zKu2MV;8#$S)yS-lz5MKI7hc~`btF!Ovn(v#&p^vjHT`4v0~xxYdYtOVR+zet?T5xd zjUxpn#^5jGJAfeTf91Hz(iBOH2Y2>;l6>!>&YI$sHB+X_q1say@TLg3rdK;XFZtg;EErAw`-_=qTPN3hr^R^7MHT{T~6X zKQ1KG5`6pjQSC+J`5g=#7#-lgNy8@SNub!EI_Od&j=Ud=e-cV0q#zSW)1ZNB5H8~u-W?Ol8`2v*&63Er`Fr;B@0bUCHPP|%wOR6lF4 z-ZNN{qB>+49u7YD=LscaH0|*E;(byrB+fYh#oEAtH&Y~xC$gTaocN0y*%_WfZ#JUl zJ;s5t!k1)X1$|nYr|%&cO=lNHUX+)4l7W|8$oQSye+!Jl4*JS&qI!gEba8ns%aom} zX1VFC*FBF_KNwgl_-)7b-TqS5;ildR$7xFq~Ln`CyS|{QX;6#^eRggL;(fgdtjSyMoaDhN(j=vpQLncqaG)t4siE-)^Rih~=XI?X;5e~{Fb z?(Ff6Cg*U3nPET1>DPQ9Mb#6K%3>Y~u-nEaFJ=tAZ2&1i;=oVw;PsqYAOIJS+0l8v z=+{;6+m*KX4u?rdoipE+;2o;nJ9CVF?nEVB!+n(>&O)Svxv<)SZ3sb*_fcUk!R_LD(2%}j!QTjTg`5=q&QaT~S;b|-T>b%#Pc@PYf8{Y2T{^ZwN0^BHF35M73sl`{~f3T#t<`Vqh)5N(Vy2CR$tE3Q90K zN1RG_MmTH3$GlCQN7A8@*31c<2?%tsqdxgtnK^fp>ng4%r5uVfj-wf@26ZOw)P>sB zi^m;c);g^7e+KS|Gi1DH&)=&)MTjo;Tmf|<1`+K z>T{pwQ00|Rs$&hqRC?zVj780?bcyh_nl+`6_l~o{(SuUw>&Ui5o?sxsQ!wnVJ8Gz$ z12C)F*zSJ%tUV>P`a68AE3Bg+>`<{KkH2M`Hjq-GfB)g^%AX>Z@C{Wc&N1DKu9yCd zz1>FBPnBa;@{y;8_tmgp*>VzQx0ZkCcoNY;<$latf^E^rYyFmso3O^41LG1$(?`Wf4xNOy064fVi;BhI8h@ zmVh3Be`{%Ct zQ2!d|p%yjk_X}AjQDGmX?#--4)5a^Ie`lSHt^1Uf)X1*%g$4Nr_1 zza((+$+7^~pH)Y25=7B(VE2rb%6=NeW<}ndmHdc8)dH#rM8cmJvU^A zt_hg+|CB_l^1c*$F!LzN^UFpc3ak6Oe+y{&sp+vueWvz~b+hDVAK&oC!pOr1hzY#8 zL}zPrbKy5I~}!&9r7SFg-1#y|iv_e>fd3{AbN`3bqC$}x3>s^mPkf0jDw zx*jeOFk__Lq>`9&MpBBxs_9PQKTn((4jd%WvKSUcUB~&-4d{MN55#R-(hUo5MB;I= z9VaOdNae-7?ofP+!e+t`cTvGRb7}@UGE=%F2*J&W1zUAnr4remy!bg5c17D*9?udl)k<2`f7L>zmM&E;D9W5atE9iPch@-u z9Gel#XR`!C`9sXErky@fM@|nMD`4=p6%Bg(IE!S-dy#QT>r1GRvWMo^J=&yBS;C$; zdheDy<#<0Yr|d31t$PO5e;CIvY_AWhz_C8^1XIl|Fa%i}bKj5bXNy|ql<20I8m#?1c1^0r&;;$ZBVyw-^5 zj&3p&DBr_e0BjGa@_$!lzRmsR-XpEj6Eac`)F_vW-CwV(eV=HkBeB0Q zTitwnI2PTs-Rtlhf9w4y5*GWpDTVD&bSc1ux9lz(FvDOuj)&S;j7e=bUt6&8h@iak zwkR~w@34mIW~7940`Js&eT(oS@Kqa86BVXmx8Wj=dF+DC^8>lMNc-%8@_epZgioVM zB*ut}vX&^vb<9XC`b~@}C7L6cm+1B2-q6@?Nt`Rn$c@X}f3l~SBP`n>U3B7w6!eer zEh3pq5S`3Zq_1E^^JZB9P`_tmm-&$^j^Ss?8H0&%j-i7OhNHY#&ZFeUM&E71uc!fK zrA7=%wsG{yqEVbe`GeRwcB&}yuBX%QsHF3Klerr9~-+JiJyk_oNK&P}crVsB?WvjKm5qby+;5buT z*U>2kIr^8_B5*hHHlxJHKEwQpq(SKcFy^2d(UBLWw(@jv{uoZ>?S$e~dsG>Gay4lRd=HHwVdJkM@}*CsF0-Q)8Idgrnn}}73V3+=(+;G=mRVZe3oeChPP4RR~C=$hqB6( zo7t!0oD)d~CygK?6sv4? z9&`rS<8~lvL!87dL@&rb$Nu*FcrvgVW`ki`e~)V!obDFf<0c$X)TP#GDn@8j4k2F9 zDo?B3>~~?yYqJg_=DoL11>j@KGNh&ns^Rg5{^RTp426HFbPS6RtSw`D_6&^4DZ;r} ze@u;)5w}Q#B$g9f*W>mow$VWV*@0;|Tk}h%B{i+sM8_WaOxLSA7nd3TxFr%0<(CS& zOZUyIH9*O$ia%AQUC8-fNd`~r4O`}kt?6vj7*ZHp2&t~T?o`hbr%Vqi$acGuTn0|y z^;vZofOy+veycri6vl<=TO&UdRHc1wqyE>Xv#pj&+60b0lk zf3AQ|V9DCxJ-vsxLVS@a9)Dx@nt*iM{Om9ZB zG6$~&6~gh(nIx@MG^d2E-`5nMnGq(3N zfAaKUINd@gW|_#-UKNR5HI^v3e{7Jog`ggU+LsXG^QcXZgf@9K7M`XOG*%~!=f1ws zcEt}d!wv{yrFU50hU$pxm&;&yVP+?BgTs1WgM5PkY$+DZ*gAuN#-COKC*+5QH;-K` zqBWmV!*^U}1SEK^Xk@j!yxrkfEAY09cs!{NNrWeH&X*MlIFarXJ{Xf2f7eLboO0NS zRbIZ$zB++C*_j`gTi@^mNagLgeVHb|4)9#BQ+X7w;c$7E$h@ZH&~-kriJTPR!jm6; z?^h3ZzD1i_sJ^N}=W4bcTaGhXT$fI=%3hG=0yf`&r*pc5U;<8V(3R_-8Ho-Q>rFM- zrY*F7SzT8J$j{oT&J{94fA3Xf(7)}>CKBJou5y5-s5f^|X;2~+& zts2;QX0BR;2cWeKf)mY(u=w{Hof<0o^6~SGSX$Q*mU;jc?$=R#8^53cJ<9fw8s-O7AtRB;KQ>e`-5Af@2e+Y0wp?kS9f-qr| zgYYJLDahJMx^h{)`f@m~>j0je_xr0^)I8zmn}OnM)ch72lj;Uh5NPN%QbzOhxJ{F{p03zUTetk?Xg7lC*blNYkQ@~0;UM* zrz@^?6(&6>mrH7!e}%Qd6LWJMh{DzJOg}?{bb;IribuB;RAUfhfnQ!OHkWxs0$BND9XD9beXu+OHRM^23s%E6(-%}I=occwSi+l4^?{5O#plv}d!!6=n9_#aLcj1? z`RYSEkf72vFz$a*DaL6@!X_!R|4PVw*Uqf&xexPIC4WW#g@?$vG3dgpdWb&$CN9rsB(sIf6kr>?rZ8Fm5aqz2{9{XRmM&=lOm`s(kQW+pqz^`lKXVPjJ3&-Mz=Sb z56>v&IE~s==Ve0QsLZWzm)eWA2po7&orn%L>%M&rjg>DN5?^T+kFw$-nB{vVa>DIR z%t7R!^09VUL8ih=viId@?$(JMdIjbLzT+*gWY@@1T7}Tr zFz^$Fr3S@F8T@YkmJ=Qq3Ao&Rw{Tf}v#Uo0R4$&rSDQ2H>c>a7(f`Tw-GPya*risI zzt&0*e>sP;QmGrRlwuHP;7v}y<}mA!XI0KOiGzMMl2S`bu^uYrydm`${Rd`LqGiI> zLV7-amqh{lb=ohq0PY4Q+aDB6*sd3cscKO3Xo{ZbHBwF%Sf}rYrri_Ruz0Slm@q`F z!TD2O3^tO?0uGR#G4^ef@BL3bm?!yY|EUdLe+TmrPjj;886S%TdmvMdy6v<}U}Y09 z+c($D8HBhf0K2qN(|Q)j1fnPR!w4G89QD_a<&H*`I#BA~9me?~PB(`Lv&2TKiwhsN zKQcL~WfLLZ$8PX!LD}+)Fr4reL5~uVO7QZLCWj>8_jn+l#{@@|t$AJxO&=05e|;Rd zfA|PfLB39$NXtVqNPN0t3cBpTR%-;u%NHYi^E~s1_#9I-ztaQ~9FQMHtj zZV+S-ydQN>2@4!js|)UVdkbDMJihT%6+gX1$4AlegHxB7j}{}9L5}Q*7c1vGaa065 zv1X1lb14>5qzC<(+WfipQ!M{1vlA+$Xya3e#$e==eh zl`I)Ea{U{c6k2h)nb?StP$21s!JR9;MnYBC&GfAZCv!^Mlw6AP15Po?2RxC z78l2PJBVI%P%L}-X9OGpH9S47oK)_PFvl+O<7D~vA*!;C!Muyj7qs`WK}+{ZHGj)I zNuLDS(KUMd|FIl9>4tgS6N^P=e>&z^TD2&2JaWEDlKPL}&0!5+CS=k=S6@_w)(foA{N$rh2M<`lxpW`dkgm%~p=)PCyDp$cGcf9tY~+byxp zu|Zq=1;!UQ1Z&p%Zqy(63#w;XQ=aVJXPI4ZL0K9oMR%%}aa~xSa%BB#4bMMQO2#|h zYLJ20EH*F)MLwiU@$`(4QL3AmaiM6Oco1+YA(YW{myTKt*J7E;A{I>h`l;72(}-me ze~C&pwO`5-h#RT_gB6v1e_H9Fde@DAa@GuVmQ}xYc=<`7fR{Tgt5{_d+n|^*G}ERI9-aLJ zE(}B~D&|CfqV#0_8fdxtxrHkGh*vNxU6!E_bI5|#O%TKatM`+he;h2WE@)MeiZ*?x zP5gDHyCE#G{in#UA?@i$UX>JrF4Q}ma$h!HQ%y{CO^s#l5?q(Wwqkmee+8|txm75Y8p@oIV&>DE zx5TllxBh$71^tZ<0D3s~nz+r9^Y5~MQ^Y{sDu${xBmKX|5KU=(DN4ugn$0dPmTr2E z0DaoK?VYCSzO0iRw7(U-Mdmib;TJE0VDG|u0eEvk%c#iKe++q6a?L#fado|nk*MnS z7(yDf(GCyAu5t*O#}YapX0}jla-thY{?q2NbB<70OQnc^p6<_tmaX(0x2h%wB0VQ* zM0vWhM=%1Oc4S%OAtE^cIZ-XkQ`pP+hwbp$`|MP8D#QM9q+DM5H)UI1^ zXk0H7)JcRFf0yxe(dt&inuy%D6a#TYLM0H|6%Wu>|2PMO+BT<{wX?2;)++E6qj z>HlhU>P@f}!_(r+*ORvzh=P%4fikvN;!Bxs{pp2#2XNJyBKRD;7e<>uE}N-!x<_d6%4E2znjiqrLeh zBPz|X>B_cVa9W$5F1M)FJ)fJ-M5A}UUpI6S}0Fq61Zt~M@f3LyO!Bt1nd2S}h`Pf@!8I}3dndZ}l zANh2Ibc6R&5SgY``!&{O>5% z&R+&pf{9KE)^Ox#B;q0&XD}eYjr8*6%=R$x!YT#UTS2tztGPACOt(#9wC>2m@$xO=XIugWws8xW<8pEw1_)M7a3B>*<3gJ*RH|h$_g5{> zduR#vP4?GU)=a#;QzWJHzokPDQQ=)o4VasEZNmk$OyFBoI@`f70Q4Kf93;jwXLPLc z0QV}LPQRIhrxt=Qf_VbaZm#D7xmeySf0z@~+?%`=Ku2j4)GLG>^5CCnlBdoF+s|k8 zzAG)Vfnxc0JS#kGcw^G94rqLhq2!R5#rwfT0hgK%U7j`860VC)9%WzAATEM-?xd-H|{^Rm$kgDLWa zjm3s+*s0dFTi`&fN+fQ`a>QW1f3MqF^6b6QgHow2zx3q-{qVKM-n5Y}Ud3^D$&>G% zF80`Dh%S0UD4xC1sEsE)qY#lSxtKW`K@+b!qw!dGZEXy7)uSs&#Eu4qisw@(lPMA= zesw_=r+ACZG0xv=!}f9>)2kI^y%OapU=$zvNrw4#Ee4VnX;{W^ZP(+bf6Udu_|8c; zT#W=J8iUu?dT%`*0iC;l;MzG7O2qfv9C_i5<-@M2BOuGR`lyc=*hD5fmrfGmbbhi538wKn&&wgndO(%BNoI zdb9J+!iFVoYt@zC2?r*Le`*GDyO7OZk!39T;^8pht4e};!<*-q9qJh|=kDkJZf#BG znk;+s#;8K*i&wql%@-wY&L;FX1|Pe9<#Iah9^ABI2MdM20;|m9bZU%0C2cPXS))f*9CEOK%OZi=4`(h8uhzl<*fpQJ9dw#{EKv2EL#m=h-x z+qP}reCvPlc%QmzpVM_tuhkFTr>gdrc#ZG85jIN_|2*7@$?k=fNZ{!pG@n=0!N(Rb51wrFSE__+&w8I|}_$&{gD ztU4l)2g}lBj7g@}MM!!I9<|3d8xD5M6KVn_R&c!Rq6*Qcb8XWOLqn*@F0r`g9Gl{c z$GyF*L9OvpBbVK=FvW?xILpIj@n>c8YxDyhl{Q0J}X~xN|N!NfP;xf3(g0Qv6*RCpZ;m2Um zueOVY%Sj+g9^H#iVRM=%+v&c9JT$#J_5aJvgfYDZP)=ee4IGph?L9KR5 zoNThz0p>?n-L(iuh@t8{H#)vD!wh=J5OeV`%GxLZaU##e;_EbZZ9Bnk<*~KIejHX8 zjB{G%3-%NQm|5fMt4}6$xt%4J@_U)+0_2jKOd{7}--O?!xyt~VvS)?FtTZ<^Sq5$L zpZ0nszIk1O`%YKOXIWJKVuco_Upra7W1(7e%A=>%)*NubcRD#{SHk0`tViWW+wkj2 zP=6EvWl}kDBefb_%(VHhL9~*;%+)Vku{6?z(Jaa zH(ni|4vpV?G1QaM@GQw8@;=--*D_-hr{NRKb{@ZN=lzQb93$#K`Xe*;Sm#5;OP5Eq z|KQ%GjLtjxrbV!zfxxkw>o-y9j;C8xu=C4E8xq)Ct7pB-U6&74TIa3VqSp1F*x^(F z2If+^Y^$EspjQdUWM)M$Mn3I6Fk$$?Qvgmfw4R_w`+tZH^=*r*tj(T%f$w1CGsOLz zDCcCKE?Kvht2Gm+Iasj#bJGL-BkF1RdZP=bx(_vBKY6}L7gHT}q<=M>F37K3g>$Em zUVOaYDSiD*{eZbp=IwddI0;q**!m@?%Zrkrl$B0{D6wXyjKP(dp&`ov>?Bq} z(`9OqaaL!Df@F*Kv1KQx0nN--8jZ0MbgMz3X%*|&v{!w#doipMqhMx<^d-8ufeo)v zov~^#zzu zR_b&=fG|tzy9A<(d)+i=W;^#I=Fv!HA(9-8LW)958EEE?e9PWNTL*b^Q=-qd@GVfH z+e+*AJcTcp93MWFN_2Ruzk&#Pu+umRAC@LOI$i zLiy|oI*BraL4MVaozk#+m|Aa$;^40d0s_h9u09>`S1lJ7+@nvf1h|QU3#^RAz#{Lf zx@4TBd-7C8_3`qttOF_3JDw;Qpv#v1H?QRiLYG-x3vFyW6V35I*1}u=tG(%#rb^cc zPlbhksG=nD8mtpj+(1SEbPvZ`6l!`=h^W)yQK!Tu%2f z=N9goJYw<|Wjj1aqlL339Oc(5LH4h|SbqbF>Yp(VTXL2H&;Z1CZ6J$&O|}*aawI=Q z5(Wx7uEzADHh$B?f-k7&h6ZRKdm@no%?U++l#g`3v@ieE_MWln}@eYSh*+0Tm^ROF)dAU46I4iEm)j-MOthdnRf3HImI zqM&lRj=}a?O>>z8M6#`*k8y)1{JJw{1mQPrCSDQoNEf~$~RN2j0HIl1)*)~1c-tXG;EXk`YPPwtdC+>!-i+p&-RYgIn#+$*Nm z5r8Y3Qvc0Uj6%a&%>At+UaK3AOX9d;s*l&-w^E1z@oq^Nd@aJN zp*{s1jqCs2U4;zai`)q}0LIt^=k}Qd1tW8yaanrBgNk@MHV5>yQ8C$j5ebt7h45>X zp$fN^aDvkUoLHE-t?7Muj1cTI_}mRLGtN zB{0FQKM>*3BmaV4;$j8!s8vB80`-@7Txa9#J!QZGKbYe?76$p?)FKe1FiLm`^am{&7nHv{N%Z9$1D}5qsXuXR--t=#vJC3qsg9Y~ z9mo{Tq*9npnc=PSFLn4{wAC<4fW+4%XgU6NjelU~?dr%hY?FewJ>_4gf!2GMiAgJH z6tIy880yQzu>qn{kTO`WN0Lh+Zc`>Bo6AU*y%#jXXH$kIcQEBnK*H%MFuqWjp{M!d zS8P3{MnkKm**(mn&v1+VIPEj;g+|A+83LmRRA&tn8HOu^_`JgvIGdE$-4xgSmOqW5G;(fHJ|-p(=Sw;c)eP5fj_gO@qRL7#4pR_hl<6jsYiiLvXnQ1JwK6Ek#`K z0jIxE$zSEBCM8un1MY}vv0zD*cDieuk#1lUV+lgl@;HTpf*|*T8p9k*)+j*bWLKl{ zJ22^!)yjsltrKt-@4jr^f~*aZ-Uu$>ttF)OR4pOou^O#xN^*;C_MLSils%4VOir#) zb!^!1Vy^1eiRd9Cg{;MTXY3Vnl@ae^w`71CK|)dDM!EMo!C#(utuUN$A4)750JV;a ztfS~8`Om1zSK}-B4!6oaAlHc;l+sjv@GlsmqjV$2R?^2h9f+c_Wea^aiFqx67295> zDT+V8n**LCO(9ucU_M_wD~K-gN9Y1p71}8_LoPPYX|{Jw`zB>$hRPN_G6&Zt09k2D zk~2OP&m?D8-lB-?2>mpjhmW?u?KVau&uzG2-ex9G>UZ|cxkAW9krFFzy7-89_w)Vu zFRba?&r9xNY=Ka*_G$*;ibxdzM}qKlWTHR(3oe5Y5CTZw=5g5$Vrj>G?EyIVLQ#Z8 zYb(`OG7Y6}wRCSbn!=Z#gW)(u;=M36|NO27I+19+eY#;i79%k@Y|m6Kb4Np!V4+>2 zLzS%O8ZHpz)9cDcPsEU8f^?4D(;uHZw6q^86{FiTwO@HL9CnR&dhnirza4WzHELrF zP0q)r){+)XopXsuiz9@&;0l!w`04-X;=F;iN7~d7U6mXH zi!_DeFtj1*M&3I(Z2DXSZ-xSTRD`vqVh3xjG9`m{ggK0e!J*qLuPwV^Z< z9wS}T#+$;6(yjxLcLH1BPt^oMCbGoM*5}fO_M3En3)AOi13G~fmhCT?5Y)7XjqR|) zg)LlvCr>yrvahmJ$g0G?>tozJ>=NA~Q1W=3pqoSvIc&5jjSf+HR~Wv)SGTzJ^UI_{1RD{IW`Fn#9@)wDF*6B+U!%PO&<83vig!Z?%a{KSzWgTcH`3nsIBco}@RzcYlOE^F5;pVroz6sq| zp#X;TB8x*{F7AKQ%{Es%{sCn^jvRB)ESp>mrSH^Ps&hSzJ+aY7)7EFaoLM=6AAM|M z*S|H?46`5B21krzT((iK@iB2T;9#jWd>oM0jdS9Z zz(#l>o%-~!(kRyxOxrwKQ_$XCX$y!N#XL0X<`Ls!lu>gk*tq$KiqU)cSvanA`Wir( zVV~~Qdj|h%CgV6iCi4*zA$@1u@>y$u#e|{*e42u~OpUzF$qCvVUq=dvKfB&Vz2soq z_Ug$BUMZSS2qK`3`@67+B>D*yOj{kLq+`@jMzw&(#gEp8gxxcGwP7b#t_aARAguccN zNZfB%bHZ#rrTG>iqi^EMhM6Ip^4Vboz$8QsskXc^*g%mo!X;9INJPZdGur;H{>Rah z_(03vB?LU_Y{Z+7Z%o)t_ftb=G?s5VZ|RdNj!8SuKA4S7{zGJ@#M;7|#k|E;F$^E) z{rUl7qtP~azHl>g7!t^Y8b~^YkeiHyx22R%alJh|^XG2`3VUn%zJgsnk4G08;I}@; zKbvnPLUS5z0jWmrUyarVp`<9JF}WSG78qkHZ&yYhppAZ6U5o<}4a3&g>E6}AQrBhj zH_T{<5L-a*FH!UXD;&Y=MVckClzYvhBb3;=_!GZEeu_Ct9?u2%=4liUw)@Hd;A@zC z3h0?jXqLmuy$pz@quby?f?`8m0ThYNA@#?%hF#Z-D-YmfFGA?th<}^P0KIG8h>wo> zm>eUWT%Z<;vF&P^Kc8cBR8|+e?}{Y1AD83X4B6B3DV;SFShqNl!(N+R8}LAecveWB2MmF_#$p0ky6uH(DY8 z;&cuZb5OvVCYiEAj6&Rc>N>)ozBSq7$hhDw>2bh3J1uE1J?ZUE6k%5KjmL#xmBmYm zf>zu6O-6)C!z%IMSpQWdr|@AW6LIlTraJDQW@3>xlZFxXuC~oho4uNp5_k>Xgs;f> zx)H^`R|zE2#ifDeB||pG0W=pVS}miJ^lV_vPYnC+NU>pM8M4K0r}+ED!?yD~t#Vq3 z-#0nPg<9vIn4UkkPP6&f8(yyI%TngmU7g63&`Qwn1(h8SbxV!Y0`RIQL_2bn)|g2+ zPlYkq#K|*zs8O=(z_m7MP4_X8u}KTVu4P)%YarZ=CfvfLWfzdH0LaKAoYw-1RA+=8 zxnaR;CYWqWu)G2J-!r?|_bVAiq$fjy%pMOFIIrO-!P{O9a#4Ho3~ z($r&3`90MW6O9JlpziT2{otesJzHx15>Pww; zgq<9_tJu~f)`U21#KLMWL~F^j+O3s61#h;Xcx?4vgn;-GJrnXD8_(VX(Z8Z% z2bwo!aMCmJyh?`668>RDr*Fq@)V2BEPQvbiSjhN%8*dWC*64x3silCN-aoGVUAqYI zu4Cswxe2k+01T)sl<_~e!)lDa6qkNI@#vY6g{ikx8*C&VO3g#_=c60d%$pin+G`2d z+1$-z*Os3s8qG8QWsj-flkj|aLk zwNN?fo0i#?aNOwiYXAdTkC&#+5sSD=_+BnX1G1v5Ys%RoE^bn_g>@dmmeHsB?!}WZ zV19!*nFGWMB`a|kyvHZ@@iP=v{zAB9S7++^$ zqZW++5z?0MfP(QNSxx8f`|*=>Dvt(bx$uLs9Sf|^hZPq{Lx|O`eUhAEEF|j<{^F}0 zCc*0&1vD`xJP2-M7cN&!j@M_dhb+D1C0#@=YkLVOE;aKFhiuW6Fc}xf4s*#@i@=+Ud<##>8sGp6bM& zHV)eN55wT4>$0}DIA&4)0x%$^Ad2lbxec%~QViJKgYq^&_xz$4NR5|{LDY>B#_Cq3 z3}@VExY#bM)RNy>N|&3+(8Rk?Oh`Q|MRjS|Gy^$Cs)P&;M;p zPE`<`gJN^upaX>eujFM}4c8v@d2Hb$vmS=u3jv$W!KFb3aUWa!1EBFV6yCt;B8Y{+ zg3*it8$pu5WxDGavw7G5tf~>EbH`XTOdmy@Ecy^CR%N)3O(!(q;un7D=DP{9GKb>n z;-CtN;+9W6EQFX$?ANgjyvosKu229}je%PLp|996jD}s2v8=yZE{|{U==Ymy_#24R zPY3+}$=au7)q_H(rL=>iwpK}lLkOpx_WqcI(couLz-(!CDd1EHtp7*Ak%*m{{eLUi zU+U^O;7T<3FW04~fnJa>`IKJ!8PXArrtS|qqm3q9V)^Hn{~0uiG?k=Dy+41hsw7@5 zmW)WwwsowpU%fc3_`9I7=yr--+0a21HI~`mQOME~4z2NbCPDkB&nnZ7#x}%V17k_Y zS0jY>R2y?@wwYR2niu_SYY#`n3uhz*K$3#IU7l7#O%o?1gR5AfD20RWOS3N&tbjgy zVI_>S37I*2rJ5Vst4*U|LJ^J16wA2~wCX#E1)O(KZVWrAVz}EOi^cR?_#Ul5IHVM+ zVy4BJ*R?<|q)%8hL^I-qKsp*VP)HJsve2jL1Rb8nvbNq{#cF~eFplC#(1Okh08<;| zVpJxLF*B<>icVpDll+UO>E^e7MIiHFcVNPO@4*#E>S&x%;F1*2F`$& zNU+AA8{h$}f16=2T&IaNg7W7e0FQvk7O{GS{eW&wijKpm@KYsdvFMVm*#K%SHk+JC zvi`rNSZN9GOiZGgc;&^!Xt;gbPHjb^&N;VJDJeK?OJmL8pPCz;8T35^^Lg zCBu4DE@sgqXw~z9!=y3wSZ{UT z@?KjlJj}DrcY}y9^?rvWtIq{=GCU05Y7*Jzv5Z7pk_ z$1z;xO_DA+jp`WEg68lg0s=ZT->8)>Ok+ze&|0IdT~>fD8lofKQT z_sXH@uOw}`$QOMBVis~|1A4(UF7i5>HiWnp>mHK2%@~cb?X7wch`lIW(W3K8_qJl4 zGvBBi-Ev9uTZ??1q5H19xrQmzN9#N{ro(CWJ>nuaCdYESm^N|*YekA-CI?9i6354( zcoWZAsvDpke)OObPz;t4;L>*t12%ABj!LARu{cU5Ab+y8jndJIh~D9m1-scv4>w6; zTxP_po&nl3=k%g32^fOdHdU!h`i5}UpDQLOtv4m2YMF^sjx)5ckSHgpBoK0_C5UQz zE+3&W7ImdJc>t&$;M%(ADKlMvlQ<3EwQ%hR4`pzm9FqzGdd5$c9t_0%WXKRhM>c~f zGsF5(FOSy#N_Oh6_Y{C=kRO7o#1x-=@=xkplyfjS!*bASyoO2BY_`Z)v33^A(7Sei zegTR2?TDr=341XBB#bPqpk;fCo*;_uy$4Ow2p6BvO4dfe@ABiCG7(k;m)4~N`oy*i zZO1}a8g`Ze1$?Do9ZBbn(8H>A9iP+I&!;N6LYM9on?eRT znJNKmHwcZhJk8kwj3FE--qU!YS!hV*c8o*`y1ACb=I;!#jAU(u%0Sh>W<{belFSJh zZK<*AHCEALdajgd%NAYbpM8rrCwJ#<>)0-zI65GJoVD&!rK-{L+Qp&6Bw|&Y>M2Nk z_AY*v99VWdcn0yysy}BRaa#l5(E@8#RR!xx%@dUqrsPgqEpy7|4XIdOfbldZ z5o0>7F!%^$mFANPOIBl>1mHCR+J60iy{w(hhBK*~XFP;=g6~D+I=Mjz)dZFRaQDXB;jGniPSIHu{&mu*bP7bbgu*cd-XGUqu z015se(ow@Sp110Kpz*!QP#-M>N8cAGmSHLqvC*x{;_-S@-tWWsniT#KnBsDO3Q~pS zkV+Sv@vB^UIDw^C5JX*|4pwg7SZ;jo)QlE@Uf@h`0CsaV`YFiG_6QfMoiQc49HcRv z8dyFvK$UNju8qZ1^lY8(ee~qw$QXdJ@Gzbf+bC#9Sk5ewhrBTq<3AnFV?MONW?$toB_) z;Yg0I<`XnQ3v*ynq94rq70aKR(#|OWDkVZ7{>N`bHsc5)M8Alwf&Cd$;lc`O5n8wh zGz2BTcqj}#cKCc!_)$8YHpFV)!W2CjQQGVj5&wt2VX|S(0mJEn8Z3Dgl;uAp8lhe!q>g>%5z#(0w8K%^Kr;o5BoVgvpyZZveEmL#-UG@cF#27xx@shVF7e)o z9J+QDl|Q_uNTqJe3z;@)nL=fIM?AXp*%?L6N{Ci=Z%$PbA2iF*2&1y^la_qPDTA$M zz!8ea&tT*WP-4R((wW@y>$(W_0*Ar_NedN?Nax z(ef%mGH;&H0#|egd2igg z#kQG5mCrKH+OTg_ZJx3+u3T%CIuak@`9IFp&qKDN}*sNbNRSvGr^X_US!8GVJCXPdCvh`1DMNou|MW zM}oCxQ>yJnUsxj{1g#eDnMHwQMGzRQrW8=HA!<8M)R#D^6VOJqTJ3lp*j(?MzvTI! z_i{pNqwHcp9K9qci-xFW2;26dz`A!WhghfnK@PYE19_QxgmRPu|!p=s= zqCjc6m8Dw>LUqa#Zsgp9)&#AnSDZ&9=4PMJJZjkEE?ya$MJJI_-*6Sq&hbMiWjNxZ zbTS`%;akRRxKy0eMc3q>7*+8W1rcXqJ`9)X#G=ulu@U;Bvq|Q%9Fn!x2ZeC#O3l4iQEyYLmh~c7qu*@hjK}f)r4JD*TIazqr$K{$^Q)o=W6i8eX zA-l+{58A1$VG*vD8~|HNDpO=`A~En!*$pp9Ux2s_ZM&7AfJfgJg% z>D)czh9!dC$B`cJ~pV9d@GQWvtDFQ9W$`UuTfNVN%0=uVUZgq5+*nb)VF+G+T zJi9P-#WD$^3Q`WHt+iRNF=2&MU$U7-g&R-uhlGv0dq$JWN371>*~+}}pPrHAnQkI- z#2%~{s1gARU`6JZ5|PI|L0;;bXU^G~me3-#oBBS$;jayR|8sgX2Lrc9C!j&*FaFWz zq~2Y2U}+KUH(pw{5^PBLs-yfW6j>)N+(G*yTlKQKzxzckf=o})pDC8}0jr+pr(AQ4d9#0vgnP_g3-(be zkjiqjl8*aaurfQKlB_TU`+L<%+-nmcIUdxnFYG}Xo3(h{(v)|FjEfxh#& z^wT$yA&RHSGlIQNx-EtBkPkcw(Fsjq}f9ig=4d%y3UX0Kkp04yFKk~phFAY&(2SN}f9 zAoJXv8*NAHCn`GZi#?=Tow(vFwH=<2o)`pv{)m>CDla}jW#UnfUc7jSTr0*vjS!M= z0o?6N7XG`&b}zhVZI%;wUCoM4ON$NOEIoAaFXa*tIzSROy?y3Vv7JK4Z`4rxn88mC z_vh%JnsU@1^uc=OLfMIymOiCmnEj#gN%J;+;zmfDpf5__W^K&e@RD7P{%2N1#F~;k zlUQeGoy~x+#X`RhH&!AU@BxOdOkq7G2cTKoLo2`I_KI&?WyMAnF4m9f+)Ev-|RFftguwAr|4K0tByZ1@GB;c#cA0VP9SYm5H0V`)u%a;jVbYeii z;w(yeUm8{CxQOZR=M(yQ)%_>*9r7*Srq7%#7VzaENWF%?JN!a;-yt3375rc90pB+_ zhx%%J@^ya94At=$w^`^4G#bBV(dgzJypbUMH3UUn7v2vuTOC-o-ZA+*-WsmKD1!60;; zl*vtOKX=BJ?--`L& z7DGBz(+tAW+zi|nzbkS(l>OKhMc9iy^t>O-A{NBi8gZ72u6A@|7BuA+0L^cDeoCBY z=OC3W8xV`1^LcsrJ=q17*TU>bm3LKq;_L_I3|7&baEmN8Vs4BgkBZ&u0u@CN%LWTN zMC0`c-ogH%REWiAUtT`GQaAqEq0cU$jy>yjfpw;e&fCtm-OK2hiTLp%^ZP~LgtVZF2xbvuUeV-;AKRc=&Vbsimsv^jsdp`}c}FE_%v-q_bVIlt$CW>a1K^JXNtDD9e6!;5lc zDV_^eD>d?>L%;sqD&e##L9b_^wz@AmF{`wyzm+5C=oQ$|_a0A<|+faKVMk0MWBGL9UQwYILK z;m+xkx0KMs!92?+yWRD|?(*hw9!9|`-NVHB(Q*>xUo7@1 ziF+2NQ$=X#me5#}J@~#{=CC~(Bnsd(i6z|`1ds)wZ}Vy27HN%RSW)#?9T z@#8bQBH%R#6cHb>eFCNEtIa*lPJ}#mYPVor|Kxm{^wjM!*2s&eTGc#h2foI)MyJap zROt5jG`Y47jpjK~TyH!y+x#>+rNHzD3MZ3*T4ZwiZmJAt4}eU4)enH$cSno-|9~4f zfS$D8Au~R26Xs0$igrK>@~Tn5@B~twvd7prUguS!vq(mKXGV-$KEUgFv?Gk3fML8(fox$DC$}HZ4wxD=e4@6>FLKftsGjBL=F52w*XA<;UVq5v-$c3{J z&SxD55D#XQYnmZ&dQqGPi5Lbh+JzY>hF$vX-mH4h@y1CUC))ffv+wy~h(vB@({p>T zH`ZA0mhxD6uY9{~)Yo}Dhn063bcx~$SBiF#?+Ftge|fWwjt^mgAbq7?EG%zvhU_8u zxO>{%DlET(wcou#`E-r}2}OErjbHFCn;}pEB(r`+KU_P9HX3r`xz(MCZMCv|OLR&= zLQcHr3wW}(zq3J6oJUZfdQo==S}C`%kBilg|MFT=TiF`#v6rWCUVq>gSgep>drOo$ zCc4(vd!8;J%jqkpSZ_|A3#plSzpC0??tF(AeAe=?97I)-1FrnQ%Xg!MJX>Yk?u zB+3DRv;JaT-E*&^U>A`xwbYUQ9($O|bw)JeIIrp$ONEwrR&cYKXpMfNUUNgw=ek+@9WZI+SKQ=HbdzLqBv3HBht- zNp91NzG95J&o1Wh4 z+63Hg?v8Hlb_}>ej$6A)0aC}%q%4jhme!lE0Gk}zj)1L$bbQ7;=6kGfh`+A>AZZEp zpr~mVi`wy?QCM!bSZ)f`-)k zm}u^UYmZ7uR+KT|1qKZd4G$rK5ASVx*Tu_AQ9hR^qv_2O#!IsIZeB4&L@y0}oOOh{ zCG{4Z?OcNupY0SK-ue;SB!@L`Pv-+U9RkJ_d@X^b=+DY^K_S8?z>C$l{8(wlbCAMQ z+8fuxxk4RB&kU$cMNbChI2~-nk2Fw%>$U1xj z6h?5@nEF>dk06O20~9+pP_c{%(4wMcRqu{2a5^zxDT6Nrz=(N;x#J&MU@Tbl0xJp= zTgQnQmb?iAX}_a5aC+D-+*}Ow0Jj^$zX3-Gb{2xraWWt`+$lRh-D_L^+Rl&G^SUX= zWyfZ@;D8O~CDY2&B18|IteT*5#K6={ky@yd)9`Sg=~ke$*M2-AVnb?zX` z8|Qtw%~1YU*dKE=FC`XzjQEf}t|%XKRyQ)|J84tFJO<(rJrg7^DG^x)595UfC||2> zqODa*`fpqKXgnIO6pln^OIr+U6}-ez{H`z~_fci7ByWh7q#?CwfivJ}X%~|}$LL>` z;OU@g$^ZF3W_zni9r!j#8t?p%Uoi+SmzLKJ4%b@O0L}-Rwzmk5*3tr=0-7eZ3XYcA z0?v>Y&<#$T_A{#AYS;n31(H^~0gm3P(hZ&pnkKmOKOq@fY5TyfLDP&6z|mTNLjDW| zo&1<8|G+aq(~2&BVordc1E&q#{D9w+;3*(!`wu_2D$IlbY1Ntq-vds&dHxyt@smK( z-rs%}cwPio1_kDBm0krm1qI<|PZRq0^CSZs;C}z1LObBZU?AL_X(bonQ>`qA;EMmD z!6)EEU_aO53vdE35LTwv!J8igl$D7&?eh)%Uu(@HxG>0n`0X<|H0XcE#~U~tC@@PJ z^cQ#l9SaK!6E`adGaD@vGbIxfB{dAAoP(*Tk*gUIl_(Dj6AK&D|DBT7`~@Bd%=KSp z0vCs!8-`Kb%Gt%0h@FY!|7Iyz*twa~fcwCy0Nt5#F6$hqBiC02^U>ujVL1w{x4ht=5esQXKxc`z7b2I6WeH0qW2(ZU(+r@rqSAnba*hhXvH7m zhlCl_iJu*DnG8Tr&QCyPH$1?hSb86@0z^8bY}v)ZNh5x~pxC$w<1!TAG+7nBVWlcY@!l5@LU>tT*7iJq!M*eB>ZO6eqLS|9rs7kbphVj<*4e{!rGt8bg zNB;F|l5j5?Lgo${je4(F;Fcup7a*$OBSGY~KHQ{jIt>==gFqzVmoPG5#ADBThORGm z|JNj|Z7z9B@~{~2ZL3*wzbmqYCC2Vvq3n&8YCpq8A3t6%F1e=q5;K7qw>C-z^MYfF{0-vpnxBLRm9xo$l& zrSqXrYR!S$)pfl2`@(+))rfP(UjCR0-E1&PMS9`z!He!9mE1!v5HXOZi*rG|;&v=- zlJ)y_Xgi*REjq6@*QLR7k^^Sbq>|+wH;y8Ar*s%Gk=GZE1A}PyJ}3GEl1C)_*VjyB zB+pb9g1En`E?^9)1Fp;4EHsQ(;P}$Xu{-|cKwX0QvIVZP&?kAlHMzT^T?30yd70et zNuSz9%PuDCAZV*GjR~>kG2ThM!^t$Yr+SF#&ecX^M16!kavo|Z-2e12%Pq2J48@y1 zOQj|&1n9GgP(-K156xQx(*`Pm-Cmf(V+K=Yao`mzHOE~Sijiw!Y?jP2MI^T}BN+D9 zkpQXmh1SGk7p$Gg+H_7ccQ|YuV9i}(ScnhQK1PnAwm7I_QAXW5yXOtZrIh6p0T*+z z*X?2xU;Z;nN-l`;0SE9)?{ANRpq8>ZwhqQe)tU((%`xKKTL}2{tdigkdxFE?B!v;M2Z;sDL$b?q3h)@J! zU;`+U*jJ_XH}DDuBFmC68?OuizuLI88YB7DPwVgW)d5hn0PgeE&Z7II_0U?(Th|Dw zrJ+Rx22Oh2+1^7D$Dwg2flEFAE`Ebuh|i{BC$N&YLG(T(?N{fuq|f!A6VJ9?3~lag z^+@8Tv%@9_0l)?U9wD3VZgKRSX;+{+Z&PVps*i}}|C&^1J zsbtTzT{|9#jXpxBPsh8zM^EVH9^~JB$c9VE4&bWL-Z#du?l%6e4(&dBCs2cLNLv*x zJ}?eX?u*^W!>vj3zCEksn?KQwAc%PDd+E;gUaVqZLI6@GE*&|?v^VzEi@m3#xBAkS zH+!)seABJ6CPx-EoR ziLX?+}+HqtEI5#dkN z>DEt5huvcRtIq19<6LaDb>6HtZ>THUyVb?np~c&@VW%#_WhwZMX;v?$-%se!mrtLM zlgG`Z>Gbunah3Iu>SM(9b^lD3%P2=WuQlq|g#k8gi^#I~zP!DN#mAoqRk5}?jukCO zZVGL9I#0vgCBga@`8Y1b z|Ml1qjd6(3*b4SEE`>eqzt1~S`rW}$f5mx*axPR<>z=+D>859rqZ8$rIg!|4V!JpF zT)8%h(6~)`=Fh)mRzzT^$9_hYT5%5u`6&4O+XYr-o=4r$`la z`n~K6d0f~%srWV3F>Owf-E2P=;KDJ*IZ2J&(_PEMW^J)K0tlqu zWA^*_GB_M}zacQ-PSnZ+h_Z~PJ7*s0`LfPd!c>I&N=iyVDtmkD9v<}^@fEI;$ zkBBH*^nzs>!#=yZIfz1VK8#1u2OOqs0#i}BcS|du?CJ|Nty(=_g{7i}d1~-W{R#KL z)K&hqhd?w$wi_+zB;`uUK+OT&gQ?r{OFPqU2dzx~bgw-n+3k_U%xqbWy4!VGFrlG= z7h8-(YBuijy25(DBS-On(6O;AQ#9(T?9up!F<{CBRRONqCV;mCFr`@vsQg$Z z0|^RGKxXoyJS}xq=SrsNoKAgkj{CL0t=!ldLrd+Q<#LZ(iVVJ}@)GoNX0qE}Gev@} z^%|5sTg{eEuawrj8irOjJ5zBr>IOXZxd&$zv6)}bx-<#+=X#>1cK7?AA5>IGWQ@mX zCceRF`5c}amj@LQa3Z)X%Tlbq1^Zu|&mBuOH{jXQ{VYqR=dvhuBFpruxLG&Xw8v4& zB@Za8Ix*QbR;kBhVV&4^Tor%20))dk|A3mWj(=a3Z><#d-x0h-Qk=p4W8!jp+rDIV z6_-YBZ!*P3yvyj43oDPO(j68NF~V#R{ZZ6UNyY=1ou8uR(R02$4XxS7Q3;RW?Qd*S z3ez_vFnN(L0}ZlC(Y2z4M@}s|87AjHIdPBxL&T5&R~GZ1D5AZAQe($^^rxL8QY>#7e~c11dW>xDv6la1t>QDZ?;I*_%83cwGP66D895#VpRs zAtufxD$Kzq!Yahg#m&mb&MnHsBE-hd$;`&a#!vMBp9250kgS=#g{vhI*Uy%I{*b8w zFtsb*Lsx98s5&#Mbo*RDLzJgu(Zu30WBoX4r9@^+bk)!pYj@tp(cIF&8tWowS zfz1&U8(TSSG+}rL1wh!zeZM$qE9>w8<_8m1V%!VD=yX9a=~8*=Lx;wdFzN6vJM2`# zmDTvmuI*E6O<7OSFOBu>ZloLDvyzhS*BJ7aXcj$xbhvA4K3UvQ8Gh>=}%IPn$>C<{H z62%rIPZW{RB=iHkO5Z92(!pn8jgi5qtvC1uic(Qd90X^IF#f46wCnlRrh1E?SKt^$ ztheH_=a(dOSpK9lM9FyyMlat?9uqo<+=R}83?V9Dl`)GM)bHRv(V(LM46V32>>li( zZ^GTG9j&pE@KDY;pMpz4)k6^Ck4DhCNNp>Fe6i7rXIv~n^}`rMj(_08wU9LwGI<6Qvv5wr$(C?PP~Lw)4fdZQHhO+ctOH=kLD5JE^C}HCU_GTQzlvwV#3& zPY%MxlJtO0m7oX(_|&}Da2gf6Cy^>m+pNr4R^7tawL+9eWCNFV5fc$rQQ0xv1ILII zlWxtLeHICv8W9li>C(cH1I6*-DzNXK8>75qAcxnI;gU6F@`Fo1BrD=mk`^JG3w$8^ zhxdbg!m|8fjTXfQ&FZ7ib0FG9w`q}F&v$z0i$<9oO1NnPP`RK$p}Q^;9Mc3Yf|f>y zHOWOu@E%(0%)?%v9+b9dK_Cgf7mH8Qb2KZ8!p0<%4bnOls~^T?wnfY}Nr<>VLiL$G zq}_3$N<0wPS9AW-<&|dtv=b|6XKq;gOj7mN27iOh&dMoAI*o1Kt66~KsO;UC~cNd_$oB+(ym zTK>{xfM#nbI>-mv{D)ZFlHlQcYZ@l6tMXb&i4C=D|6u&m!nlqUqxIk2!oIiZ) zCiPzp_{ck1t_78%dy_`Gs9u&C>YuM=HQFngHvJV{z|dTI8}xB&R+cK7uWsc1XtQ-! zRX@I@1F*m;HsWD4QwGBQr=b$})yFDZ>Fz}TFKYikxe#&f8AxaWoVZ>61uq-F3pslX zWt#(`i4KO#*)t#gl-1AbOblG3&%!lH<6xp9I%s|eOz|_+_ZCYYE<#3k4A$mz@x0r) zSd3+3Kmc@ys*xT{xeB<@CJ$=(aY~eGc)a$JNNBdxg*3uUPy(McS85s9{n zg7{p*_X$Z>!Aa|Y(tgeZ3Ievo4Odq7>>SqZ;b*OeRoxo~KepY4zZ&73oyh_K7t1_< zfMr<*oxxdYqmyAyt2apq%@j5UGXnY3IS+0#X4xW4$Wu;aR(g^7?g3LpeDGb|coyzR zjOFOKDeA{xd`D}GZrb1N*+*rzm)2EE&G5UMiP|DW)=$eBjuSVgrHf#@Od+fR8Lp+T zKFG-=aTp`ZvGzGV=m?Q;uO&(;Q&ZU%?YtJIWAR>b=AeQ_ksE z2cvCfdE+LLqQX$5$_7I>NBqjou`9*g*b0wsmAl!skU&oqXQnI zCCXh-Pdj^S2vSq!A7ZQ|{K4bbv2A3xs_!hz%vlfKw;k zV^af0bYz{kBv5*O8@)5y**vAP7%v)lCwtM;TO}BNyWj%&S4>Tb!m(8 zi|DhKeXHM?R2!?|O@5heS^oR?<2%iJjq?TYTJ~kN$NsX%bIITCx7S}5?w+-G`}2O; zqNY5`LJF6gET(Ih(Ttt*P;_Nnnf!pPDU1H=N5{#>z(Er8tD9pPtx$G%y6#r%)rHLA zhVMFH#q^2KzWa{+0nvZ5)lhTjM+ouwIfK!Oi|5U&jg&7s7EAmZ-XsQOny)^bVnz+X zMBWbCh?aDR^#8Tz+i3axg080`zJMZE^RevA?S#?n_~f|TiIvENyqv3|aap$OqD8E| zy}y*t3H!k)D0W=32gny!x|@A-!Qp*2hF&oeBj_DRnk^y?qg0qC_0yTCaO|?hxVOzG z5ogyHWPlSn`d64{5 zxvC&TGuC@{EJ2atJPkjbjgc475xF^2!LmIpiLUWidz7lf5z~)lY(F2mal42POilqQB`l~w( zsXc+O=MtUX@ZUFFZ*}v-UpK(sp2LC5Ee%qVxN0Byeju#ZB**R%k$T|dagcz14~GI1 zS6&oV^xzD60EeBZJ5~!}T1I<-i}EJ7VfQq0!L9uB9QK8vm}ey7&w>>Zen( zo_tB@J~CU3CchA46gs;qrMvMOxCuJ_;mM^t?$TZH(>2VcLe%zWG42BTx~79%1i(jI1Kjx^Xh7O~w{97WKXbKmf)6jhgNhtfy3UhuH6@%;?7 z%+SApPGDGh$sQgzKHt$9aEuJ_LsBjU)nwsSDdvGb^_Vpww29EpKudtRu@Gkp_*o7FafSq|x8ibDtr#suJK1NvLnGoq5EW}q7y((qgCS~8 zwGDZO{0dWi2`o<_dEemP8){IMR{nYKh3m4GG5s-~c~uzcv#kE?!QADj>rofw#t12h z3|e`SSMUXCU>ePwLK*$r=P55yxE^u<`V?v+L&qZ6HjiyS4fj&BUYh&Rgf1Y{n^L0MJS(NS)1 z*mFO)gy`3U)lMLSGjJvvam(zEK59T^Ik06;g z`!14p;pl)fl3?K}0DW6stMsSxw83iA_E|%bY6?J7<8>&OGeCNzo9;+-_n5;GQ@((OR`Y1qY4)RybdtFp4*hObuY8oNv z#LimXDyl60h6lWh+_t`dHvUk2%UL zET2{CeTKgc--d1X{nXfYI_qzIv+xZGFuP*mQz(?~ zWwOF+N)`I?0Vw%Sdfdc6Y{oxiK6&qsJb}S2J_WRMQiWP@Ok3_+6D{4n-#&3=i)$^1E1?HWl`b-Rz_V#0| z7yTyYpTo-#nE^!tum!o56Dtk%tsPRF13;1kCX$*y0k7@7?gEeJhbxFRo;0G!nSqAb znze@AMeYptu)%GFB?F(WuQw3iHtf$BD~@Lrjga2R$tLN}Kr!SB1cO^z@$69NWuPp{ zZWKhJozvqTu#}UwAOpJl+gbGjbr!;3_my92C;t?Ny+~b2`cnoi9pxb0?S> zGWV1ZjH9%!2K56ckzrr%ubq2S2W_-k^!s#7L_iWE7{ekInQx$+}Yf$c#G?}-s|2gL#Llz#&?h8M+VV{@u9o-v!u}l zRA>fRV&RSbxUt>101bm~+}>WFiJ@Au803gxP9m_8fCRu7IHcBu7aVx97g`A0YTs9- zy8pfoxCCAPZa%52b+MQvjTj$9vxMNN0v%O&N{9b7QyCodOGjd-(>Y4`etpZCT97cj zJ$b4G30St{aBw1&5|*#Gm;zuIUYv?a}O{UW7|Bfhb>!2ydZ@ix=*Q;nqU_F&-#_3j`k-kbXxZ zG@&FV=Z1iL!;~PA!0F#@a=>7Ng(^i2OuP**at(mZC&lz)4DAv0io{zdB^ZJm;ky@+ z%l`;A$b7TgHMR#6?A1^w!PpIxV0)t)jxGPn_FC&9bOmp6V4XGwc?-lU;t~k^wS#39Kvl+8CT!Pwf=;n!Wh{5b?$z82FULBOI^ z&;k>!FHADhcI6iVLs_c)CG2Pwm7@=`C|wjL&k*h9ga=*u8mo-*w+ZaV7>qyg9$`L= zLrW{{Pn#mkQ!F6kw0ED6VMDxE_|lp5Bn8k_=^)0Lrc54!g#Oi+LoO|yZl00PpM=k! z9$0lPbs17a{-|h-N`d&Ul^+l_n>P9b5!z{<8BmN>5eC$Z)<-`oZJQh`J5B&Q4}pUa z0)tw4Ku42fRUa?L8y(zz#Y{i~)6D!(Kx^PC1_tBM_NP!mywtV{ePaQ*l(p5wD;KbA z)_QI2pk2_nG!{2Az9o1yQH|dMy1pE3u9X%f1F?@G7{x_XlBjYtx*|CmbSttEzBBl1VXlU z9$^eVZ~wjE-z?F(=I3c;d~auT1X9=|jV74V&?TO*x-s3oGwG?v*%_q}Gx z71;3yJtL0}f44N(A-&3o3%!vrb>+CBNJ+oB3__NXd76KI*-K{~tNHZ@pelo|*mP>;P4ZX_xk-P_&`m#guTU0hyT~NR66^eX2s`8yO%qPI=wAcaAcH zHZ}BFELv}qb;r^bLY5G^y5c?rmm7VO=wfEBCl)xN3E$~@c-;BJhvu5}Fwn^^?FcY! z2V>2nwkjqbyf{+KREi&yZbrb2xrx4nH9_5X()w<@z?cTE7$`VzD3{*JF529+t8|&K z5HUgnjFG_)S;fME|FnVrr%M3fZ&keHG{;tOR1jvC1myqKB;p8az=^i5a~2nx-&>v0 zXK)q~DRjWaNda%uMKkN?;!D=r60rnV^EQpqs(k8TlN@Ad zEuX=n*rJD2Jz07qEN7eG`VoU0iA0ztmEU!%4EYZ+1sStD_&%5eK4uxo>|O9_bjmVccIjrPGZ z2A#-jfub$Srh#dT6q_mF!q!4jLnzu|>8lr7_-d$-R~;=JdjMPh2Nexrk5`-B#z;to z5$6I?LrjVFcw}$?@8a159RiheA$^S_o*l_B1rd^_*B;BIkDxvtGtqkWEkmM<+8F5U~CPdUUU%JN9jaaq&-`h)M^4|1466&Rw4rdlFnMOrE}igesC%@E)9jU zjMF@@xWAGkYGdr-@e~)mXj*GrzQ}gehrt-ZwzJp)v0@pUpu@1(gG!;Em-t{!jf-NG z`~#JgZdT(~orY(WZN&~!PK6anH;Qg_MWm#6haF;l8qXD1mf*)?M0@p{bRZKP%I zar;C8;PDC7^{cFTBQJ|{HZCYzDnwEW2+4yq;R}jwl%7wRp!kEttv>FJqg4RUK64Q`(v;KZv zAd<}O$+MJb@@p;1>&0Y8NXrXWZbjo-WDW%bj8ENOwr%kXOa~9`U9#RC*@AUHtLF~? zko%vM5dE#j3pX%+Q&-d=V6J?-@s(QYy7Dx>=xNUbYf8Yzx=p{UJ&B_#j-)g6U==&& z`ny5m;w6%U8w~vB=vv^$Ai1 zkO>)q=RnK4@}DmJ#x=6qiVXGCFJJyZ>TrhtQ*G|f#DmPo@@GN-?Mj53ygHMXSJ*tdxc5S2BTzu!0S^A$} zl&&R>Je3aXe#Lg)Jh z!ljjp|!*Fi0=fz5FOP+;wT1vs5UDK;_{;)(}3zi$me zss?s>Y1SemMv@J>)vBuA%D)Gu8T-de@ULrm=A8C|C^Lz0R@9y>UGm{ZQYeX11xRKj z8T5&BR=vBmk99AtSziL5bMO*)lwo=18F)2cAAk#rcRqBr;=3J4gKR^ayiqD~Nx4O{ z9s{%$OuBn|?fwuf5D)GabFP%C!`{!%ZFF02^qjlA$rIP;siyCLapi=h=DJZ)uj02e zZCiybej|{SuLcmM>pt|;h_7-%=tIj_#n}p5rPZB(h#g<;1zB<}|0TDOEE|#MSA3P6QBxHUJ7nA%E+pr$iY07-JgOS(V|+BYqp? z0@25g)gR6n4ErKIh%nk2)f@~o^?R(NW8bn0^?NebwLG!5_c}i*Lyhd8+JXJLrf#iw z+1`iLA8R*zZMvdKMll5#e+csI*IX@v`2a(YW7Fn&d5`Z!^6xwLkH5m0C<`Z#Yz;bn zoq4gR6I*da5lF=$`51WN64)002qM4`D@+dGPrdq@}F?hC3T#0+|2c7%TKYxS{%3E76-QvZL$^Cu1i>IVOPZ zDngp-i`9x{UZYQC4F&+R(pH)IZUBGV9%oiS-jF;}N=|I$q$iQjCX!KT50Z1^^ht}A z<*-NObAa!U!iDH`GquHV`r$D~wF*X{wb7)h)E$s}WC%&7(Vi{m-0k~iNGQbZx4rKR zCh;oq8c0DNf-rZ*IUa#YA3wU;FCtcz7_b0{fg8&=E*{f~0 z`fI2g(}C9M+Vx7Uxs9*e+r6A^u1$`#wh5fhL*D8hn~hE?w-g|q^`+JBKO@8na_U5& zH!0I72r*$`xHTWf<}76m9k)1`;T9+;qOVM=ybh*g#{q`|D3&17{gmq}wZ6P+608y+ z@BZbl9S=$xvSrPXOsS{0>Hu4pzO6qPif8iB3dGd^D!q6;6Dw9Ff??!hO)({Tiqgo_2o!7Kkm+iB6!(fM%7|Ye#2DoTam)#)$W%6ierytz zRRxnP2=OlWays4){$kR#I!V~7^sjnUBEVvR9SPs}tR7gOWu`HjvRe0EvssB*HZ@fklZh+46 zDqnCfV@&!ZANNTI%>ckdgyYtB3KFxkmQ4y0?YmWIE$bE7iMGK`TbeUjV(Iz7^rbc= zru>!-Rfx@4PprTN=La!-ehI|0RKedcP|>4nH`Fs#oyV@(rWy%)X)G)q^Xyx%dN-`_ z4H#u1&(-$`Ush5H_xq^Q?)~Dv3E&YMsnAP^!`}`KRs|#yv233(m()9&>X(mI`_k69=GLW{n{9{ZVt_pR$Q& z&G*1T!aST87ocJb2w13EU7{{ zso2hqfp^XgFXY2Zlnlh97_AjX#k_krnj`3s!2uH^4Zs82V`Q+Hx#FuT{KoCK#;DZ+ zV(+67`jcy#nD`&@0&@)0O8A1WznVpIXlIp3!}&rrD0mu5EZE!2d(cM6tXarLw2z(F zh=6`)VS~J?7^~S2+ql9Bq|2xeqG_G+5w-ju$v&l%b7O{Y=5)DBdwn~d_RDes{FR^Y zb_@mUcfdn6#zF$Nrk;IvM6|UYVma01I01`6l9tU7hVW@v5=cGY{#B zZDIg!BmWw-mwmi~RWHCrs=FO$u!f7Vqhb1){)`?U)oz_!v9~`CT(C}C&L2Af{ecuT zMo6yQJsPPvq)-7dWzq+d(x_=)YMD z2js#SVT3Y}MRb?TDV1l!Sb~n;USltBt+}@6ouzb9-?n*!Kn^Q8=^YG^?>E_pF_+*h4-cT-g=9}M6EYFOdKPpox7##d)ipK)@b1daX-PKf2qFSu1^7&{MWQFW_G5|E>32~w*Pf$ey!jT zSShHb zzOGb9>{M**WQ!J}V~7}ssyb_HkE&MZZITwQ>p0o;eqG$3R@xHHqmj$d=5AN9Q3J7T zS!>`9M9kc)uE)OS^LYQdwuA|h|SK9(lasYrP+OAIsJMG@i@AuoTx?Xe^ z=V7bb1j^kAn*loHRnJ)Ww{h8UALyy}nW?<;d0A5!ME$Ei^v_(W)2vI>-mtpw&$oZ~ z=itn>6p2_#Vys#8?W7_)j8|E6xoVfxCB!mN_acgqv#L}%R)^8`{2?U$S&k$r_i8O= zi#CH6#DEB30a%4?qb)>e^JJ`vfJo$75N-5kCsY;%Iv;{$lK^E?;`1Yip=g+_CsZ>M zVr5k0`3s?1l7NOWFyv`d)Jl7SVqf6^fdP7_N{Z!4>L&LI`9iUPX`_?R;Dr1x4bXwL zh;R@|l9*|XK-A0hvae6$Ep@`yIQ5=~fJY_QHvlwy4Dl$!H9&gYP|3ttJYR|%>}vpw zOOX{H1Si61L=>No)% zUGE-2CCK##6y-!PJdj+eSVkS&J751;XEg2~cun_hUgwi;SrE3Y9C+I{`s{zO?+?|e zg~=1ND5{ARGSk*@ffe@WoTG1w^pHnJXk7}{=(rqR#)NP|Fk1(hl%Sbt&oqf{06!s$ zmhBatPKGY(s%;UbCi%%QpL1kFJ_9V&K-@x*U0VVBeKi5F#V?|9oL}E4vS2bnvP+?9 z{^jdZObZwU{2JPBGXf!+q4!6#D6ry8D8GsE(qzxlTj19~nj9Ft!NPxh*X;=H>F4(H z=^?BQ`MriQW!kS{d_0~BFH4{70auNB@0ufSSN54Lhz5t-MsG~o(stp^qfrpd{2!nJ zF?}3&U4@n3z5$q7_CD_~0L_D?FvL%Y@9&Pymo5Tl=#s5t+a~_!JVpEPd5A=V(MbL1 zeiBW^8*~Wc)X0s*E%4>KuTNdtr+i~V$B;Yf2INmC)oV@3pg_r+?E;`H0NQZV-juWy zi9Z4~8&jGnh~pVQbwf=*I_iXA7~cmQSzv#0PIPUFjRsCbZWgGq~!62@EfXLm@ zBz4tm!_B1rmG0y)67+kxEA|5L=7WRb?bI-Vka(+0wUVpTAd-vNfI3#m)+lW|{cHX_ zg-2NzW2bhNp}P0D5kRmVKm(@V4>x0sOVD+=w!pHmbzRhK7*!lb?$?L7SEVoAy~89I z+cJ(4WVXjl6=;muH^v86n;3A6vRK`RC>+CB)p=QIa`W*Gki-2;I`@{3gs<0p zV~lBVD7J&g^J}~X7mIjeXK)$p2vjN}W1*u& zZ>Qu77(YvzX$U_7Fdrp^FuLm*U$o8D^pon6j?RG7;z&2x;mne%$JGr+gB$Z_vGWK` zSk)UswJDI<%H)a=@Zq3LBH;tX5OyJw3r(azolySD=3j|JQmmDLmI;;&XS<4^x(kbF zQzp;hFK*{xiY$=w8-PmUKD!3hZH0{f>Tx{KcV9EI2Yv_($cG9G1g6Gt@++}E%`+(rdRz8cJ@X068W;VPM!PpWIru@V3=@fVz zjzxZ*6qp?bI2U9U{`ix>1bRF!1F~sBOzBYv^9&RU2=+18nKcJ%cJsqX!XqYjU)f-7 z+G|P`dqD(wg?dz25u{uEcy~ITMONg79LLU$h!w*e_sSBe=NNPTy~0Dqe|TK3gZ01> zXpIGs+du%p!&00sL(AunIU{3#yW*%T;|F;loKM07%Ku^^YwZ$nWHsIlq5_@W^T(F2xb)H%VA^e=S{kr?d!4_y2mfV; zTH)aF7f=o-zR5r2BYf-S7c7G(3e=X8{3Ck5TE)6Bg@#7gKfI8r>9)W(D(On>*OECg z$~?JQMxmm2XrvdJs^P0XUjTtaXb_=~!wlUHh{8o>o2rUW=x#;VL5(*F^v=RC zN-CLGzXbC&idf)rKJj?s!G}h5Wy1M`!j?cei&vMijYxC!n<*3pWJWZKfl*6C#`Omo z8aaRZ7o-XSs_6g0Tvo3C!CWqm{|j?>v~}&bIQ|E7=Zm@}=91?6}l z(PWF%s-c0Twct=egu6<saGjdUHFXDRogBX04VQ84an9pYOGQ zaquKus&u5vg+P*0i7L+E*8r|LhJ3JGpmHl}lF;P!^gBl=zWK7jWjU0L;wqQ5vc1e= z$OGY01j=B<+m)aaaEp{39T#;G+M_`Hj(LxNKK3t3IQzaGw>!@ICSU(B!5zzGUCS;j(#(+7-k{SkS;;u#` z+vYBGVLJKm9mLrv{F22LJ?CKNH#5vD4SS1z8pv|0+EKQcoh%5ZQ8I&!G}os_K3J+9 zoe3yOU(B2vjacPatks{~oLW;jCxT(ZLzxLJMpd6weExQ^_Oedel2nj-by_dQjxe)x z4Myqs8E=j}EHui8PQbNzBe<841ZJ~#Z|*5W z)1%WtYFj;QRHuz!+DZE5zrQ}8C0g8(9p21ArZkm4I+!PaHG?C-XjKi%0)&^JT*e5a zVUlSMrX-HQX6ycibx+1D4aBYGW?27O>$Izxx$NDkS-8?i8UW;bKqXtjobl}B7)IJN zJ(8pwrq}mqi27-lw+wp1&|bpM(Rk-4dTd^h$$ctu^m%fybF6%AyM(uLvZ|>V zPGaS%|!ze=YTJ`uSx2PB=>nS|D?WjmwgFiJHMkXU_Ozc zuRh#JuxCfnloLRVI~;3TBFg;cE^aq%Dkkg}N9DQzgM63F5s$S!eR2nS$X@de zW6IjepwrTc%GSwXmW`u>W{Oy*R3{KbxcE7y`r}5(}RnfZtK}^Y! z_SBU$pQHYpqN?h!l-R>@@ym@f2V!@}2vqxbeF1?K?yw*x;#$>};AOYlm!Cy7d8N(K zrlB&c=2lT{!F$S8gAQNPuC?`7J9am#EsK6H_}}(E>H2W_7|Bnse|*Lj@)7=@Gr*G@ z5*tN((SpGk+iZGDS{cRRX$J!)6K-|s`w$$r38^rbBGeu3!Nv9Vt@u0^Bm(Daj@r(M zy8A;%6I4fm_2+Z3K!$BN&|g%qHe{wlf>ZmZZ3+u-pjCS3bdvw;pr-o z>aw8v+!PFPnk+H-SUhm(3k4~vUVs+ zW=G?_j7SSNk_o}oiFCLuzYkI<@XcSOdB1eeJkf=$@3QeuT&~Ebobm$Wc}-;fc`ZO? zHBOV86hDkmAcDPjF~XR)?g`3e^JG)p=K<5ptq=)TdVXD5oZ~)%is$MbqDbaNg7TI| zAAXxx5&H_0ZpkT$Drw1f3t_=-YFg4l-#?%p+N8g>j(`zuTYKCwY=RWA^5K=f~clc;A$Y{yFdVjl3<6rR+w;su?A{ebm_~fJH zmSnZy@px~v`Vjxwcw&}kWOyAq0&Lq?m_m{0Y5>V7`FIeyO(@@bAt_|GW-NbIcn!B= zk?Fj784C58txCkgA?ZYF!SeCENkXtAvINqKm<>gtJOr0A@N=>_xEyKuD(dl6HD6XS zOY3kvqO6CF(!rc=by5Ke?Uh(L+?6R7f4mkxs@QriQwJolgejBw^dtHx#dQEvonm}= zK7fl+W3q6aD%szhdJ434Ha%M?-Du=8omDGbUW?r06!{-0dlC|)A-GoXFcb0=5*B7* z8o@NF7da9=nKAgEK{v-}IBgH|%aT;td0uB6WG~VoGP3E`M*4et7{Upib&2q3;04c% zfE7IqW{_#p&n6SbFzAtZ=uME2hj@?YFF;47g8GZ^Q>XHZ`=z}9Z@0?IgNmHH zT;&?)T1%zEDCSGK*TS^si%-k290;Cz-KANRf!tLIQs0fIcHZ3R9tYtHE@`in2)qEU zaUi$^4b<9T?CBH4Q+GhoWMswn^bKXkR_NxbO*6S9ubrCj7TxTXcvmoYm0!7EHNapW zT$dj;qm=dq!l1mssy@#JM0Z^`=U`+N3!mSp>}mo=RRfSUv!@|Bm)8cXCb_gyouOH_ zqFCpb{9DJMS#@=iS9MvcUAO9`q6rA*gN#Izw&23!EgEU#Dllv7lf!IUIq@nio8i?u z-FcZfnQ`LKZ=JhZ(SnGnZ zT$2>!Ok)Ie3V3=xhdmspU7UCb)YbL+?R0xH8|bk=dHo}x@cW~F0-U%vXp56d|nNaBlYSi-(|BxtHGQQJz&C~h0 z2~*wy296&8x*0IKYdH~0|GYo-k!f9oiv>xQ%PT#-tgLMOk=h~=^|wO~hRq*mz)5HT zvr0-;Qw9-_cMz94#4>84e7MKzTa14D{p1td@Y^uyG55+*rL*@+^Kr*X$HkGk?wOU#J0K0 zJ4(BJMJ7f~o~a_y9CIq!wyC@n^AX=Wu!)H+c?L@3?XTdAnv_xI056>(Nuo#hu}Uv} zfZk1MY7Uykl3iqzaUPm`ogr?NuQ6_?BSbMQSSCp|qJb)QGIHXQDt5ay+5GqpHsVQT z*Oesn^|pQ=*W(?ENj|#%ZcAa*Q1!V=1A1FQpZ$nHH5ynsM_;pJXXG6Yqy@|zKfxw@ zW^%)>K>edo)qA@&fc-8^dnWCSm~gUYHS9$pQG;$!r}*#LRK1O|!*y{wrO8k#dfQk1 z)et@m-|3JON=q0M5ucIJ{SX4yfkv;#y5nF*HE2ti7JbC%NO2CH4rx*9*>@!EvWBkf ziiY+}835P9LCqafZAavpiqgJFdOFaFWT2S=D~F%Ip8)`t=&Pa%e zLb}*;iCy^zoi9}Eg^L0eaAp|PL93~7u)o&Y zn!Ea|pLW@~NqpO)e&(j%apaaWG$qt0ix|13rm=n6u1Znzc*yaBRxMB7lnsg~9F)Cx6&fpRr_oZO+n{rxQ-|I>V~BIzei?97R3 z=K1-Y^EGUIDE!s&37r3w1e9S6RyeZ*fDUr2$JI$MX5E^zO011MuKC`nmz;Sk#jPHH zR!T4il#?70L|)S(RtpQG!w!U3t$wi7yH>Q_pF=t?3ZP9+8`z2I6JrgGzEhe`rqa68 z28?ZgB*3QD>h2ki!<}FW9%CV%VBHNc?;iOz5k`~RvZz70rE|Zcfqxq*ZoV!v^%zBc zn;C|l91HJ0pWNAZ!d=x@dgr{I;D*0t)(z^M+T-y2I3Lp6`Ee6D`%_+x8 z@&P|@exfR^=P5VG#f&#onT6vS2h;aiQ5=Rg81IMQQG7o?4xfiFytl`LEShM49liVZ zC74PuLOA54dfN3F{bn^df7`#GqOvP|9KV)qF(_FD0X=|*f1VW~v%8`239{LGeI445 zdS}|-@W+j5jEa_K_Le3ME^>}7T3!=D>A$fq&LsI4iUb!FtogEGt4X3~&^fg@ulh;# z{40rQ(^j#XenYBg($Eyb!%Hlv83uhrj$C6H^pTg$J50-MZNbLyDc$(z!f%L8LfEa?`9eFhi+-D$Od zyixDI3=8SI2=0s%B(vMT=ciIYe+E>@p`dFzy;|x#r2U$-WF6Ama|iTne}$D8AL13 z$!MLWD$Pta@5UQ;X=QDs6&k8T5I6H&Q&XZ9>5_br_~iZ5R7|~gOW;aqYZQkygO=og zZq#-O8z2%lQhXpgA(aD5gDSiqhTu-pSzM=tomQd57d^yw71Q((ML}mhgn{g6k&NI8 z5S^3b;TAhGK~aZi)$MfOR4aI@GROySWmPWA<>s+eH)*>O^6=fOy#Q~-DGQ$6>Fduj zQS4#Wl*k~4_c;{?U)6dMRvVlfG8HP&{mco)7#22un1-vrpKJ2j8)*$eqL1J_%d!$O zSixqoZE%mePXvJ+p;2PN&{Xhs{2ky4Fg>IbJOtv)9r`W)Ba%*ypv5YT%C~uaYfdev zVS8B`cNca)*fhW@1T?uFI#CH3o-9h{de<{Q+N5E8?MPAm=B2#fp4Y>Qnvg=1p2~vm z*}Y+RSOK}V>K}-wLXvX-}JfsjOTD_|E(B ztKrwP=}p|ES)$yr(QK)?NDj5b)!p(1{OxaLVpb;4oow%<(G+_io_-bWhxZ+Ek!Lb3j}V*E2(rO*MYSpkkipgYPK$& zkW-pGWE|ztEo-7ZMi%JQ_>LnAu)yY|*+I=O9OqatkoV{&W%q?XI>_&H>xyGWQM~z{ zxN4+syD>M!H<;s5WJsX6@8`2i*)Xh{6U~R^9TNWOV4T|!cQR@SDfE>LDkP)F35kDv`-jz^sqY+eL_7kd(^UQU3E~C=|_)XWJHbTTK z5mro7LM3WDYdy1h*Xm_dospnFTy(@z2MSEx&Y1OMGgh($KRs&0FcT8N-xks>%I}$e zHXXKuw2!Gyy09lN65Iw1YQIYvg~>jOogd!;%cC|fu-r|v{rj`>_R)0jM(QBtqE+G|_a^stks0CR&wkBB<#?sH1Tcy58=lo$P_TB2lcAiFZEs|7Obx9=l zQ^#@(v|_nSK6(o=c4wk|CNq`sn$9OSMRVU9qA_MT?USlhZxY~^Q{{&zp7MRRlE+;- zl5GixFyQ{d50!ZY2!&LO#I8M^WHYLchd>6Y%%Y8$=y*y}e4vOhP3nRftF`;?*qo9{ zE)WMx1>L`iQNN@p%NR!X;yJoDo}WbT`?;WnPjLtAZ-vc!ljF{(6_ut>K3Nx?Ci0R! z24-(Eg~{~SJqJ*M`7>bM#LJBo-f~wJ8gVct9u0|mzHV^?(p?-{gK%e35NR-a{D8A= zp1JV$9>D`~oBFl;9)tP>_+W2xP%3_Co}G3j`;!X<`lsX6`*0{3!tS7ArRV}=8(1Eq z{d;Kx_Y)E(6%#VdR|6It1AShGJ_&m!n2<%v0H~9#vHypybBfL^Xt#Cjq+{E*ZQJQ2 z9ou%k*y-4|ZQHhO+uHsA`<%O7H#O>VjWtHqta{)1%zYlZn}JKgG}t{JzQhZcP*g(e z%MD$$;sGnb^%ZLhmW&R-#OP{CpmD3#(gBI1t{i4gU&LsTD^*bVs%s72KUSsa$* z)F&p)waW&9dt)8^Br}rv4Uvu;HTNhV2-@xIbzjU8gPzd2)zh#f8`7O6N{5jwhhPSO z-Q^pHM_tUAVteNX2A5jg34`cE=cy#yxOvHK)pke=Gp?Ih+_N~$i%%oghQsHX2TA`t zWeN|#UW4JpyY@v~YLU6u8oW zR&?;X7^^Q^XtQJq(XKKf5t(ehOHU5Gj{5kCkaA9R9FQ%WNQr*uE(&*MbtoTdp2`-~ zG%Yd$ed;{3g;hX5={=<>;kvT>ultE_ei1r=S;+buA@!h1)4EDiaW=dI?$ED-lAJ%; zYif>hq(Vs_X(k$JTXvF(8{QFUHrCtsSIPW|t9JT7?u?zh77mmWy?GukIyY^Lih z*!>NaeC9&ct6KXP|00uKB=Ua>wc7k;#+cwgMX6(^(uNIa;`(d+=a%CQI3v z6^bfN+s>qMwB~!H)QcoOYm1GcwejkUVH$d$puqjLN&^2U$65DwJdkiZai$x5h&5D2 ztI%+ybZ6%Y?MfAiePkFP-n%$RDQ+%w*scO zp`BSjs^oU2T6$cMRyc;$!o0;YUfwXP^O&Yhso9Cz?cl(x6Q^{K*JD0oDMhnrKR4K& zILFa;PDoq&5t*ajdlGc7M^B(e3H0~y4BxW)H_LhIs&9462H^tcA9{1# z-C%D{vbD}4;=>5W$G6OaY^SVH-EtS&47Tlq6nktT}HML z3~MDUaaMc!tA*hkwB0#+@xR5Iga~gm5Jsl|Ho9(M>pC4Y-+lQ8b9>~GQ{Z+(lXr!Z z+AyC@x^VLeI3Lq5{-MqUIfMied6RZ69)nmRZ%xu>x1Ye8AfOyAf-XR1T@=o06uiZM1q>H7t z&>w%kkUjH!G8l?veDjcEuyUsjljT|Z)S`ahE*y?e3Q3GaR^QW8T>9763xDc*r30Ce zE({OWZabk!BF^J#o24CFbY6?}f>#=Al>;*M78q*d(@>F$Wb+O`9 zfRRQai4jYh1Yw7QvP$Lu=*lh5nT}y^v^?e30HFilSr>kPgGo*3kV7d$J|rQtv;%Ni z8bb|O$TPzXI@5Muf$I`zkGTIlVxKjJr|o>zsyp^oW>;<{t9hikwOr&61(v(dZ=s{v z6ppU6CTO1rryb-yDU-6{rFT}Y*W!P?aMIk8WU`n?Pp1Vs4JXr7ncN#KJ!r_UMUIyQ zZ=B5*|66n?43j;h*V>HRY)v1=4 zj1y&sXX@aUd3xy%j!OPUvxY#6mZUE4<1Xk=^YqSJ4y(O`Nh{;Ka}$g4_gAU1@Ek4g zeNIo%yTFC!v-K3^JyaYgg7TFu#JQ$Q=!5_rB=c$WB*x1cpL?fKGTM(vXadOBP85F? z5g9XHLl zPhNMQx=@j%&17OsoC~lN4>BUP7_UUWOD{Hm$v^-T*`TF)&!1TIJ_NY1+_)H=w%eMI zuL_v``Q4Q{+6R%Ge2GiPZZS;Ajn^S+bfRk*#(ZPS3G)#tGPlwI5zInjZD%PksoxNF zp=FI^wU;_@hbd?1LrU~`+FOLEhn>Mp1Tw*(D1%m8+DRQd)4Ne$6jxq+JLKjq3JExl zZ4WEF1Ncz3cwi*EfdMFFY|}r>X~Qm99oMYr8#4@B{GZ;^gWyPX6$CqW<|Tc?uyEdC zWA@@&M{{3d8d5{&(BS9mhFfVTiKAv~el6!?5%+K!9!OOx0+W|f3YPYN*4kHA>5D`n zB3?`H^gNVSgD~M;*|alNc{2(@f>z30g{uJqtrp&k98YBj(T*4GQZwP>uCyU-0 zOC-?y>T*F0!6b5DxAYr^abo)+KS5Ad1hr~af%bqLinPjO3H0lj)1Paw%rlFy&43?7 zQ`M(Mf#yH<1pzx&Oas1!6a)6uY(3B~D=f3DD#Rh`)3@qnH1^GcKuMV z81vjjsnR_uRCg%_f&Mxsh*ulbdm*4BKSO~2c(t3%ECJ{qMoyD6=Bf$`eb916*@lxE zu{i%UXi~xM_@(38Swb{KPX~(kO5d33d`7T<2k1g0s@3>nw3u(b(5-!BR(=-z6-0s& zXzTA6yE)y*3p|8XE2+98`l^X zS2R6Pjsld;hw>lr29TRcz%mU(Vm)N0^e>MlK(jaQb`4lw;`q_jr(~eFb-aI2L6`WZ#2%&tFvH%zbK$#9U{XwbrN; z3du9kHy_k%d&Y9Nfd%O{LJe?q?HS8kt$fc0UT&%vi^#R1R4B-5_WR2}0GnM=8l zLe3@RNydzGXLl1za5K@3Gq_$L#fV2=I>C>CLGfa|)V9`nH!jUl)mv~=y($J){W)I( z;K1bgcFSCJg5Xuiu(mW;fk zlAfI|FyAZ~o9eH;E@fk1y1kxAD6*;I3Mek5=;KBZ`!bd&91+>ZX{GprID-BN+7dou zS%UEEt@KfKFZ+>%{vL=+;@hLFHe_!DsKf@);2my+DZN{W^HGG!V8M> zXfzvyMUX})H@JoTJvC*>d_P0*$L|XqT~IOSKZ8HW z13k1#-yGEwVF0OHRE6{Su6J!8$pp4`cwGIfV|D3bT3$dKZuBFs80%kT2hW2f742ZN8)zw$4pcw@vs_&TLyoG-s3%%mi8;-%Y6 zC(~Fp9NN@XS5%}tvV-Py?812#09Q~MkL10li}&jt_laHC$(eg9Nik62fVw@=AS(@+Rduvs7&q5*>uv{WKGGZ_6(M>OL0sp5mrIWn#moIbMlJ zNdXX0!_)k0m_1aEvr5b%e&`zlR@W8iWek}97H2YQq907vRDGns@+p8^ofER&3-m?k zV7jxJK7_=o5bO*hdQoQRBJT&i;A=8wJ4$fie)-dX8Q9-X_LhTw-+jd0@en9TiA@BK zaVi;ug&c_J4VyayugCM{uS9=-d+bAivrJhpY9d98 zw;K9>&rWl4M{_Gmf_pLH!ky0i(A|&xkJjfuc>KJ(zg)tZ!Vs=q#cXTs`~y{#3SS^* zPMa`JOTQB7)(2>@sOwZ*2D?Dcag!knKwWWB+4k-E`~uwP)NE6Bg=yiA&juRmfM5gN z67G>b`)M48| zWmuFUq%TvyUPSNFO+oXOfwJ)ojpU`N-^ANI*#N$wT?@~1$&DSIZ^+T&p za>>#)=>SwZF!F=SViE1#T1$IZkAJ%E6gIZ$=AY{ObnRT(Htu#M_8 zJOL=%+>jUpyD;!NlZU3UC^Uo;JTn5j`0&WScX6b_SEQ2mPnN1HEzaQ63sCy36z(df zsypw41~eSYFliI$7#h(-jp^MZ>yGF$nazxb>9mL)3JehmN2ewfJ;ETYjan2sBg?`T z!E)41wsx1Fx;5j(x4w$`PKUsGTCx+EX#lt`Q(V3fEEWH5K=qFARWBVOglMAk;}*zv zB(Xj-N!gBQzsW^Qm?5ir8Z69`fz*B1ZR(NFfGKH&&0}sj>_%#ZRpRp!$uKLu<-0eWHy{1gbgd9jUX0%-k{&G))lwzOladx>pGDxLJNXwiOv|ivQZwox^AwLl|d!L zMBvuGhW9;%h4uk{zb;XLx1~4~)&>V5Rr&&g8%KnzwO1%L{+7Mp(Dl3~)$f|aYo?|$ zB6nwnkVN=pVgN7@3{oXx5eCHEbh=Wj0}F&qK$%A4?R;DV>yR50GavBmW5y%O((ocl znfI!ZOCBeTP7d7*2Q_O4x3&`ih5UPZ zv>|tGlJApNX{5TjpB`2JC_gW!ZDF!^NO9aV+ekmazQJ!1tjf*aMFA{23ylPp7N>?& zl`EmJnpqP@G5&!=@Clty4a9t-wt;e(uGd*H2}S&I)tOtDHZLPbA3Zq&)HpA0dsAuZ zc#IWeB4A5L{aLgHolpB_Dazau+b``>Tk4)?>B}H{J?ic!FB#AC7!_i- z=03d&Kl}Yb3qt1_0@-p(!XH4j8lD1E`354NB2pd?C#?gSvl-5ALiq&3@?!U}NFf*v z)5UN|VqF9*`>b&uu=_5)WUXAwaZBi9iA-Ru$cXUqI1vvDExX z^4U-bH~R8kHYcfs>PB7T(_w)z(==oc;!xa=G&jcK}wW$yK>s$Cf)$S*do<=jBm?-6HX@4MSV(<{G<^KG@a%nIls<3fTnt9PXCVud@lFHF6Km^(IuE`53C z;RA3YJWsxvfV#WD5SuhS|Mm@2AD+@&{Xb;aPb?-|QT?rlB3zoM0jdIM4pm38g>jSX zWi)@yg#dJ}&?emYCE`k2W(cbAolh)eD?Mhmh9L~T%kxk3zaJC4H5V|Y4eP?aScBrf zN_ntdf706loLskKzKe7q8ouYXIDG^afvBjQrjvvIN(+8&Q)sYx{4(_Id}>u0WjrMWLNHv=zq!0h+Hn4qt){=QUR~K{egN?5Ek>CX&4z}QmDf71ML&` zv`G6q=#aNK;IJ&$km^gO zOlxmXW}_D%m3Zsbvq^5fx^QWFLaG6DQXYI0b`#}7u~$0?NQj?dOsB4pf(+Zbm=*zs zIXk}D9o~ub?U{=T;$B|d@(AAAr-EqrT>$wSh2iVNvXJ{IP0|kO?W2cZKGZjaDvmbO zIKJCZDTUwf#_%90-{4H@77aBKNcB|V((hcdI4@iV%Lrm02TU=porQrK4c3_;dq=1s z_cwe;{c<-+f*q%N>-!U^pD)E?|T zo#l03i~d2z>P8`+5fyg&M;ltCvCENdPH{NPcmXmhS|2{T>NR#dxx(h_%KDmyGwUdJ zDkj2sbdkt{FflJL3l%2^%WG8=4vef!f$C$A?Z+H@t!@5EUG-2XX zhBLimcD@}Gxczs~E^8S_cEq~}*?MrYtbclF-4WBedcodv0xF(J#T{VQ39t=q?LdUa zLXDgDKBWOoU^Di^nN?4fwdlo<0k#N*H=;0o4kB#5dhs#T)35|@O26~eHZ&#XRO8s6 zUDkIJn}ZTh733z&&pLwvu>0|H5C!{!#5LFyspv_|v=6?Hk0{Y{1axv`4LaoFE*$@q zEYO{QnJ#>jlzF}Ue4e3eNK?f+Pp*)+Jy&3!F!BqGw|+CTRex&PsIgB*KVO_&3t914 znDmG%`@TKin|RkPyqXu2RDIAy2KP?-$PZauwtl_++rR+O2s&W@tF5gQP(hhkSrU-4 zX#iVx73`>h8$Ew+-Lftk`euK@qAuBus^-v+vI%Wn+D-bwyyZfCPeUUe5Y{nw% zbrC3s+dc1`9j;ue?0l*gv)|wFta5XHarmoRhg3~T9UBbpxzT8_tHotB4^1HO zaI#Qn3#q_C@Fv&Pk&%q~zK$LOKTWyQ=zyO^xab;xEK|}vPGh(7%1x7%9It8D3)-J{ zRd{0^&Wj*Xw0LZNTywuI70~yngtAITW~vtPaP19{DR{@pL&}&|(7&YtMP>b-~c!)e|;HPs`oU*tm*HfI}vg*6ER9LhK;{>c}ByreS<-nF*mI(U-j^TlzH7F zfNoAC_-vzZ5@&UVzY_cD=sSP2+W|>zVpG1iJT(RS8{+YJ`boy!_m-(u3B0bR5|>43 zrAb>ZH+8G%#;Ee$ur;mZ`#lfQe1#g|OR|tG9(baEZ<=rLrD18q?2!^bUE7GE9LP}n zpi{E)g0m3HZ*yAqx@*WYTVHGju3r`*P&34YTA}0NeSr4(`Ec~W_sGwx0s+8=gud&D z6!(20@0VU}@H+J#BVm3az_ou=djxC&?IeFh_c=$YUDCDQz-doFaY%gE@+whdyF`SS=Ba=NkBO{esxG-yEJ-76>SXUiaf6P^uz0@7Q_^|H@ zeHKUNth{DMt-K{TcF;!Z#{tCf4{>YM8$8d=n{*_Vp}dhxq_cza7h?QaJ3bt1Bi}Q5 z;|I0s&ykVrV9w1lIdK#sz@GG!m)smn$dlR@Z8;=4e;sv8Wv6F~irm#PAf3ZOg<_F@ zK2M#?#2AS4df2AyyBMhMXp&X1D?yH#peO+4@r&9j?!Bzz;-#@O;i!B3JZCS+|j#`VC zkU&JwwMHa&Tv{7`u(!7%ppaq4=*R#vl0Bxes4pH+ zG-p3xmF8Zo#NM8|PjS#j1oAtF*hd$mK_%N#9)f(%8cv9;0w18u1KuQW4FJM@DVMXf zayS<+wWx2e$OghzMVcQV!K=mqb_Sbi_{4oNDTVpDcD zEe>Q_qZ{PN|uFiAn<{<;C!mV)|pOa|W5 z)@$2R!YP=kptCRwp(mkZCZmUW5p60s@n4Ug0|qs|ERAJRv*RL04_o5q zLOvnr9oyt_M8Ey9Yo{$cZHQS!UJyaGjhfI!a*gh|xmanmeY11EKqCeQOaHrXqjUUU z=6cSA8!T$T(m(jm<}lJf961~u=rIrvYDdNtWB{)9C^G7*RIasr@-H3v8+sB1E^@B* ztk6WJGSkXqM*ng zsjRg#vZ)7sg{2nCaBI;Ik(4RPO_*NmroLVasIB8pYe(eMuq6)fB~MG$_?|Fk2?QtQ z8_{qBlA$bY&l&qpCTA}t2i5K`3~d3u>k5U5EMJ4RuG!Bw?>N2gWhH*5B32L?kZaN{ zw^4wpg|$Pfcv;EmZ}K#ju>s9BtOTWFineI!b>rUtL%qj{$d7UfwXb_z`nmXtAt&{V z+n1^z-HI{9L?7Dz8bIu?t(Bpq3Z3k%u-8AVo9S33CxUL%27Yb*u z4x_%djcVe-T8}0!n>)>JrG`Bd6Dxuhs;Sx)N{P|q1GMZFa@GT~G~16kK1-XG9OO-X zavIh&E0xhO5cq5D-I?&0oAw*RD&QO60ioxX8t_AkO>#Y|qKSfk#!<3jb{?MG@K-ILe${qD_(D0N$j*xKM?%{0Esd!}gkcvchf8wMI z?e|45$Z}JX?pYo{g3M+L{a^l$jE-;Qg=`7oKg&&Z< zN-QP*wHEYGphLhofw|_`0|&xZ8ju3qnszDm(b!@<$!(|&Tp@H=C_eFax&&Uhs;NAQ zsO=9KdnoEY=&&)`Ks#R0zP2t~?6%{qcyXuoRGg=4Aewj<9?s>Nvn~#ljfk-8j`{uZlnA{aE6_b#cln_bK=n`5FG~F+B+V$ z@hA_WoJ$$XmOu3*=d9jDD?EiH%BBV}Q$iX7cMH*O7cx)M_YfnI(Wx_WNWpGqZ>%#< zy#{4BWpYnx&iLDQ-F_vQJ|7HFXg`MVMj@se4%MN^ImsxgT+xJzRM$a)WB-e)Md|Cc z^0=hvBf8?u8w5!h4?(WTw+g(mp>D*g#kppzf+9lM7%A7MLwozG9jK0YGFs`@gSTw1 zQxp}q)rVm#1I{e&mN<_Wo>VF^T$Q(rLShlICl~@@6^3_;Jrk+du7C+3u#FTF|GcXW zT=C{5g1?FY&cvYGD(s_ap-{q|Rn!@lu}MAJsyfhhIQ53}56cnHkM*T&_s8AB{tY%V z)mbvq2>%Q3`I9W~ZX$;+Oj~G_wU%WFEG?Td>fL(LJ%slcy^#DO9cE5K6^)j}diV(8 zj5!E0FUSm!BAgCI79W!%!k5k-stc7EkfP!$9e#;fb>aqkv;@r}c zxATf?eXX_IM>}<-|3M7!l@@GXHL}epNWG}xk6P!J;Ob<&cU%|%Ei&O4IbtDmP=HaG z(h{bG@$~uyjD(Ted(cF9Qi@z+g8CO5ZdykrkNG0_aRE!Q9U<>AwP{@Qkm955t0?9i zlAwg-QbQ4@xGHj|&8j<9_CKyjo(9OupUQM7!^e7HYvAcWNqoQ14mAZI9u)RLGN%nJ zC!H8ML12z4ydxU`kWuitTyq`@X{r>+0fa)JV1#-jntf|fW@G0j>DrZY=B@dI3D9Jg zQ5sHgmNc#&FmmkfUDK=-n2%c@AW_I&;_RflKq!(;)u+=!U`DxuF``ph9cR5&yRR7D zQi#(ODXiOyARMfEZw0evDxSTELA|57dd%Q{HCW&}7clhzpYpWxn|=x{GK1`NYvHCj zgG@|vEAO*x;lgqEO5~s{h2(QaP${th0WY}hNu_*^@R0RE!8~@J)SB4oGmuJEx}-b% zeME;;6Ga`M^vQ#?PL`yhM{`ypPXe*}uKDMv)iP){(*86kx01(G3}btFOVZLHCx`1CWQrMK*rGHh_nCP8>~1O)p(+Tq!~5bzXcWZ* zY)+eY&L)0cj;Ig6vog-Cn+FQ}3dx3v^GU(uE~40(t}<8~XMCr`zO?TaP%(Y$j%uqI z`Zr2l3(6NGq&O63u&5UoCFOqq?WB>0*<((K=z1%FSDtHTSi9>?kRr9gte?JH_?0@k ztl@iH9QK90-yNuegZSaI4UyUeCo_})oZktENn)@PJ+*br5FA+LG{)mt+Gn!A0jU!L zlvC}ZPd}TQE%fl@GFy%`f3o46b+o8Kspqj$>aIjGc+GDJiaSj{!AJv~!(=2)rIc9l zDClv(Rh!GuDY9dILeU8Kh=^W;V(~N)NP$OzdrSk@pUEzXs?;e5KSyzV@1566`-Ajy zKB&X1jdwzeUsuIR4EePYEUA$5BWndu?p!H`rVgddEOgm&&aBm{e#)r0hhw1WU)B1X z;NatXmeD3KDpdX|+p>2NcmtP#VQ^B! zX#&B3j5yHUl?*&>NW$)V&uvAS}d0)oR?A7vA&{OD6=4fny7nxiZB}8Fb0x$d{kO0E6Y=MU>Z)GjxkT;`44XQ6S zkS6s^*-na~!h?vq=aUrUQ9-p6>7V5!QynspzV6cqyl$-r`yK|$PN9cc)mL-qbx2km z{^%USyb$b8ck-DRIi6k6M1YobO8?016!_bLj6ms?-X|+)_DSK~)L_p)M12k>&W*h6qlOztIpcBOq5lHMKR5JT$W6qoBR6SutQh}w5@N^zrNp|A+1_(sRfJ_I#kn~yBnb*5= zThl7F3lg)6_L2Yq9~DzpG>qEC`$8L-&i9^=AhQ?>T5NTom0yfw>H+13HbM)>k-5*D z2WB3|oAbKwed<@9Oy0IU`KkBN7!rW5l-AI~PZ!O8SLH7Fy1O3&08KEBbNp9wV{)?p zAHYo(mj5QA0lGA`><<6&takK-Uc*6U=;Ei2{W&4(xs4-eHw?`0dO{(lnl51Q<4du6jcq#gxcj%sH>&RdVZ~ ze8|a7f$$Z>+h&Ciwvg+<=&(wX{3(&fV7r_k{((U^2EGg^V5MumvaQw%S@#|;aCba+ z{ptGXzzE~rutcOmGAZm;rS&pCT^XuvnHnXa*os+XrSlQOXoCGR%(pdx(3LN?^1xaH zY3a|001W!x5n|5Q7Vl((N{>)K{+O&I7e5gLI+X=O!c!!vj0fr69q&i{_24#V_1@cY zdjGD%e;I^4JbURXwLmD1)cbunTu5qXBdlN1pX@o5AkRAEZ#svs;3|H(*3_Khx^hn< zyaO8gnketNW<~R2si1iWvN;n^317v(%`TL-01u~%s}=kzEnc~H8*+{9R!HKmM?wpT zl+e`q=1Fs4Y`+}@9*UJx@H`Q1^W;Auu_kU|3XaU)1S1bZr%zlyvBIqMn~gec0t-4k5$#Ghmd{! zi_7Yb;W=T)%;0V@YKw$WLUtSxM@uRpmX}PZw|e)WM)YBy)^8ELoi(^JKc)AFq+@k( z!hslzAgvydEJWsK3?f0p{iK2koGL~!6vCm9M;kysGF9jDKBqE5;rYM0Knm~{<))9?kd_&NJDTw2kKj3V*u7AlP+a7 z%WQ-=V*Y7?fJi)2y8Hp(%Q=FLGw3i_?Wfl5`;vxO2I35zj&XuJvzU!;J`LSN>N-G_ z6=BcSC)4}L50j1{jlct1AK&e&Ws|0L0sb&QPYdr(%6^&{rB-G+!8CP*7&sLl@yV(c-QD=2M=UwMSh9&mkkmO#irnc>YV)a>E~AB%a(E(1t2874kQcWtmPMGL2#-kmR()#9p=Q6 zM&Bw7Zr8r(JRF3&ZO^#A2}pS*HWY*M#{-9bU>Qlo3Q5J1r(uk=`Sp+8{Zhje@cy`h z#u!*c(;!Zt$U>fT03ry?1*ijVtuVt-NobN|Ga>%8o+4b~GxIMnqzJ-6$qsM%Ql`^j zyK`e!6I|BUB$9h5@VU~8IM6okH+yDBX8rZcBh_zFW=n469p}>Ac>z~*42LgJE0*ic zdSJ}1EOxkmd9Ms@JA#QJq~pN)C!zh7O|Y}1)%hNK0VjE$n`xoMfQe@AxceF%OcC>7 zEywmM+mXCY|1X3d2Mb&g1snRN?CCtHd`l67eXc!lvoFbTQKtUYJyivG&2COc$!4$1 zN~*7y!05)Bq~pYDk0O~Zv98d37j9NoowT~lk+~3L=V%(RO%I?RiG?X0D1-CzJZ7&h zb$?|tS^tn+{HZuB0DYO7evIrKoa@upohPhAJ09$%L)qA7ya*GkF6=5rU=-P`Gn30e znXuq=V-1MBtqshv1bC{k^OOAC;}du=rnGcYYP%wjew_6~Z)_TBZ}IX_rCsgvO80v5 z2&l7-$6xrrMEX^E{*Im1JlHv}UO?6>-h}^Z&8vY5dl;%ffSMBMRs|I*^i}TfU~b^W zVDEk@wZL#FLw9{3aADwTK`Tf_BceV)(evFHG+sHXVw9EF4F)#-g&}U*cP|9VN+hGo zq@Juzg#WL9;d)Sl{vt;pua&*j7+TpikV;`Yw_+e;R4lcMAe3k&JC zCA(Z@R!Yxb09dN|s~JNia2$&;fDpE4M*!o91h_YT``SC?L|b#c>5zVj!~)Hjby4D$ zQpPh2o{?T=#vm#Bz!A=VT(5eY$dQ-NClWB`hvb1`A>J~}`<8DuS9%j&ja7fmLo4pks=EJ-O=HhjqkL(xWbj#5 zYEW&|p7?j*c(ge@$>96%L3I32$U8^w`#EH7`YyQGj6rn^L8`-ktFlwKUx!T@8Su{4 zBH$@sfB|kLv5`Z$F0U>Q$$zkseT3Fu^lGmZ`daehi2eR<(eZ`ORTd>zUGAiYCn~uh zuLQ^e78nAdfVR(e$$k>fuEXi)^LrJG(7ttB6ng+SiS5$V%jkTonSjd^nzr-of*l>h z`L|*wtsxox3qS0~35wKCEfa6Wz6pg+%~Vb&AcB?FH&jC1>oRiQ0EQKT5Z}hM$Vy%u zbH_6Hh8E-2*99%aqDAYQ6_aoDfI~vLXtO8UKHeIQ?%*Kik53rmavHPY5(UJ0yegB>;TsT1l z)EEOg0vDS^EbcJ3$wbQ!zZ$>e0{!g<&yjK?J}bIwja)kyLk4#W?qnziH=VmoBx{1j zGKZnq>;!`r`7f~&qNG{7y(T6yc@oMi(O$n>vC~Nxk6vX&<)@%eyC?UZ_jN~+Pq9ss z03gddvzxy_RN3r9rDQtne&ZO|%6C=k+|3pSiXBE}4UBxcTO@o4s~? zY)D`cm{n87M8W~vHX;~B+>GER<<(z0|0qP z-6d(+A+KnfK@h`8zhb6o4L>q0zW=UJ0rkXueuy-_8r(&5i}-P?x!=rXiS+47l#xTo zMVK3ja{#n~26?=XJhjh2Sx}c{BqichQVv#;S67$|7O5>V+O<&%SG~WCczxDCe#+OW z1sK=xO2Uy6MBY`YrRL@E$e7S;UYpO+J70_1LjQOM^uonOw6nxqAj zBH+b?`r%0w8pDu|>}Cbn3E;Ky2!)(nqLwA++zDGqdjNr@!A%cSE`lK&-q^_e6xic5 zVWu3n7+Frg?`T>gHLE$RlmF*6N^iXUF&zX&vq@(2K@v4OD=#r@_PF$SlUCeUa7NGb zsM&hKuq^s@r41%^%i;pL6D}qua5hb)H|cbvN^B!)oA8TiIK435DL_Fj)SJT-G3C8w zXr@&D$(l`BuFzI`a&i=@47KuRw%SV8tX7U96$T#VUbeFXbvY0L#$J*LSaCiwDlC!?^ZDqHp%O2<_Gl3`{8& zrz54O5zZe)aQty;upc@c2sj1_YfV7Hw+VxxZ?6Mo3?PC`&6fH8IG6kZIAl(6|BJW3>l4nv9A4-oRK*?S7o;lH>Z z7N!&jD{#~X|8+eZ5EhpIE?S-?fX(bdSlIrvLP2KY;N~k%-{4lws9GCfJzHkg#~gr14=vD0xas z2SwnYO5ISF#EX)(DUCNco^d9Lis?pd@)gL(c+Gs>_#QWy&Di;@{O-B2tGaoIcW7pa z6g|k_tL2H%C#7B+M|x$%L?q9pVZ@AsRtf@Giv5OuI3kvWDK)mDp#h0&RfQ43L}(Mj zyo4%sO$DmehlYq8Frezy#f1{x1tzIQ9cd%v#b(rpGnA0gr)CUXgw<0GiR&lALKuM3 z3+p|0A5~UGrbD|2Po*9KL6%CALc5ZN_=89R5*;rF1sr7WkC7$>A1z$}SA@t{v;gbfQPu>jfF+S=b?0IN=1mq{BHSf8 z+|~706S_Xdq#TwBhum|$R4{ZWVpNl#OXZ2}yXDw`B24369$8s|3?m{6c)=FtY$bs( zaKwUA*|>!~p2i`83M_IgQ*DAgU?Kf-%9vTyRr)k0A~wNAeexM06s3=RD`yAn{ENfc zS57dfuZv>bg%+!g=#pJxAehJb(|fV+dDU&MbP70c`%>zx#a#fq6$a^9;xAH=Fq1lr9+3*P}p$wlELc32Z$6sBLBy0)U6u$--lwMBt-y-=G;;hkLgu= zb@Ygak)o13C$GbWUcF}#yP<|R3fbSv#N~smaC&lr%5e~4OW9p_3|g(Zkf3N2fHUeC zF&QS^Chu%Z-9cdzn!V2)Z{7fO6iotnd$~G9{gBJeu0Zk9dFBCV)ks#_+PQ`MpuNFp zmPo)bSVBy^V3LQLxVwNn<$&_JW>vvB!B^)9eSCEK!w0onOv5i|CASxx(R52lO6E4r z%I@(t;WSY&v7d0|GT3fmCQJerw8twZA2kjC-wWz`(_d$QRco|TQKS7pgLFO#_GvIW z(UtRvS79(n(OM32W_pBs*8E;glDi{ZUBu9&W!zRSgJBy&q3lhIG*D(Nfxn?e+p0J;2)r5C}6m_qA)HFu<2Q7=ig;Kfl8H&Y+o%`w!L$s@T`+IWFb%0 z%`*1cvHA`8%qQh|abmb`|AqRxeXW1)(BVV1{Qjr>I;)zCI0TdM*isz7Q=uoH5?xN6 z?E)(Wn{zeo3mzALVY3&#Q{1}20%~fv9I$yIAtlx!fBSK^6QVWA=NNQgO5pYt!`+x* z8}qQK5a59?Bx^8A*M{mRl41*S4GW8fTe8@w@`3|E+}4fnACJ4W^ffVgv8vxR9lWCK z?rXz@IAX+}J!tOUdb?QRIvFer*27+XE?+r20Go2BNNTMG+{{2DO~*xC7?kvR4Vyje zz`OYo=252jH_KEY-v!xz{XRd*MucnQ``xOuct$xC8@}VwvrJGuN=Zs`6P`L~6qN=% z#mxe^w)J&i5>kjun249JFwswU1aUg?a&)k#EVB0WHZ5lO^=ypiExM8> znYy`&=$D)fF3sghPiAH65`MF8?szH=tK=Fq&@#^zwZAOXEsllpn(1m^I#uac?db!~ zm*JyyHfOg#hg`95skW9*Zsu^5T}obBw@t&Q7ffHe z(NUEuGwl01Tg0-K(gtePgU?&vS+?wWghDwpMwuaY-gqgbFV~f=%JWP_mnJFhO-d$= z^FBNW&>3S(@L0ROjg$2st4X@efL#GgR-pq!hk6tO_AwmU@w6SVeKGX)i_Ca(0A9T3 zqNaqo$G||Mx~NNwi=HH>QJ-a9bJ??CDR^xSD+t0gwRQBojTHMFl%Fgf#`*je0%GZv zF)M-A{O*gGix@PtO?*uzPWPU(&0KvY3QtX5`^f#Jgyz5lxT$S>yo)yTrc8h)nf(C> z=wSRr5#%O={ibpmpoV2B(Q2u)Kmes(dhvdyDSE^FH8a`FA%mX+0{LfSSVy1J{pj_4 zvKC1H59x>J5+#R*l08}6cmB3p<8=V~d*<)9#<3FoHRB^^^PO+ZP1@98L{oxu(1oT9 z$FBRx@g0mNrIk+ngD|UZ<#<3Md)0QV%g;9vZEs>!<5$}iBTG^QIVI}y68B=--KU*gwoz=5a z=HH$RGg=b_i;?`GDB9X;F^E6y706~EEKoSejG!wtYkEg~o|0o6S* zko)~C^klPkJ<_OzIQgUJAC&TnQ;A2^#Xg{-dy=e^o3^U{FTy|O3Do0l3ILQRLsUiZfP z$Hbuzmx%SKHUXXqsc-;Qx)~xP3-rGjjOdo!hg%a{QZ2yq52J--VPwlLpD6esZ~Tp3BqnP zmu(>u8%qq8;t#spnVte72c1rF2VoR5)}>e1n3=YqAhE#xsrjLc@;2<^i)R;n>|@)@ zLo}|2?UqdAZq*#To^2*K?>T=Lo$?9G%dkvHhwIDU#YqvL0hgS&ZDzr|cK0omr|0q# z7$8vGTJZgBGzu^>P{)E0KDZ7IA4FKC^1asnd7A(~Q^xjrpEFaB&?EO2feq`63tp(9 zIR7~K&0gWf#ah)7ytbb=sW}lKB+%X~<);UbA)?bC*yPRJI^9s&NU(FFupcWOY}P$i zLEz+EKQ@z_%iYK+IR@p%vE6D}6yp=QaD(_msmXr1;sr1#ZtHQ-BX!^5IjS;9rZu+? zy?7?@Tl}JR_Houyn|`d`wNQ|moXj(@V4|zJdh~4`XIfYzfuH0q)57-9eQR?1BR7Md z@<(b|CJW!Bo16!Gt>>yE>G_FGp~I6GW=8Y^;11IZ(0JX`mH3Q;+aA2y=IV6Ub>{do zQbX4$*0MK4%h*5TpAoD2vK{yD{*Z-k=VRY&oT}J+QtN1-8ol{z+G(*;yuf-k1q~7Z zJe(gV6~eiab)?6$iI$OEhRw0ydLhq5f3Wv}`Vxnjrk;o8pquh%?_(7Y#eC@}h=<)+ zS4U`dyYMCF2jYcxqve0L{onBIpKWi07y+)L1!3j-&#AYK-U9c*fUx~%=~2_BdBG6c z%0GZN(LmV#XY4bOK#U?m*#6Jhw;^VM*olI${U?RmmbeR|!kturh?-^?4hGiti4Dpi z3&Q@N7Dk(}Iq11H2>X9B=xrhkpe|M*?EjPb_naZq@}JR_FLyWwjMUu(KW~{v5VUD}{C<5q1UDe(cyz%4wG8xp zkmo?CE1*Y1q<#uU>mARW+Ve=i_gIc#`2|LoAp)=UaeS~5U=X;>5M+S5%1xfOAX0E2JgcA>Y~fE1r19&fo|d?bu#ACRXwa2AJP?9E z<=pbjwfn};o;a)##H$GWzSR|s+dCz&|IW(xwpM)1%+yrW^61b^4*I3l(D~&nq5!=P z52ruo+A=WEn+pJ(8|23_8gd+FLEc^GZy(5KF0|mp9~3Aj&~LXy@E@5oFiY}bXcG4y zL}p3SEx?$|XBt;OJk)0$TkEmi&nrzu1vPN?^p)VXU_942it(x4_4OVE(}x7a8-(`! zR}>Kv(b3-dJ5ShspIYsY)PwzbqM@Xjcx>tWYSCs4@F&FM~^{1z8|2OJP?2Fuesf`{nPM`uOO@q-5BZf z2_!Es55!|CWhSVuY)DB7WzCtqdzI80^f-Zkc|P>qXMF_(;vL}gV_VLX z{c9Iw?#Edco9Oov44l&=uqJSg(wb`jb-Zi-&kx`v`AdO83sFJ>-I&N90Ff*NV ziRb&x@kt1(_t(Dkk;YJi(s}G>e!XL2!xOY->2izS_UCM`Z}|TDIKCwfX#E!mG3uK{ z@z)0#m#{j<{aVAv*Ahs*!|i7sLSsxS0#SYzu7BDMAv)4R&`$xZ<)?ik5QiGM8a1eI z=?`E#4Nz?=m}*GN6A$m76ozSdeQtFfG>D)DB2Pfz&j@LnRpY<&V*~9UgAV2v)((`- zxAg_QVyKb4n{;>tTrc&B@dbMayjJ{8wk~Yrc!0s*1&aVeh%q_2o`+tK?n;qEOf_YJDwtr9_KJ#7;ygvzl#{weo z{jYz_82g_H-+q1xO1FI??gt#Ug7FBq9tfay0A#YWzmo?C5YA@vaN3E~bOP`Jae(hQ z@L=#;1;udi+rGs6@0Bi(be``c5X+D!LQE(-a)yD zAOI!%{!4J*A`r%~5WqjmA}|i$MS##(?kth(%pV*Q&aoednd;h*XAD@SWrCzrk+Hu9h;pXKRBGle(sF71_<$31b|eGLBM{V1}LXY z{l%_auQLAJ*p!s^Znl(a-G6==Vv& zE5Kgt$575)qaIXo^$*Ow_-k53BiG@nCvKxiEjdCs-u=_fZuAc=@Jt@=6QjM9RDByG zEN8JAoo+nK8_{AQ9;^Pp_ur}IbM<3FfRp5K{>%RJ;|K@{%nOuWNjYSQvpvD|XnF7`V(OX6jOxuIZh9C(Gl5#d5ywAmN z-%_$N?bD`=xyy;Dh(IKimXJQaYP5{tSJ#cAxh^;1P#AAe{1f1P4e<3==D`;9-dB~* zZ#vTtv)>8WQ z9SjoaY=Bq_tRkL}x?@>qZ+@Y+jI}Pj)Y@SA87HT=IMd+$?vJ@1e2YJOoL!~2t!s^a zSB3DzsyP+Q-A%%t0#K=1fWnj^8&i1aDFS=lK)M0{yB<-azw~sW#+Je13rG>z57%C%-$7$J)=?qN zjHR1?eFu1c@zO8y#a0YHIL<`*zJf3EUQA1!g~)ajY(XU%i0m9?Usw^iOm7v%oh57^ zRfO2kla9E##|BLcn=1=gmP_=OY5UdBy!pHbR@1WLH5Qil(om=4@KNq@d7mVy-54$E zw!wVaiVu^I05Zxbf@;{Khi;7=Nz*|_6I5uEx6Y;y9u`r@hh7`E5E2`7n*&O*09Lw04Faf5nm zLmuuNggm8OFY}X^-$>r|LF97%t*;)lxbNN4AOc^g3SfdXp}Tix>9k+<^Nl{c4B@jf z)a$|Z@#5^j@(OWHgVOcQWh)&TJCW_?Yt(#}9qWYF<*DWN{^+xHq~G}J zT}7sV72uu28xG%7#0_y!p~^3HKcDK|w(2}RLHqL2esQ9jJB%LrP!Lcr_c>3%>)s_F zhsJgxw{+u(k$E8@_Ca;9^cN*DInkhcTHoimQXDvYfnNY~kz84vl?BUfjz z1;J7Yv{K~Ju!c#M+{09GWsT}VENnpsYSNzk7a+PIw}^3arPAXXIAqklSm(Nn?3tg8 zKf9QpQ&LE;KK`CulBX2Ky&HBTMb5U3p@@Zp`KQM=4*iOb_Dzu=vT{a;iZ5_x32FI2 z$k_pQR2KKzty=KTGpRhlvGrj6yi7!r#!Vcsx{w!-g!z)}XvCw<20;$+8Ep_oI5C%YiL@Xarwkoxb$6@A9a3IBAY zkT}zK?T)ve;*?atgu|0O8kc#jQhu$i1J3GjJPGWET)lO;^;X2fJFekpSw`aBQzt{& zEP=b7rW#dJO!M2uM=HD_qqOUOB(V1mIHQ zp-#P9GZI9yzi+a5Im<{QYOJ(cmks6A*L7ugzc8Kb+4?jcfrak=0n>P1Pe}VPheTI7 zQ5c8<=M0+~fkn`1M3E4;kaK07Xy7K;0EFpZl*g8< z=QNS1pyt*P1u;|mMNXes8(tiM0e-4t$0Ar9+3p?RC?BICu9l;q>tU80hcF>G`f)K; zj-0GwEKmLk2@nQ0S<055MeaE{TGiuqn{^X@M0s$d>l!rk_mUXI?iG(d2U?N0U@*Y& zr>X@5rC|YiqJ?58fdEsrK2Fvn(R zQkmUR-4k}J+;hEi$-pcga`du2r!jeqS*p;hIC{`BxQlTT;qhw_0ae+WiLBS~ zD5OfQMpcNhw--ZjUnUT8*F_B0ZaG53Ct&FEX(55eJp%!D$!(Z%LP<43)!=B~!zhfO zr4{GSE8D@%uPibEEx4-UeE7p`9r#t%(f#9D_0}OdinpHHTiOa14dTeS^C*X&nU>B5N^UbKOgS zw@BC$UxK)O)!;?~ULi?VhfLl05XqfH9c4tD#!~owO;)5bX(665k$>!=kVeM5=b$a% z&T3D=Y$783R%cZSK;jtL1iFkUOG;tu)UM>fCIPK`mOF*N{!+QU=Bk|lE7z8X6*&RD zBrbt3KAkM`R&mHi#cIxoGH>FlQ+Nfu)O){(GA`*5twM>CsNVCJVG7MG1_gZwxdOf+ zbiw_}V2|6_1sW5jdH%M<*ex3c zK5YKGB4#kj6W_+maX`HVH{yP~MY8FwNTV+h zoVIQA`od_$eAwZW3Mi?hE_!Lh-2B4IMQvQ{agtN%C37TRa^L}qu{o8uQA(SRQpt7>PhW}K|wqh*V&G$XBOI0|4P*FhtKpC>zqo_5i^7o&QoJcPoi3JkH z)1C`ZZ{wSc^+9s}v}9ptXulkW(L8PjlRa;5)~llVYBg#3Zp)6%ATmsJT`yp7K1bt{ zph65nUQ@hj`(Uy_%`noXxeVz~>nHJbFgffEdP(AbK*W0LE6)OXg`Jb-IGvnse%w8#xhi{4X}TB}yRBG}aWB|9_HmH0xCQ{i-*xw{Q#OE4r?(_hz8Y8@;q z_f@%``h19I{zc$C16LK>GAjrncc z5+-5A0E|Vf1Jv3sX`2gV^qWI6InIg_WyAVlbPamlnZXlZS3;h!N=C)TCAkdvsRyU&{;;I6_75P24~_XmH4&=pT{^B z1lS)Wj~@4l06d>rGLrB=Z13xuv}R#23XLA`EKKtkD8&83Z%&3pvs;`bmCj~d#yy?9;9=PJW* zqwEroc82(5IrZYt`5$vF4JddmHA*SMkYbF?q0>Eb%Zj!G*EmK|UFocF$4kr#D!nWs zk0ISK{2{)0l8NDjRYx+hMMepy#8G+k$^=*XSbI3z^Gyk*yXuO2cHmPS*IUZEu~c?MAt7~zF_-xKmsEEm`E zhCvfDwAREoe!(_6I+b3gH(b=hp?fwEn4_KfqNs47{%NV;G{3%224jB#Y$4gb7Zkeq z!OO!=cjzhmH4`5#hhUz1+7H(@N1v!r8PoV zRk~Aufj?Polsb1{l!xd6b(s^zbj7MryKXOz2X1>+5(x>iwG&PSa1$UbYDJ%p$^Vx8 zejP$J(!IGt6~EU?I~R9~{Tk-fq=!FhwxBx4%>?$1f^h!L!bx+aw3fl$H*U{c0)mT` zs|rRWQa-C+S$<)Bnmm=+7wS13QI;G?4hw$D(zN;pzq&__%s}qc{DO~GhkAlN25umA zkzuh~v$3D9O|EeZNW!Gg+hRe0*Zjv}*0IhGHG8Y%3WPb{zuJM3jx_W|#*$zJCH~6~ z2>1H4y)YA9{qB?hgkHi;k@T8=V`>e?XO1-XOn~({lY~mc?cGtFVAixrbV z)22r6FH?$yi=mBIN6Y`&K_iUE*R3m5beZMQ##nGXd(S&6vaYCDFU@aqivel^(>p4G=} zLreVqYf4U6SZ;2?a_T=)YNw+yALnm6Nb34 zhgSO1#2PyDQK zwckBAo#1t(<;|rVS>SMkeK*>a{6={|CoCMoUlRJ7iW1I7ywvmFP|$Pz9PAZUDy7M>4<5{ z7@ka%>LXj&LX5DME3~Jdmi4z4ogT+?74&P*V@F>cm zC192eC>I%E9{t@YJ_kBKLP1^b<`(Ba!jg*RBp6eIb}mOfP`<@j0G}()@-($CBFrMEg zKmIDKqQ+bil8Lb=!{&f8YvHDnIhq!x+AT10?VU`;^Q3}pVaI7zW14$V?#BsFK+rK) zZZS}iV3bY+>v~Jy@dbyKOU7xeK8o*ytvt|(^Jy!E^QP!M)>pzGU(B}%<2V$GEbDzm-s7%5!4{rv4*0T_S`lO6kz~oh8>HHzzb;d+JOaq9zaGiz214j;(2Rubg-_MX8km;GnBqK}{dQ zRyAb!OoX15=U(;^J|`Ssv7xcso<>A$eW0IJy7s)L)|I$lEAUp4!4s1|V4<<&qvx@X z6^M%->WWieunr&N;;j2cv7$l0JD~x#u6a8slcaE z(!eTV<8|?(WmX=|D3O)-y1X>yEmr(CZ6f;s|U0d}$8~Mj* z7MbweYItk;7U}&O&@xH+UMNa$l?{r`sL&sptxZ(Hb1Hu4_!F$Mo)7 z2V<~jXX;U<~R*<^vYoz^-y)Xd=k#!`$o{QfRT%7T|M?`A!erg!h+ z$5ybX`8fm`KZ@rny~OHzs~$T?h8BX>@SL5>O#hlI(v<9tg-pV^f7wIC5%vxcfl2m5 z6}1N09h-cjomFU<53=v>)2D1WabZ6#tkHFmJ}(V73zW%OEgFm0*vzg%*Cf|#F;%kP ze;Z>4oC?oK)gQ1)BR-FmTm7A}BSj^2ka9jV33@tu`PC#_DlVpt@4O8|+G&MezOj-L z0R&sH!Uk*mMxHWvU}{mxrmbh_MBc?1%avR{ztSkx8db%%nPMW#2B~arEDJ%yx69?k zl%XprH16dm9j;Zgd3K(ML#sUpDTc9Z6S9E~fc2_5?v_GzZLBCci#dMmiR{y?zdSqe zhKh+bofu0iVkfD0EVklXS*f^g8Dm<0ZCKB*CFyzi8y_a}JsrtxNl!m`4pKB5`PTVM z9N$~uw}rxF&k5MFWJIS1)wdgDi=}4wr3U*Zrp=itTo49~C zfT#3JM|jH;{N#%%kBRjL8bUJ{xv=;drZ*{Hl1|F-kzuE0uMb_;#S1ld9}G`Rm%J0h;=54`HE>~uSC3;0<3k)*a7Q|FJB&F}$=m1}-t z^-k=0Uu|oxu!aVb`;wvuBhmzEa7Wi2!1?PSLnSZW(;hr?=;Sv)=N8`G^nf%Ra`BSY zNaRIa4dwe7vLAM)Yx2iWt|1)-Rg=>1xByn?J>Y)ZVA>5YJv7&II$2x##x2CDJipkV zN1?ITCI}sn>|VMK*L5u8Yaxxd~4^TIq7cx$}+Gl{W6oPofGeZt~wyBar70JClb z>EP2GM}p<>Mi|3ioGH-iz(ntZ0@K>*aOVH~;F%jj0)jrHCCllD_k0jkT z-0jgqW^5+cl#XNJZDq>s!)d^EJSYw*^^k zI4s)FQR{CERjl|uSK(1MgN>M>fV?T`Bw6$F+{5)i(c=~^j@Q?L83s9*7Yq15iU(?SN_qN9O)X`4Vz4;d1(19bZxanr}-Wm8+DrY-TL!DfKp34KK%Ozr{(|o&Jh<=b^S&!lA1e$kkvvT}b*%WC% zxe8%@NJ}q#_B8AD(wk{RtlGT1;==$ti~r+C?PR)|%1~=yRLtY24WL$hcG~JA^7g#n zXq5WT-_ZS@*UBp+`;8c!-I^E)LND&BNxGL(oT7l2VQ<&y?5>`;R&lr~H4!aT?_g#8 zB>QC@ZAN3-ouQBA9rsGT+)aZP$I5lFv{LvYey?UjW3x4k%-tvN%qa_?80HRJDHF!i z*M#$3RYNBV=~ZW$NdP{=puzeMRZ3O1iQmDk?VF|0)$V-NESork3i z&9#P6CIpsCiV2+%eczFIjri;lG^6#N3QVXqjpgI^q8P*U0D$$1p^iS{CIrDhcJh-e zop4Pxjw?yIk}Eu37b6mp#J{ZEBWl?ByTyMioQ>f_NImkLcfP>YwqN(Xy0`Y(7R}KYvhY@-d0w>k#=!>8)r_^x*z9oL(2fq|Kx{!6H11Rt->$ z7%LTw>fK4*@&MxTyoY+a97Fi6LKgPA>9oN1UDhUW=gH&uB=!lqkFGv8PV)GKDG9Ti z#y(5)CAoS=+8T=b*IZn1NaVlals+_{@Gh6p6E3|5M&^0CL_Ll)0>M=tG8wUIfhEkQ z4=9L)W)1dT8*`Tko(z!4gejRh&J0M?bX|{74B@fEDgcn9xBxS3(ExY_pC}!K**8~W zL$8bh<*i=wlvc%nd}vzz8snOITCHcjweWV$!V_us>4NGMC$nyRgg5SeYMeKq7a(uHgY8}4ka!N*jp{z z3^G~g>GrKj)hKjDZ{D`g3ZhX%^hQh9ERWv)br9aA+rzen#~s0`Fo!&9)tgv}+4evY z#f?wo72*VGDB%iuG)5hZG7m4 z+OOj4-!dSBP6XfNNM<3&l+YHW{Dzm)S0D)sylY-W$LE?kb*ZroWhQ0H-s}Eytyx!@ zTy61F!JVBW^d5rAbnW)s34}ivoz!v<7u4KVfAO_M=^|3i&MZ@0bRHp10a#*w)#S>g zA3*LRLhgH-v+xl-1pUnua%f0bEo53@jA3038FOylLRx)@JfPUOiZM_|@9^T^49qC> z#?8SdpYTQnOs;c;cZqR?ZF1el!|HDwLCwz^)2N^@erWs^0dZ(V$*()`;?DzZrU~$X z%=7LazA{p!fEuT<=tt+Aj=A|w=J#C3DnNa;Itt4o8}8PJ-tTfp=)jeEDEDFodl_|> z`KQE>NQM*Monbn9gbt_=7y*GMVSK4UN3oSzaC%j&`RE-#t1^y=jyrGF&u1Ki)!Wol z>XD$RhoBw*Xh#)XyMgeoczN)a;}P8o()p@ie}sIA_uDI5ufYEnN4uOZLFQsu!~ob9 z!cLb2KPGF9_wkG?iv~Y0E0l13vK*=_VkO!qr6Utsve)icADWU8_rCitI4s0&413`b2G26PG}Kgg2jouZx4|M4H_t4?t2*&R z!Q(foA550A(Vi`jdjSl98X<{!$M zA@HyVdqo=m8fM@AW?0I|Wx^9U^qBJ{{XrFcPOdQ)c>)Bx<$rB#Cu`3}?U^hi18>H4W2=xah{sppP$NAyAc`H$qN#6uH38^Kl+~$Bse@s2t=cl+mqAwjT@74F;H+fNCV@P+*Qb6@ct& ziF@E=miL4=%f=l*(QogFBIVRB8Wg>VXO=@@0RD9~o!XxDdp^qO9-eDy zRXpFpWatEy+V|zyRU9I@6^R2@$TC=+OuRy;tf(mRJ`|=>P^rxM5luCfH_P83;9o{r z%TC3$b`)z6f`HB7naMQxkeP{bOOJrvTdG^$9BTJK%x0#T`L$Oy3TRL>>DGSBX{(&z z*Eu#)Wr=ZQU%#i`_a;AzLPOz(mSxbt51X)M0|8tjE@3QAze}VCOi(#$R?$;qKzy^C zWctY3V`M85^;4|j43V^#>3kwP=P2uOc1c`MRK9(x+yIp`N%t5jH;ZbN0(ZUYD-}ZW zT|1^nAA1ckzYi$fk=wFt>goeWakEuI_WBJv2n{!^X?HCKr<7ac*9r1MxLBn&eS$Oa zh$7d$RvbUYPko~oH?VL!sC9b!z2Oc<3nkTpT*@EcROxHkN4EZ5?rkO1F8$mem@=rh z|HDc8j00GXG>*w8ZBKsUI0N8U$8;tyTb{?v(%>OlD?keC`mg@|Jq~+9rbO_7>_x@P zcxjRZcl~L_8G3Sn^-Dl|Q_q^AJMOZns7@2?dY=@d8NCacJIm-Rao4gOuVA`jzi?}Q z3!P`|&f~Z?%Yy=^Hev|&d4naw2g^e~6JgP3VHsdKE;}QDksP*$8KP@95c z#Hckeso$+&t)rp&g>~9}EOtA0C%G&}N=~Z05^zV>h3QsqYS4?q8Qc^AeY*K@MG&n* z=msDpf0vJj!K}ESNN&0HrNdA9szQI|U@#w>&teyKutL2{vmaBf4Gy8B=s2^HZ6nKub@Z7r=mB(-@(7P3BF8fM35T%W$U3=%(Kya!{=w;@f}-nX%-Xv4#OvE0G6>d;%Y zM(ijmjI8n=Ub7$bYZ2quG4^Gj+)dGHbfl(D3fs_=`g+~yg5Gq!m&?A?3{~n{ zWAaOk;2PdnBcSUWJ(p+VCCudUwwL`A=d1bECHS@^&~f9)eSZ*2Ce*Hi?~pVvM-(sR z%oV{!Y>o2xa}gp-kLyhZ6E{U2_7~t8oYe+f5k)3X7K-wb4I6-x;4w4zV&6H=Q{|UA zrb40AEH`b&cUPDhTIp(WNimR&g%g`%N$hq952KXdsRrFlFO+e`YqzxTxBX^jMTum& zyFs%F+e9IfF%}&1+hAP6RV+eYh$~F}{E}8)FiC;4bheStMHp;7t_wSXqY@zYrs9dn zvDj>3W_IVNtWZ%Gz7+E)t%9)ik8J9xiRl3&S>$$A_xtN1MFX5{xmtl%19I6g9-zB} z&@G}ZR`aQbuz z><8W)lP{c>w7yJH-Y7UDzX%v~i>}%E7hgKf9lE9BWJV^~bv0}bvlI79r;v(hbzypJ zx}CTU8s5IJ>~dq}s~JtLABBqlX)zO?Z_Qy=#(49i9g1HO%Q0=FlI}`;!oD3~l2?l$ z{jD`iy#0(y)nMyDL)(k(;1u^;&i(uApk!C3GT*rbm%GWp{}YpZ%LPy#=Kv>TMl2fJ z-q8|dE(`QYpLqfA85|0;^ffudNkUu*Ly2J~r0#4TeR&3jyo#Gvcn2!StG(~eUp_jU zxY;Ew7csQ%%3E~9EM`|DMA_m{{*4n*qiJfNuXJDo$?z=+0AW^M(uYsqxU+xnH=CbPb;0=V*I2M=ke+-Q zksc2qu_DPvfy)pij*>m+Xpz_xFo2%siwdLV7GtspMV4uAr3Z}HGnMv&P~kLRuh=`p zGk#br=5vz39fBQR%_vz$wiQ0;YlppX6V_*NW6C&s3A%voCT$d2;lom?YO$_hWel*ZcD9bi7A z$r<-CVc=rR$;qY z&C%z!Ee7oel8-j``x}A6?)`MpS63{z+bvq6vt464LD`V`#X{LLS44YXHPubzP${=o zun)AiK>>i{ry>_Q_PhJEHuXv?uNG3umEek*Cq^aRCVF(#agU{$yicoaj8NylR&&Kf z(!YzDmfL`T;5>#H3Kvyx-yW{52&q>~Fg#>VkwgRw`o-~MfjfZGQOAZ|^(cn`d;`w)si8`SlTl>ZU%tR8C>L0-%k_SkDNSfyq* zHJLE39^^9~9LngXIDh$cEMzB#n}JpFj0$MHk~a^oND*CXq3g4MvDDzFG)`QLdU0Myh7IA4OpNWYRFto?;zpF<1iN zqGKzpj&Lby4C&_E8h#SC7$oaD0qY81=+o@d6o-;Jlv&Do`+kLQ13?(+jNE33WCM~c%FF>O z$QxFIy2O0NTczI!)Fc5GD|-uier`rD1f?8hgzn!_r?1Dlc}wYYl%2$6C&WWLNr==J z#wTu9rv}5#vd7fc`}ML%cU9NrsK!vu2XDNn+xp?j!!P3^!y;JvW`Kp-)U-jeA$q-B zLp}VsNwbfq-zqan{=bJ;GL0D3AAYZ&k)|t3ysPc(0box47?9=sx}u@6b=#e8B}f=q zIjgGF;sF^f53(SAe!hOd2nkqKtqWxqk!QuU1u<6}&YnCBfP72QOAfX$Ae4+L3uOTR z3tNB-5w!j(>vv}!`U21%x_#&^;lEl}Ne)%`Io4|adu@3stKE<^zE-)rnCAq?r7BDD#KdRi+Vsxe?WXBWpbtiL{bU@uUsZm-Ud*$ou! zA9sTlKTbmVi9KH*z>YwHwQ}wz3lXOAsNzHq)nMVC*{$62E7~h9oq+2?i)pM;$jK^M!)HF#W7qMn_P7hficT-ASLjDO zpEA55m?JTgJj2>so37Tp)GZe8f?Uwl!1`1YKX?HeVho!v>S@zFDdF5bzjNRU9Lj&k z80e3suu+#ObAc`CNpd@3C7Z^1O}*#Hi#@;w%VKoy(YNZ*FuPV=6lFhv$(^5d{)~ABS2F%>uERIUqj_A3r-pA zyZH_&{LgPcmrU(u=SY0$v;7YfOo6cSd!=LiHG>&Q8!Pj90xy+Wt!7TpJTB&~j=7&VV@1w@whwM#AV zmb~qKf9n{5haoGmxkK#4x${UO-+L(%oj%^#39GM4777gn$S)%cpowW6nbgs{OX`M} z`S8P!epB{#@Fg*-*u^dj+_h_l)H&;m7Sa9hGL!bQMLsHU3#h@ktAoRC*xMKonH3sN z92lKe>2ulbZ}BX$^zr`A3neRfvp`#!f4$zD~s1^m5L)EWA^EtErYRKSah(yM<>A%vxGCBvU zqH_%$l*3B}Za7J{S}GD<-B?w)k>Yq!&8oZ;y-MIF1NJ21ne|Ov<8_Zk6l7{;3s`9UY9r5Lw=vn~RR24aB zam_0+o*|lXgpw675)?caq{(9qp^?`QlJA0R5j-SNBceWix(9tP6afq0k;x1H5^yKH z6wUh%btaE?Zf86{YUzprg<%Po{l!F}MJ>`#e+^Mm zXl9o%8DaNGi91$%+KKMD%n@oU1uvKF0@)44)i3Mhs={1ujx1d}w4YNt!PF-|98xZX zH1E;JlIs|>by;b#_M8k}goU;J!8#szp}gtcBa)?$mq;u)v~3C;C~E;p1i zs8XOi?wnKF$~E(6=y#`noO+N@f5;`#7C_6@U?E-G;g(D%uscxEPnreb${naR9Gsb` zAcRM?@7K3($X<2{>g(}2_^1M;iqiIw)i!CTXJVYuPI@IYT+|h23UirqS(zdma2wmD z+(WdbHMg%xw3$=-;w27gbZy0^%x5Ao9I{CqY;Ptn8$#@LTaCra;waodf4U*dS$aWV zbg;FvTfPy?3*KDKpE(4sR@{w}6RXUm`y;O9TA)s$-7-GfO3559Yo_WUO8azsvjv5g z;-2Fe@saf21zqYetss6Lf{jD>vsWCOIK#%@#3?{o-^km);q&T?tuB8*#mEt-1*Qt=s(ffSfIax+=du>r%aDi7D$I&$5p+(AiM^hj zxPzw}C=#E~4=Yze(BI!x^kc6re!UwcG$m#{UXm97c?$fcEIQz#e=`fiKj)NQ@jzk1 zbnds4Xj?aQ;`sF9CFgYOclUY?O?2tZfJ=k^MY6*3S1-aQP`Xf%p3Cx~!=?6Pfna1! zi!IA{$7n#pz)=e+(JN{s-WCqYLS{_<4W}IB_Z`DJ-ZO(RMb)IfA@gDC*T>yh2qE=$ zzxJQFWEe}hY4g(!f8DkdZd}%KIj=dT#9o{sUiR zL^O}P9ZP#Le`!dINvU&4@^=CR@Rmpc97G zgl)khTekThOy3`rTA}##-uSq@6V5!QA_b%zOF#z>De&cfFl^f@cOYczJ@GeAvl@Hj zPqMnE!Q5bZHAJA8{!u&dj5}Te#Xs{~r&gV&x+P_Rf2Ty7W=e$RhnL$WguVxX{K*@N z_?xVu$)|@U0sDSKyAF?4B>49^c9uFR8OjanDW_oYFmw9a3{T%7C^KKcJ}3=Ln#^#RK6H=5_XDH4iuG zs|Y#6COMEVsNqe@*)V4yNvn@j=w#R1SHAT(gn37z7J9jGZ7h0r!F<*X#~hX1>bM|o z73dA0o@7k+^TKPX!I)bDVXujANl{zpe{w&9L#*&P1cBsR{DR7T$-Bv-ZiDV-I8{3x zUfBW@J0BOCEF``A{yKl(`z?owo^1kpOOC<0r-wq&n@953UboOXH&F#eq49TVuj@?@ z#eP1AvCd@W=dIV8wIAusa3keagPEjzN^_LO)Q`+sUTo>4o|~(@YFWcX^I=NExj*}_ z+d{G0p3;;v8?uB+@@+(EqG1^Nf8o6++h+i`sgJnq8O-Q}T8AZO%Jdg>GJG(9R1w=F zRi>pml$Ba}vrFu&lBP6)?is6_H?CHF87xm+ASyNomcVH>Xc!&nKOG->xbT404V)f3J>TctgRh6PuFykvKr*@o?qC z{}WCxvCwG;N~RJ{Iy{Rqf9}|^3$;3S7pp-k>gcJWS^uAFgrw4gI>fnSlzs z4-MR0ot-?M_X`-xVj|l%j;Qd|E_#!?HT+_rT+&4o1kEV2rUWo_ik`KBPD z1cR`gqYwx@#;={hh=pk)2N&|M-E&G+g_K5Z88UxMwW+w^#C0f|f0R!J`ss)iLJso4 z4=SK9ikCERq!n=e;$*txffb%;y^dZ8KHYV&>RZGY4MJ0o@95pO)ePy?V#322^sm`m z;R*L+tCbw$pPYh2a1v64hlNY5B7|dT3a5%eJe-!I5_VmHHTzpex@&zSmM;4Rv-zEsc11`qh$(trVpuYz|B^DB5 zJ4C?vzSGK5;FP`OQ@hH83F`_&IP^fS{Z@Xl40*1p-}QMaZlF?MbGbv9u(65q57Rm7 zzV?Uq)j&-q$--zUV@6{%@cxY`ylB2%#TZ%O_j ztShddgK!)VHxgl7k9x{_2Dv*^!eq>+>BoupL?WWaEB#GJ(AfE|TC@{^rv5GfVbt~P zkR*Cw{V$hCXr57@yDN8;^<$*_>D@S01@Wbc&A^Id0dIN7(BknSSkOv?36uMCc?|JY zhg#Ivv1!e2e-Lb#WU9SlzQRr@V2e4_t?32bd}0=AlaY`3ZYVV0LE86iejxzVCE0;S z5I-?TtT8)UQCs=NmGA6z;A$?^QQj_ft?qd(ePJ8X1w1R86PJc@DUiIIa>Lifn-Omo zGL^$?`VNE4zMeNHFI!;{XF(r8Cy(J9rvAE<1 z@thTf#POKTsenT-&F|MYL450QmbfKaq#wk;vuok50aA>0CNB2HwhP!DCvcQqJ!*Z85e{AE^wI@e81W1FV%xs;7C!{hoErC~pEkg4&xOqPl`)8NMMJdF{M!Rz)1i9C2iOe`k6M?g z7fa9g+|}wer?+IM9fj7&qb9Krj?K48q2ZFcf2t8iy-!aruT#%A+`8lnyS5iCM11q3 zAERBp*38$|$R>Nn3+C=pI(f<>X5=;EP%w`diqr3yu3Nc}uEXUTJZ__ROlmX06|RtR zr8;wD*R7GlN8K5L-nk=3)*7B7v@JZOL)DqUc!Kjd&KiR$FecmdxYTn>b`y(~f9Z{ZveHf!Wng-_*NPgk1yDSYuco)E=h_Z3 zI>A}}qxU#|W)5qIFd*FM?dR>8l5bF-E&h*Sg!m1NS>ALV8waZ;K^ZS@!hKab1O|Ri1V)5phsH7=8YN6H{;QW z7-8bLg=*W|^%)#eQt!2h*)%e=h-svNsVY?Ji~K)D_U}lH_^AmjY^bCcWmw?K9e>0A);kY3)uwSYtF(H;VoElqRHRz^?0HbEKl9aRT z(H3} z5iK1j=_8`38B@Pts2&vVGBAMj+xL1Ic|_sg0bX(tv{ECcP)wxMh+3Xie|<@*3rfTp zK6mjaDeM#u`*R6ffd+mkzHV5QO+*NligOC|Kt+FD^=PtA!Rg25N|dmCg5lmCL#H(P zJ;m!TWy{|l`*3=~Uwod#@1+r$V3FloIbHDSn4Y~%=s3*m8tgNtgxL*vliqdA{kJ3H zkV+hK0Y|X~wn<*USAgO4f8YSP#L%BknL?m@vv)9CI;vAuq@`hg9H}$s|A{^_;Z(Sg zF?ToiJf&-+hmxb479b@VVaV7w-aOk4)kegGNDAu;ug{;IC98DQSw_Uv37?}??e;SE z+9YtOwm7VJK_@!oj1f|FsDtHc2>p&d#K#GKN&=P4b1}wC+oR={f0F`bZdXE{Yu8Ww zVppQz&KVH@L8rG3T71H09&v$*daVN+VT`!G)Ahc8D4~&j&z7Uk^9D4>5P$hZBAZ-S zRtJKhQL@?nIcI&WbzKM@sC%q>a%U9(Pn$qfV)mdMnZS5M0XP#bdh^+!8BN8{N0?26 z_C0e{m^l7~o2$U(sPaZ`E$yw{67&%jh4SGr@ibR@Q$NVJ+v8+4Hix z$ouVj;HnCJ?M9qrru$36HrwmLftA#{KP{Xash`is!KwnS)+^^$%m=Q*KRG=Q!;|Ce z>@Xi*6z&#t$Ukew7yrhKDbmhbXlT`jOlcZA^j`rPX#sNafBFK?658t{yx`uRz==0c zz?61@jmM}y5$$baUsVp0B}|&n+Fe~{qbbN+kM}}p0deb?Uo6lE5H{$OGKksqn#VlR zGMhHK2s8ri`rny~gPJL>bz3#4CRU8{!XDwn;$8nVS@nuCG;t$*DZ>f`qO29lDGXoy z{ykC@iqP@Mf4H-UKM>s+xD(-Bqheb;SHEXH0(NkF^L1~@eIVxpP9M^rLmt1yZ(8KU zU0^4rF5?7`XsYU~Q~sL~=t|~P$n9t)I{L1_DyYK%Eh@SJq2;LF;Cpnc z>bsM5jeAMAZz_!_``h>8dRNY2DJuRW@en4eIot!Zf15=-%7COesjDgy9)oj4sMhj8 z8sadAx=?@{6&YHPPe9$A&hzC z!70oWo`&H)P(&KcUxv@DroEmf1?92!t;{Hm_P8HELA57gu)U#zb`$j;VJkME1(dAI=4)%Yv^w)0GA4J6NNNI>O`ogMYx3_jCkWf3uvBI0o|y z^K~MEkJa8j!XU>PwmOtCkytF$ILL7PIHry#bXRzw#I%s!908*7zk83fx(Rs?1-MJZoJu7AN%{rfE+&#v zN*iwpn+L-dU&?83FkjNae$6{3#s}}AObq>-WOy?0>In4S?0|_$f4co@Ou3sio7ri(%OeQrcFf<+01hc+5%#u0;8z5o=hXBZN4sSwLy4m$lqEdXH z1=vY6NsO_|DNUy9f7pG|<7PN-MD`dQTvJ}jOh5Y!R{|+`Mc#=OM-9~3Md2|{iFP`3 zQ#hQkXYALkaOh*~j-vpH)S+MiC4Kz$8P#s^T%F*RNzS4hkK%E_PVKVkbwU0HhD3y4 zmQQ(-prHKGq`iRr{hu2(BhSDs)eTj^S6;79zM#8g@H`RJe?|a;C4i6r!n<2sz8>NO zH8E(AvtMG-h44}AUZ?&y^qqT}agw4a1*>230WzA#ok4e27^>JHnkq8r^sk;G^!B}S z#D)t+ z(VLkyl`NV1MtCDezNJ%IVApZv{NC1AGudmHKf~d+iM-=zZP(6d~fE9t;@pGQN zlg?A$g;N+I!ahym(^;ykt!Z5A8eZXXVTK@Ki>#R(P_Fz}GE~rJoyHlrroQ}_Fp66z z3|Kzce-45MFema0!UI6}f0h6|7IqnfRsPyla=Y~CptwURiNg4>K-RqEG*u`GArkD zf6K6G%I3UZ5qM>kM`68bx#Ue_q1koA2pk09Y|b9i#82u&=4<5 z%87pk0>wr7Bn0-cNO14HAD~kZ7uQQi;d50#cPGlV>K5=U@5uxlxUMtl$kCI|`A@(s zd9&&2t9cvgVA&gM^Ko8?*>}t(-Fgw1e|RG~WYd?jQgSI!@gZyE-OSY!C(*jjuoS?U zXwdT-B{jP6bs3kcREu=7fh}N^J6GsqAlHL$=R$Z}lQV9Up0o;%Asu42g?GZBTTN*H zpb!i7WV72bwA4gc@@+YQQ_eRaEtZD;@$eEVh;2I7KlDuQ_m+;>tQe5xSO9dbe|j=> z%FNpp$2J}hZD0SURKlAe_1F^7OP+MGE7Zb<1mHwxdjg7BD&UrFUDo}SpPBqh>brM( z7-WjC>|jX26)aI+j*=~atg@S35w9+S3IbC1=!>PUP#Wt^)_F>`w{W3Mn4~hT%a4h? z1VHtZO1spqG^~YmL+MeWM(+#pf9}gQ;hD1APIbEo*X01BqOuH1?2^0FabdQE;|Em? zA60*BVbip>u=x;b$Kz5`lQ2 z_nr9YMWb$*_m|hCS5Y}Rn0v7Z65_brP`6);;)+4&BsK}DhKPK9xZ3dae?{dH5`;7< z0Xv4ifm*YJL`9*!zL!|o`<|Bi4=_CN8kx6hktA&400KB(UUGm-R(GgF_8EAqNOp%G zze3-RwfJ`fGowI3j*}F=zwX$8z_pFY4MH`)5a47c%9bupJDJyb^nVj6q-|97+_vli zJNr|X31@ak`1;b_G1F;zf5n;0V1(J&}!-@B+dX??Q+=UH6(x^=X4aXu z=_lw(PSGqi_O49KYfVKSC@e@sJD*v7@w+eo;&z%%5v=y1BAqFRag(OU3bd1t9C3uH zf%>f(`qHn3(MgiwKz8?Iq+t6eV-k1X|=^S zd(e`!+Lsj?xY*l+UhS&k+PQ~o(XY^L02N1qj(b4sd_^Ez; zK^nKPy%eiIpN7r^k4%ROL~gU-YyIz)(R%EuaUS+Ia8C}Ee`7puWu{PhgS7MK6S4e7 zZ*$S#?3TE+j8)dqxy`L<4~cfBcJ*YI3x#zSt8H=Qun!hB#H@f!CTr*mA?^eLhi1`(vipDnG5l4l3G|OcedUE z0f0#MaTaAUf8+=JYBqk=Z6PIgX-U&&RcV`xUwjRQ{t|1SYz=rFwxIkHTX^Edq-m-4 zOO>Ykq`ZmS$v0)=u16ym?F;c740fHb2r~i>fa`#HGqS!>=6P$xRvBxk>g z=<3I$hFB!MHEyZj;qL8|A6CuD3}$Sq(w>X-<={ zfI+smy2~v09?WI&Fbu*~sb&51!S#!2@E;>Vta)OA5Vg~u*!VIhgBRCNro#K&M-^j= zew+tZi3{gQiDO4vq&dA!!E4b|Oj}_L%r8Kj%p5!o@rU!zBX-y>1xRJoIg> zylp+q(W7B3-Ei;BcO+BL>gCJZ=bRvnQjdpnhlpiXlEGTUT!W)lw`B1_gToF}L(_9gt`E#&ckLMAM07A#vvmoa&p;&xk~S)Yrue{Yd}@sq`!1u0?+Zpv zgZok>*`QxuIIbv!BObrjq@N88+joIt4)}SC9 zigfUrhtbCHzqK7sOj%VXjW`XGf8leUM-h0vo<{A8C+%&8^-L+%B!Qjwy!q21owsru zk>jz85UFm8yIlP!${5X%6S;k$4Fz<-J{-v4Jvicb&cX+$bL5MQyi(j&v$BY{p~bvh z)7?;#K1OM14jZ;kb%$%;?BB5iO#>A4L1booiIWwMaxe-h5X%%XCrIF?e-4AM?EC_G z?ET{uyp9UDJ`Ae)@^Hx(dBqmeBuMA#thg5^ve6FJ`f3Hx@&w4GI*s1Suag-X6*RlW zR)B^VxPK_-#_ot3k1rE?IbyByNldxa0bFZInL_1m`(ed%L{A;wp>P||Rd>JRuj3M9 z(6k{|(sKm#C}fC5EU6g}f20lb_|D7a0C-#o`nPKcfAl#Zx`W<`XeGVJl3yh4L_E7A z$~Hv+sp&7EY8HLgK&>E=u^8-bM_7wIHg_B#`-4AZmoAz{wgY-@_ zV+Ng0>Q~#1-36_nGC@7j-c(fbm|;fk$_~kH^*PxJSFkY5HTdIOf3XqLo)Fg9$Hn?s zOeU?k;`Je(0Q(k?sd^{z>^{YZ4t~@SGhAysXK-36UC5&odHt#Pzo>F2zkYh5(5Z0i zzSU^StRUaZ)nu#+aP=$0{uU=aTdw3b;$|Cbl5)tUX5)Nl`-^F+ zEf>7pz67OiK0a4yZTx3JEzbhIiz;lSw$yJKP8{s;ip%?&e-(tGr|m}muBQIld%{S6 zQ@slHi>S0UlBT^~6i?Dm+_d$GwFk@C#}0$vU#ctHlL9k**^M9P;rc4Fk}OwuWvd6= zzYc^1|0phZ=>9Y&@l>slv9kgzK&MGUdrj=M)6<7k3N60pX8^k=<9iM_;T$hAV5NaS znXPI%9|_=oe~D~w1q3R{+1%yzMNSs!2Q|4n`_7s>4^{^tay`b=YAr@`hj@v&-Ow#9 z6ZAY6-tjJ2)ow3KgG~TGB^|vp`4cm??<+Ta~gF`fh#A+%!T*w0ZjNWCQM5Q&nWWZ@> zhKoHy$eU=|PU(2A7nYxU7E+hMQuTlw{1`F?f_Mt8E82FLS#=YLp^-bCoJc$jc6S}< zv|o_?e|PB|uDglcXGgt*+e4rA`20lrQ_HA%)5l+eA++L@5DwWM0E@qHJTa@ z;*00H=^r1&B16XjZ+K54zOVa^vAIT^VBv}k)eL@G&yRgJ9_Crtd;>WE3c6(rxYO%` zHEPJk-TygQq^+ddafGYJfncOCp0vjZE`-cRe~*(KZSuCPq}1XSSF?Xffic6+ zuR4hfrisdXb%wR8msUp(%uO7d=0y3CXAxB1yhN-r`JTQ|X}*udI4xT<0|2~e;#BxG zTX1%Q*iQz{W820^H`pV+j}=rQ_a)U%akYzv8);vXN2n2R93H7cKr=Z4!Nijk1D&U3 zf0wd5IYP)v0(kZ@X3kjvphSqr97wPlTmG0WkP+c4+%T1DD=k>!%x`-_Hxr~{cue6k z1(p(WR3|NmMt2%L@ee;5o6Eq<{!jU0&|6Nw=K9rJy}s-tRR-tDFNDWJC?Aii0NIN; zB|&^#@&No#f>l)j82oh6bB_w^KdMTQe@58*3YN{yNe>t9=w(LfF zTlp-iOvZypGbk#(-t;k~u$glO_kZlX+xX5)wAD8U5iOXkALR6g-dGS;JIU>dp?!?r zLUzj^#%Si*+5mjqn4V+tN4J~XRns(caL&bK8%T>2%$gV(T41JA)&Vc7Q4VXof1akG zzh?T_V9;}!90z1ahcDxFS&ddE033hYy=EIREr}s0b-o=zf9`rR60(S9j$V9fkH3(< zG^6>R(dVqjXVATP4`mm@biLU4v&{k)_}PBp&^;+?4Ijy45Lvkq$1Ca^@UzMUcdR1% zIY};5m)+x3?HBs{j2evnh)7hvmEE9@Cp@?aly~)%52_xcC z(&n#odrC=ZDs}HDE7I=%^SEW?wC1o6&V-yB!P8r$`z>?oee#YXDazJ}f9c$PZmE2% zku@f13f-6no+n35W>Tgjt*J+wOO^utYCgfFGjdR8X_hJ#13e>wHN;>NX;Mb{I>#Cr zF(dmzsM0V2q4^wRYRieDvVl~UCX~_taFz=mq-^7?>_YwN6+{F zTRq`cp(!tN1s}^$f{mdrMU0UXi5pIR`UDePm??du3j)QeHjWOYf94!XqD_KNR>t_5 z`Ef9YkN*4NI2d1Zjd%D%e5>?4|rN zPER!uf`hpXN^gaZNvj}g^n?U8#JLMNJ(;`qiWS&gsjVAm9V>zQ`_^b<5JN$qt+w2e zj%!<_q58BjzeZRUe}Tx$RazC3166W$qTi*u;0!#m6Rl@8RoCKQkr(!)w#h zm-YL0sd?i)=fDebC?%w7XzR=ircI(9#3KW_GBkQ>!4ed&O%2ZHyfy!>CU;k=_Ald0 zHX_qmmYP5#T|9AFrZ`dGR>GTwL_qJR!%|e_m?;{~H!90oe*)VA!90K%Cu4>cMJ1)B zukWU6@1qe;m$y=d8fBrB6hX`z5zPmscDQlH7m+w>?YrU>SHbl)w$^lkFxUG=!a%7a z9STW9SJUP`vl#L>!q*_^Qx_Wz77`f6r>@Fta(+rGLcHdwG-|va(LIk;9RMwcfyh-7No@f;iz9KalZPD0bR2?PgWyM>{Lzr<94JJQmn(T=^vlq zN^@ZSe~8|ceA{B zvtF~@=2)WM_JdTy`#3vEbXM2J9Jp3E_HU*^5F1RUHSE~vCBtW@A<^oFev>0C14d-x z3}%3Agi%JDEl7Rd)$WqsWg=E(h85ar_vViMf9g{kR3~2dQl-i!83%HW!wtHtWE(># z`Qa8FqBSBD)#2E2^)YUiC}Fx0@F}wK<%5NnkT9*qPku?c8DxRA4-JmqUJL9%!g<^V z>|&kc!Tvg>(D$WjHU0j(ih!%rH{#4Of2)CSoIM3}-V0*z!!9JJC_%r&`4s~FjQ0_= zh;N*!4bpN$U!jI~3^}n-0|Tq@fHC9Isnj*Jk+F2LbcG5aX|bwZ&!hN}A2dYoAc80$^M z81z___|-u;HtLic1H@5jSd`to6a=f@s`&l+a>4R(ZAiwpVz8;j@8bnJHgsl`2z*Go zbPV8nh;fmWm(70aXuN4ZQBH60e>wfnm*C;0Y-`c?LZRO{<43AnD+&^d+!al}$o~^U;LpMn}ZP}UwyG1)Nbr%WPN4EO%&5VVf^!o+YwALlI zH2SP4rf;CZHpR~~ri|0x8O)VI48QTkRuvvuyZq=^z!EOryi;c7iU&l!e_@dL+127= z&&`K_USwFMrRQtx&)ExzXyBp03=R6!p${u&W*&pQV5DL~H1CsqCV5et2MAI}snVBo zaqGHzNuM=o5$$Zc)b`c-n`u3tGAhunH`p{XpnH+I_$tHeD#{KVqCAX9CFiu04h$Yw zM@gX4EVsW~;8IQ70e2mBe;dv${w}K?Goo(_VMXd?2|OpjiP|ZdOQDC}U=b3WZ51dx zHe43zzRsbp`6!6)JK9Dc%;4Li^a)SCYKT|<5=m`1!)cR$cgvlxFHQi^9zid4F0`-- zKgn+?iqW-v-hYV1s)xBHrTu9gB zC;nb2dgL9ypZjJR$1LhsR`F0`#JO1pPq|^;P{_Wxn652?oHkZK6u7;TqD|TfiPVzU ze@kS4!S+u*05iX8Kj@lCHFPzuPKO;=IK(>*SOzFP#6R=AxlUdZnDqxzeq12)2ZIVF zZ$bk+85_rs$3UB)f56SYoN5d8z$8)RxE6L>7&LOHQ(?mci=+d1|Az^oyE!2`RAbvb zatOxLqM_YES$>UXF+)#-Ze^&3z0L@!ZnG+ABV(EDA zBr9 zWI$^+r1*R*e_`2p4_ln?(+S&#K@K;LwS(bQ$F1JG3#i3>9KIhg6Ykbht8Qj}Z0c*M zh?0!e4@++5T`jf-Alj>NYIpUL5trN^4L4&a62X3uTH(|~Rr{Mk~m zM{_N#r3O;#<>RKzO(a2%&j<~E&P~pe-ImfR(9R>N37lOoF-ZSPq8969+$RTG>^wp? z86?uze-UiR4et#*s`M*gmy5xp_V+iPfeDn&Tw*R>X~1l8jH932A(YG@(*&Z>24b^7^&s>D%*I@wKXFk ze+5nbtR(ARLuuTlrlG^yx26;P1y6GA44_3G^1#K)1HinVBxYeL@YEJLsd4*+-tJpi zU0gEhoB+EQ5};jTFVdmkdX>+lvttVw;!ysEm?whV-D^9do8}MW-#TyN^@;B_kEwtT z=4yBGW_fN<%w};nr)OcyqEBPFNrHIje-O`hvCGpRbOs$alOG!Mdj~=zX<2}g3QjV{Ty|c_f$;-Wz_NHY@as{N>^1fn9lN8p%;qi+$a2XkfBKWm z;Qpa{Jt%it=N^7C9Ff{C_f?I*wQT|(9%cFqrnPu<1xN~X8!1+78KI}VBd36mp{ljP z-mH$^!{B_{D!2Uv90-J!G-LZ~E3Cqo~J`W0@!zU$kJX8KL1{ zxaIG6U1AqoWS^o@r5gxYR~{FYe_c$EUWa;HOD>}ydnYXmo*x(4!>e|w_0T59AU02G zDv&MPQkaYz@|kOo=Q73FFM;RnXKAy;bV}0yUj&rTA;bp)lVr5tuJ|%s*q_%+D)x%z$gFh}Hz7on-!l%NlU5Y4`0lVfBH(3)OHn1#n)-1P23C>+=|p5~0=of(6kkd3tyEt;VnhFG(#d6?&Wlvv z;`OXfgvsDpdp`)O>rhj@(B_|U1xsr`+{|m+$Q%38AZkZ>0;S=$*?WRHI^#@)mMupJPo*f z%@2*q=XpO?K${`!ve|FX=v$JZ)b{=CCkRwb=`Rf~Bj4j%873WMVX!IB*o{97p4>$K9^+53k{3U2?Mk?Cml5Oz z69P6hw{dF)7eWFyH4OLkuY*W`5&I>fssJ64NNFg%hKdJL zZ-mkcY2mhZ*y3Pv zXk)OFgtj{DDOi)Xa9_civ{Nv1gkB+HMGKu2bO(Nz%43TWMk|;r24Sp$*W?5U)M@d{d^g$iyiIbotj8W<;0OAuDs3KtF)AXF&{t7#kyl>jutT3juB zMZ0J43$7{@7HmydK<12riWfHFMyr4oq=~{AkeLQHIiHXjO85#S1YCiNN3IyGq&=;i z@CLR8d=frFR}D)K*1|X{i4aW%@~=chL2Io<49GBltwquR5v>6=8b{U^DmXGsoT7n| z5hay|y=fFhrD3bI!74c<1172prC<;c4ws1n(g+Z3EFu9ms|{iz0zb5Yz6Nopk#WYR zxCf$yS^(*3!fgUfY3mWa6}CmhA*@Uz5^cf-!!}?muzii#an9gf#B8 z0LiV0jfpJ>O1vGhiY?T+l32*YL2ocR9kx4vwJ_Z0xILNFqdtI~E}z~`o}EtGlUgWh z*po+t{zN=}EaVZ&+%wq`$}D#|0%?Qjxelhos#zWQoo4lj7B17H^5#0~BN(Om6$nf5 zSg>SyJQ{RP>xuXP>wIz~D#n7s3*pApF_zqi+tb=NK@4qJ))(dVuP)cx=paDAr=5hsUN~Dws0wd@2gHYn) z9h5l#ttfF}dz83U_65X)4sI)A3xdagHEcy}9DFaCj6cB&Gz8iZVrd=dSpZnBhr8ise{nlF7Qjfk>it(;(V5kq2 zy5U+dbEEIbD_vRw(eKHF!RVqM(b&~T`Gb5e58(o0@{hQh4r~JwidIs{EeL)gKeQpX zEMUy~)G{Fe-Rbmv0%OV--To(kx&Qbv%^>$XlkT7|PvtKs&++q*>&ayJzfykr@}<=u zwK{_v`A=BUw=Ig$889#OEzBihEf|DbhtZz?Yu02^YC*ydE*}^M?#qS%+TbDo4HyFK zwx}_z>Z&`ro}SaH4hJ{G(soutSJkqWNDs3LGX-?IY6S#2fu=CNuxNlqgS#y52?P)BPH+f87q_6nB@pcL z-#Yi!sX7nmX}<2SXXa_7db+18I7H;P9l{$(s>jKTct;tH={YNgW1 zQba!ocHQrF=REQZJ?Fb4ehiR8Leqjz(UY2iuw~R62t+%0Fh;ZRr9emNG-2szh^&$Y z#cKoRT4RjYFR`vmlVRz~x>w;$N!YrP3d7gvED5wfEoj(_fJS)+-xMY~FgawxC41QF zHVZipxfd=Pm3=juid%7?km;;8muk_PcLQe+do3?L0i2F@kGl;%>&a+!@k%kY@7)qw zVW@9*7|uhLb3`UbBWZuTM5z;vC6rgna-Zqbv$z}HjC!oITZe<&j5$=*gkZ8;YTTCa z+w(vCLg`7&^|F}67I~9 zrgHYwtvUmyDN_0ZfSU{;`0TF+Uhr&!&Z{5?3>&ZoEn!~quRVGK9V`{X4E$lYMAQoc za{5BKJ4l9JgT~Ky5qLpvzPw0W>S9I!qZndej$fn+Ml=nbWSh)mTn-zn?mq%#U|d^T zQ-#~Ru|r1lLi{t(a?4oLE6;H8S^>x^wOe)q_YbzhiL72*Qj(>tXy12LHmLzjoIlP5 zDQrsk^R~6Xn_O1@#A<{EZY^>DcBc_Hwyx6KQ_k6W>Zf*2zv^X4jk$%Smy3J3tiO~| zkv`=x@FyByTVazTO&5eJXN4Sw!9E8c;)d+&UOOv&Sv1yYHWhPy1AQ`rmFBejmQFV! z(vjc({;Vcfue~d|5zqMXC)KR_Uer|LKv-r7(%vg(zOoEKUnQ+tF-fpY#NqU4Jqiju z-qmMOC5~scN8v2a3$PsYORuGBha6g#_ouq?Ym&t3V0sy2F!4%@{Hlb8}2<8fugSFdqA27iZerq3dz=@K!}smH?$ z2}Z7rlKI1;cCL*@BEalfEh%X>f@3g*)Zl z;W>iOkQ~mAbG~e-bdKCiPnY1W{WjbxOB@`Rc!X~w!?PqbmAv`F;J46jI|zzcA-#8v|bD zPDoTDFEI|3LZ`InDLHY&lO|DRLHe4mP@JsT=DfK6$59Fz3YLAi{CiQ3S0qIZ_4XG6 zJ|mw^-dhr?hAQ&b{*KIb+z*bc8(bYXa|#76`qS)<8EIU=0%BY}_3Q=CM2y%@&Oj|{ z%vqtb`*Hf|ye7I~SCHM&JyE~5jL^XF8aM;<9q~B*PX=pnP#&|LUqZwq{-i}l4Lf76 z5J zb$LyDWt2`z`(eo~ZSrs`3=&|jjT+4|p=fWP#Y8f!fNxQH;V9Iaekv3R@XC(`*Q^Zc z&keLpoPH4K(@J&P{Y$I0%NGL%U8D_C;N@$&DY9B1X;95QTwyM^kP<})t2f<_M=O`G zol>|L#qc*eOZ`SrCFrNehpet#2uW$1<5uf&-aeepv~UMnn@3g_@Qhgi8yjeqdpdu2V-l!X(mA@*%r! zW{^?2h&emBl^8RT_C7c@Da)NGoFhKiQMM~++fLmE5)`2tl}FiTl2GzI+<^bavY*hD z!7Q|9g99%4`L295gbJG?oAfUiLPFePsGlA-K|60_$cI+jm|D>9&Fzk*9R6>@;jYWG za`bbDru~XFu3PSC!k{wJZO}hOiIdp2{t=4-yh`&%*Nw@eQdX(Fj^~9KIBRIOm$_~N zE~pcWe!sEiM<4h%a_8r>VL2XJ2mg||TGSMr>Gl;m?=w%-v*J$|wGt17CuXL5$~pM3 zQwc2$ke{T(-?rSxC*2y-xjgaQSs~Z+&D%Wxd@*TqG;5P)`CH+8I(Kv0QPzuZ;>lAx zco*__T6x3TcXC~6&WOBW4@6S#Ng01<>z}#iI{ubchcPU}F=bywn*N268H~nkApnu~ zoFw7Pc<)Xh;T~C|esws8u^=a0PdyJSKG}E|zhj`bqZ62JzEkz^PkrrClT5ENj>EjF zJ)t7P6&C1z;XkR{8C8MYN=fweHL`_Lsd|Y;|Jci<>X%V9EwM=eyZOk%h4AGy*&VL1 zvJ=e}=rN>NAhGhE?D+gm-Cy_2&U|p_{c0eN3GG{qoD$ZYC zM(P~Xuh-u=t)m8WQpTX|^JSM!hbHs(UXR$RnJLLe@z^=BPn=2Hetbmd&4O%yNms5w z>z3E%=ut|&w`;y#^;z<%H3kG0k{}=zRWwi=WNj2uj6fZdFI&!hJ!jPPhb$68BQMYe zKVCbd{O)6>zJ#g}#!!Bf*5PRV=%~|-tcuhh+kG=}Cu@9jc%G4{5G{Go^oh(g=tJT9 zHf@LxAPA*pWIekQr@i*3lyyoC<}+_mwLFaAxSU(O%DtsLHtGs=JmG>J*VX4Itmi*I z)P1k7rQqjq#aTysmNdrlJD$hg!FpNCIDj9vAZhb8K`&?TIs^|)R5UP9&!;a=tO+!z zDD`@9U58_pcllfEK|>m1%|iT!%8P1!?Vcc`)~S0v1SrKw6)5TGEjdSY;n6yo{?~u$ zXfDNeq4n3wv!L@|n*i7{@|;yM3?ne@2?kRAGcwi-ZOCO@+;G$60b1-OmAe z$1O(ZD7pQI9yA9v$Ke&gTX#%L+`c;mr9l9b_ZsAN!A5i4qT z|Bhq!rf$q91icez`qw44b<`e+VTj)1ke=XIQVreJ^c20e@~>0Q?LFhXWQe%6_ty)%{j$j( z>emD`KL!6h-eH90(dE%}=iWy`@lcMjRmhD2DBGbF8BJIvc|0N!<~t10ZNqNFa%wRR zWhwz#nk~yC3QuSM{*2VKh7?8WAj1ki|*;I8X^ZHH&0QOIwkUjY;OFj z_68SpcS-tq3yw*sb+6^%`E7sVPhg_e+$tod9U?Q1KstmiTd}m$(i}hOQV?-*8-82r z-?aZYc2g!niPN=XUno&@R+xuIh(MO<$KgVIZSdElUH`AxH5b#Kblc;YOSjma5`l=& zD)ioB(B*5VqW9uG$Nll8neojLr;^8_N4@aJV%|yM(cXnyTh1*-A)%ztl;=Sq)cMS$9Oqyjb`Rd2!A;tHN{jp1<81j{ItNTkGH4 z@#piW^J#+PfS+_78BUUakW;RQb#DLQugeL1)0il4Jt`lMYs75-bf$ z7go%Z3B|nl>b}&Cln4bCQ#vnbDeZZKPixFOBpSfzN4SwbHnXwPqdepdCmR;4 z-=u4)RvDJA-doIMZ%WdHt`-C~+>?3C?kqmX_f@N+r>lSB21zVlXpw%*3=NFGC2Rew zwIemvbsF=aY&pq~6o1_cK4FM3O8IUSf4u;?1ELF}J_NFwdtKZdWD7-5;QVc^gV^^h z;H6K)HnOI>K0xk`j;>2t9Aq&4(SwA|w6aM;93sC+b&8;Id)8oLe8=BiaitBew%SyS zKi^|@U89k_Ul^MSg_j$ps0VpAW<3exK27It{ps;*7eqBERl*_LR~@NO*_G7ui7S1a z+yEq^%|Lm0dV$yHWWIE%>xt=@RTm>=KZEOFJKW4ea8t{1u7JGy=ES#p4Vi*mI<3PV z`iD~{bBZ1YuJs-D0*2BW(E11VBTRPQZ{P!h-xHO4#6nZQ4l?TqJ}v^r3*M;Dz7jQe zV?!o*Vz?sKgv%2#u=uhhb2uj{e6@^+xQBDZ5bFtleikt4cO5IeL0W1BLg1zby$Q>( zC@~lD3X<=Rd6UuPr#b5`A}Luk&@Dq!8YhOFTQ2Pu;S|=fjFR4>iC_3@g7G3)!9nVo z$|(BdefZO(LM>~58Dwp^vdURLoNpq7$@<+y2G9h?E&M-oNNtND#&?dr3?>-hb{0Z- zfH4dCGL0W806Qbp1lx}omg9 zvbjGxW)9e7z z0D>aTtdqcU6o3#P`~{3P=lue%1K^e~fkDlQb3ix>vQRS{0$7Fucl`h)f{P+venRV> z14cDxuL9Qq$U-U7#YD|<8!v|N$f_3u;YlD*v)vBR@jo4>_kiF2(|$SvR-(YE$AHAm zVyD0(WH|HW3lu#ErUBs7(?DW)^bPQPbLtgv3mG0T_d>nMDF8St;sx^E0ku)!KUZF0 z#sg3j1@5~6Bx!zn1_q$OnRZ^Z2`C_3RQS)mmtijOKpoA_=pYRgxZlwWHH`(rL4~uP zzQ7j>3l+|M{?eC__&;#+5=a79q6IZI?~{Yx0pOlDFO(D&=mr3%z5|jrE7O8*0myuC z$SY6~1|KgUn2(>ApASCq3Pkze#IskR1Z3X-_?xLI!2i$NEFi%5|Gmw8!hGQW>uv7L zRCZk!#veL;A{MW1-p}e63NCuPC7w0WAAdP%h38^b0wNTPB(bdL{Jk( z%|>2soA*;FfyRVU`g<^0P*apP>1H-Ek?cAKuyRDKDT1teNH(OLB0IvH72ju)VmEBk zwAqsgV(0l#zorYv`YI5A9JKkS@C;Uzq(Xj>`J>n~x=?$wvG6<-6+`7C3-bcaM+Ivd zqAbN?WU)*r;8ARncqr+M9cBR7)$L=^>|*d&VSo{v=(%tgDKrG$Kt{8PqpfH3r?)*v zR^c>bm+(OR9kOVuPkZ{lqPvCBwhFV~49Joht|FEb0x{}$b5R(LP3&Nto`kU#D5p~# zJ5UeB4t`TU(NqiFMtV<5YzDBV0?V+2@CLX*hpi+5wxoVDWrc+ALf6g1bn{$fJCGzj<-9K8uF}PpY`&3*^Qc-5Di@`bjU1=?3u5Y)pbO^1#xLsjk zdgtKL_aS|&9F=JIHhaA9udkO@Dv90%22h7%@KDsi}uQ=`B2 z4`kGcT;(EDAz1n-*s?6P67z18jY>+LZvJOh*!=ADg&`E}z{E}+X z03p~q-!@@6Z@qPeMpX~Z| z7vTOAWN1gFaMX4+Vu$wqx!oNf`V>L7j;lQ0R%mpFnhBq$Y1EsxT;g@rT%C+)_TDRH zwp;GWDZ#{N&h%xfcMk244R5FWv$paY<=@uc*x$R$B6dM2Ar(yA4nz{}(TB}qyH+&0 zN@udVa+01&1r_YTFqG#Ab(QImtrA;r?>L|f6zyD;0Cs)mC72sb+9tNeg3!EYK6xq^+{BA}H$%@Te=aBbT6wKX^ zKL1)Qo_8t$=4cl(25MX#%eA%5wwCt~r-GpojZ^+z<-D*NX*cY(x5V?Wg87K&$Id_pu27hH%Uq!%-S@3mlRtQf1fHy2pU&1hl|G_^yQ3<_Xi(rQ-6kG37FifH44azyiNCG!WiDBm6c$o zA_m6BeflX9R)fm7FnTmv4jTV;%uH5>-I9zo3-VtH{lg4p;t6K-UYz`@(9~XMs2;{I zz)gv@r@z&Y?Efd`ql_TCXe4R0d}oHZ%`N5Kd)io-`)h)KF%yJk<_o-lbi8eSR$M2a zMxtvzjblhsY?)V)77GPY5?4)My0Fp@oM9%jEh3VJC`q=PwewGiz(7bI5O6!g@R%CZn`#cxGyT6%h{#R`Uoxv`@u4P`@Kt970dO$@fS*VCN; zMP;gdPSwQ_X0Ldu|ED+?N~Ai{hMWUS)z$sTq@p3fUQwPh?aN%HCYMsz7F~5oxEOTY zZhPeCl)EsAnw>g_!N8tv_`L5cj5`RVyylv=K?PFgZ@Lf} za2y$rowx2Au`-QXWDIDSBJ@nZEzmd>wFqC5_nCaL(4ysbpUlMWppW_YT_?|v-KXXW zk;C>X1ElYmb$|?-A|DOQh^_D=P$7w^+PX{=dvT#V8T(AYVC)Q}gG3mTIS25^rEnYv z2}=(o1J!|-;w!JK=)kL#^&Dtg*}8ci@(lU4Yyk4@98DivZ!Thfk40DGe3#|q!sPNTVQ0~or4 z91T!oEHSvkZEHy#bzp~TKFgQ}7!?Jidi|7T)}@9zs<@bOPf7 z3#keTmI@5NyyPe~@eVTn#FY41{VQNaUaWvfP?`+dZ?_0&x2#Yyukv;ZJ+4LZE5!5a z;|@9acBD_Ca2_vF3rD7n%c0x^yE|gUshM9QyW1*qL`>Z==EH19-l98;mt63L^$Me8 zm$_dRsIw@f|6n6{1F+H#5-=QI-zb=3gvq5&Xb-8qrUL6ue!VB=?s=do8FAN{915@? zrRg%`PUOu-85p|Isg4bx&un-{|4|M-&V<5{zt5IL8Rr zE0cJ)Ibt&?`1oN~b_3(WR*Xe@C1jss!aOqgV0y=^DHmVXufIF9}qsVwVmI^gz zC~ExPP<9;X=F90zMadO^KoL?ybD0CWz%a0LF|lhfSMJDLhPFeOdSvA20&vAeD17vY ze4Yf)+^sj}u6wfQg8x1SCkz41c|d_HI1m10x7QN9;H~HHA{!K=;fO@EI}M~K{WoNo zh3L_Rs7msM*t@;{21vA7m\n", + "" + ] + }, + { + "cell_type": "markdown", + "id": "399cf8f3", + "metadata": {}, + "source": [ + "# Week 8 February 19-23: Gradient Methods\n", + "**Morten Hjorth-Jensen Email morten.hjorth-jensen@fys.uio.no**, Department of Physics and Center fo Computing in Science Education, University of Oslo, Oslo, Norway and Department of Physics and Astronomy and Facility for Rare Isotope Beams, Michigan State University, East Lansing, Michigan, USA\n", + "\n", + "Date: **February 23, 2024**" + ] + }, + { + "cell_type": "markdown", + "id": "8e0fe466", + "metadata": {}, + "source": [ + "## Overview\n", + "**Topics.**\n", + "\n", + "* Gradient methods:\n", + "\n", + "a. Semi-Newton methods (Broyden's algorithm)\n", + "\n", + "b. Steepest descent and conjugate gradient descent\n", + "\n", + "c. Stochastic gradient descent\n", + "\n", + "**Video and handwriten notes.**\n", + "\n", + "1. [Video of lecture TBA](https://youtu.be/)\n", + "\n", + "2. [Handwritten notes](https://github.com/CompPhysics/ComputationalPhysics2/blob/gh-pages/doc/HandWrittenNotes/2024/NotesFebruary23.pdf)\n", + "\n", + "**Teaching Material, videos and written material.**\n", + "\n", + "* These lecture notes\n", + "\n", + "* [Video on the Conjugate Gradient methods](https://www.youtube.com/watch?v=eAYohMUpPMA&ab_channel=TomCarlone)\n", + "\n", + "* Recommended background literature, [Convex Optimization](https://web.stanford.edu/~boyd/cvxbook/) by Boyd and Vandenberghe. Their [lecture slides](https://web.stanford.edu/~boyd/cvxbook/bv_cvxslides.pdf) are very useful (warning, these are some 300 pages)." + ] + }, + { + "cell_type": "markdown", + "id": "d829d9d1", + "metadata": {}, + "source": [ + "## Brief reminder on Newton-Raphson's method\n", + "\n", + "Let us quickly remind ourselves on how we derive the above method.\n", + "\n", + "Perhaps the most celebrated of all one-dimensional root-finding\n", + "routines is Newton's method, also called the Newton-Raphson\n", + "method. This method requires the evaluation of both the\n", + "function $f$ and its derivative $f'$ at arbitrary points. \n", + "If you can only calculate the derivative\n", + "numerically and/or your function is not of the smooth type, we\n", + "normally discourage the use of this method." + ] + }, + { + "cell_type": "markdown", + "id": "af9ec2f1", + "metadata": {}, + "source": [ + "## The equations\n", + "\n", + "The Newton-Raphson formula consists geometrically of extending the\n", + "tangent line at a current point until it crosses zero, then setting\n", + "the next guess to the abscissa of that zero-crossing. The mathematics\n", + "behind this method is rather simple. Employing a Taylor expansion for\n", + "$x$ sufficiently close to the solution $s$, we have" + ] + }, + { + "cell_type": "markdown", + "id": "a4bb6a8f", + "metadata": {}, + "source": [ + "\n", + "

    \n", + "\n", + "$$\n", + "f(s)=0=f(x)+(s-x)f'(x)+\\frac{(s-x)^2}{2}f''(x) +\\dots.\n", + " \\label{eq:taylornr} \\tag{1}\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "6e6db269", + "metadata": {}, + "source": [ + "For small enough values of the function and for well-behaved\n", + "functions, the terms beyond linear are unimportant, hence we obtain" + ] + }, + { + "cell_type": "markdown", + "id": "ce7d6f81", + "metadata": {}, + "source": [ + "$$\n", + "f(x)+(s-x)f'(x)\\approx 0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "9504fa42", + "metadata": {}, + "source": [ + "yielding" + ] + }, + { + "cell_type": "markdown", + "id": "dde294d4", + "metadata": {}, + "source": [ + "$$\n", + "s\\approx x-\\frac{f(x)}{f'(x)}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "5da2441e", + "metadata": {}, + "source": [ + "Having in mind an iterative procedure, it is natural to start iterating with" + ] + }, + { + "cell_type": "markdown", + "id": "60d9cfe3", + "metadata": {}, + "source": [ + "$$\n", + "x_{n+1}=x_n-\\frac{f(x_n)}{f'(x_n)}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "7deb2440", + "metadata": {}, + "source": [ + "## Simple geometric interpretation\n", + "\n", + "The above is Newton-Raphson's method. It has a simple geometric\n", + "interpretation, namely $x_{n+1}$ is the point where the tangent from\n", + "$(x_n,f(x_n))$ crosses the $x$-axis. Close to the solution,\n", + "Newton-Raphson converges fast to the desired result. However, if we\n", + "are far from a root, where the higher-order terms in the series are\n", + "important, the Newton-Raphson formula can give grossly inaccurate\n", + "results. For instance, the initial guess for the root might be so far\n", + "from the true root as to let the search interval include a local\n", + "maximum or minimum of the function. If an iteration places a trial\n", + "guess near such a local extremum, so that the first derivative nearly\n", + "vanishes, then Newton-Raphson may fail totally" + ] + }, + { + "cell_type": "markdown", + "id": "d8a09b5c", + "metadata": {}, + "source": [ + "## Extending to more than one variable\n", + "\n", + "Newton's method can be generalized to systems of several non-linear equations\n", + "and variables. Consider the case with two equations" + ] + }, + { + "cell_type": "markdown", + "id": "035bcb72", + "metadata": {}, + "source": [ + "$$\n", + "\\begin{array}{cc} f_1(x_1,x_2) &=0\\\\\n", + " f_2(x_1,x_2) &=0,\\end{array}\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "06601d88", + "metadata": {}, + "source": [ + "which we Taylor expand to obtain" + ] + }, + { + "cell_type": "markdown", + "id": "e4e9b7a5", + "metadata": {}, + "source": [ + "$$\n", + "\\begin{array}{cc} 0=f_1(x_1+h_1,x_2+h_2)=&f_1(x_1,x_2)+h_1\n", + " \\partial f_1/\\partial x_1+h_2\n", + " \\partial f_1/\\partial x_2+\\dots\\\\\n", + " 0=f_2(x_1+h_1,x_2+h_2)=&f_2(x_1,x_2)+h_1\n", + " \\partial f_2/\\partial x_1+h_2\n", + " \\partial f_2/\\partial x_2+\\dots\n", + " \\end{array}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "63477e16", + "metadata": {}, + "source": [ + "Defining the Jacobian matrix $\\hat{J}$ we have" + ] + }, + { + "cell_type": "markdown", + "id": "f4b477f5", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{J}=\\left( \\begin{array}{cc}\n", + " \\partial f_1/\\partial x_1 & \\partial f_1/\\partial x_2 \\\\\n", + " \\partial f_2/\\partial x_1 &\\partial f_2/\\partial x_2\n", + " \\end{array} \\right),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "acd8f765", + "metadata": {}, + "source": [ + "we can rephrase Newton's method as" + ] + }, + { + "cell_type": "markdown", + "id": "1752cfa0", + "metadata": {}, + "source": [ + "$$\n", + "\\left(\\begin{array}{c} x_1^{n+1} \\\\ x_2^{n+1} \\end{array} \\right)=\n", + "\\left(\\begin{array}{c} x_1^{n} \\\\ x_2^{n} \\end{array} \\right)+\n", + "\\left(\\begin{array}{c} h_1^{n} \\\\ h_2^{n} \\end{array} \\right),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "2debe988", + "metadata": {}, + "source": [ + "where we have defined" + ] + }, + { + "cell_type": "markdown", + "id": "299eeb27", + "metadata": {}, + "source": [ + "$$\n", + "\\left(\\begin{array}{c} h_1^{n} \\\\ h_2^{n} \\end{array} \\right)=\n", + " -{\\bf \\hat{J}}^{-1}\n", + " \\left(\\begin{array}{c} f_1(x_1^{n},x_2^{n}) \\\\ f_2(x_1^{n},x_2^{n}) \\end{array} \\right).\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "fe4294f8", + "metadata": {}, + "source": [ + "We need thus to compute the inverse of the Jacobian matrix and it\n", + "is to understand that difficulties may\n", + "arise in case $\\hat{J}$ is nearly singular.\n", + "\n", + "It is rather straightforward to extend the above scheme to systems of\n", + "more than two non-linear equations. In our case, the Jacobian matrix is given by the Hessian that represents the second derivative of cost function." + ] + }, + { + "cell_type": "markdown", + "id": "1ce3c134", + "metadata": {}, + "source": [ + "## Steepest descent\n", + "\n", + "The basic idea of gradient descent is\n", + "that a function $F(\\mathbf{x})$, \n", + "$\\mathbf{x} \\equiv (x_1,\\cdots,x_n)$, decreases fastest if one goes from $\\bf {x}$ in the\n", + "direction of the negative gradient $-\\nabla F(\\mathbf{x})$.\n", + "\n", + "It can be shown that if" + ] + }, + { + "cell_type": "markdown", + "id": "f406e4a4", + "metadata": {}, + "source": [ + "$$\n", + "\\mathbf{x}_{k+1} = \\mathbf{x}_k - \\gamma_k \\nabla F(\\mathbf{x}_k),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "9fb06ed3", + "metadata": {}, + "source": [ + "with $\\gamma_k > 0$.\n", + "\n", + "For $\\gamma_k$ small enough, then $F(\\mathbf{x}_{k+1}) \\leq\n", + "F(\\mathbf{x}_k)$. This means that for a sufficiently small $\\gamma_k$\n", + "we are always moving towards smaller function values, i.e a minimum." + ] + }, + { + "cell_type": "markdown", + "id": "dcefbbfb", + "metadata": {}, + "source": [ + "## More on Steepest descent\n", + "\n", + "The previous observation is the basis of the method of steepest\n", + "descent, which is also referred to as just gradient descent (GD). One\n", + "starts with an initial guess $\\mathbf{x}_0$ for a minimum of $F$ and\n", + "computes new approximations according to" + ] + }, + { + "cell_type": "markdown", + "id": "6b2282cf", + "metadata": {}, + "source": [ + "$$\n", + "\\mathbf{x}_{k+1} = \\mathbf{x}_k - \\gamma_k \\nabla F(\\mathbf{x}_k), \\ \\ k \\geq 0.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "7b33a598", + "metadata": {}, + "source": [ + "The parameter $\\gamma_k$ is often referred to as the step length or\n", + "the learning rate within the context of Machine Learning." + ] + }, + { + "cell_type": "markdown", + "id": "4dbfaa03", + "metadata": {}, + "source": [ + "## The ideal\n", + "\n", + "Ideally the sequence $\\{\\mathbf{x}_k \\}_{k=0}$ converges to a global\n", + "minimum of the function $F$. In general we do not know if we are in a\n", + "global or local minimum. In the special case when $F$ is a convex\n", + "function, all local minima are also global minima, so in this case\n", + "gradient descent can converge to the global solution. The advantage of\n", + "this scheme is that it is conceptually simple and straightforward to\n", + "implement. However the method in this form has some severe\n", + "limitations:\n", + "\n", + "In machine learing we are often faced with non-convex high dimensional\n", + "cost functions with many local minima. Since GD is deterministic we\n", + "will get stuck in a local minimum, if the method converges, unless we\n", + "have a very good intial guess. This also implies that the scheme is\n", + "sensitive to the chosen initial condition.\n", + "\n", + "Note that the gradient is a function of $\\mathbf{x} =\n", + "(x_1,\\cdots,x_n)$ which makes it expensive to compute numerically." + ] + }, + { + "cell_type": "markdown", + "id": "b93734d8", + "metadata": {}, + "source": [ + "## The sensitiveness of the gradient descent\n", + "\n", + "The gradient descent method \n", + "is sensitive to the choice of learning rate $\\gamma_k$. This is due\n", + "to the fact that we are only guaranteed that $F(\\mathbf{x}_{k+1}) \\leq\n", + "F(\\mathbf{x}_k)$ for sufficiently small $\\gamma_k$. The problem is to\n", + "determine an optimal learning rate. If the learning rate is chosen too\n", + "small the method will take a long time to converge and if it is too\n", + "large we can experience erratic behavior.\n", + "\n", + "Many of these shortcomings can be alleviated by introducing\n", + "randomness. One such method is that of Stochastic Gradient Descent\n", + "(SGD), see below." + ] + }, + { + "cell_type": "markdown", + "id": "cc1ab56d", + "metadata": {}, + "source": [ + "## Convex functions\n", + "\n", + "Ideally we want our cost/loss function to be convex(concave).\n", + "\n", + "First we give the definition of a convex set: A set $C$ in\n", + "$\\mathbb{R}^n$ is said to be convex if, for all $x$ and $y$ in $C$ and\n", + "all $t \\in (0,1)$ , the point $(1 − t)x + ty$ also belongs to\n", + "C. Geometrically this means that every point on the line segment\n", + "connecting $x$ and $y$ is in $C$ as discussed below.\n", + "\n", + "The convex subsets of $\\mathbb{R}$ are the intervals of\n", + "$\\mathbb{R}$. Examples of convex sets of $\\mathbb{R}^2$ are the\n", + "regular polygons (triangles, rectangles, pentagons, etc...)." + ] + }, + { + "cell_type": "markdown", + "id": "8091cbc6", + "metadata": {}, + "source": [ + "## Convex function\n", + "\n", + "**Convex function**: Let $X \\subset \\mathbb{R}^n$ be a convex set. Assume that the function $f: X \\rightarrow \\mathbb{R}$ is continuous, then $f$ is said to be convex if $$f(tx_1 + (1-t)x_2) \\leq tf(x_1) + (1-t)f(x_2) $$ for all $x_1, x_2 \\in X$ and for all $t \\in [0,1]$. If $\\leq$ is replaced with a strict inequaltiy in the definition, we demand $x_1 \\neq x_2$ and $t\\in(0,1)$ then $f$ is said to be strictly convex. For a single variable function, convexity means that if you draw a straight line connecting $f(x_1)$ and $f(x_2)$, the value of the function on the interval $[x_1,x_2]$ is always below the line as illustrated below." + ] + }, + { + "cell_type": "markdown", + "id": "42354ed2", + "metadata": {}, + "source": [ + "## Conditions on convex functions\n", + "\n", + "In the following we state first and second-order conditions which\n", + "ensures convexity of a function $f$. We write $D_f$ to denote the\n", + "domain of $f$, i.e the subset of $R^n$ where $f$ is defined. For more\n", + "details and proofs we refer to: [S. Boyd and L. Vandenberghe. Convex Optimization. Cambridge University Press](http://stanford.edu/boyd/cvxbook/, 2004).\n", + "\n", + "**First order condition.**\n", + "\n", + "Suppose $f$ is differentiable (i.e $\\nabla f(x)$ is well defined for\n", + "all $x$ in the domain of $f$). Then $f$ is convex if and only if $D_f$\n", + "is a convex set and $$f(y) \\geq f(x) + \\nabla f(x)^T (y-x) $$ holds\n", + "for all $x,y \\in D_f$. This condition means that for a convex function\n", + "the first order Taylor expansion (right hand side above) at any point\n", + "a global under estimator of the function. To convince yourself you can\n", + "make a drawing of $f(x) = x^2+1$ and draw the tangent line to $f(x)$ and\n", + "note that it is always below the graph.\n", + "\n", + "**Second order condition.**\n", + "\n", + "Assume that $f$ is twice\n", + "differentiable, i.e the Hessian matrix exists at each point in\n", + "$D_f$. Then $f$ is convex if and only if $D_f$ is a convex set and its\n", + "Hessian is positive semi-definite for all $x\\in D_f$. For a\n", + "single-variable function this reduces to $f''(x) \\geq 0$. Geometrically this means that $f$ has nonnegative curvature\n", + "everywhere.\n", + "\n", + "This condition is particularly useful since it gives us an procedure for determining if the function under consideration is convex, apart from using the definition." + ] + }, + { + "cell_type": "markdown", + "id": "d60db400", + "metadata": {}, + "source": [ + "## More on convex functions\n", + "\n", + "The next result is of great importance to us and the reason why we are\n", + "going on about convex functions. In machine learning we frequently\n", + "have to minimize a loss/cost function in order to find the best\n", + "parameters for the model we are considering. \n", + "\n", + "Ideally we want the\n", + "global minimum (for high-dimensional models it is hard to know\n", + "if we have local or global minimum). However, if the cost/loss function\n", + "is convex the following result provides invaluable information:\n", + "\n", + "**Any minimum is global for convex functions.**\n", + "\n", + "Consider the problem of finding $x \\in \\mathbb{R}^n$ such that $f(x)$\n", + "is minimal, where $f$ is convex and differentiable. Then, any point\n", + "$x^*$ that satisfies $\\nabla f(x^*) = 0$ is a global minimum.\n", + "\n", + "This result means that if we know that the cost/loss function is convex and we are able to find a minimum, we are guaranteed that it is a global minimum." + ] + }, + { + "cell_type": "markdown", + "id": "0e613f15", + "metadata": {}, + "source": [ + "## Some simple problems\n", + "\n", + "1. Show that $f(x)=x^2$ is convex for $x \\in \\mathbb{R}$ using the definition of convexity. Hint: If you re-write the definition, $f$ is convex if the following holds for all $x,y \\in D_f$ and any $\\lambda \\in [0,1]$ $\\lambda f(x)+(1-\\lambda)f(y)-f(\\lambda x + (1-\\lambda) y ) \\geq 0$.\n", + "\n", + "2. Using the second order condition show that the following functions are convex on the specified domain.\n", + "\n", + " * $f(x) = e^x$ is convex for $x \\in \\mathbb{R}$.\n", + "\n", + " * $g(x) = -\\ln(x)$ is convex for $x \\in (0,\\infty)$.\n", + "\n", + "3. Let $f(x) = x^2$ and $g(x) = e^x$. Show that $f(g(x))$ and $g(f(x))$ is convex for $x \\in \\mathbb{R}$. Also show that if $f(x)$ is any convex function than $h(x) = e^{f(x)}$ is convex.\n", + "\n", + "4. A norm is any function that satisfy the following properties\n", + "\n", + " * $f(\\alpha x) = |\\alpha| f(x)$ for all $\\alpha \\in \\mathbb{R}$.\n", + "\n", + " * $f(x+y) \\leq f(x) + f(y)$\n", + "\n", + " * $f(x) \\leq 0$ for all $x \\in \\mathbb{R}^n$ with equality if and only if $x = 0$\n", + "\n", + "Using the definition of convexity, try to show that a function satisfying the properties above is convex (the third condition is not needed to show this)." + ] + }, + { + "cell_type": "markdown", + "id": "05bffda7", + "metadata": {}, + "source": [ + "## Standard steepest descent\n", + "\n", + "Before we proceed, we would like to discuss the approach called the\n", + "**standard Steepest descent**, which again leads to us having to be able\n", + "to compute a matrix. It belongs to the class of Conjugate Gradient methods (CG).\n", + "\n", + "[The success of the CG method](https://www.cs.cmu.edu/~quake-papers/painless-conjugate-gradient.pdf)\n", + "for finding solutions of non-linear problems is based on the theory\n", + "of conjugate gradients for linear systems of equations. It belongs to\n", + "the class of iterative methods for solving problems from linear\n", + "algebra of the type" + ] + }, + { + "cell_type": "markdown", + "id": "d5e96d65", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{A}\\hat{x} = \\hat{b}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "edb1517e", + "metadata": {}, + "source": [ + "In the iterative process we end up with a problem like" + ] + }, + { + "cell_type": "markdown", + "id": "cf46f9ab", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{r}= \\hat{b}-\\hat{A}\\hat{x},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "42027921", + "metadata": {}, + "source": [ + "where $\\hat{r}$ is the so-called residual or error in the iterative process.\n", + "\n", + "When we have found the exact solution, $\\hat{r}=0$." + ] + }, + { + "cell_type": "markdown", + "id": "ad4b4921", + "metadata": {}, + "source": [ + "## Gradient method\n", + "\n", + "The residual is zero when we reach the minimum of the quadratic equation" + ] + }, + { + "cell_type": "markdown", + "id": "7e26232b", + "metadata": {}, + "source": [ + "$$\n", + "P(\\hat{x})=\\frac{1}{2}\\hat{x}^T\\hat{A}\\hat{x} - \\hat{x}^T\\hat{b},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "249e804e", + "metadata": {}, + "source": [ + "with the constraint that the matrix $\\hat{A}$ is positive definite and\n", + "symmetric. This defines also the Hessian and we want it to be positive definite." + ] + }, + { + "cell_type": "markdown", + "id": "12a1d411", + "metadata": {}, + "source": [ + "## Steepest descent method\n", + "\n", + "We denote the initial guess for $\\hat{x}$ as $\\hat{x}_0$. \n", + "We can assume without loss of generality that" + ] + }, + { + "cell_type": "markdown", + "id": "425ecf31", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x}_0=0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "63e4fb8d", + "metadata": {}, + "source": [ + "or consider the system" + ] + }, + { + "cell_type": "markdown", + "id": "de874252", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{A}\\hat{z} = \\hat{b}-\\hat{A}\\hat{x}_0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "7bf4f529", + "metadata": {}, + "source": [ + "instead." + ] + }, + { + "cell_type": "markdown", + "id": "14dfd7e8", + "metadata": {}, + "source": [ + "## Steepest descent method\n", + "One can show that the solution $\\hat{x}$ is also the unique minimizer of the quadratic form" + ] + }, + { + "cell_type": "markdown", + "id": "0d4369c2", + "metadata": {}, + "source": [ + "$$\n", + "f(\\hat{x}) = \\frac{1}{2}\\hat{x}^T\\hat{A}\\hat{x} - \\hat{x}^T \\hat{x} , \\quad \\hat{x}\\in\\mathbf{R}^n.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "693b4789", + "metadata": {}, + "source": [ + "This suggests taking the first basis vector $\\hat{r}_1$ (see below for definition) \n", + "to be the gradient of $f$ at $\\hat{x}=\\hat{x}_0$, \n", + "which equals" + ] + }, + { + "cell_type": "markdown", + "id": "f4975202", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{A}\\hat{x}_0-\\hat{b},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "1c9715f3", + "metadata": {}, + "source": [ + "and \n", + "$\\hat{x}_0=0$ it is equal $-\\hat{b}$." + ] + }, + { + "cell_type": "markdown", + "id": "ed8fcaa5", + "metadata": {}, + "source": [ + "## Final expressions\n", + "We can compute the residual iteratively as" + ] + }, + { + "cell_type": "markdown", + "id": "595a6c44", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{r}_{k+1}=\\hat{b}-\\hat{A}\\hat{x}_{k+1},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "e02ec1e3", + "metadata": {}, + "source": [ + "which equals" + ] + }, + { + "cell_type": "markdown", + "id": "ee586e88", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{b}-\\hat{A}(\\hat{x}_k+\\alpha_k\\hat{r}_k),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "b653a9a1", + "metadata": {}, + "source": [ + "or" + ] + }, + { + "cell_type": "markdown", + "id": "4fbb348b", + "metadata": {}, + "source": [ + "$$\n", + "(\\hat{b}-\\hat{A}\\hat{x}_k)-\\alpha_k\\hat{A}\\hat{r}_k,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "bca87614", + "metadata": {}, + "source": [ + "which gives" + ] + }, + { + "cell_type": "markdown", + "id": "4ca73207", + "metadata": {}, + "source": [ + "$$\n", + "\\alpha_k = \\frac{\\hat{r}_k^T\\hat{r}_k}{\\hat{r}_k^T\\hat{A}\\hat{r}_k}\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "b97651c8", + "metadata": {}, + "source": [ + "leading to the iterative scheme" + ] + }, + { + "cell_type": "markdown", + "id": "88a3242e", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x}_{k+1}=\\hat{x}_k-\\alpha_k\\hat{r}_{k},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "2d77985f", + "metadata": {}, + "source": [ + "## Our simple $2\\times 2$ example\n", + "\n", + "Last week we introduced the simple two-dimensional function" + ] + }, + { + "cell_type": "markdown", + "id": "d6bf1899", + "metadata": {}, + "source": [ + "$$\n", + "f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "294f0746", + "metadata": {}, + "source": [ + "which is of the form (in terms of vectors and matrices)" + ] + }, + { + "cell_type": "markdown", + "id": "aa812aa8", + "metadata": {}, + "source": [ + "$$\n", + "f(\\boldsymbol{x})=\\frac{1}{2}\\boldsymbol{x}^T\\boldsymbol{A}\\boldsymbol{x}-\\boldsymbol{b}^T\\boldsymbol{x},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "c9cd000a", + "metadata": {}, + "source": [ + "where we have" + ] + }, + { + "cell_type": "markdown", + "id": "62c2d155", + "metadata": {}, + "source": [ + "$$\n", + "\\boldsymbol{x}=\\begin{bmatrix} x_1 \\\\ x_2\\end{bmatrix},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "14b04d34", + "metadata": {}, + "source": [ + "$$\n", + "\\boldsymbol{b}=\\begin{bmatrix} 5 \\\\ 3\\end{bmatrix},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "12e18a6f", + "metadata": {}, + "source": [ + "and" + ] + }, + { + "cell_type": "markdown", + "id": "5554c203", + "metadata": {}, + "source": [ + "$$\n", + "\\boldsymbol{A}=\\begin{bmatrix} 2 & 1\\\\ 1& 20\\end{bmatrix}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "37c1f654", + "metadata": {}, + "source": [ + "## Derivatives and more\n", + "\n", + "Optimizing the above equation, that is" + ] + }, + { + "cell_type": "markdown", + "id": "fe4cfa4d", + "metadata": {}, + "source": [ + "$$\n", + "\\nabla f = 0 = \\boldsymbol{A}\\boldsymbol{x}-\\boldsymbol{b},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "f5733b60", + "metadata": {}, + "source": [ + "which leads to a simple matrix-inversion problem" + ] + }, + { + "cell_type": "markdown", + "id": "f6b5934b", + "metadata": {}, + "source": [ + "$$\n", + "\\boldsymbol{x}=\\boldsymbol{A}^{-1}\\boldsymbol{b}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "52ca4f24", + "metadata": {}, + "source": [ + "This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns" + ] + }, + { + "cell_type": "markdown", + "id": "83ff488d", + "metadata": {}, + "source": [ + "$$\n", + "\\frac{\\partial f}{\\partial x_1}=2x_1+x_2-5=0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "5b33ecc7", + "metadata": {}, + "source": [ + "and" + ] + }, + { + "cell_type": "markdown", + "id": "589431f4", + "metadata": {}, + "source": [ + "$$\n", + "\\frac{\\partial f}{\\partial x_2}=x_1+20x_2-3=0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "6dbc5e2c", + "metadata": {}, + "source": [ + "with solutions $x_1=97/39$ and $x_2=1/39$." + ] + }, + { + "cell_type": "markdown", + "id": "7cf4b6cb", + "metadata": {}, + "source": [ + "## Simple codes for steepest descent and conjugate gradient using a $2\\times 2$ matrix, in c++, Python code to come" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "5093cd2d", + "metadata": {}, + "outputs": [], + "source": [ + "#include \n", + "#include \n", + "#include \n", + "#include \n", + "#include \"vectormatrixclass.h\"\n", + "using namespace std;\n", + "// Main function begins here\n", + "int main(int argc, char * argv[]){\n", + " int dim = 2;\n", + " Vector x(dim),xsd(dim), b(dim),x0(dim);\n", + " Matrix A(dim,dim);\n", + "\n", + " // Set our initial guess\n", + " x0(0) = x0(1) = 0;\n", + " // Set the matrix\n", + " A(0,0) = 2; A(1,0) = 1; A(0,1) = 1; A(1,1) = 20;\n", + " b(0) = 5; b(1) = 3;\n", + " cout << \"The Matrix A that we are using: \" << endl;\n", + " A.Print();\n", + " cout << endl;\n", + " xsd = SteepestDescent(A,b,x0);\n", + " cout << \"The approximate solution using Steepest Descent is: \" << endl;\n", + " xsd.Print();\n", + " cout << endl;\n", + "}" + ] + }, + { + "cell_type": "markdown", + "id": "64917123", + "metadata": {}, + "source": [ + "## The routine for the steepest descent method" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "6bcf9a1e", + "metadata": {}, + "outputs": [], + "source": [ + "Vector SteepestDescent(Matrix A, Vector b, Vector x0){\n", + " int IterMax, i;\n", + " int dim = x0.Dimension();\n", + " const double tolerance = 1.0e-14;\n", + " Vector x(dim),f(dim),z(dim);\n", + " double c,alpha,d;\n", + " IterMax = 30;\n", + " x = x0;\n", + " r = A*x-b;\n", + " i = 0;\n", + " while (i <= IterMax){\n", + " z = A*r;\n", + " c = dot(r,r);\n", + " alpha = c/dot(r,z);\n", + " x = x - alpha*r;\n", + " r = A*x-b;\n", + " if(sqrt(dot(r,r)) < tolerance) break;\n", + " i++;\n", + " }\n", + " return x;\n", + "}" + ] + }, + { + "cell_type": "markdown", + "id": "f9824f2e", + "metadata": {}, + "source": [ + "## Steepest descent example" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "f6c193d4", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "gca() got an unexpected keyword argument 'projection'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[3], line 18\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m np\u001b[38;5;241m.\u001b[39marray(\u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m[x[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m+\u001b[39mx[\u001b[38;5;241m1\u001b[39m]\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m5.0\u001b[39m, x[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m+\u001b[39m\u001b[38;5;241m20\u001b[39m\u001b[38;5;241m*\u001b[39mx[\u001b[38;5;241m1\u001b[39m]]\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m3.0\u001b[39m)\n\u001b[1;32m 17\u001b[0m fig \u001b[38;5;241m=\u001b[39m pt\u001b[38;5;241m.\u001b[39mfigure()\n\u001b[0;32m---> 18\u001b[0m ax \u001b[38;5;241m=\u001b[39m \u001b[43mfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgca\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprojection\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m3d\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 20\u001b[0m xmesh, ymesh \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mmgrid[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m:\u001b[38;5;241m3\u001b[39m:\u001b[38;5;241m0\u001b[39mj,\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m:\u001b[38;5;241m3\u001b[39m:\u001b[38;5;241m0\u001b[39mj]\n\u001b[1;32m 21\u001b[0m fmesh \u001b[38;5;241m=\u001b[39m f(np\u001b[38;5;241m.\u001b[39marray([xmesh, ymesh]))\n", + "\u001b[0;31mTypeError\u001b[0m: gca() got an unexpected keyword argument 'projection'" + ] + }, + { + "data": { + "text/plain": [ + "
    " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "\n", + "import numpy as np\n", + "import numpy.linalg as la\n", + "\n", + "import scipy.optimize as sopt\n", + "\n", + "import matplotlib.pyplot as pt\n", + "from mpl_toolkits.mplot3d import axes3d\n", + "\n", + "def f(x):\n", + " return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2]\n", + "\n", + "def df(x):\n", + " return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0)\n", + "\n", + "fig = pt.figure()\n", + "ax = fig.gca(projection=\"3d\")\n", + "\n", + "xmesh, ymesh = np.mgrid[-2:3:0j,-2:3:0j]\n", + "fmesh = f(np.array([xmesh, ymesh]))\n", + "ax.plot_surface(xmesh, ymesh, fmesh)" + ] + }, + { + "cell_type": "markdown", + "id": "f7c40b47", + "metadata": {}, + "source": [ + "And then as countor plot" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "faaef1c5", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'xmesh' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[2], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m pt\u001b[38;5;241m.\u001b[39maxis(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mequal\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m----> 2\u001b[0m pt\u001b[38;5;241m.\u001b[39mcontour(\u001b[43mxmesh\u001b[49m, ymesh, fmesh)\n\u001b[1;32m 3\u001b[0m guesses \u001b[38;5;241m=\u001b[39m [np\u001b[38;5;241m.\u001b[39marray([\u001b[38;5;241m3.0\u001b[39m, \u001b[38;5;241m0.05\u001b[39m])]\n", + "\u001b[0;31mNameError\u001b[0m: name 'xmesh' is not defined" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAAGeCAYAAACZ2HuYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAqu0lEQVR4nO3dfXBUVZ7/8U8PIR3QpEUi3cSNJDAWD4XuSFKEZCrAbGkI+AA1bMmDZkbLpchSDoaUJSKzRRZrCbBTDOuGhxXRHXdcZGcgLn+wGeI4ZFnTAUIRQIhUOQaT0bQYDN1ZcRLA8/uDpX823QkEuSF9eL+qzh99+ntun3OKmf54+94blzHGCAAAwCLfu9kTAAAAuNEIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdRJu9gRuhm+++UafffaZkpOT5XK5bvZ0AADANTDGqKOjQ2lpafre965yjsb0gQ0bNpiMjAzjdrvNhAkTzH//93/3WL93714zYcIE43a7TWZmptm0aVO3tdu2bTOSzMyZM695Pi0tLUYSjUaj0Wi0OGwtLS1X/a53/AzO9u3bVVJSoo0bN+qHP/yh/uVf/kXTp0/XiRMndM8990TVNzU1acaMGVqwYIF+/etf6/3339eiRYt01113afbs2RG1n3zyiZ5//nnl5+f3ak7JycmSpJaWFqWkpFz/4gAAQJ8JhUJKT08Pf4/3xGWMs39sMycnRxMmTNCmTZvCfWPHjtWsWbNUXl4eVb906VLt2rVLjY2N4b7i4mIdOXJEfr8/3Hfx4kVNmTJFTz/9tPbt26ezZ8/qnXfeuaY5hUIheTweBYNBAg4AAHGiN9/fjl5k3NXVpUOHDqmgoCCiv6CgQLW1tTHH+P3+qPpp06apvr5e58+fD/etXLlSd911l5555pmrzqOzs1OhUCiiAQAAezkacNra2nTx4kV5vd6Ifq/Xq0AgEHNMIBCIWX/hwgW1tbVJkt5//31t3bpVW7ZsuaZ5lJeXy+PxhFt6evp1rAYAAMSLPrlN/Mo7lYwxPd69FKv+cn9HR4eefPJJbdmyRampqdf0+cuWLVMwGAy3lpaWXq4AAADEE0cvMk5NTdWAAQOiztacPn066izNZT6fL2Z9QkKChg4dquPHj+vUqVN69NFHw+9/8803kqSEhASdPHlSo0aNihjvdrvldrtvxJIAAEAccPQMTmJiorKyslRdXR3RX11drby8vJhjcnNzo+r37Nmj7OxsDRw4UGPGjNGxY8fU0NAQbo899ph+9KMfqaGhgZ+fAACA8w/6Ky0tVVFRkbKzs5Wbm6tXX31Vzc3NKi4ulnTp56NPP/1Ub775pqRLd0xVVFSotLRUCxYskN/v19atW7Vt2zZJUlJSksaPHx/xGXfccYckRfUDAIBbk+MBZ86cOTpz5oxWrlyp1tZWjR8/Xrt379aIESMkSa2trWpubg7XZ2Zmavfu3VqyZIk2bNigtLQ0vfLKK1HPwAEAAOiO48/B6Y94Dg4AAPGn3zwHBwAA4GYg4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6/RJwNm4caMyMzOVlJSkrKws7du3r8f6mpoaZWVlKSkpSSNHjtTmzZsj3t+5c6eys7N1xx136LbbbtMPfvAD/du//ZuTSwAAAHHE8YCzfft2lZSUaPny5Tp8+LDy8/M1ffp0NTc3x6xvamrSjBkzlJ+fr8OHD+ull17S4sWLtWPHjnDNnXfeqeXLl8vv9+vo0aN6+umn9fTTT+t3v/ud08sBAABxwGWMMU5+QE5OjiZMmKBNmzaF+8aOHatZs2apvLw8qn7p0qXatWuXGhsbw33FxcU6cuSI/H5/t58zYcIEPfzww3r55ZevOqdQKCSPx6NgMKiUlJRerggAANwMvfn+dvQMTldXlw4dOqSCgoKI/oKCAtXW1sYc4/f7o+qnTZum+vp6nT9/PqreGKPf//73OnnypCZPnhzzmJ2dnQqFQhENAADYy9GA09bWposXL8rr9Ub0e71eBQKBmGMCgUDM+gsXLqitrS3cFwwGdfvttysxMVEPP/yw/vmf/1kPPfRQzGOWl5fL4/GEW3p6+ndcGQAA6M/65CJjl8sV8doYE9V3tfor+5OTk9XQ0KCDBw/qH/7hH1RaWqq9e/fGPN6yZcsUDAbDraWl5TpXAgAA4kGCkwdPTU3VgAEDos7WnD59OuoszWU+ny9mfUJCgoYOHRru+973vqfvf//7kqQf/OAHamxsVHl5uaZOnRp1TLfbLbfb/R1XAwAA4oWjZ3ASExOVlZWl6urqiP7q6mrl5eXFHJObmxtVv2fPHmVnZ2vgwIHdfpYxRp2dnd990gAAIO45egZHkkpLS1VUVKTs7Gzl5ubq1VdfVXNzs4qLiyVd+vno008/1Ztvvinp0h1TFRUVKi0t1YIFC+T3+7V161Zt27YtfMzy8nJlZ2dr1KhR6urq0u7du/Xmm29G3KkFAABuXY4HnDlz5ujMmTNauXKlWltbNX78eO3evVsjRoyQJLW2tkY8EyczM1O7d+/WkiVLtGHDBqWlpemVV17R7NmzwzVfffWVFi1apD/96U8aNGiQxowZo1//+teaM2eO08sBAABxwPHn4PRHPAcHAID402+egwMAAHAzEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHX6JOBs3LhRmZmZSkpKUlZWlvbt29djfU1NjbKyspSUlKSRI0dq8+bNEe9v2bJF+fn5GjJkiIYMGaIHH3xQBw4ccHIJAAAgjjgecLZv366SkhItX75chw8fVn5+vqZPn67m5uaY9U1NTZoxY4by8/N1+PBhvfTSS1q8eLF27NgRrtm7d6/mzZunP/zhD/L7/brnnntUUFCgTz/91OnlAACAOOAyxhgnPyAnJ0cTJkzQpk2bwn1jx47VrFmzVF5eHlW/dOlS7dq1S42NjeG+4uJiHTlyRH6/P+ZnXLx4UUOGDFFFRYV+8pOfXHVOoVBIHo9HwWBQKSkp17EqAADQ13rz/e3oGZyuri4dOnRIBQUFEf0FBQWqra2NOcbv90fVT5s2TfX19Tp//nzMMefOndP58+d15513xny/s7NToVAoogEAAHs5GnDa2tp08eJFeb3eiH6v16tAIBBzTCAQiFl/4cIFtbW1xRzz4osv6u6779aDDz4Y8/3y8nJ5PJ5wS09Pv47VAACAeNEnFxm7XK6I18aYqL6r1cfql6S1a9dq27Zt2rlzp5KSkmIeb9myZQoGg+HW0tLS2yUAAIA4kuDkwVNTUzVgwICoszWnT5+OOktzmc/ni1mfkJCgoUOHRvT/4he/0KpVq/Tuu+/q/vvv73Yebrdbbrf7OlcBAADijaNncBITE5WVlaXq6uqI/urqauXl5cUck5ubG1W/Z88eZWdna+DAgeG+f/zHf9TLL7+sqqoqZWdn3/jJAwCAuOX4T1SlpaV67bXX9Prrr6uxsVFLlixRc3OziouLJV36+ejbdz4VFxfrk08+UWlpqRobG/X6669r69atev7558M1a9eu1c9//nO9/vrrysjIUCAQUCAQ0P/+7/86vRwAABAHHP2JSpLmzJmjM2fOaOXKlWptbdX48eO1e/dujRgxQpLU2toa8UyczMxM7d69W0uWLNGGDRuUlpamV155RbNnzw7XbNy4UV1dXfrrv/7riM9asWKFysrKnF4SAADo5xx/Dk5/xHNwAACIP/3mOTgAAAA3AwEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFinTwLOxo0blZmZqaSkJGVlZWnfvn091tfU1CgrK0tJSUkaOXKkNm/eHPH+8ePHNXv2bGVkZMjlcmn9+vUOzh4AAMQbxwPO9u3bVVJSouXLl+vw4cPKz8/X9OnT1dzcHLO+qalJM2bMUH5+vg4fPqyXXnpJixcv1o4dO8I1586d08iRI7V69Wr5fD6nlwAAAOKMyxhjnPyAnJwcTZgwQZs2bQr3jR07VrNmzVJ5eXlU/dKlS7Vr1y41NjaG+4qLi3XkyBH5/f6o+oyMDJWUlKikpOSa5xQKheTxeBQMBpWSktK7BQEAgJuiN9/fjp7B6erq0qFDh1RQUBDRX1BQoNra2phj/H5/VP20adNUX1+v8+fPX9c8Ojs7FQqFIhoAALCXowGnra1NFy9elNfrjej3er0KBAIxxwQCgZj1Fy5cUFtb23XNo7y8XB6PJ9zS09Ov6zgAACA+9MlFxi6XK+K1MSaq72r1sfqv1bJlyxQMBsOtpaXluo4DAADiQ4KTB09NTdWAAQOiztacPn066izNZT6fL2Z9QkKChg4del3zcLvdcrvd1zUWAADEH0fP4CQmJiorK0vV1dUR/dXV1crLy4s5Jjc3N6p+z549ys7O1sCBAx2bKwAAsIfjP1GVlpbqtdde0+uvv67GxkYtWbJEzc3NKi4ulnTp56Of/OQn4fri4mJ98sknKi0tVWNjo15//XVt3bpVzz//fLimq6tLDQ0NamhoUFdXlz799FM1NDToo48+cno5AAAgDjh+m7h06UF/a9euVWtrq8aPH69f/vKXmjx5siTpqaee0qlTp7R3795wfU1NjZYsWaLjx48rLS1NS5cuDQciSTp16pQyMzOjPmfKlCkRx+kOt4kDABB/evP93ScBp78h4AAAEH/6zXNwAAAAbgYCDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwTp8EnI0bNyozM1NJSUnKysrSvn37eqyvqalRVlaWkpKSNHLkSG3evDmqZseOHRo3bpzcbrfGjRunyspKp6YPAADijOMBZ/v27SopKdHy5ct1+PBh5efna/r06Wpubo5Z39TUpBkzZig/P1+HDx/WSy+9pMWLF2vHjh3hGr/frzlz5qioqEhHjhxRUVGRHn/8ce3fv9/p5QAAgDjgMsYYJz8gJydHEyZM0KZNm8J9Y8eO1axZs1ReXh5Vv3TpUu3atUuNjY3hvuLiYh05ckR+v1+SNGfOHIVCIf3Xf/1XuKawsFBDhgzRtm3brjqnUCgkj8ejYDColJSU77I8AADQR3rz/e3oGZyuri4dOnRIBQUFEf0FBQWqra2NOcbv90fVT5s2TfX19Tp//nyPNd0ds7OzU6FQKKIBAAB7ORpw2tradPHiRXm93oh+r9erQCAQc0wgEIhZf+HCBbW1tfVY090xy8vL5fF4wi09Pf16lwQAAOJAn1xk7HK5Il4bY6L6rlZ/ZX9vjrls2TIFg8Fwa2lp6dX8AQBAfElw8uCpqakaMGBA1JmV06dPR52Buczn88WsT0hI0NChQ3us6e6Ybrdbbrf7epcBAADijKNncBITE5WVlaXq6uqI/urqauXl5cUck5ubG1W/Z88eZWdna+DAgT3WdHdMAABwa3H0DI4klZaWqqioSNnZ2crNzdWrr76q5uZmFRcXS7r089Gnn36qN998U9KlO6YqKipUWlqqBQsWyO/3a+vWrRF3Rz333HOaPHmy1qxZo5kzZ+o///M/9e677+p//ud/nF4OAACIA44HnDlz5ujMmTNauXKlWltbNX78eO3evVsjRoyQJLW2tkY8EyczM1O7d+/WkiVLtGHDBqWlpemVV17R7NmzwzV5eXl6++239fOf/1x/93d/p1GjRmn79u3KyclxejkAACAOOP4cnP6I5+AAABB/+s1zcAAAAG4GAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsI6jAae9vV1FRUXyeDzyeDwqKirS2bNnexxjjFFZWZnS0tI0aNAgTZ06VcePH4+oefXVVzV16lSlpKTI5XJd9ZgAAODW4mjAmT9/vhoaGlRVVaWqqio1NDSoqKioxzFr167VunXrVFFRoYMHD8rn8+mhhx5SR0dHuObcuXMqLCzUSy+95OT0AQBAnHIZY4wTB25sbNS4ceNUV1ennJwcSVJdXZ1yc3P14YcfavTo0VFjjDFKS0tTSUmJli5dKknq7OyU1+vVmjVrtHDhwoj6vXv36kc/+pHa29t1xx13XPPcQqGQPB6PgsGgUlJSrn+RAACgz/Tm+9uxMzh+v18ejyccbiRp0qRJ8ng8qq2tjTmmqalJgUBABQUF4T63260pU6Z0OwYAAOBKCU4dOBAIaNiwYVH9w4YNUyAQ6HaMJHm93oh+r9erTz755Lrn0tnZqc7OzvDrUCh03ccCAAD9X6/P4JSVlcnlcvXY6uvrJUkulytqvDEmZv+3Xfn+tYzpSXl5efhCZ4/Ho/T09Os+FgAA6P96fQbn2Wef1dy5c3usycjI0NGjR/X5559HvffFF19EnaG5zOfzSbp0Jmf48OHh/tOnT3c75losW7ZMpaWl4dehUIiQAwCAxXodcFJTU5WamnrVutzcXAWDQR04cEATJ06UJO3fv1/BYFB5eXkxx2RmZsrn86m6uloPPPCAJKmrq0s1NTVas2ZNb6ca5na75Xa7r3s8AACIL45dZDx27FgVFhZqwYIFqqurU11dnRYsWKBHHnkk4g6qMWPGqLKyUtKln6ZKSkq0atUqVVZW6oMPPtBTTz2lwYMHa/78+eExgUBADQ0N+uijjyRJx44dU0NDg7788kunlgMAAOKIYxcZS9Jbb72lxYsXh++Keuyxx1RRURFRc/LkSQWDwfDrF154QV9//bUWLVqk9vZ25eTkaM+ePUpOTg7XbN68WX//938ffj158mRJ0htvvKGnnnrKwRUBAIB44NhzcPoznoMDAED86RfPwQEAALhZCDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB1HA057e7uKiork8Xjk8XhUVFSks2fP9jjGGKOysjKlpaVp0KBBmjp1qo4fPx5+/8svv9TPfvYzjR49WoMHD9Y999yjxYsXKxgMOrkUAAAQRxwNOPPnz1dDQ4OqqqpUVVWlhoYGFRUV9Thm7dq1WrdunSoqKnTw4EH5fD499NBD6ujokCR99tln+uyzz/SLX/xCx44d07/+67+qqqpKzzzzjJNLAQAAccRljDFOHLixsVHjxo1TXV2dcnJyJEl1dXXKzc3Vhx9+qNGjR0eNMcYoLS1NJSUlWrp0qSSps7NTXq9Xa9as0cKFC2N+1m9+8xs9+eST+uqrr5SQkHDVuYVCIXk8HgWDQaWkpHyHVQIAgL7Sm+9vx87g+P1+eTyecLiRpEmTJsnj8ai2tjbmmKamJgUCARUUFIT73G63pkyZ0u0YSeGFdhduOjs7FQqFIhoAALCXYwEnEAho2LBhUf3Dhg1TIBDodowkeb3eiH6v19vtmDNnzujll1/u9uyOJJWXl4evA/J4PEpPT7/WZQAAgDjU64BTVlYml8vVY6uvr5ckuVyuqPHGmJj933bl+92NCYVCevjhhzVu3DitWLGi2+MtW7ZMwWAw3FpaWq5lqQAAIE5d/YKVKzz77LOaO3dujzUZGRk6evSoPv/886j3vvjii6gzNJf5fD5Jl87kDB8+PNx/+vTpqDEdHR0qLCzU7bffrsrKSg0cOLDb+bjdbrnd7h7nDAAA7NHrgJOamqrU1NSr1uXm5ioYDOrAgQOaOHGiJGn//v0KBoPKy8uLOSYzM1M+n0/V1dV64IEHJEldXV2qqanRmjVrwnWhUEjTpk2T2+3Wrl27lJSU1NtlAAAAizl2Dc7YsWNVWFioBQsWqK6uTnV1dVqwYIEeeeSRiDuoxowZo8rKSkmXfpoqKSnRqlWrVFlZqQ8++EBPPfWUBg8erPnz50u6dOamoKBAX331lbZu3apQKKRAIKBAIKCLFy86tRwAABBHen0GpzfeeustLV68OHxX1GOPPaaKioqImpMnT0Y8pO+FF17Q119/rUWLFqm9vV05OTnas2ePkpOTJUmHDh3S/v37JUnf//73I47V1NSkjIwMB1cEAADigWPPwenPeA4OAADxp188BwcAAOBmIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOs4GnDa29tVVFQkj8cjj8ejoqIinT17tscxxhiVlZUpLS1NgwYN0tSpU3X8+PGImoULF2rUqFEaNGiQ7rrrLs2cOVMffvihgysBAADxxNGAM3/+fDU0NKiqqkpVVVVqaGhQUVFRj2PWrl2rdevWqaKiQgcPHpTP59NDDz2kjo6OcE1WVpbeeOMNNTY26ne/+52MMSooKNDFixedXA4AAIgTLmOMceLAjY2NGjdunOrq6pSTkyNJqqurU25urj788EONHj06aowxRmlpaSopKdHSpUslSZ2dnfJ6vVqzZo0WLlwY87OOHj2qv/zLv9RHH32kUaNGXXVuoVBIHo9HwWBQKSkp32GVAACgr/Tm+9uxMzh+v18ejyccbiRp0qRJ8ng8qq2tjTmmqalJgUBABQUF4T63260pU6Z0O+arr77SG2+8oczMTKWnp8es6ezsVCgUimgAAMBejgWcQCCgYcOGRfUPGzZMgUCg2zGS5PV6I/q9Xm/UmI0bN+r222/X7bffrqqqKlVXVysxMTHmccvLy8PXAXk8nm6DEAAAsEOvA05ZWZlcLlePrb6+XpLkcrmixhtjYvZ/25XvxxrzxBNP6PDhw6qpqdG9996rxx9/XH/+859jHm/ZsmUKBoPh1tLS0pslAwCAOJPQ2wHPPvus5s6d22NNRkaGjh49qs8//zzqvS+++CLqDM1lPp9P0qUzOcOHDw/3nz59OmrM5bMx9957ryZNmqQhQ4aosrJS8+bNizqu2+2W2+2+6toAAIAdeh1wUlNTlZqaetW63NxcBYNBHThwQBMnTpQk7d+/X8FgUHl5eTHHZGZmyufzqbq6Wg888IAkqaurSzU1NVqzZk2Pn2eMUWdnZy9XAwAAbOTYNThjx45VYWGhFixYoLq6OtXV1WnBggV65JFHIu6gGjNmjCorKyVd+mmqpKREq1atUmVlpT744AM99dRTGjx4sObPny9J+vjjj1VeXq5Dhw6publZfr9fjz/+uAYNGqQZM2Y4tRwAABBHen0GpzfeeustLV68OHxX1GOPPaaKioqImpMnTyoYDIZfv/DCC/r666+1aNEitbe3KycnR3v27FFycrIkKSkpSfv27dP69evV3t4ur9eryZMnq7a2NuZFzQAA4Nbj2HNw+jOegwMAQPzpF8/BAQAAuFkIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6BBwAAGAdAg4AALAOAQcAAFiHgAMAAKxDwAEAANYh4AAAAOsQcAAAgHUIOAAAwDoEHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAAQAA1iHgAAAA6xBwAACAdQg4AADAOo4GnPb2dhUVFcnj8cjj8aioqEhnz57tcYwxRmVlZUpLS9OgQYM0depUHT9+vNva6dOny+Vy6Z133rnxCwAAAHHJ0YAzf/58NTQ0qKqqSlVVVWpoaFBRUVGPY9auXat169apoqJCBw8elM/n00MPPaSOjo6o2vXr18vlcjk1fQAAEKcSnDpwY2OjqqqqVFdXp5ycHEnSli1blJubq5MnT2r06NFRY4wxWr9+vZYvX64f//jHkqRf/epX8nq9+vd//3ctXLgwXHvkyBGtW7dOBw8e1PDhw51aBgAAiEOOncHx+/3yeDzhcCNJkyZNksfjUW1tbcwxTU1NCgQCKigoCPe53W5NmTIlYsy5c+c0b948VVRUyOfzXXUunZ2dCoVCEQ0AANjLsYATCAQ0bNiwqP5hw4YpEAh0O0aSvF5vRL/X640Ys2TJEuXl5WnmzJnXNJfy8vLwdUAej0fp6enXugwAABCHeh1wysrK5HK5emz19fWSFPP6GGPMVa+bufL9b4/ZtWuX3nvvPa1fv/6a57xs2TIFg8Fwa2lpueaxAAAg/vT6Gpxnn31Wc+fO7bEmIyNDR48e1eeffx713hdffBF1huayyz83BQKBiOtqTp8+HR7z3nvv6Y9//KPuuOOOiLGzZ89Wfn6+9u7dG3Vct9stt9vd45wBAIA9eh1wUlNTlZqaetW63NxcBYNBHThwQBMnTpQk7d+/X8FgUHl5eTHHZGZmyufzqbq6Wg888IAkqaurSzU1NVqzZo0k6cUXX9Tf/M3fRIy777779Mtf/lKPPvroNa3BGCNJXIsDAEAcufy9ffl7vEfGQYWFheb+++83fr/f+P1+c99995lHHnkkomb06NFm586d4derV682Ho/H7Ny50xw7dszMmzfPDB8+3IRCoW4/R5KprKy85nm1tLQYSTQajUaj0eKwtbS0XPW73rHbxCXprbfe0uLFi8N3RT322GOqqKiIqDl58qSCwWD49QsvvKCvv/5aixYtUnt7u3JycrRnzx4lJyffsHmlpaWppaVFycnJPEdHlxJxenq6WlpalJKScrOnYy32uW+wz32Hve4b7PP/Z4xRR0eH0tLSrlrr+r8zILiFhUIheTweBYPBW/5/PE5in/sG+9x32Ou+wT5fH/4WFQAAsA4BBwAAWIeAA7ndbq1YsYJb6R3GPvcN9rnvsNd9g32+PlyDAwAArMMZHAAAYB0CDgAAsA4BBwAAWIeAAwAArEPAuQW0t7erqKhIHo9HHo9HRUVFOnv2bI9jjDEqKytTWlqaBg0apKlTp+r48ePd1k6fPl0ul0vvvPPOjV9AnHBin7/88kv97Gc/0+jRozV48GDdc889Wrx4ccTTv28FGzduVGZmppKSkpSVlaV9+/b1WF9TU6OsrCwlJSVp5MiR2rx5c1TNjh07NG7cOLndbo0bN06VlZVOTT9u3Oh93rJli/Lz8zVkyBANGTJEDz74oA4cOODkEuKCE/+eL3v77bflcrk0a9asGzzrOHTNf8AJcauwsNCMHz/e1NbWmtraWjN+/Piovwl2pdWrV5vk5GSzY8cOc+zYMTNnzpxu/ybYunXrzPTp043Uu78JZhsn9vnYsWPmxz/+sdm1a5f56KOPzO9//3tz7733mtmzZ/fFkvqFt99+2wwcONBs2bLFnDhxwjz33HPmtttuM5988knM+o8//tgMHjzYPPfcc+bEiRNmy5YtZuDAgea3v/1tuKa2ttYMGDDArFq1yjQ2NppVq1aZhIQEU1dX11fL6nec2Of58+ebDRs2mMOHD5vGxkbz9NNPG4/HY/70pz/11bL6HSf2+bJTp06Zu+++2+Tn55uZM2c6vJL+j4BjuRMnThhJEf/H7ff7jSTz4YcfxhzzzTffGJ/PZ1avXh3u+/Of/2w8Ho/ZvHlzRG1DQ4P5i7/4C9Pa2npLBxyn9/nb/uM//sMkJiaa8+fP37gF9GMTJ040xcXFEX1jxowxL774Ysz6F154wYwZMyaib+HChWbSpEnh148//rgpLCyMqJk2bZqZO3fuDZp1/HFin6904cIFk5ycbH71q1999wnHKaf2+cKFC+aHP/yhee2118xPf/pTAo4xhp+oLOf3++XxeJSTkxPumzRpkjwej2pra2OOaWpqUiAQCP+RVOnSg6amTJkSMebcuXOaN2+eKioq5PP5nFtEHHByn690+e/RJCQ4+rdy+4Wuri4dOnQoYo8kqaCgoNs98vv9UfXTpk1TfX29zp8/32NNT/tuM6f2+Urnzp3T+fPndeedd96YiccZJ/d55cqVuuuuu/TMM8/c+InHKQKO5QKBgIYNGxbVP2zYMAUCgW7HSJLX643o93q9EWOWLFmivLw8zZw58wbOOD45uc/fdubMGb388stauHDhd5xxfGhra9PFixd7tUeBQCBm/YULF9TW1tZjTXfHtJ1T+3ylF198UXfffbcefPDBGzPxOOPUPr///vvaunWrtmzZ4szE4xQBJ06VlZXJ5XL12Orr6yVJLpcrarwxJmb/t135/rfH7Nq1S++9957Wr19/YxbUT93sff62UCikhx9+WOPGjdOKFSu+w6riz7XuUU/1V/b39pi3Aif2+bK1a9dq27Zt2rlzp5KSkm7AbOPXjdznjo4OPfnkk9qyZYtSU1Nv/GTjmP3nuC317LPPau7cuT3WZGRk6OjRo/r888+j3vviiy+i/qvgsss/NwUCAQ0fPjzcf/r06fCY9957T3/84x91xx13RIydPXu28vPztXfv3l6spv+62ft8WUdHhwoLC3X77bersrJSAwcO7O1S4lJqaqoGDBgQ9V+3sfboMp/PF7M+ISFBQ4cO7bGmu2Pazql9vuwXv/iFVq1apXfffVf333//jZ18HHFin48fP65Tp07p0UcfDb//zTffSJISEhJ08uRJjRo16gavJE7cpGt/0EcuX/y6f//+cF9dXd01Xfy6Zs2acF9nZ2fExa+tra3m2LFjEU2S+ad/+ifz8ccfO7uofsipfTbGmGAwaCZNmmSmTJlivvrqK+cW0U9NnDjR/O3f/m1E39ixY3u8KHPs2LERfcXFxVEXGU+fPj2iprCw8Ja/yPhG77Mxxqxdu9akpKQYv99/Yyccp270Pn/99ddR/188c+ZM81d/9Vfm2LFjprOz05mFxAECzi2gsLDQ3H///cbv9xu/32/uu+++qNuXR48ebXbu3Bl+vXr1auPxeMzOnTvNsWPHzLx587q9Tfwy3cJ3URnjzD6HQiGTk5Nj7rvvPvPRRx+Z1tbWcLtw4UKfru9muXxb7datW82JEydMSUmJue2228ypU6eMMca8+OKLpqioKFx/+bbaJUuWmBMnTpitW7dG3Vb7/vvvmwEDBpjVq1ebxsZGs3r1am4Td2Cf16xZYxITE81vf/vbiH+7HR0dfb6+/sKJfb4Sd1FdQsC5BZw5c8Y88cQTJjk52SQnJ5snnnjCtLe3R9RIMm+88Ub49TfffGNWrFhhfD6fcbvdZvLkyebYsWM9fs6tHnCc2Oc//OEPRlLM1tTU1DcL6wc2bNhgRowYYRITE82ECRNMTU1N+L2f/vSnZsqUKRH1e/fuNQ888IBJTEw0GRkZZtOmTVHH/M1vfmNGjx5tBg4caMaMGWN27Njh9DL6vRu9zyNGjIj5b3fFihV9sJr+y4l/z99GwLnEZcz/Xa0EAABgCe6iAgAA1iHgAAAA6xBwAACAdQg4AADAOgQcAABgHQIOAACwDgEHAABYh4ADAACsQ8ABAADWIeAAAADrEHAAAIB1CDgAAMA6/w/Tmgx13LfePQAAAABJRU5ErkJggg==", + "text/plain": [ + "
    " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "pt.axis(\"equal\")\n", + "pt.contour(xmesh, ymesh, fmesh)\n", + "guesses = [np.array([3.0, 0.05])]" + ] + }, + { + "cell_type": "markdown", + "id": "cc812942", + "metadata": {}, + "source": [ + "Find guesses" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "d0ec2880", + "metadata": {}, + "outputs": [], + "source": [ + "x = guesses[-1]\n", + "s = -df(x)" + ] + }, + { + "cell_type": "markdown", + "id": "5b051e83", + "metadata": {}, + "source": [ + "Run it!" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "3c08b684", + "metadata": {}, + "outputs": [], + "source": [ + "def f1d(alpha):\n", + " return f(x + alpha*s)\n", + "\n", + "alpha_opt = sopt.golden(f1d)\n", + "next_guess = x + alpha_opt * s\n", + "guesses.append(next_guess)\n", + "print(next_guess)" + ] + }, + { + "cell_type": "markdown", + "id": "cbcaf40b", + "metadata": {}, + "source": [ + "What happened?" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "7f186b8a", + "metadata": {}, + "outputs": [], + "source": [ + "pt.axis(\"equal\")\n", + "pt.contour(xmesh, ymesh, fmesh, 50)\n", + "it_array = np.array(guesses)\n", + "pt.plot(it_array.T[0], it_array.T[1], \"x-\")" + ] + }, + { + "cell_type": "markdown", + "id": "5fff7daf", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "In the CG method we define so-called conjugate directions and two vectors \n", + "$\\hat{s}$ and $\\hat{t}$\n", + "are said to be\n", + "conjugate if" + ] + }, + { + "cell_type": "markdown", + "id": "a9d17d2d", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{s}^T\\hat{A}\\hat{t}= 0.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "67ce3688", + "metadata": {}, + "source": [ + "The philosophy of the CG method is to perform searches in various conjugate directions\n", + "of our vectors $\\hat{x}_i$ obeying the above criterion, namely" + ] + }, + { + "cell_type": "markdown", + "id": "a0fbafc1", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x}_i^T\\hat{A}\\hat{x}_j= 0.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "c52db4ba", + "metadata": {}, + "source": [ + "Two vectors are conjugate if they are orthogonal with respect to \n", + "this inner product. Being conjugate is a symmetric relation: if $\\hat{s}$ is conjugate to $\\hat{t}$, then $\\hat{t}$ is conjugate to $\\hat{s}$." + ] + }, + { + "cell_type": "markdown", + "id": "01dd8f76", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "An example is given by the eigenvectors of the matrix" + ] + }, + { + "cell_type": "markdown", + "id": "69b68e6a", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{v}_i^T\\hat{A}\\hat{v}_j= \\lambda\\hat{v}_i^T\\hat{v}_j,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "28c861ca", + "metadata": {}, + "source": [ + "which is zero unless $i=j$." + ] + }, + { + "cell_type": "markdown", + "id": "a6ec3aed", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "Assume now that we have a symmetric positive-definite matrix $\\hat{A}$ of size\n", + "$n\\times n$. At each iteration $i+1$ we obtain the conjugate direction of a vector" + ] + }, + { + "cell_type": "markdown", + "id": "10fee31a", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x}_{i+1}=\\hat{x}_{i}+\\alpha_i\\hat{p}_{i}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "eeb65674", + "metadata": {}, + "source": [ + "We assume that $\\hat{p}_{i}$ is a sequence of $n$ mutually conjugate directions. \n", + "Then the $\\hat{p}_{i}$ form a basis of $R^n$ and we can expand the solution \n", + "$ \\hat{A}\\hat{x} = \\hat{b}$ in this basis, namely" + ] + }, + { + "cell_type": "markdown", + "id": "acfb574c", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x} = \\sum^{n}_{i=1} \\alpha_i \\hat{p}_i.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "cad3ff45", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "The coefficients are given by" + ] + }, + { + "cell_type": "markdown", + "id": "c91cb637", + "metadata": {}, + "source": [ + "$$\n", + "\\mathbf{A}\\mathbf{x} = \\sum^{n}_{i=1} \\alpha_i \\mathbf{A} \\mathbf{p}_i = \\mathbf{b}.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "56866152", + "metadata": {}, + "source": [ + "Multiplying with $\\hat{p}_k^T$ from the left gives" + ] + }, + { + "cell_type": "markdown", + "id": "dc885f5c", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{p}_k^T \\hat{A}\\hat{x} = \\sum^{n}_{i=1} \\alpha_i\\hat{p}_k^T \\hat{A}\\hat{p}_i= \\hat{p}_k^T \\hat{b},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "ecbda348", + "metadata": {}, + "source": [ + "and we can define the coefficients $\\alpha_k$ as" + ] + }, + { + "cell_type": "markdown", + "id": "1cb56c1e", + "metadata": {}, + "source": [ + "$$\n", + "\\alpha_k = \\frac{\\hat{p}_k^T \\hat{b}}{\\hat{p}_k^T \\hat{A} \\hat{p}_k}\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "0e113c26", + "metadata": {}, + "source": [ + "## Conjugate gradient method and iterations\n", + "\n", + "If we choose the conjugate vectors $\\hat{p}_k$ carefully, \n", + "then we may not need all of them to obtain a good approximation to the solution \n", + "$\\hat{x}$. \n", + "We want to regard the conjugate gradient method as an iterative method. \n", + "This will us to solve systems where $n$ is so large that the direct \n", + "method would take too much time.\n", + "\n", + "We denote the initial guess for $\\hat{x}$ as $\\hat{x}_0$. \n", + "We can assume without loss of generality that" + ] + }, + { + "cell_type": "markdown", + "id": "2a417239", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{x}_0=0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "f8eca051", + "metadata": {}, + "source": [ + "or consider the system" + ] + }, + { + "cell_type": "markdown", + "id": "f301cd17", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{A}\\hat{z} = \\hat{b}-\\hat{A}\\hat{x}_0,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "3ce80544", + "metadata": {}, + "source": [ + "instead." + ] + }, + { + "cell_type": "markdown", + "id": "06d430ae", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "One can show that the solution $\\hat{x}$ is also the unique minimizer of the quadratic form" + ] + }, + { + "cell_type": "markdown", + "id": "df4c262c", + "metadata": {}, + "source": [ + "$$\n", + "f(\\hat{x}) = \\frac{1}{2}\\hat{x}^T\\hat{A}\\hat{x} - \\hat{x}^T \\hat{x} , \\quad \\hat{x}\\in\\mathbf{R}^n.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "70ca1a9d", + "metadata": {}, + "source": [ + "This suggests taking the first basis vector $\\hat{p}_1$ \n", + "to be the gradient of $f$ at $\\hat{x}=\\hat{x}_0$, \n", + "which equals" + ] + }, + { + "cell_type": "markdown", + "id": "2a80836b", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{A}\\hat{x}_0-\\hat{b},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "4546d9d3", + "metadata": {}, + "source": [ + "and \n", + "$\\hat{x}_0=0$ it is equal $-\\hat{b}$.\n", + "The other vectors in the basis will be conjugate to the gradient, \n", + "hence the name conjugate gradient method." + ] + }, + { + "cell_type": "markdown", + "id": "18e13ae9", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "Let $\\hat{r}_k$ be the residual at the $k$-th step:" + ] + }, + { + "cell_type": "markdown", + "id": "877633e0", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{r}_k=\\hat{b}-\\hat{A}\\hat{x}_k.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "b5a1975d", + "metadata": {}, + "source": [ + "Note that $\\hat{r}_k$ is the negative gradient of $f$ at \n", + "$\\hat{x}=\\hat{x}_k$, \n", + "so the gradient descent method would be to move in the direction $\\hat{r}_k$. \n", + "Here, we insist that the directions $\\hat{p}_k$ are conjugate to each other, \n", + "so we take the direction closest to the gradient $\\hat{r}_k$ \n", + "under the conjugacy constraint. \n", + "This gives the following expression" + ] + }, + { + "cell_type": "markdown", + "id": "d4a13896", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{p}_{k+1}=\\hat{r}_k-\\frac{\\hat{p}_k^T \\hat{A}\\hat{r}_k}{\\hat{p}_k^T\\hat{A}\\hat{p}_k} \\hat{p}_k.\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "2dbb6628", + "metadata": {}, + "source": [ + "## Conjugate gradient method\n", + "We can also compute the residual iteratively as" + ] + }, + { + "cell_type": "markdown", + "id": "85875f49", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{r}_{k+1}=\\hat{b}-\\hat{A}\\hat{x}_{k+1},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "9efe856c", + "metadata": {}, + "source": [ + "which equals" + ] + }, + { + "cell_type": "markdown", + "id": "470484e3", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{b}-\\hat{A}(\\hat{x}_k+\\alpha_k\\hat{p}_k),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "049c6212", + "metadata": {}, + "source": [ + "or" + ] + }, + { + "cell_type": "markdown", + "id": "09e38b0d", + "metadata": {}, + "source": [ + "$$\n", + "(\\hat{b}-\\hat{A}\\hat{x}_k)-\\alpha_k\\hat{A}\\hat{p}_k,\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "ffa86d35", + "metadata": {}, + "source": [ + "which gives" + ] + }, + { + "cell_type": "markdown", + "id": "72841aae", + "metadata": {}, + "source": [ + "$$\n", + "\\hat{r}_{k+1}=\\hat{r}_k-\\hat{A}\\hat{p}_{k},\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "88f3cf38", + "metadata": {}, + "source": [ + "## Simple implementation of the Conjugate gradient algorithm" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "213b6a91", + "metadata": {}, + "outputs": [], + "source": [ + " Vector ConjugateGradient(Matrix A, Vector b, Vector x0){\n", + " int dim = x0.Dimension();\n", + " const double tolerance = 1.0e-14;\n", + " Vector x(dim),r(dim),v(dim),z(dim);\n", + " double c,t,d;\n", + "\n", + " x = x0;\n", + " r = b - A*x;\n", + " v = r;\n", + " c = dot(r,r);\n", + " int i = 0; IterMax = dim;\n", + " while(i <= IterMax){\n", + " z = A*v;\n", + " t = c/dot(v,z);\n", + " x = x + t*v;\n", + " r = r - t*z;\n", + " d = dot(r,r);\n", + " if(sqrt(d) < tolerance)\n", + " break;\n", + " v = r + (d/c)*v;\n", + " c = d; i++;\n", + " }\n", + " return x;\n", + "}" + ] + }, + { + "cell_type": "markdown", + "id": "d2847fc8", + "metadata": {}, + "source": [ + "## Broyden–Fletcher–Goldfarb–Shanno algorithm\n", + "The optimization problem is to minimize $f(\\mathbf {x} )$ where $\\mathbf {x}$ is a vector in $R^{n}$, and $f$ is a differentiable scalar function. There are no constraints on the values that $\\mathbf {x}$ can take.\n", + "\n", + "The algorithm begins at an initial estimate for the optimal value $\\mathbf {x}_{0}$ and proceeds iteratively to get a better estimate at each stage.\n", + "\n", + "The search direction $p_k$ at stage $k$ is given by the solution of the analogue of the Newton equation" + ] + }, + { + "cell_type": "markdown", + "id": "46f17a0b", + "metadata": {}, + "source": [ + "$$\n", + "B_{k}\\mathbf {p} _{k}=-\\nabla f(\\mathbf {x}_{k}),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "480f6bf1", + "metadata": {}, + "source": [ + "where $B_{k}$ is an approximation to the Hessian matrix, which is\n", + "updated iteratively at each stage, and $\\nabla f(\\mathbf {x} _{k})$\n", + "is the gradient of the function\n", + "evaluated at $x_k$. \n", + "A line search in the direction $p_k$ is then used to\n", + "find the next point $x_{k+1}$ by minimising" + ] + }, + { + "cell_type": "markdown", + "id": "45a18714", + "metadata": {}, + "source": [ + "$$\n", + "f(\\mathbf {x}_{k}+\\alpha \\mathbf {p}_{k}),\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "2e64b382", + "metadata": {}, + "source": [ + "over the scalar $\\alpha > 0$." + ] + }, + { + "cell_type": "markdown", + "id": "47d2904d", + "metadata": {}, + "source": [ + "## Stochastic Gradient Descent\n", + "\n", + "Stochastic gradient descent (SGD) and variants thereof address some of\n", + "the shortcomings of the Gradient descent method discussed above.\n", + "\n", + "The underlying idea of SGD comes from the observation that a given \n", + "function, which we want to minimize, can almost always be written as a\n", + "sum over $n$ data points $\\{\\mathbf{x}_i\\}_{i=1}^n$," + ] + }, + { + "cell_type": "markdown", + "id": "a6549f0b", + "metadata": {}, + "source": [ + "$$\n", + "C(\\mathbf{\\beta}) = \\sum_{i=1}^n c_i(\\mathbf{x}_i,\n", + "\\mathbf{\\beta}).\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "e0679ec8", + "metadata": {}, + "source": [ + "## Computation of gradients\n", + "\n", + "This in turn means that the gradient can be\n", + "computed as a sum over $i$-gradients" + ] + }, + { + "cell_type": "markdown", + "id": "138d5b06", + "metadata": {}, + "source": [ + "$$\n", + "\\nabla_\\beta C(\\mathbf{\\beta}) = \\sum_i^n \\nabla_\\beta c_i(\\mathbf{x}_i,\n", + "\\mathbf{\\beta}).\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "058943d0", + "metadata": {}, + "source": [ + "Stochasticity/randomness is introduced by only taking the\n", + "gradient on a subset of the data called minibatches. If there are $n$\n", + "data points and the size of each minibatch is $M$, there will be $n/M$\n", + "minibatches. We denote these minibatches by $B_k$ where\n", + "$k=1,\\cdots,n/M$." + ] + }, + { + "cell_type": "markdown", + "id": "66cb46b8", + "metadata": {}, + "source": [ + "## SGD example\n", + "As an example, suppose we have $10$ data points $(\\mathbf{x}_1,\\cdots, \\mathbf{x}_{10})$ \n", + "and we choose to have $M=5$ minibathces,\n", + "then each minibatch contains two data points. In particular we have\n", + "$B_1 = (\\mathbf{x}_1,\\mathbf{x}_2), \\cdots, B_5 =\n", + "(\\mathbf{x}_9,\\mathbf{x}_{10})$. Note that if you choose $M=1$ you\n", + "have only a single batch with all data points and on the other extreme,\n", + "you may choose $M=n$ resulting in a minibatch for each datapoint, i.e\n", + "$B_k = \\mathbf{x}_k$.\n", + "\n", + "The idea is now to approximate the gradient by replacing the sum over\n", + "all data points with a sum over the data points in one the minibatches\n", + "picked at random in each gradient descent step" + ] + }, + { + "cell_type": "markdown", + "id": "37f5c873", + "metadata": {}, + "source": [ + "$$\n", + "\\nabla_{\\beta}\n", + "C(\\mathbf{\\beta}) = \\sum_{i=1}^n \\nabla_\\beta c_i(\\mathbf{x}_i,\n", + "\\mathbf{\\beta}) \\rightarrow \\sum_{i \\in B_k}^n \\nabla_\\beta\n", + "c_i(\\mathbf{x}_i, \\mathbf{\\beta}).\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "28417f9f", + "metadata": {}, + "source": [ + "## The gradient step\n", + "\n", + "Thus a gradient descent step now looks like" + ] + }, + { + "cell_type": "markdown", + "id": "1371ee83", + "metadata": {}, + "source": [ + "$$\n", + "\\beta_{j+1} = \\beta_j - \\gamma_j \\sum_{i \\in B_k}^n \\nabla_\\beta c_i(\\mathbf{x}_i,\n", + "\\mathbf{\\beta})\n", + "$$" + ] + }, + { + "cell_type": "markdown", + "id": "c57a595a", + "metadata": {}, + "source": [ + "where $k$ is picked at random with equal\n", + "probability from $[1,n/M]$. An iteration over the number of\n", + "minibathces (n/M) is commonly referred to as an epoch. Thus it is\n", + "typical to choose a number of epochs and for each epoch iterate over\n", + "the number of minibatches, as exemplified in the code below." + ] + }, + { + "cell_type": "markdown", + "id": "bb4a5d25", + "metadata": {}, + "source": [ + "## Simple example code" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "af5d1798", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np \n", + "\n", + "n = 100 #100 datapoints \n", + "M = 5 #size of each minibatch\n", + "m = int(n/M) #number of minibatches\n", + "n_epochs = 10 #number of epochs\n", + "\n", + "j = 0\n", + "for epoch in range(1,n_epochs+1):\n", + " for i in range(m):\n", + " k = np.random.randint(m) #Pick the k-th minibatch at random\n", + " #Compute the gradient using the data in minibatch Bk\n", + " #Compute new suggestion for \n", + " j += 1" + ] + }, + { + "cell_type": "markdown", + "id": "b0545474", + "metadata": {}, + "source": [ + "Taking the gradient only on a subset of the data has two important\n", + "benefits. First, it introduces randomness which decreases the chance\n", + "that our opmization scheme gets stuck in a local minima. Second, if\n", + "the size of the minibatches are small relative to the number of\n", + "datapoints ($M < n$), the computation of the gradient is much\n", + "cheaper since we sum over the datapoints in the $k-th$ minibatch and not\n", + "all $n$ datapoints." + ] + }, + { + "cell_type": "markdown", + "id": "c8d6d0cb", + "metadata": {}, + "source": [ + "## When do we stop?\n", + "\n", + "A natural question is when do we stop the search for a new minimum?\n", + "One possibility is to compute the full gradient after a given number\n", + "of epochs and check if the norm of the gradient is smaller than some\n", + "threshold and stop if true. However, the condition that the gradient\n", + "is zero is valid also for local minima, so this would only tell us\n", + "that we are close to a local/global minimum. However, we could also\n", + "evaluate the cost function at this point, store the result and\n", + "continue the search. If the test kicks in at a later stage we can\n", + "compare the values of the cost function and keep the $\\beta$ that\n", + "gave the lowest value." + ] + }, + { + "cell_type": "markdown", + "id": "b333b841", + "metadata": {}, + "source": [ + "## Slightly different approach\n", + "\n", + "Another approach is to let the step length $\\gamma_j$ depend on the\n", + "number of epochs in such a way that it becomes very small after a\n", + "reasonable time such that we do not move at all.\n", + "\n", + "As an example, let $e = 0,1,2,3,\\cdots$ denote the current epoch and let $t_0, t_1 > 0$ be two fixed numbers. Furthermore, let $t = e \\cdot m + i$ where $m$ is the number of minibatches and $i=0,\\cdots,m-1$. Then the function $$\\gamma_j(t; t_0, t_1) = \\frac{t_0}{t+t_1} $$ goes to zero as the number of epochs gets large. I.e. we start with a step length $\\gamma_j (0; t_0, t_1) = t_0/t_1$ which decays in *time* $t$.\n", + "\n", + "In this way we can fix the number of epochs, compute $\\beta$ and\n", + "evaluate the cost function at the end. Repeating the computation will\n", + "give a different result since the scheme is random by design. Then we\n", + "pick the final $\\beta$ that gives the lowest value of the cost\n", + "function." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "1c55be00", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np \n", + "\n", + "def step_length(t,t0,t1):\n", + " return t0/(t+t1)\n", + "\n", + "n = 100 #100 datapoints \n", + "M = 5 #size of each minibatch\n", + "m = int(n/M) #number of minibatches\n", + "n_epochs = 500 #number of epochs\n", + "t0 = 1.0\n", + "t1 = 10\n", + "\n", + "gamma_j = t0/t1\n", + "j = 0\n", + "for epoch in range(1,n_epochs+1):\n", + " for i in range(m):\n", + " k = np.random.randint(m) #Pick the k-th minibatch at random\n", + " #Compute the gradient using the data in minibatch Bk\n", + " #Compute new suggestion for beta\n", + " t = epoch*m+i\n", + " gamma_j = step_length(t,t0,t1)\n", + " j += 1\n", + "\n", + "print(\"gamma_j after %d epochs: %g\" % (n_epochs,gamma_j))" + ] + }, + { + "cell_type": "markdown", + "id": "009e6326", + "metadata": {}, + "source": [ + "## Program for stochastic gradient" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "67b4dc09", + "metadata": {}, + "outputs": [], + "source": [ + "# Importing various packages\n", + "from math import exp, sqrt\n", + "from random import random, seed\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from sklearn.linear_model import SGDRegressor\n", + "\n", + "x = 2*np.random.rand(100,1)\n", + "y = 4+3*x+np.random.randn(100,1)\n", + "\n", + "xb = np.c_[np.ones((100,1)), x]\n", + "theta_linreg = np.linalg.inv(xb.T.dot(xb)).dot(xb.T).dot(y)\n", + "print(\"Own inversion\")\n", + "print(theta_linreg)\n", + "sgdreg = SGDRegressor(n_iter = 50, penalty=None, eta0=0.1)\n", + "sgdreg.fit(x,y.ravel())\n", + "print(\"sgdreg from scikit\")\n", + "print(sgdreg.intercept_, sgdreg.coef_)\n", + "\n", + "\n", + "theta = np.random.randn(2,1)\n", + "\n", + "eta = 0.1\n", + "Niterations = 1000\n", + "m = 100\n", + "\n", + "for iter in range(Niterations):\n", + " gradients = 2.0/m*xb.T.dot(xb.dot(theta)-y)\n", + " theta -= eta*gradients\n", + "print(\"theta frm own gd\")\n", + "print(theta)\n", + "\n", + "xnew = np.array([[0],[2]])\n", + "xbnew = np.c_[np.ones((2,1)), xnew]\n", + "ypredict = xbnew.dot(theta)\n", + "ypredict2 = xbnew.dot(theta_linreg)\n", + "\n", + "\n", + "n_epochs = 50\n", + "t0, t1 = 5, 50\n", + "m = 100\n", + "def learning_schedule(t):\n", + " return t0/(t+t1)\n", + "\n", + "theta = np.random.randn(2,1)\n", + "\n", + "for epoch in range(n_epochs):\n", + " for i in range(m):\n", + " random_index = np.random.randint(m)\n", + " xi = xb[random_index:random_index+1]\n", + " yi = y[random_index:random_index+1]\n", + " gradients = 2 * xi.T.dot(xi.dot(theta)-yi)\n", + " eta = learning_schedule(epoch*m+i)\n", + " theta = theta - eta*gradients\n", + "print(\"theta from own sdg\")\n", + "print(theta)\n", + "\n", + "\n", + "plt.plot(xnew, ypredict, \"r-\")\n", + "plt.plot(xnew, ypredict2, \"b-\")\n", + "plt.plot(x, y ,'ro')\n", + "plt.axis([0,2.0,0, 15.0])\n", + "plt.xlabel(r'$x$')\n", + "plt.ylabel(r'$y$')\n", + "plt.title(r'Random numbers ')\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "5de3c7a3", + "metadata": {}, + "source": [ + "## Using gradient descent methods, limitations\n", + "\n", + "* **Gradient descent (GD) finds local minima of our function**. Since the GD algorithm is deterministic, if it converges, it will converge to a local minimum of our energy function. Because in ML we are often dealing with extremely rugged landscapes with many local minima, this can lead to poor performance.\n", + "\n", + "* **GD is sensitive to initial conditions**. One consequence of the local nature of GD is that initial conditions matter. Depending on where one starts, one will end up at a different local minima. Therefore, it is very important to think about how one initializes the training process. This is true for GD as well as more complicated variants of GD.\n", + "\n", + "* **Gradients are computationally expensive to calculate for large datasets**. In many cases in statistics and ML, the energy function is a sum of terms, with one term for each data point. For example, in linear regression, $E \\propto \\sum_{i=1}^n (y_i - \\mathbf{w}^T\\cdot\\mathbf{x}_i)^2$; for logistic regression, the square error is replaced by the cross entropy. To calculate the gradient we have to sum over *all* $n$ data points. Doing this at every GD step becomes extremely computationally expensive. An ingenious solution to this, is to calculate the gradients using small subsets of the data called \"mini batches\". This has the added benefit of introducing stochasticity into our algorithm.\n", + "\n", + "* **GD is very sensitive to choices of learning rates**. GD is extremely sensitive to the choice of learning rates. If the learning rate is very small, the training process take an extremely long time. For larger learning rates, GD can diverge and give poor results. Furthermore, depending on what the local landscape looks like, we have to modify the learning rates to ensure convergence. Ideally, we would *adaptively* choose the learning rates to match the landscape.\n", + "\n", + "* **GD treats all directions in parameter space uniformly.** Another major drawback of GD is that unlike Newton's method, the learning rate for GD is the same in all directions in parameter space. For this reason, the maximum learning rate is set by the behavior of the steepest direction and this can significantly slow down training. Ideally, we would like to take large steps in flat directions and small steps in steep directions. Since we are exploring rugged landscapes where curvatures change, this requires us to keep track of not only the gradient but second derivatives. The ideal scenario would be to calculate the Hessian but this proves to be too computationally expensive. \n", + "\n", + "* GD can take exponential time to escape saddle points, even with random initialization. As we mentioned, GD is extremely sensitive to initial condition since it determines the particular local minimum GD would eventually reach. However, even with a good initialization scheme, through the introduction of randomness, GD can still take exponential time to escape saddle points." + ] + }, + { + "cell_type": "markdown", + "id": "80f0f66a", + "metadata": {}, + "source": [ + "## Codes from numerical recipes\n", + "You can however use codes we have adapted from the text [Numerical Recipes in C++](http://www.nr.com/), see chapter 10.7. \n", + "Here we present a program, which you also can find at the webpage of the course we use the functions **dfpmin** and **lnsrch**. This is a variant of the Broyden et al algorithm discussed in the previous slide.\n", + "\n", + "* The program uses the harmonic oscillator in one dimensions as example.\n", + "\n", + "* The program does not use armadillo to handle vectors and matrices, but employs rather my own vector-matrix class. These auxiliary functions, and the main program *model.cpp* can all be found under the [program link here](https://github.com/CompPhysics/ComputationalPhysics2/tree/gh-pages/doc/pub/cg/programs/c%2B%2B).\n", + "\n", + "Below we show only excerpts from the main program. For the full program, see the above link." + ] + }, + { + "cell_type": "markdown", + "id": "f38903db", + "metadata": {}, + "source": [ + "## Finding the minimum of the harmonic oscillator model in one dimension" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "67377884", + "metadata": {}, + "outputs": [], + "source": [ + "// Main function begins here\n", + "int main()\n", + "{\n", + " int n, iter;\n", + " double gtol, fret;\n", + " double alpha;\n", + " n = 1;\n", + "// reserve space in memory for vectors containing the variational\n", + "// parameters\n", + " Vector g(n), p(n);\n", + " cout << \"Read in guess for alpha\" << endl;\n", + " cin >> alpha;\n", + " gtol = 1.0e-5;\n", + "// now call dfmin and compute the minimum\n", + " p(0) = alpha;\n", + " dfpmin(p, n, gtol, &iter, &fret, Efunction, dEfunction);\n", + " cout << \"Value of energy minimum = \" << fret << endl;\n", + " cout << \"Number of iterations = \" << iter << endl;\n", + " cout << \"Value of alpha at minimum = \" << p(0) << endl;\n", + " return 0;\n", + "} // end of main program" + ] + }, + { + "cell_type": "markdown", + "id": "4e7447d8", + "metadata": {}, + "source": [ + "## Functions to observe\n", + "The functions **Efunction** and **dEfunction** compute the expectation value of the energy and its derivative.\n", + "They use the the quasi-Newton method of [Broyden, Fletcher, Goldfarb, and Shanno (BFGS)](https://www.springer.com/it/book/9780387303031)\n", + "It uses the first derivatives only. The BFGS algorithm has proven good performance even for non-smooth optimizations. \n", + "These functions need to be changed when you want to your own derivatives." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "15dc6a71", + "metadata": {}, + "outputs": [], + "source": [ + "// this function defines the expectation value of the local energy\n", + "double Efunction(Vector &x)\n", + "{\n", + " double value = x(0)*x(0)*0.5+1.0/(8*x(0)*x(0));\n", + " return value;\n", + "} // end of function to evaluate\n", + "\n", + "// this function defines the derivative of the energy \n", + "void dEfunction(Vector &x, Vector &g)\n", + "{\n", + " g(0) = x(0)-1.0/(4*x(0)*x(0)*x(0));\n", + "} // end of function to evaluate" + ] + }, + { + "cell_type": "markdown", + "id": "73b9f44a", + "metadata": {}, + "source": [ + "You need to change these functions in order to compute the local energy for your system. I used 1000\n", + "cycles per call to get a new value of $\\langle E_L[\\alpha]\\rangle$.\n", + "When I compute the local energy I also compute its derivative.\n", + "After roughly 10-20 iterations I got a converged result in terms of $\\alpha$." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/doc/src/week6/week6.do.txt b/doc/src/week6/week6.do.txt index 0ceea043..c858f048 100644 --- a/doc/src/week6/week6.do.txt +++ b/doc/src/week6/week6.do.txt @@ -1,9 +1,9 @@ TITLE: Week 8 February 19-23: Gradient Methods AUTHOR: Morten Hjorth-Jensen {copyright, 1999-present|CC BY-NC} Email morten.hjorth-jensen@fys.uio.no at Department of Physics and Center fo Computing in Science Education, University of Oslo, Oslo, Norway & Department of Physics and Astronomy and Facility for Rare Isotope Beams, Michigan State University, East Lansing, Michigan, USA -DATE: today +DATE: February 23, 2024 !split -===== Overview of week 8 ===== +===== Overview ===== !bblock Topics * Gradient methods: o Semi-Newton methods (Broyden's algorithm) @@ -29,7 +29,7 @@ o "Handwritten notes":"https://github.com/CompPhysics/ComputationalPhysics2/blob !split ===== Brief reminder on Newton-Raphson's method ===== -Let us quickly remind ourselves how we derive the above method. +Let us quickly remind ourselves on how we derive the above method. Perhaps the most celebrated of all one-dimensional root-finding routines is Newton's method, also called the Newton-Raphson @@ -440,7 +440,66 @@ leading to the iterative scheme !split -===== Code examples for steepest descent ===== +===== Our simple $2\times 2$ example ===== + +Last week we introduced the simple two-dimensional function +!bt +\[ +f(x_1,x_2)=x_1^2+x_1x_2+10x_2^2-5x_1-3x_2, +\] +!et +which is of the form (in terms of vectors and matrices) +!bt +\[ +f(\bm{x})=\frac{1}{2}\bm{x}^T\bm{A}\bm{x}-\bm{b}^T\bm{x}, +\] +!et +where we have +!bt +\[ +\bm{x}=\begin{bmatrix} x_1 \\ x_2\end{bmatrix}, +\] +!et +!bt +\[ +\bm{b}=\begin{bmatrix} 5 \\ 3\end{bmatrix}, +\] +!et +and +!bt +\[ +\bm{A}=\begin{bmatrix} 2 & 1\\ 1& 20\end{bmatrix}. +\] +!et + +!split +===== Derivatives and more ===== + +Optimizing the above equation, that is +!bt +\[ +\nabla f = 0 = \bm{A}\bm{x}-\bm{b}, +\] +!et +which leads to a simple matrix-inversion problem +!bt +\[ +\bm{x}=\bm{A}^{-1}\bm{b}. +\] +!et +This problem is easy to solve since we can calculate the inverse. Alternatively, we can solve the two coupled equations with two unknowns +!bt +\[ +\frac{\partial f}{\partial x_1}=2x_1+x_2-5=0, +\] +!et +and +!bt +\[ +\frac{\partial f}{\partial x_2}=x_1+20x_2-3=0, +\] +!et +with solutions $x_1=97/39$ and $x_2=1/39$. !split ===== Simple codes for steepest descent and conjugate gradient using a $2\times 2$ matrix, in c++, Python code to come ===== @@ -461,8 +520,8 @@ int main(int argc, char * argv[]){ // Set our initial guess x0(0) = x0(1) = 0; // Set the matrix - A(0,0) = 3; A(1,0) = 2; A(0,1) = 2; A(1,1) = 6; - b(0) = 2; b(1) = -8; + A(0,0) = 2; A(1,0) = 1; A(0,1) = 1; A(1,1) = 20; + b(0) = 5; b(1) = 3; cout << "The Matrix A that we are using: " << endl; A.Print(); cout << endl; @@ -516,15 +575,15 @@ import matplotlib.pyplot as pt from mpl_toolkits.mplot3d import axes3d def f(x): - return 0.5*x[0]**2 + 2.5*x[1]**2 + return x[0]**2 + 10.0*x[1]**2+x[0]*x[1]-5.0*x[0]-3*x[2] def df(x): - return np.array([x[0], 5*x[1]]) + return np.array(2*[x[0]+x[1]-5.0, x[0]+20*x[1]]-3.0) fig = pt.figure() ax = fig.gca(projection="3d") -xmesh, ymesh = np.mgrid[-2:2:50j,-2:2:50j] +xmesh, ymesh = np.mgrid[-2:3:00j,-2:3:00j] fmesh = f(np.array([xmesh, ymesh])) ax.plot_surface(xmesh, ymesh, fmesh) !ec @@ -532,7 +591,7 @@ And then as countor plot !bc pycod pt.axis("equal") pt.contour(xmesh, ymesh, fmesh) -guesses = [np.array([2, 2./5])] +guesses = [np.array([3.0, 0.05])] !ec Find guesses !bc pycod