diff --git a/docs/source/optimisers/base_classes.rst b/docs/source/optimisers/base_classes.rst index 82b5cf7e7..9fce056ca 100644 --- a/docs/source/optimisers/base_classes.rst +++ b/docs/source/optimisers/base_classes.rst @@ -8,3 +8,4 @@ Optimiser base classes .. autoclass:: PopulationBasedOptimiser +.. autoclass:: LineSearchBasedOptimiser diff --git a/docs/source/optimisers/index.rst b/docs/source/optimisers/index.rst index 3a812e597..eef64719d 100644 --- a/docs/source/optimisers/index.rst +++ b/docs/source/optimisers/index.rst @@ -20,6 +20,7 @@ or the :class:`OptimisationController` class. cmaes_bare cmaes gradient_descent + lbfgs nelder_mead pso snes diff --git a/docs/source/optimisers/lbfgs.rst b/docs/source/optimisers/lbfgs.rst new file mode 100644 index 000000000..977712194 --- /dev/null +++ b/docs/source/optimisers/lbfgs.rst @@ -0,0 +1,7 @@ +****** +L-BFGS +****** + +.. currentmodule:: pints + +.. autoclass:: LBFGS diff --git a/examples/README.md b/examples/README.md index 643671e6b..cb356e689 100644 --- a/examples/README.md +++ b/examples/README.md @@ -34,6 +34,9 @@ relevant code. ### Local optimisers - [Nelder-Mead](./optimisation/nelder-mead.ipynb) +### Quasi-Newton optimisers +- [L-BFGS](./optimisation/lbfgs.ipynb) + ### Further optimisation - [Convenience methods fmin() and curve\_fit()](./optimisation/convenience.ipynb) - [Maximum loglikelihood](./optimisation/maximum-likelihood.ipynb) diff --git a/examples/optimisation/lbfgs.ipynb b/examples/optimisation/lbfgs.ipynb new file mode 100644 index 000000000..4bc6c259e --- /dev/null +++ b/examples/optimisation/lbfgs.ipynb @@ -0,0 +1,434 @@ +{ + "cells": [ + { + "source": [ + "# Optimisation: L-BFGS\n", + "\n", + "This example shows you how to run a global optimisation with [L-BFGS](http://pints.readthedocs.io/en/latest/optimisers/lbfgs.html).\n", + "\n", + "For a more elaborate example of an optimisation, see: [basic optimisation example](./first-example.ipynb)." + ], + "cell_type": "markdown", + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from __future__ import print_function\n", + "import pints\n", + "import pints.toy as toy\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "#specifing a simple model for LBFGS line search trial\n", + "\n", + "# dzdt = ax^2 +by^2\n", + "# dz/dxdt = 2ax\n", + "# dz/dydt = 2by\n", + "# starting conditions z=0, y=0, x=0\n", + "# x and y increase by 1 in each step?\n", + "\n", + "class easyProblem(pints.ForwardModelS1):\n", + " \"\"\"\n", + " A model with parameters of a similar magantuide\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " super(easyProblem, self).__init__()\n", + "\n", + " def n_parameters(self):\n", + " return 2\n", + " def n_outputs(self):\n", + " return 1\n", + "\n", + " def _simulate(self, parameters, times, senstivities = False):\n", + "\n", + " # unpacking parameters\n", + " a = parameters[0]\n", + " b = parameters[1]\n", + " #ax^2 +by^2 where x= sin(times) and y=cos(time)\n", + " time_dim = len(times)\n", + " time_step = float(times[1])\n", + " x = np.sin(times)\n", + " y = np.cos(times)\n", + " dzdt = a*np.square(x) + b*np.square(y)\n", + " if senstivities is True:\n", + "\n", + " # creating senetivity matrix\n", + " sen = np.zeros((time_dim,2,3))\n", + " for i, xi in enumerate(x):\n", + " old_sen = sen[i,:,:]\n", + "\n", + " if i < time_dim:\n", + " # df/dtheta\n", + " dfdtheta = np.asarray([[0,0],\n", + " [0,0],\n", + " [xi**2, y[i]**2]])\n", + " # jacobian df/du\n", + " Jac = np.array([[0,1,0],\n", + " [-1,0,0],\n", + " [2*a*xi, 2*b*y[i], 0]])\n", + " temp = np.matmul(old_sen, np.transpose(Jac)) + np.transpose(dfdtheta)\n", + " new_sen = temp*time_step + old_sen\n", + "\n", + " # assigning the new sentivity to the right place in the senstivity array\n", + " sen[i,:,:] = new_sen\n", + "\n", + " return (dzdt, sen[:,:,-1])\n", + "\n", + " else:\n", + " return dzdt\n", + " \n", + " def simulate(self, parameters, times):\n", + " return self._simulate(parameters, times, senstivities = False)\n", + "\n", + " def simulateS1(self, parameters, times):\n", + " return self._simulate(parameters, times, senstivities = True)\n", + "\n", + " def suggested_parameters(self):\n", + " \"\"\"suggested parameters for the model [a, b]\"\"\"\n", + " return [3, 9]\n", + " def suggested_times(self):\n", + " \"\"\"suggested times for the model range 0 to 10\"\"\"\n", + " return np.linspace(0, 10, num=100000)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# making some toy data\n", + "model = easyProblem()\n", + "times = model.suggested_times()\n", + "real_parameters = model.suggested_parameters()\n", + "output = model.simulateS1(real_parameters, times)#\n", + "values = output[0]\n", + "sensetivities = output[1]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using the Hager-Zhang linesearch implimentation of the LBFGS optimser" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "tags": [] + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Minimising error measure\n", + "Using Broyden-Fletcher-Goldfarb-Shanno (BFGS)\n", + "Running in sequential mode.\n", + "\n", + "Hessian updates: 0 line steps: 0 propose alpha: 0.001 propose points: [0.01 0.01]\n", + "Iter. Eval. Best Time m:s\n", + "0 1 1.42e+07 0:01.9\n", + "alpha_initial: 1.2626003255377118e-06\n", + "Number of accepted steps: 0\n", + "step size of alpha accepted: 0.10154474127440917\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 1 line steps: 0 propose alpha: 0.10154474127440917 propose points: [4.34929732 8.05250872]\n", + "1 2 1.01e+07 0:22.8\n", + "alpha_initial: 0.20308948254881834\n", + "Number of accepted steps: 1\n", + "step size of alpha accepted: 0.18279326950513297\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 2 line steps: 0 propose alpha: 0.18279326950513297 propose points: [2.86351878 8.75197834]\n", + "2 3 9996514 0:29.6\n", + "alpha_initial: 0.36558653901026594\n", + "Number of accepted steps: 2\n", + "step size of alpha accepted: 0.06556133817314082\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 3 line steps: 0 propose alpha: 0.06556133817314082 propose points: [2.99467023 8.96074361]\n", + "3 4 9993489 0:36.7\n", + "alpha_initial: 0.13112267634628164\n", + "Number of accepted steps: 3\n", + "step size of alpha accepted: 0.17156343822848208\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 4 line steps: 0 propose alpha: 0.17156343822848208 propose points: [2.91829932 8.99303174]\n", + "alpha_initial: 0.34312687645696416\n", + "Number of accepted steps: 4\n", + "step size of alpha accepted: 0.0485337191905153\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 5 line steps: 0 propose alpha: 0.0485337191905153 propose points: [2.92869329 9.00828149]\n", + "alpha_initial: 0.0970674383810306\n", + "Number of accepted steps: 5\n", + "step size of alpha accepted: 0.16286406909804865\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 6 line steps: 0 propose alpha: 0.16286406909804865 propose points: [2.92121559 9.01116373]\n", + "alpha_initial: 0.3257281381960973\n", + "Number of accepted steps: 6\n", + "step size of alpha accepted: 0.626615636979039\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 7 line steps: 0 propose alpha: 0.626615636979039 propose points: [2.92206729 9.01337339]\n", + "alpha_initial: 1.253231273958078\n", + "Number of accepted steps: 7\n", + "step size of alpha accepted: 1.7570238298625835\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 8 line steps: 0 propose alpha: 1.7570238298625835 propose points: [2.92146643 9.01360499]\n", + "alpha_initial: 3.514047659725167\n", + "Number of accepted steps: 8\n", + "step size of alpha accepted: 0.6266156369792899\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 9 line steps: 0 propose alpha: 0.6266156369792899 propose points: [2.92150318 9.01370032]\n", + "alpha_initial: 1.2532312739585798\n", + "Number of accepted steps: 9\n", + "step size of alpha accepted: 1.757023829805599\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 10 line steps: 0 propose alpha: 1.757023829805599 propose points: [2.92147725 9.01371031]\n", + "alpha_initial: 3.514047659611198\n", + "Number of accepted steps: 10\n", + "step size of alpha accepted: 0.6266156369924621\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 11 line steps: 0 propose alpha: 0.6266156369924621 propose points: [2.92147884 9.01371442]\n", + "alpha_initial: 1.2532312739849243\n", + "Number of accepted steps: 11\n", + "step size of alpha accepted: 1.7570238302294703\n", + "updating Hessian and changing newton direction\n", + "\n", + "Hessian updates: 12 line steps: 0 propose alpha: 1.7570238302294703 propose points: [2.92147772 9.01371486]\n", + "alpha_initial: 3.5140476604589406\n", + "Number of accepted steps: 12\n", + "step size of alpha accepted: 0.6266156369695461\n", + "updating Hessian and changing newton direction\n", + "\n", + "******************** Convergence after 13 accepted steps!********************\n", + "||df/dx_i||inf <= 1e-6 with parameters:\n", + "[2.92147779 9.01371503]\n", + "error function evaluation: 9993281.681014169\n", + "\n", + "Inverse Hessian matrix:\n", + " [[0.10233609 0. ]\n", + " [0. 0.10233609]]\n", + "\n", + "Hessian updates: 13 line steps: 0 propose alpha: 0.6266156369695461 propose points: [2.92147779 9.01371503]\n", + "14 14 9993282 2:05.3\n", + "Halting: Ill-conditioned covariance matrix\n", + "Score at true solution: \n", + "9993481.362921719\n", + "Found solution: True parameters:\n", + " 2.92147778828319726e+00 3.00000000000000000e+00\n", + " 9.01371503353561465e+00 9.00000000000000000e+00\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": "
", + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEGCAYAAACO8lkDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3dd3yT17nA8d+R5MEw25iAAbMhDLP3XoGQkNWkEEhI0pTmNmmS3rS9JGmb0QzSpGkzm2YWEpK02YMwwoawwg57GjDTmGVj8JDO/UOSka0tS34l6/l+PnywpFevjmXpfc54zjlKa40QQgjhymR0AYQQQkQfCQ5CCCHcSHAQQgjhRoKDEEIINxIchBBCuLEYXYBwaNCggc7IyDC6GEIIEVPWr19/Smud6umxKhEcMjIyWLdundHFEEKImKKUOujtMelWEkII4UaCgxBCCDcSHIQQQripEmMOnhQXF5Odnc2lS5eMLkqVlJycTHp6OgkJCUYXRQgRAVU2OGRnZ5OSkkJGRgZKKaOLU6VorcnNzSU7O5sWLVoYXRwhRARU2W6lS5cuUb9+fQkMEaCUon79+tIqE6IKq7LBAZDAEEHy3gpRtVXp4CBEVbdqXy77cvKNLoaogiQ4RJBSioceeqj09gsvvMDjjz8e9HnWrVvH/fffH3I5MjIyOHXqlM9jnnnmmZDPL4wz8a3VjPjbUqOLIaogCQ4RlJSUxOeff+73wuxPz549efnll8NUKs8kOAghXElwiCCLxcLUqVP5+9//7vZYVlYWw4cPp0uXLowYMYJDhw4B8Mknn9CpUycyMzMZPHgwAEuWLOGaa67BZrPRpk0bcnJyALDZbLRu3br0tlNubi6jR4+mY8eO3H333bju9nf99dfTo0cPOnbsyJtvvgnAtGnTuHjxIl27dmXSpElejxNCxI8qm8rq6olvtrH96PmwnvPKxrV47NqOWG2a/Tn5pNetTrVEs9tx9957L126dOEPf/hDmft/85vfMGXKFKZMmcK7777L/fffz5dffsmTTz7JvHnzaNKkCWfPni3zHJPJxOTJk5k1axYPPvggCxYsIDMzk9TUsutmPfHEEwwcOJA///nPzJ49m3feeaf0sXfffZd69epx8eJFevXqxU033cT06dN59dVX2bRpk8/j6tevH463TggRA6TlUEEXCku4WGzlxHnPaZ21atXi9ttvd+sWWrVqFbfeeisAt912GytWrABgwIAB3HHHHbz11ltYrVa38911113MnDkTsF/A77zzTrdjli1bxuTJkwEYN24cdevWLX3s5ZdfJjMzk759+3L48GH27NnjsdyBHieEqJriouXw2LUdDX39Bx98kO7du3u8kJf3xhtvsGbNGmbPnk2PHj1Yv359mcebNm1KWloaixYtYu3atcyaNSvgcixZsoQFCxawatUqqlevztChQz3OVQj0OCFEcKw2zffbT3BVx7SoTweXlkMlqFevHrfcckuZ7p3+/fvz8ccfAzBr1iwGDRoEwL59++jTpw9PPvkkqampHD582O18d999N5MnT+bmm2/GbHbvyho8eDAffvghAHPmzOHMmTMAnDt3jrp161K9enV27tzJ6tWrS5+TkJBAcXGx3+OEEKF7Z8V+7vlgPd9uOWZ0UfyS4FBJHnrooTJZS6+88grvvfceXbp04f333+ell14C4Pe//z2dO3emU6dO9O/fn8zMTLdzjR8/nvz8fK8tkccee4xly5bRsWNHPv/8c5o1awbAmDFjKCkpoUOHDkybNo2+ffuWPmfq1Kl06dKFSZMm+Twu1hQUlWCzaf8HClEJjp61t8BP5RcaXBL/4qJbySj5+ZcnJ6WlpVFQUFB6u3nz5ixatMjtOZ9//rnbfUOHDmXo0KGltzdv3kxmZibt27f3+Lr169dn/vz5Hh+bM2eOx/ufe+45nnvuOb/HxZJzF4vJfGI+949ow/+Oamt0cYSIKdJyiDHTp0/npptu4tlnnzW6KFHvzIUiAL7adMTgkggReyQ4xJhp06Zx8OBBBg4caHRRhBBBcp1zFO0kOAghRCVz5inlXSrmtcV7o3JcTIKDEEIY5Klvd/D8vF3M337C6KK4keAghAhJYYmV7DMF/g8Upcq3D/KLSgAottoqvzB+SHAQVV4MdfPGlD98uoWBzy3mUrH7TH7hW7RPgAMJDhFlNpvp2rVr6b+srCz69+8P2Bfec05UE5ERA9+/mLZo50kACkuir9Yb7c5dLObVRXuieoBa5jlEULVq1cosZgewcuVK4HJwcK6vJISIHy9+vxuAmknRewmWlkMlq1mzJmBPSV2+fDldu3b1uKR3vNt1PI9RLy7l3MVio4si/Ineym/UKd9QcI41fLnxSNS1IqI3bIXTnGlw/KfwnrNRZxg73echzj0SAFq0aMEXX3xR+tj06dN54YUX+Pbbb8NbriripYW72XMynxV7TjGuyxUReY0Ve04x+Z01rPvjSBrUTIrIa1Rl0msXPF0ukjq7PhfuPMl3Px2P2Gc9FNJyiCBnt9KmTZvKBAZhd/h0AUt2nYz465T/Qjq9s2I/AFuyz3p8XAhX0+fsJGPa7Iid/+zFooidOxTx0XLwU8N3ZbVptNZYzBI3I23435ZQbNVkTR8XtnMePl3Agh0nuHNAC5TUbSPKmXHjLfhWNW8s3Wd0ESqVXAHL2XUij+3HvO8al32mgIO5F7w+rrV9Z7i8S777ylNSUsjLywu5nFVBsdX/RSXYC8+kt9fwxDfbOVsQXbWwaHDg1AXmbzsetvM5u0SirKu80hw7d5Fb3lhVuoZXIGLpvZLgUE6Jn8kopy8U+Rwk1RryC0s4mOt7clCXLl0wm81kZmbKgLQHodb6nUE5ClcjiKgF20+QMW02u094r3AMe2EJU99f7/Xx8tYfPMNP2efCUbwq6V9L97M26zRfbHRf2HHVvtyAKijR3LqNj24lg7gu2V3+voSEBI9LdsezuVuPcc8HG9j4p1EVPteJ85eokRg/H++5jhbBpsNnaZuWEpZz3vRPR9q1n26/OIvDfhWWWJn41moym9bhq3sH+Dw2mufixH3LocRqkxmeUeLt5QcA2JtzOahqDa8t3kvWKe9deZ6MfWl5WMsWq95cto9Hvghzpp5DFF/XDLM/J59Djl6DXcfdu6fLB9Jofg/jPjjsPpHvsykufPsx63TplyGsHN+a0xeKeH7eLia9vSbkU8VSP2+4PfPdTj5cc4iFO4Jb2O3YuYsBHxtt+fmV7clvt5f+PPxvSxn192WV8rrnLhazdHdOxM5veHBQSpmVUhuVUt86brdQSq1RSu1VSv1HKZUY6rkD+dCW2GJ76n9RiY1cA7YcdL63N7+xisHPL67QufaedA/Ox89dKvM6gbbuXNesieYme2X7xYx1QR3f71n/XZ6Xs5V8s9k0by/fz4XCkqDKEM0uFVtZFuCFWWsdkcmc//PBeqa8u5bTQQyIB8Pw4AA8AOxwuf0c8HetdWvgDPCLUE6anJxMbm5ula/VHDh1gSNnL/odSA+Gv/dMa01ubi7Jyclheb3tx+zBwfVV1x88E5Zzx4tQP+YZ02Zz34cbQnpuoLF34c6TPDV7B0/N3uH/4BjxxDfb2R9gV+eby/aT+cR8jp51b42FsgDfywv3MPyFJew9ae9+jdSKroaO2Cml0oFxwNPA/yr7OzUccC44NAN4HPhnsOdOT08nOzubnBzf0f3EGfsfbEdeNY+3/R1/qdjKqfwizieYuJiThNaaE2cvoRSYz3s+RzgdO3cRqw1M55IxmypeVS622jhxvpAGNRNJTjB7PS45OZn09HRgu9djQuX6W1RGaK9K9YdQPgHfbjnGfcPPk1G/hs+/uTdaQ+fH5vF/Y9szuW9zt8cvOlp9/tK7Y8n+HPdkE2/mOZIFjp276PZZy3dpTTkfs9nsCdzevs/OdZkapkR2Vr/R6Rz/AP4AONMr6gNntdbOdywbaOLpiUqpqcBUgGbNmrk9npCQQIsWLfwWYKxjxmPW9HHsPZnHL2csK73t73iAhTtO8Muv1zGsXSrv3dmVS8VWrv7TXBItJnY/Ndbna7+/+iBpKUmM7tjIbzm9ue2p7zmVX8SPj44kNQwflllrDvLo11lM7N2MZ2/sUOHzlXffhxvolVGPKf0zQnr+V5uOMKB1g7AsdxELyyZXljH/WM74zMa8PLGb22MHTl3gu5+OMaFXU+p7eN+11uQVlvDnr7Z6DA6VZdfxPJIsJjIa1OBikZUt2Wfp07J+2F9n4purOXza/zib5zRV7zWR1xfvZUKvpox7eQW7TuSFdXJoKAzrVlJKXQOc1FoHnnjtQmv9pta6p9a6Z2pqaljK9MbS/V4f23jojMdBOtfrS7B9/3/6cmtQeefR7Kfsc3y09hD/WrqP297xPnj87ZZjPPb1tjL3WcuN+yzZdbm1dzLP/p7mXigiJ6+QBz7exC9neu8/9/h1DLJl8OqiPXGZ378u67TH+29+YxXPz9tFj6cWeHw8WhpeV/1jGUNfWALAw59v4ecBXsSDtWp/LkcdY2LhdPTcJT7fcIRdHhJk9pzIc7v+RPp9N7LlMAAYr5S6GkgGagEvAXWUUhZH6yEdcJ9hYoAbXl/ps9tm7YHT9HhqAS9N6FqJpYoe1766IuTn/vY/m7mhW3rpWMeri/eWPrbx0OWxB2ff6vEIfDHBvnSKTWtemL+bF+bv9ltzyy8sIf9SCY1qh2fsJVoVFHkeSI7mGdI7j9svsPkxNgh+zMtn25kB9euhrdwei1T717CWg9b6Ya11utY6A5gALNJaTwIWAz9zHDYF+MqgIrqx+ph2e6HI3q+69oDn2pcvO4+f5+vNR0MuV1Xw0dpDbDjkvgDe2YLK66ee+OZq2jw6J+Djr31lBX2fXRiRsuQXlvDa4r0+P3OuTpyveMA8eu4S2WcKAn7N8pclb8+KlaSQH/aeCjmr6M1l+/jvusN+j/P3Vry/+qDPx19fUnnrO0VDtlJ5/4d9cHov9jGIdwwuj1fHz13ynuMfxPdhzD+Wc/9HG8NTqDD5aO0h9pzII2PabPY4mrlnLhRFrCb28OeeJ2o5a4Cujp27xPajnte/8vTlLrLaAsrmWluuW+WPX/qePHYgyIl5wZg+x77x/JytxwI6fsXeU0Bg4yjnLxWzLyefIx6yZ655ZQWtHvmuzH2uZ/zlzHVeL6D+LnzROMazYPsJPl2fzflLxUx6ew1TfXRZ+vLMdzv5w6db3O4vKrGVVnqyThXw8Y++A8ipILqmIx1zoyI4aK2XaK2vcfy8X2vdW2vdWmt9s9a68pP4y1m+x3PGU99nF/L4N2WzdTx9/u+e8SNzt4ZvwTNXwX5Aikps/Oilb7m8n7+5GrCPEwB0+8v3DHwusCU/bBFe3Ojql91nQH+6PpsSD6+bk1fIxLdWez3X8/N2ebz/g9WHPN7/w95TPPOd57TM0xeKfC7MGKgLhfaW6MHcArYdDe/4x+8/2cyIvy1lwHT3v6W/ltr320/wX8cFLpgLWbS6e+Y6fvfJZoocW50600PD5TWXLtKHPtkc1nOXilDMjYrgEA3Gv7qCT9dnl7nvvR8OsGpfLre9s9bteNc/uidFLjXVBTtOcs8HkR14DrRS9uycHdz8xiqPNe+cvEIe/WJr6W1Pk2tcLx6FJd4nprV85Dt2+FjdNhSefscLhSWlNdnf+fjy/Zjlfd6Ep9aJL5PeXsObyzwnLwx8bhFDnl/i8/k5eYUBt8Cen7eLcS+HPp7jybxtwc2WDrXGv+nwWU6GoburImxas/XIOWat8d1d48tvPtoY0j4OSsV2ADU6lTVqbPGQnfLEN95z+L3VNl1r8rtP5IVtETR/nK+7fE8OKckJdG1ax+NxuxwXwjMeVoz01qLw1gY456eWufHQWTpcUcvjY12fnO/zuYG4UFhCx8fmAZ5TjwPvOw+fgiL/M7l7Pb2AxrWTWfnwiDL3Pzd3J6k1k7hroP8UbG+01nyy7jDXdW1CoiU8db/yoeHcxWJy8i5f9Lwtq379az+Qkmzhqes7haUcoXANrJP6hJZm+02cjgdKyyGCXL9AkVK+UnfbO2u5/rUfwvoaufmFjHxxqdv9nrpwAhWOgWZ/XQDPzoneGbmeUiH/uWRfmXV6QjF363F+/+kWXlq4O2ILSr66eC+9nr6c1nq3j6U58i6Fd4zKZtO8sXRflVqKwx9v42WRbpVIcPAi1OyhSI65uU6TX7TzBKv25XIq3/O6Kt9vPxH0SqbefLvlmMcL8e8/9d2HGqnVQAMVj0twnHfMQs7NL+KaV8LbHeXNNi/JAeX5+mos2nmizDyhC4UlHrOc5m47zvQ5OyMa+PMLS8iYNpsWD0duS9BAvL18P3e8t9Zvt2ek9oSQ4ODFPysxZSwQ2WcKaPPoHP7zo32Q9K5/r/M5yPrLmetKJwQFKtB+8Nz8QjKmzeaHvbl+j918+GzYUhnLZwe5BktPe1GXD5wr950KSzkqwl+qYkU5x1b2nswP2+BqXhC19FAmnRUUlXDXv9dxx3s/AnAy7xIdH5vHG0v3u82xcLaGnAP2Tv66OF19tj6bxT72Ln91kX08UevAF3yMhKdm7ygzIbSyxXVw8JXeGI5mq7cJLaHYl2O/MDozhzypSEth5b5THlPxwD091NtMWU+ue+0HPlmXzcZDZ0prtaG69a2yM69dEwicFxZ/z3edDxBIfev+jzby0VrPWUuuDpy64Hfp8gOnLvCnL7f6PCYQi3ae4P1VWT6PWWdQq2nQXxezeOfJMsHaX93A2T3p/Pw6Jzk+N3cnV/55HusPXh4Lu+BlTOdAuQwxX6/50CebudPL5yX3QlGZvaLb/2mu78L7UFBkZdYa/58dfyqrBVheXA9IL9rpvfYQ6kqHH6+9nMf8u08287Me6SGdJ1jnLhYx8sXg15F/Y+k+xnRsxHof2TwV9YfP7EGnR/O6YT2vv5xxT37MOs19H27kjv4ZLPTx93f6evNRvt58lIm93dfvcjWsXCvt5PlLNKxVduZ0+c/U8j05DGrjeemX8uv0/3PJPn41uCUmk+Kuf9v7+G/rl+G3/Ea489+eL7zeulydmX95hSU89tVWembUK/P4xkNn6dHcfp9rcP1iYzY/Zp3hmRs6u53TFkBr1XWV1Cd9JJ/Eq7huOfj6AIXai+drkHbT4bOlOfA9PdS+LwaQ6eLN64s9d4P5ypE/W1DE9Dk7mfjWas5UwkzkaFiv6NvN9pbXv1dmRfR1nLW9cwXFPPnN9tI8eleeUqTBnlFWPo34ubk7+b7chj2RWDcoErxlMzn9y2VNsxmrDvKbACeE/vY/m/nQS818TwBdav1d5nnE8goFkRrnjOvg4Eu4kiBP5l3uxrj+tR9Kc+A9ZRp0+PPc0q6u5+bu5IuNl7tN/OWL/3TE84XXV478HMfEvIvF1oBqWlWBvwtVuDgXDPzrvJ28+8MBvtx0JKDuKcBr99vvP9lcJt9+0F8rtslSZVm4w95C83QN2xfE0te+xMoSHZEQqRyYuO5Wqgy9n3Zfe8fXINep/CJqJJlLB8SPnr1ETl6h35qur6/GG0v3cc8Q+4Jdzu/Qfpf+77MFxWUm7UVKZbxGqHxN6HPSWgfVwnp9yV5KrPY33GbTvPdDVqjFA+B8mNNCK4uncbJFO0+w7ch5erWo5+EZIhpIcPAinIPJ5fka5Cq/kJu3yXbBmD5nZ2lwcLpYLgvEW/O8qvFWwXTWbn1574esoOYh/HXuLn7es2nAx8cT57jJlH6hTUyL1O5n4jLpVqoi/DUtD58uu9pmHLfCPQpkwuKSCmzmHkxK5Ot+lmapSmasCiy19/yl4jLvYfnZ7/H8cZ79U2CLMwZLWg5xYtBfF3PfsNalt1ft9z9HIZ6U34AoXJxdaeUXaHRatS+Xw2cKymRyLTYwt70yuKamBqrL4/NpWi/y2+7Goj9/tY3bI5C5FufBIfqWEPbHW40/kIwF1010jJxcY6RQV/zwtAihk6/Jg/66P3xNZKxqlFKs2HOKyT52CvTl8Gn3JcY9+c5HTTrcq65WZXHdrRSFy8uLCFuwI7gVSZ12HjvP5sPumxGB7y6paNzDwCha65ADg/9zX/7517M2eH19T2uECc/ivOUQe5ybupQXqfVVhN2tb4d2UZO/ymXfhbinyVOzw7OOkoyzBSeuWw5GrptSEeU3GgdpBRmp/OxoV7E8uSrcPE0EDMe5zvjo8hOhi+vgEKv9j56+ZL76xIWoalyXPrGP6/hvFmSFYYe+eBLXwSFWvbvigNt9OTG845QQFRVIKvLwv8l4QzAkOMQgT7nh0p8q4tk9H3gehBahi+vgIN30QsS+YgO2g40HcR0cZBRXiNh3Q5i3xRV2cR0cJDQIEftOVsJe7fEoroODEEIIzyQ4CCGEcCPBQQghhJu4Dg4yHi2EEJ7FdXBw7tIlhBCirLgODq/G0aYqQggRjLgODkIIITyT4CCEEMKNBAchhBBuJDgIIYRwI8FBCCGEGwkOQggh3BgWHJRSTZVSi5VS25VS25RSDzjur6eU+l4ptcfxf12jyiiEEPHKyJZDCfCQ1vpKoC9wr1LqSmAasFBr3QZY6LgthBCiEhkWHLTWx7TWGxw/5wE7gCbAdcAMx2EzgOuNKaEQQsSvqBhzUEplAN2ANUCa1vqY46HjQJqX50xVSq1TSq3LycmplHIKIUS8MDw4KKVqAp8BD2qtz7s+prXWgMcFkLTWb2qte2qte6amplZCSYUQIn4YGhyUUgnYA8MsrfXnjrtPKKWucDx+BXDSqPIJIUS8MjJbSQHvADu01i+6PPQ1MMXx8xTgq8oumxBCxDuLga89ALgN+Ekptclx3yPAdOC/SqlfAAeBWwwqnxBCxC3DgoPWegXgbbudEZVZFiGEEGUZPiAthBAi+khwEEII4UaCgxBCCDcSHIQQQriR4CCEEMKNBAchhBBuJDgIIYRwI8FBCCGEGwkOQggh3AQcHJRS1SNZECGEENHDb3BQSvVXSm0HdjpuZyqlXo94yYQQQhgmkJbD34GrgFwArfVmYHAkCyWEEMJYAXUraa0Pl7vLGoGyCCGEiBKBrMp6WCnVH9COzXkewL7fsxBCiCoqkJbDPcC9QBPgCNDVcVsIIUQV5bfloLU+BUyqhLIIIYSIEn6Dg1LqPUCXv19rfVdESiSEEMJwgYw5fOvyczJwA3A0MsWpXD3VTu61fMVFkrhIIud1DY7q+hzVDTigG7FHp1Ns6E6qVYEmlXO0NR2mhTpOQ3WGVM5RW13AjA0TNoqwcE7X4Bw1OaIbkKUbccDWiCM0wPtmgSIQFkpork7QWh2hhTpOPZVHPZVHCgWlxxSSwGmdwmldi2zdgH26Mft0Y/KRqU0VVYsLtFHZNFcnaGY6SSrnqKEuUoNLABRhoZBETunanNB1OaIbsEs35aBOw2bwHOVAupU+c72tlPoIWBGxElWiJFVMPZVHNU5RjSLqmPJJURdLHy/UFnbqZmywtWG5rTNrbB24QDUDSxz9FDY6qEP0Ne2gr2k7PUy7qa/ySh+3akUutTmna1CCCRsmEimhtukCtcknSZWUHpurU9hoa80GW1uW2TqzTWegZVK/T9W4RF/TDnqbdtLLtIvOan+Z97RAJ3GaFPJ0NZyBN5lC6pnyqK0Kypxrn+0KfrS1Y51uxzJrF05StzJ/lZjUiFyGmLfQx7SDTLWPVqZjpY/ZtOIMNcnT1blAMhpFEsUkU0Sq6SzJqrj02AKdxHbdnNW2Dqy0dWS9rS2FJFbq76K0dusx8v0EpdoBs7XWrSNTpOD17NlTr1u3LujnZUyb7XZfCgU0Uadoo7LpZDpAZ3WAbqa9VFNFFGsza23tmW3ry1xrL05TKxzFj3lmrPQ27WSMaS1XmdfRSJ0BIMuWxlpbe7br5uzSTdlna8wpavuoEWkacpaWpmO0UkfJVPvobtpDa5O9oXpS12GxtStzbL1YYetMibTqAKjLeUaZ1zPKtJ5Bpp9IVsUUaTM/6Zass7Vlp60Ze3UTDuhGPlsDCZTQVJ2klTpKG5VNd9Meepp2U0ddAGCzrSULrN2ZY+vNXp1eWb9e1GurDjPevJIRpg10MNmz/k/qOmy0tWazrRXbdXMO6jSO6AYUkeDlLJo65JOucuhgOkQHdYiupr10UfuxKBuXdAJLbZl8Z+3NIlt38sr9HbOmjwup7Eqp9Vrrnh4f8xcclFJ52McclOP/48DD5VsURgpncPAkiSJ6mHYzyPQTo03raGU6Rok2sdLWkU+sQ5hn6+Xjj151XUEut1oWMsG8mFR1jos6kaW2TL639mClrSPHqB+W16nHeYaYNjPcvJEhpi3UUgWc0rX4xtqPL60D2KxbEX/dT5ruag+TLQsYZ1pNkiohWzfge2sPvrf1CFtNU2GjrcpmhGkjI83r6ar2YVKaLbYWfGYdzNfWfpyJw0pSGqe53vwD15t/oIPpECXaxFpbe5bYMlli68punU44PpM1uEhv006GmDYzxvwjjdQZCrWFRbZu/Nc6lGW2LlgxGxMcYkGkg0NZmg7qEOPMqxlvWkkzUw6ndU0+sw7mY+sw9ukmIZwzlmj6m7Zxu/l7Rpns7/kiW3c+sw5iqa0LF0mO6KsnUMIQ02auN69glGkDSaqYbbbmvG8dxVfW/hF/faNV4xLjzau4zfw9nUxZ5OlqfGYdxCfWoWzTzYl0kEzlLNeaV3GjeTmdTFkUazPzbD2ZWTKatbp9xF/fWJp+pu3cbp7PKNN6LMrGBltrvrQOYLa1L7nUjuirK2x0U3u5xrya8eaVNFDnOaHr8I+Sm3j26RdDO2cowUEp1d3XSbXWG0IqTQRUbnC4TGGjv2kbE82LuMq0jgRlZaX1St6zjmGhrbvhA0rhVJMCbjIv5zbz97Q2HSVXp/Af6zBmlYzgCKmGlCmFAsabVzLZvIAOpkOc19X4zDqYD6wjq1yQbqGOMdm8gJvNS6mlCthha8oH1lF8YR1IgUEBsZ06xM/My7jZvJQ66gI7bE2ZaR3Nl9YBVSpIl//sn9Y1+a91GB9Zh3FQNzKkTAmUMNy0kZvNS5hn68XzTz8f0nlCDQ6LfZxTa62Hh1SaCDAqOLhqwDluNi9lsuV7mqhcDtoaMsN6FR8gLZcAACAASURBVJ9Yh7j1D8aStuowt5vnc4N5BTVUIZtsrZhZMorZtr6VPkDmnaaH2s1tlu8Za1pLkiphhbUjM62jWWDrEbNB2oyVEaYNTDYvYLD5J4q0mbm23swsGcU63Y5oqaUnU8h480qmmOfT0XSQ87o6n1iHMNM6yrCLZzi0Uke4wzyPG83LHZ/9lrxfMppvo+qzbyfdSl5EQ3BwMmPlKtOP3GmZSy/TbvJ1Mp9YhzDDOposfUXYXy8SLJQw2rSOKZb59DHtpFAn8LW1HzOto/lJtzS6eD7V5xw/Ny9msmUBjdVpsnUDPigZyX+sQ2Omb7yB43e41bKQJiqXo7oeH5aM4D/WYeRQx+ji+WAfB7nDMo+xprWYsbHElslM62iW2rrERKaZwsYQ0xbuNM9liHkLhdrCN7b+zCwZxRbdyujieWVYcFBKdQKuhMttRa31zJBKEwHRFBxcdVL7udMyl2tNq7BgY7GtK+9Zx7DC1oloqfW5SuM0E8yLmWhZRCN1hkO2VD6wjuS/1qGcJcXo4gXFjJWRpvVMMc+nv3l7aYCbYR3N1qgMcJqeahe3WRYw1rSGRGVlubUTH1hHscDWHStmowsYlFTOcKt5EZMsC2mozrLf1oj3raP4NEpb0tW5xE3mZdxhnkcr0zFO6jq8XzKSD60jIj6WEA5GZSs9BgzFHhy+A8YCK7TWPwupNBEQrcHBKZWzTLIsYJJ5AanqPPttjfjYOozPrIMN/+ApbAw0bWWSeSEjHYNsS6yZzLCOZqktM2a7ZFy1Udncbp5f2j2wwdaaGSWjmWPrY3iWWS3yudG8gonmRbQzZXNeV+dTx7jJft3Y0LKFQwIljDWt5XbLfHqadnNBJ/G5dRAzrKOjIh22nTrEBPNibjIvp5YqYJOtJe+VjOE7W9+YmgBrVHD4CcgENmqtM5VSacAHWutRIZUmAqI9ODglUszVpjVMtCyij2knRdrMfFtP/mMdxkpbx0qtHTZVJxhvWsUt5iU0N50kV6fwiXUoH1qHc0inVVo5KlMKBdxkXsbt5vm0NB0nR9fiI+twPrcOqtQuPxM2+ph2cJN5OdeYVpGsitlka8lH1hF8be1XpQZzXXVUB7jDPI/x5lUkqWJWWq/kc9sg5ll7VWproiYFjDOvYaJ5EV1N+yjUFubaejOjZDQbdBuisVXvj1HB4UetdS+l1HpgGJAH7NBatw+pNBEQK8HBVSt1hInmRdxkXk5dlU+uTmGetRezbX1YbbsyIoEiXZ1kpGkD480r6W7aC8AaW3tmlYxkbhzN1XC2lm43z2eEaSMmpdlqy+Bba1++s/WJSHA0Y6Wb2sM48xrGmdfQUJ0lXyfzlXUAH1pHsE1nhP01o1VdzjPBvIQJ5kU0N52kUCew0NaNb6z9WGHrHJFAUYsLjDStZ6x5LYNNW0hSJeyypfOxdRhfWAfGXLdpeZUaHJRSrwEfAROBR4EJwENAPrBJa31nSKWJgFgMDk5JFDHUtIlx5jWMMG2ghirkvK7GGse0+bW2DuwOaY0nzRWcpotpH30ck2icU/l32JrxlbU/31j7GZaGGi0akcs48xquMa+mmyNgZtnS+MHWiR9sHdmiW5Gtg1/jyUIJ7VQ2maZ9DDJtYYBpG7VUAYU6gUW2rnxj7cciWzcukRSB3ypWaLqqfVxn/oFrzKtIVecp0SbW67Yss3Zhg27DVluLkIJFLfLpZMqin2k7A0xbS2caH9H1mWPtzbfWfmyqQpMnKzs4PIA9IDQG/oM9UJwBammtt4RUkgiJ5eDgyh4oNjPYtIX+pq20MJ0AoFib2asbs1c34biux3Fdl/PUoEhbKMZCEsXUVBcd0+9P0UydpJXpKA3VWQAKdQKrbR1YYstkqS2zSvRlR0K6ymGEaQMDTVvpa9peus7WWV2DHbbmZOsGHKU+OboOF3UShY6WVnV1iZpcoqE6S7o6STN1knYqmyTHWjlHdT2WWbuwzGb/JwvauXO2rIaaNzPEtJnOpqzSx/bZriBLN+KwTuWork8+1cnXyZRgJpESklUR9cjjCpVLY5VLO9Nh0tUpAEq0iS26JT/YOrHQ2r1KBQRXRnUrNcceJCYA1bAHiQ+11ntCKk0EVJXgUF4Tcuhm2ksH00GuVAfJUMdppM5QTRV5fc5JXYdDuiEHdRqbbS3ZbGvFDt08brqMwsVCCR1VFp1MWXRUWbQzHaaxyqUhZzArz9+ZQm3hiG5Atk5lp27GT7YWbNEtOajTqIoXpEiqQx5dTPvpovbTyZRFU3WSpiqHWuUWB3R1RtfkuK7HHt2E7bbmbNfN2WhrE5XZUeFm+DwHpVQ34F2gi9Y6anLrqmpw8ExTiwukcJFEVUICJVwikXxdjXyqSRCIMAsl1CWPJGVfTVMBF3QyBSRxjhoxkcsfy6pziZpcpIa6RAIlFJJAoU7gHDWq7EB+ICIRHALZ7MeCPX11AjACWAI8HlJJRBgozlOT89T0sAWTiLQSLORQV957gxSQbF8uRN7/iPMaHJRSo7APRl8NrAU+BqZqrS9UUtmEEEIYxFcb+GFgJdBBaz1ea/1hZQYGpdQYpdQupdRepdS0ynpdIYQQPloORi6sp5QyA68Bo4Bs4Eel1Nda6+1GlUkIIeJJtI6e9Qb2aq33a62LsHdpXWdwmYQQIm5Ea3BoAhx2uZ3tuK+UUmqqUmqdUmpdTk5OpRZOCCGqumgNDn5prd/UWvfUWvdMTY3vWb5CCBFu0RocjgBNXW6nO+4TQghRCaI1OPwItFFKtVBKJWKfY/G1wWUSQoi4EZULlmutS5RS9wHzADPwrtZ6m8HFEkKIuBGVwQFAa/0d9s2FhBBCVLJo7VYSQghhoLgODp2bRP/esEIIYYS4Dg4mWUVZCCE8iuvgkGCO619fCCG8iuurY9emdYwughBCRKW4Dg5KupWEEMKjuA4OQgghPJPgIIQQwk1cBwcl/UpCCOFRXAcHIYQQnklwEEII4Saug4N0KgkhhGdxHRyEEEJ4Ft/BQZoOQgjhUXwHByGEEB7FdXBonVrT6CIIIURUiuvgcE2XxkYXQQgholJcBwchhBCexXVwkAnSQgjhWVwHByGEEJ5JcBBCCOFGgoMQQgg3EhyEEEK4keAghBDCjQQHIYQQbiQ4CCGEcCPBQQghhBsJDkIIIdzEdXCQGdIVYzHJGyhEVRXXwUFUzA3dmhhdBCFEhEhwEEII4UaCgzDcO1N6Gl2EuPerIS0rfI7P/qdfGEoiooUEBxGyGkmWsJwnrVZyWM4jKkBX/BSt4mTzrD+O6xD2c9atngDA/SPahP3coYrr4KBkE+mQvTShK52a1PZ73LwHB5e5fc+QVjx5XUe349b/cST/N6Z92MpXlbVvlBL2c/qKDe3Swv96sSpr+jjuHtSSvU+PDet5qyfaK1qdGtcK+rnf/mZgWMviFNfBQYRuTKdGHu9/ZWK3MrfbNUrhrgEtSm9PG9veY0iuXzOJmsnhaYlUdXWrJ4b9nFp7Dw+/HtYqwHOEqzTx7TfDWwd1fCCVtFAYEhyUUs8rpXYqpbYopb5QStVxeexhpdRepdQupdRVRpQvHP2vVV2Sxex2X9b0cVyb6b71alJC2Y+Zv2tIiwY1KlI0UQFXd3YP+ibJ+Q5aWq0k7uifEfTzlFI8NLod/zM0sIAcSUa1HL4HOmmtuwC7gYcBlFJXAhOAjsAY4HWllPtVKMJSayb5PeYXA1twe7/mAZ/ztVu7V6RIMWXqYM/B9ZeDWni836lbU3sd4eGx0r3ki79rdVot75/fH6YNZ8nvhvLW7WWTAJy1/trVEtyeU79mIrf2aRZ0OeOZxWTi8fHu3acAk/vGxntpSHDQWs/XWpc4bq4G0h0/Xwd8rLUu1FofAPYCvSu7fP1a1S/9ef5vB/PKxG7c1D29zDFd0mtTLSHwuDWuyxVhK1+0e+RqzwN2dfx0h3RqUptdT41hdEfPXVZOzepVD7lssaJpvWohP3fmXX28PtakTjUyGtRg1JVpXo5QPH7tlW73TpLg4NfPeqT7PwhI9tDq9tWtZ5RoGHO4C5jj+LkJcNjlsWzHfW6UUlOVUuuUUutycnJCemFnDaz8TN/6New1r4YpSbRNS+HazMb87ZbMMsf0zKhX+vPUwS35YdrwoF47mmcXv3Bzpv+D8P2BHtoulfEeupjsz/N+Tk/dVeX1a1nf7zGx7oau3icY9mnh+/evVa1iYzd3DPDdwhOeubaYh7ZLDeq5zq9ENF0VIhYclFILlFJbPfy7zuWYR4ESYFaw59dav6m17qm17pmaGtwfwp9gu1jr1UikSZ3ganrROvj67I2d/daAAvng//vO3rxcbnDaqUEA3XbBqIpddr7qkT0z6rLrqTFe+7QDzcL78O7LLYxaju4kj5/9coVp3yiFtY+M8Di+JCAlyeK1S8mbZEcvhLdrj9mlMrnwoSEhly0YEQsOWuuRWutOHv59BaCUugO4BpikL1dBjwBNXU6T7rivUoW7hffenb3c7rvyiuBT1irDhF5N/R6TaA7uY+NsJTkHNntm1PX7HH8X/N+Nblv6c9+W9XwcGXsC6cJJsph55OoOTOxt/3sF8ncrr3/rBux9eiwrpw2nbg17l5+/sJJWK4kvfj2AhrWSecyl++mO/hlxu1ZZ+e9yo9rJJAT4Hdn859G8dmt3tySMmuXmELm+ta7zSbx3D1acUdlKY4A/AOO11gUuD30NTFBKJSmlWgBtgLVGlBF8tyBcH/IXTDwN8kXrF0l5KNjrkypWM//VkFZM6dc8qOwNf2M0yUGM90S7jX8aVfpz35b1ePqGzgE9L9Fiokt6Hf8H+mAxm2gcRKu3Xo0kqiXa33vXFmAwNeWqNGZkNim+uLc/m/48yv/BHtSunsC4Lle4ddHePahFQIkZ5YNIOBk15vAqkAJ8r5TapJR6A0BrvQ34L7AdmAvcq7W2RqoQFpNiZIc03rnjcs1+1t3eB/PclLuOrn1kRJhKdlmwfZeRUNG8+ppJFp64rlPpRaWidLl+jugbyguOs9Ye6wJtcX9zX2QmbRklyWKmTvVEUhxdxRUJ2M66WZLFzK+GeE9n/XhqX4CIprwala3UWmvdVGvd1fHvHpfHntZat9Jat9Naz/F1nopSSvH2lJ4MaWu/AE/q04wBrRuEfL6GtZJ59sbLtb6uTStWqwPjalmutfzGdWJ3eYtpAdS+9oR5tmukRSSZoRKzZWpXd29JxxpP79YVtavxzX0DefqGTm6Pmcv9zRLNJga3Db7i5+zS7duyPlnTx9E2grPXoyFbKSpkTR8XcHMevHcLTewdWMpfJJbuWPfHkWE71+PjO7L/matZ/fAImtf3PCnNYg7tdyh/HfL2XtZMstDZw+zPYe0aBvxagfTfB9o/XBl8XaPbNLT3NfvrUvOWrfTG5B5+X9/T36J7c/9jRJWpdcPoWcOp/NvVOb12QF2eu58ey8y7/GfpT+zdtEz24MqHg8uKrIjoTJkxWL0aidRMsvDoOPd870hpVq86h04X8MLNmSSYFWcuFPH4N9uDCiHhzgIymRSNantvNVzTpTE7j+fxr6X7w/q6TlufsE+Qz5g2u8z9qSlJHDl7sfR2oP2uax4ZwfqDZ/j1rA3hK2SEWEzuActft9xvR7alRpK5dJ2e8jqnB7/Mwu39mkfd+E6TOtXYezLf6GIEZGynRszZetzv9/hyKmvZI5+9sQsATetW4z/rDlO/Ersgo6fKFEUSLSa2PnGV1zz9UF2b2ZgrvFxsnZOe0molcV25HPfHPExKMtKkvvaZ4QlmEw+PDf8KlYFwXrDuHdbK58XLdYA9rVYyV3eOjcmIvxzsfa6Bt8bFAyPbcPegsrPTE82m0s9PPR9jRw0dK+M2rWvsYHHW9HF+j2lS1/MAeijLVYQq0F64f07uEdDvpP1MdOjTsj4v3tLVY8JIpEhwCILrchkKVTq3wdtyBT0dzXFntH9lYjf++rMuPl/DWXMItAf4qevt/ZspEZ43sfMvY1j7yAiypo8rHaOpbD0c76cGft6rKb8d2Zb7hoVnieOGKUmG/V6unH/36okWt4pEqJeFOwe0IGv6OJ8tj9FXpvHenb3cgkuUJtWVfhZapl7u8qzImlztG6XwxuQeLPjfwf4PdhGtWYfhIMEhCE9e14mGKZcDweQ+zXlnSk+v22VOG9ue+b8dXKbPPthxv0BrCuUnJHkqkzPDIRTJCebS2qVRXN+JBLOJB0a2CVsG1NpHRzLDpQ+4ef3ga9CBrgnls4Yb5nHh67oG1vpVSjGsXUO3gVMn59In3ZtVPMkiWPU8dKU4y3mVn6VWApVetxpjOjWidcMUEi1lL4uB1PyrIgkOIVLK3ic/okOa1wu4xWzymk1Q/im+gobPx7zc7xrEnPpGybIT5VNR/bmxW5NK3y3uFwODX0LCW+ph+0YpZS5wvuYEBPve9G5hnwDobbaya/ZcRTSpU425Dw7isWuDm/nrtCaINO+JvZuVSSRY+L/uM4LDVWGfeVdvpo1tz19/dnnQN5BksGD/Tv7PF30kOEQZZ9DwFhD+dVsPGrt0N6Q7urbK78KV5Kj9jCvXx/7hL/vE3LacL/68KyM6hDYT1NP6T66rw/73V5Hf2nLug4O5M5TlmwM4plVqTbKmj/Oagm0JYyZW+0a13GrVrqoneW/Fedvt70MP84qevbFzmczBujUSSXeMMyRZTEwd5HnV32C6eP44rgN/ua4jg9o04J4hrTy2Tl67tXuZDahapbp3W4WSdXi9l54G+/mihwSHIBkZ4e/on8FVHRux/P8up7MNa9+QT+7p53bxcbZmyqf99W/VgBEd0soswVzRZvP1XRvTK4AlMSrK2bXhL8//jcndmfPAIP5naCuPmUyuWWjOmrc3N/r4IgMkJ1TNr1DbNPvnpkMQy7wkWczsf+bqMuNfKUkWtj3hfVuW/gHOK5rUxz7et/5Po8gIYGzhg1/4nsxaI8nCbf0yPLb6nRf8Ye1TSyeZ/fjoSL6pwI5rzm7efc9cHbHNecJNUllDFM4IP7ZTI85dLA74+PL9wr0ygl9bKJzzqP4xwfMCe+H2ws1d+GLjEb+TCxvUTKLDFbV8XtjuHdaKRLOPLCfH/55qwylJFvIK7SvOL3xoKAOmLyp9bNOfR3Eqv5CRLy4re74A3+9wzkWryLLfHRvXZveJfJ8tBYC3bu/J9qPnS2+bTJfr0vN/O5hGtZM97jXer2X9gMdDwD4T2NNsYNf3y/m6k/o0Y2Cb0Cezejp3qodu2mA8e2Nn/njNlV7HdKKRBIcoUCPJwtmCssEhnC2UaB9Qy/Ayya68OtUTuTNMy0n//qrANxSqkWjmQpF9FZf5vx3M2YJibvnXKjpcUcttNd461RP97lvhyRPjO/LY19vK3PfPyT3417J9fPfT8aDPt+2JqyrlQjTqyjSvi7+lpSRTK9nzbOiPAkiOGNI2lQQvEy09BtswpQ4Fcppgg7jFbKJ2Ne+Btl1aTZbtzgn7XKWKqJpt4gj6/VXtgNCXAOjXqj4TezfjuZsup7S6psKW/1yG8nkP5KIwMsQ+/HCoU81+8Xz06g5kTR/nsWYZLF8rs740oSvv3lHxcZZ6NRJpm5ZC/Zr28nubsxIM55jQlR42ls9sWofXJ/XgvmGt+eLX/Uu7QALZGKZGkiXqJq8Fa8ZdvXl7ivuKxoHyVSkKZGXhgK7/YYq/fxjTnk/u6RdVXU7ScgjSLT2bcktP38sjv3lbD48DXGBPwSyfQfLAiLZMeTc8i88+cnV7BrdNZY6f2ubQIJagCLdqieawt2Y+ntqvdCZ1+S91+UmFoVr6+6GAfRD4pQldGdo2+Pfw1+W6Rl5zrHjb3LGG1nUexjh+56iQvDKxG28t31/hlVirov4uuzf6u6g/NKqtzy4tIzp+EsymkLqHI0mCQwT42+ayvESLqTQbw7kJUKjbBk4dbL/4+AsOALPvH8ieE7GxDEE0cB28dA04E3s35aO1hz09xeNzPWlYK5l9z1ztcyyoab3qPHmd+6Ju8eiZGzrz17m7SifDmU2B5w39ZkRgEyd9fQedf05f+3XHOgkOBpo2tj0DHdkaT17XiREdGrrVCkNdoG9K/ww2HT5bZlZ3eR0b16Zj4+hpxsaqZ2/sUroGTqB+N7otLRqUzSSLpcHKcHId4A9Uy9SavHFbD5bu9r1F8AMj2vDSwj1BnTuQiadJFjN//3mm3y1bY5mMORjoniGtSvsYqyWaGdMpfOv+1KuRyIy7elM/iga4YpHWlGa+BLNU9q8Gt6RJnWq8eqvnTK77hrfxu6FRvFj0u6ERO/dvR7VlRHt791/nJrVD2jHPmxu6pQe1UVKskZZDjPrsf/pzqTi8+yDVCNNSFLHsvTt6YdOao+culd730oRuHDt3KagB3oev7sDDVxuzKGGsqWiaqNO1mY35dstR7h3W2uPjD4xow8gAttX85+Tu/Gvpfmp4Wd02XsT3bx8DvLVwe4R5jf3nbuocdQNiwfry3gF8vPYQPZqF/t4Mc9Qy3199sPS+5ARzhRZ1cxWNyyRUFbWrJfDx1IrPeB/UJpVBbYxfhNFoEhwEAD/vFdgmRdGsa9M6Ydl9T1Qtf7m+E7Xn72JQ24pPjIsnEhyiVCXu2ihElda4TjVevKWr0cWIOTIgHeXiM39FiMA4F6GM9S7RaCQtByE8cAblcG/mIi1Cz9b/cWRIu5y1SUthye+G0qyesTvYVUUSHKJUuNeLF8H5WY90th09x+9GtwvL+aryjmHhUJGU60BWaRXBk+AQ5eSiYozkBHPQE9uEqEokOMSA7o601cGSbSEqiXOzqHDO2l4cwcluIvwkOEQp177prk3rsPMvY2J+lc145lwGJVa6Cx++ugN1qie67SRYEeGaKyIqhwSHKOccpJPAENtu79ecA6fy+fVQz7N3o03taglMGxv4nhei6pHgIEQlqJFkKbOJvRDRTuY5RKnY6HwQwp1zpV+Ll13cRGyQlkOUk6+XiDX/ur0Hu47nhWWHP2EcaTkIIcKqVnKCzFiuAiQ4RCnn3gEJAex1K4QQ4Sbtvig1uW9zcvIK+fWwVv4PFkKIMJPgEKWSE8yyWYwQwjDSZyGEEMKNBAchhBBuDA0OSqmHlFJaKdXAcVsppV5WSu1VSm1RSnU3snxCCBGvDAsOSqmmwGjgkMvdY4E2jn9TgX8aUDQhhIh7RrYc/g78gbKTga8DZmq71UAdpVT4Vv4SQggREEOCg1LqOuCI1npzuYeaAIddbmc77vN0jqlKqXVKqXU5OTkRKqkQQsSniKWyKqUWAI08PPQo8Aj2LqWQaa3fBN4E6NmzpyxFJIQQYRSx4KC1HunpfqVUZ6AFsNmxHHU6sEEp1Rs4AjR1OTzdcZ8QQohKpLTBO54rpbKAnlrrU0qpccB9wNVAH+BlrXXvAM6RAxwMsQgNgFMhPjdWye8cH+R3jg8V+Z2ba61TPT0QbTOkv8MeGPYCBcCdgTzJ2y8XCKXUOq11z1CfH4vkd44P8jvHh0j9zoYHB611hsvPGrjXuNIIIYQAmSEthBDCAwkOjoynOCO/c3yQ3zk+ROR3NnxAWgghRPSRloMQQgg3EhyEEEK4ievgoJQao5Ta5VgFdprR5Yk0pVRTpdRipdR2pdQ2pdQDRpepMiilzEqpjUqpb40uS2VRStVRSn2qlNqplNqhlOpndJkiSSn1W8dneqtS6iOlVLLRZYoEpdS7SqmTSqmtLvfVU0p9r5Ta4/i/bjheK26Dg1LKDLyGfSXYK4GJSqkrjS1VxJUAD2mtrwT6AvfGwe8M8ACww+hCVLKXgLla6/ZAJlX491dKNQHuxz6ZthNgBiYYW6qI+Tcwptx904CFWus2wELH7QqL2+AA9Ab2aq33a62LgI+xrwpbZWmtj2mtNzh+zsN+wfC4sGFVoZRKB8YBbxtdlsqilKoNDAbeAdBaF2mtzxpbqoizANWUUhagOnDU4PJEhNZ6GXC63N3XATMcP88Arg/Ha8VzcAh4BdiqSCmVAXQD1hhbkoj7B/al4W1GF6QStQBygPcc3WlvK6VqGF2oSNFaHwFewL43zDHgnNZ6vrGlqlRpWutjjp+PA2nhOGk8B4e4pZSqCXwGPKi1Pm90eSJFKXUNcFJrvd7oslQyC9Ad+KfWuhtwgTB1NUQjRx/7ddiDYmOghlJqsrGlMoZjlYmwzE+I5+AQlyvAKqUSsAeGWVrrz40uT4QNAMY7Fnf8GBiulPrA2CJVimwgW2vtbBV+ij1YVFUjgQNa6xytdTHwOdDf4DJVphPOTdEc/58Mx0njOTj8CLRRSrVQSiViH8D62uAyRZSyr5H+DrBDa/2i0eWJNK31w1rrdMf6XROARVrrKl+j1FofBw4rpdo57hoBbDewSJF2COirlKru+IyPoAoPwHvwNTDF8fMU4KtwnNTwhfeMorUuUUrdB8zDnt3wrtZ6m8HFirQBwG3AT0qpTY77HtFaf2dgmURk/AaY5aj47CfAFY5jkdZ6jVLqU2AD9oy8jVTRZTSUUh8BQ4EGSqls4DFgOvBfpdQvsG9dcEtYXkuWzxBCCFFePHcrCSGE8EKCgxBCCDcSHIQQQriR4CCEEMKNBAchhBBuJDgIEQSlVH2l1CbHv+NKqSOOn/OVUq8bXT4hwkVSWYUIkVLqcSBfa/2C0WURItyk5SBEGCilhjr3i1BKPa6UmqGUWq6UOqiUulEp9Vel1E9KqbmOJUxQSvVQSi1VSq1XSs1zLoEgRDSQ4CBEZLQChgPjgQ+AxVrrzsBFYJwjQLwC/Exr3QN4F3jaqMIKUV7cLp8hRITN0VoXK6V+wr48y1zH/T8BGUA7oBPwvX05IMzYl5sWIipIcBAiMgoBtNY2pVSxvjy4Z8P+vVPANq11ld6+U8Qu6VYSwhi7gFTn3s5KqQSlVEeDyyREKQkOQhjAsTXtz4DnlFKbgU3E1x4EIspJKqsQQgg30nIQQgjhTdZPNgAAAC1JREFURoKDEEIINxIchBBCuJHgIIQQwo0EByGEEG4kOAghhHAjwUEIIYSb/wefOv/flNO5twAAAABJRU5ErkJggg==\n" + }, + "metadata": { + "needs_background": "light" + } + } + ], + "source": [ + "# Create an object with links to the model and time series\n", + "problem = pints.SingleOutputProblem(model, times, values)\n", + "\n", + "# Select a score function\n", + "score = pints.SumOfSquaresError(problem)\n", + "\n", + "# Select some boundaries\n", + "boundaries = pints.RectangularBoundaries([0, 0], [20, 20])\n", + "\n", + "# Perform an optimization with boundaries and hints\n", + "x0 = 0.01, 0.01\n", + "opt = pints.OptimisationController(\n", + " score,\n", + " x0,\n", + " method=pints.LBFGS\n", + ")\n", + "opt.set_max_unchanged_iterations(10)\n", + "opt.set_max_iterations(200)\n", + "found_parameters, found_value = opt.run()\n", + "\n", + "# Show score of true solution\n", + "print('Score at true solution: ')\n", + "print(score(real_parameters))\n", + "\n", + "# Compare parameters with original\n", + "print('Found solution: True parameters:' )\n", + "for k, x in enumerate(found_parameters):\n", + " print(pints.strfloat(x) + ' ' + pints.strfloat(real_parameters[k]))\n", + "\n", + "# Show quality of fit\n", + "plt.figure()\n", + "plt.xlabel('Time')\n", + "plt.ylabel('Value')\n", + "plt.plot(times, values, label='Nosiy data')\n", + "plt.plot(times, problem.evaluate(found_parameters), label='Fit')\n", + "plt.legend()\n", + "plt.show()\n" + ] + }, + { + "source": [ + "## Using the CMA-ES to solve the same problem" + ], + "cell_type": "markdown", + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "tags": [] + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Minimising error measure\n", + "Using Covariance Matrix Adaptation Evolution Strategy (CMA-ES)\n", + "Running in sequential mode.\n", + "Population size: 6\n", + "Iter. Eval. Best Time m:s\n", + "0 6 1.12e+07 0:00.1\n", + "1 12 1.07e+07 0:00.1\n", + "2 18 1.04e+07 0:00.2\n", + "3 24 1.02e+07 0:00.2\n", + "20 126 9993283 0:00.7\n", + "40 246 9993282 0:01.2\n", + "60 366 9993282 0:01.8\n", + "80 486 9993282 0:02.3\n", + "100 606 9993282 0:02.8\n", + "120 726 9993282 0:03.3\n", + "140 846 9993282 0:03.9\n", + "160 966 9993282 0:04.4\n", + "180 1086 9993282 0:04.9\n", + "200 1206 9993282 0:05.4\n", + "220 1326 9993282 0:05.8\n", + "240 1446 9993282 0:06.3\n", + "260 1566 9993282 0:06.7\n", + "272 1632 9993282 0:06.9\n", + "Halting: No significant change for 200 iterations.\n", + "Score at true solution: \n", + "9993481.362921719\n", + "Found solution: True parameters:\n", + " 2.92147774048697517e+00 3.00000000000000000e+00\n", + " 9.01371505503933079e+00 9.00000000000000000e+00\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": "
", + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEGCAYAAACO8lkDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3dd3yT17nA8d+R5MEw25iAAbMhDLP3XoGQkNWkEEhI0pTmNmmS3rS9JGmb0QzSpGkzm2YWEpK02YMwwoawwg57GjDTmGVj8JDO/UOSka0tS34l6/l+PnywpFevjmXpfc54zjlKa40QQgjhymR0AYQQQkQfCQ5CCCHcSHAQQgjhRoKDEEIINxIchBBCuLEYXYBwaNCggc7IyDC6GEIIEVPWr19/Smud6umxKhEcMjIyWLdundHFEEKImKKUOujtMelWEkII4UaCgxBCCDcSHIQQQripEmMOnhQXF5Odnc2lS5eMLkqVlJycTHp6OgkJCUYXRQgRAVU2OGRnZ5OSkkJGRgZKKaOLU6VorcnNzSU7O5sWLVoYXRwhRARU2W6lS5cuUb9+fQkMEaCUon79+tIqE6IKq7LBAZDAEEHy3gpRtVXp4CBEVbdqXy77cvKNLoaogiQ4RJBSioceeqj09gsvvMDjjz8e9HnWrVvH/fffH3I5MjIyOHXqlM9jnnnmmZDPL4wz8a3VjPjbUqOLIaogCQ4RlJSUxOeff+73wuxPz549efnll8NUKs8kOAghXElwiCCLxcLUqVP5+9//7vZYVlYWw4cPp0uXLowYMYJDhw4B8Mknn9CpUycyMzMZPHgwAEuWLOGaa67BZrPRpk0bcnJyALDZbLRu3br0tlNubi6jR4+mY8eO3H333bju9nf99dfTo0cPOnbsyJtvvgnAtGnTuHjxIl27dmXSpElejxNCxI8qm8rq6olvtrH96PmwnvPKxrV47NqOWG2a/Tn5pNetTrVEs9tx9957L126dOEPf/hDmft/85vfMGXKFKZMmcK7777L/fffz5dffsmTTz7JvHnzaNKkCWfPni3zHJPJxOTJk5k1axYPPvggCxYsIDMzk9TUsutmPfHEEwwcOJA///nPzJ49m3feeaf0sXfffZd69epx8eJFevXqxU033cT06dN59dVX2bRpk8/j6tevH463TggRA6TlUEEXCku4WGzlxHnPaZ21atXi9ttvd+sWWrVqFbfeeisAt912GytWrABgwIAB3HHHHbz11ltYrVa38911113MnDkTsF/A77zzTrdjli1bxuTJkwEYN24cdevWLX3s5ZdfJjMzk759+3L48GH27NnjsdyBHieEqJriouXw2LUdDX39Bx98kO7du3u8kJf3xhtvsGbNGmbPnk2PHj1Yv359mcebNm1KWloaixYtYu3atcyaNSvgcixZsoQFCxawatUqqlevztChQz3OVQj0OCFEcKw2zffbT3BVx7SoTweXlkMlqFevHrfcckuZ7p3+/fvz8ccfAzBr1iwGDRoEwL59++jTpw9PPvkkqampHD582O18d999N5MnT+bmm2/GbHbvyho8eDAffvghAHPmzOHMmTMAnDt3jrp161K9enV27tzJ6tWrS5+TkJBAcXGx3+OEEKF7Z8V+7vlgPd9uOWZ0UfyS4FBJHnrooTJZS6+88grvvfceXbp04f333+ell14C4Pe//z2dO3emU6dO9O/fn8zMTLdzjR8/nvz8fK8tkccee4xly5bRsWNHPv/8c5o1awbAmDFjKCkpoUOHDkybNo2+ffuWPmfq1Kl06dKFSZMm+Twu1hQUlWCzaf8HClEJjp61t8BP5RcaXBL/4qJbySj5+ZcnJ6WlpVFQUFB6u3nz5ixatMjtOZ9//rnbfUOHDmXo0KGltzdv3kxmZibt27f3+Lr169dn/vz5Hh+bM2eOx/ufe+45nnvuOb/HxZJzF4vJfGI+949ow/+Oamt0cYSIKdJyiDHTp0/npptu4tlnnzW6KFHvzIUiAL7adMTgkggReyQ4xJhp06Zx8OBBBg4caHRRhBBBcp1zFO0kOAghRCVz5inlXSrmtcV7o3JcTIKDEEIY5Klvd/D8vF3M337C6KK4keAghAhJYYmV7DMF/g8Upcq3D/KLSgAottoqvzB+SHAQVV4MdfPGlD98uoWBzy3mUrH7TH7hW7RPgAMJDhFlNpvp2rVr6b+srCz69+8P2Bfec05UE5ERA9+/mLZo50kACkuir9Yb7c5dLObVRXuieoBa5jlEULVq1cosZgewcuVK4HJwcK6vJISIHy9+vxuAmknRewmWlkMlq1mzJmBPSV2+fDldu3b1uKR3vNt1PI9RLy7l3MVio4si/Ineym/UKd9QcI41fLnxSNS1IqI3bIXTnGlw/KfwnrNRZxg73echzj0SAFq0aMEXX3xR+tj06dN54YUX+Pbbb8NbriripYW72XMynxV7TjGuyxUReY0Ve04x+Z01rPvjSBrUTIrIa1Rl0msXPF0ukjq7PhfuPMl3Px2P2Gc9FNJyiCBnt9KmTZvKBAZhd/h0AUt2nYz465T/Qjq9s2I/AFuyz3p8XAhX0+fsJGPa7Iid/+zFooidOxTx0XLwU8N3ZbVptNZYzBI3I23435ZQbNVkTR8XtnMePl3Agh0nuHNAC5TUbSPKmXHjLfhWNW8s3Wd0ESqVXAHL2XUij+3HvO8al32mgIO5F7w+rrV9Z7i8S777ylNSUsjLywu5nFVBsdX/RSXYC8+kt9fwxDfbOVsQXbWwaHDg1AXmbzsetvM5u0SirKu80hw7d5Fb3lhVuoZXIGLpvZLgUE6Jn8kopy8U+Rwk1RryC0s4mOt7clCXLl0wm81kZmbKgLQHodb6nUE5ClcjiKgF20+QMW02u094r3AMe2EJU99f7/Xx8tYfPMNP2efCUbwq6V9L97M26zRfbHRf2HHVvtyAKijR3LqNj24lg7gu2V3+voSEBI9LdsezuVuPcc8HG9j4p1EVPteJ85eokRg/H++5jhbBpsNnaZuWEpZz3vRPR9q1n26/OIvDfhWWWJn41moym9bhq3sH+Dw2mufixH3LocRqkxmeUeLt5QcA2JtzOahqDa8t3kvWKe9deZ6MfWl5WMsWq95cto9Hvghzpp5DFF/XDLM/J59Djl6DXcfdu6fLB9Jofg/jPjjsPpHvsykufPsx63TplyGsHN+a0xeKeH7eLia9vSbkU8VSP2+4PfPdTj5cc4iFO4Jb2O3YuYsBHxtt+fmV7clvt5f+PPxvSxn192WV8rrnLhazdHdOxM5veHBQSpmVUhuVUt86brdQSq1RSu1VSv1HKZUY6rkD+dCW2GJ76n9RiY1cA7YcdL63N7+xisHPL67QufaedA/Ox89dKvM6gbbuXNesieYme2X7xYx1QR3f71n/XZ6Xs5V8s9k0by/fz4XCkqDKEM0uFVtZFuCFWWsdkcmc//PBeqa8u5bTQQyIB8Pw4AA8AOxwuf0c8HetdWvgDPCLUE6anJxMbm5ula/VHDh1gSNnL/odSA+Gv/dMa01ubi7Jyclheb3tx+zBwfVV1x88E5Zzx4tQP+YZ02Zz34cbQnpuoLF34c6TPDV7B0/N3uH/4BjxxDfb2R9gV+eby/aT+cR8jp51b42FsgDfywv3MPyFJew9ae9+jdSKroaO2Cml0oFxwNPA/yr7OzUccC44NAN4HPhnsOdOT08nOzubnBzf0f3EGfsfbEdeNY+3/R1/qdjKqfwizieYuJiThNaaE2cvoRSYz3s+RzgdO3cRqw1M55IxmypeVS622jhxvpAGNRNJTjB7PS45OZn09HRgu9djQuX6W1RGaK9K9YdQPgHfbjnGfcPPk1G/hs+/uTdaQ+fH5vF/Y9szuW9zt8cvOlp9/tK7Y8n+HPdkE2/mOZIFjp276PZZy3dpTTkfs9nsCdzevs/OdZkapkR2Vr/R6Rz/AP4AONMr6gNntdbOdywbaOLpiUqpqcBUgGbNmrk9npCQQIsWLfwWYKxjxmPW9HHsPZnHL2csK73t73iAhTtO8Muv1zGsXSrv3dmVS8VWrv7TXBItJnY/Ndbna7+/+iBpKUmM7tjIbzm9ue2p7zmVX8SPj44kNQwflllrDvLo11lM7N2MZ2/sUOHzlXffhxvolVGPKf0zQnr+V5uOMKB1g7AsdxELyyZXljH/WM74zMa8PLGb22MHTl3gu5+OMaFXU+p7eN+11uQVlvDnr7Z6DA6VZdfxPJIsJjIa1OBikZUt2Wfp07J+2F9n4purOXza/zib5zRV7zWR1xfvZUKvpox7eQW7TuSFdXJoKAzrVlJKXQOc1FoHnnjtQmv9pta6p9a6Z2pqaljK9MbS/V4f23jojMdBOtfrS7B9/3/6cmtQeefR7Kfsc3y09hD/WrqP297xPnj87ZZjPPb1tjL3WcuN+yzZdbm1dzLP/p7mXigiJ6+QBz7exC9neu8/9/h1DLJl8OqiPXGZ378u67TH+29+YxXPz9tFj6cWeHw8WhpeV/1jGUNfWALAw59v4ecBXsSDtWp/LkcdY2LhdPTcJT7fcIRdHhJk9pzIc7v+RPp9N7LlMAAYr5S6GkgGagEvAXWUUhZH6yEdcJ9hYoAbXl/ps9tm7YHT9HhqAS9N6FqJpYoe1766IuTn/vY/m7mhW3rpWMeri/eWPrbx0OWxB2ff6vEIfDHBvnSKTWtemL+bF+bv9ltzyy8sIf9SCY1qh2fsJVoVFHkeSI7mGdI7j9svsPkxNgh+zMtn25kB9euhrdwei1T717CWg9b6Ya11utY6A5gALNJaTwIWAz9zHDYF+MqgIrqx+ph2e6HI3q+69oDn2pcvO4+f5+vNR0MuV1Xw0dpDbDjkvgDe2YLK66ee+OZq2jw6J+Djr31lBX2fXRiRsuQXlvDa4r0+P3OuTpyveMA8eu4S2WcKAn7N8pclb8+KlaSQH/aeCjmr6M1l+/jvusN+j/P3Vry/+qDPx19fUnnrO0VDtlJ5/4d9cHov9jGIdwwuj1fHz13ynuMfxPdhzD+Wc/9HG8NTqDD5aO0h9pzII2PabPY4mrlnLhRFrCb28OeeJ2o5a4Cujp27xPajnte/8vTlLrLaAsrmWluuW+WPX/qePHYgyIl5wZg+x77x/JytxwI6fsXeU0Bg4yjnLxWzLyefIx6yZ655ZQWtHvmuzH2uZ/zlzHVeL6D+LnzROMazYPsJPl2fzflLxUx6ew1TfXRZ+vLMdzv5w6db3O4vKrGVVnqyThXw8Y++A8ipILqmIx1zoyI4aK2XaK2vcfy8X2vdW2vdWmt9s9a68pP4y1m+x3PGU99nF/L4N2WzdTx9/u+e8SNzt4ZvwTNXwX5Aikps/Oilb7m8n7+5GrCPEwB0+8v3DHwusCU/bBFe3Ojql91nQH+6PpsSD6+bk1fIxLdWez3X8/N2ebz/g9WHPN7/w95TPPOd57TM0xeKfC7MGKgLhfaW6MHcArYdDe/4x+8/2cyIvy1lwHT3v6W/ltr320/wX8cFLpgLWbS6e+Y6fvfJZoocW50600PD5TWXLtKHPtkc1nOXilDMjYrgEA3Gv7qCT9dnl7nvvR8OsGpfLre9s9bteNc/uidFLjXVBTtOcs8HkR14DrRS9uycHdz8xiqPNe+cvEIe/WJr6W1Pk2tcLx6FJd4nprV85Dt2+FjdNhSefscLhSWlNdnf+fjy/Zjlfd6Ep9aJL5PeXsObyzwnLwx8bhFDnl/i8/k5eYUBt8Cen7eLcS+HPp7jybxtwc2WDrXGv+nwWU6GoburImxas/XIOWat8d1d48tvPtoY0j4OSsV2ADU6lTVqbPGQnfLEN95z+L3VNl1r8rtP5IVtETR/nK+7fE8OKckJdG1ax+NxuxwXwjMeVoz01qLw1gY456eWufHQWTpcUcvjY12fnO/zuYG4UFhCx8fmAZ5TjwPvOw+fgiL/M7l7Pb2AxrWTWfnwiDL3Pzd3J6k1k7hroP8UbG+01nyy7jDXdW1CoiU8db/yoeHcxWJy8i5f9Lwtq379az+Qkmzhqes7haUcoXANrJP6hJZm+02cjgdKyyGCXL9AkVK+UnfbO2u5/rUfwvoaufmFjHxxqdv9nrpwAhWOgWZ/XQDPzoneGbmeUiH/uWRfmXV6QjF363F+/+kWXlq4O2ILSr66eC+9nr6c1nq3j6U58i6Fd4zKZtO8sXRflVqKwx9v42WRbpVIcPAi1OyhSI65uU6TX7TzBKv25XIq3/O6Kt9vPxH0SqbefLvlmMcL8e8/9d2HGqnVQAMVj0twnHfMQs7NL+KaV8LbHeXNNi/JAeX5+mos2nmizDyhC4UlHrOc5m47zvQ5OyMa+PMLS8iYNpsWD0duS9BAvL18P3e8t9Zvt2ek9oSQ4ODFPysxZSwQ2WcKaPPoHP7zo32Q9K5/r/M5yPrLmetKJwQFKtB+8Nz8QjKmzeaHvbl+j918+GzYUhnLZwe5BktPe1GXD5wr950KSzkqwl+qYkU5x1b2nswP2+BqXhC19FAmnRUUlXDXv9dxx3s/AnAy7xIdH5vHG0v3u82xcLaGnAP2Tv66OF19tj6bxT72Ln91kX08UevAF3yMhKdm7ygzIbSyxXVw8JXeGI5mq7cJLaHYl2O/MDozhzypSEth5b5THlPxwD091NtMWU+ue+0HPlmXzcZDZ0prtaG69a2yM69dEwicFxZ/z3edDxBIfev+jzby0VrPWUuuDpy64Hfp8gOnLvCnL7f6PCYQi3ae4P1VWT6PWWdQq2nQXxezeOfJMsHaX93A2T3p/Pw6Jzk+N3cnV/55HusPXh4Lu+BlTOdAuQwxX6/50CebudPL5yX3QlGZvaLb/2mu78L7UFBkZdYa/58dfyqrBVheXA9IL9rpvfYQ6kqHH6+9nMf8u08287Me6SGdJ1jnLhYx8sXg15F/Y+k+xnRsxHof2TwV9YfP7EGnR/O6YT2vv5xxT37MOs19H27kjv4ZLPTx93f6evNRvt58lIm93dfvcjWsXCvt5PlLNKxVduZ0+c/U8j05DGrjeemX8uv0/3PJPn41uCUmk+Kuf9v7+G/rl+G3/Ea489+eL7zeulydmX95hSU89tVWembUK/P4xkNn6dHcfp9rcP1iYzY/Zp3hmRs6u53TFkBr1XWV1Cd9JJ/Eq7huOfj6AIXai+drkHbT4bOlOfA9PdS+LwaQ6eLN64s9d4P5ypE/W1DE9Dk7mfjWas5UwkzkaFiv6NvN9pbXv1dmRfR1nLW9cwXFPPnN9tI8eleeUqTBnlFWPo34ubk7+b7chj2RWDcoErxlMzn9y2VNsxmrDvKbACeE/vY/m/nQS818TwBdav1d5nnE8goFkRrnjOvg4Eu4kiBP5l3uxrj+tR9Kc+A9ZRp0+PPc0q6u5+bu5IuNl7tN/OWL/3TE84XXV478HMfEvIvF1oBqWlWBvwtVuDgXDPzrvJ28+8MBvtx0JKDuKcBr99vvP9lcJt9+0F8rtslSZVm4w95C83QN2xfE0te+xMoSHZEQqRyYuO5Wqgy9n3Zfe8fXINep/CJqJJlLB8SPnr1ETl6h35qur6/GG0v3cc8Q+4Jdzu/Qfpf+77MFxWUm7UVKZbxGqHxN6HPSWgfVwnp9yV5KrPY33GbTvPdDVqjFA+B8mNNCK4uncbJFO0+w7ch5erWo5+EZIhpIcPAinIPJ5fka5Cq/kJu3yXbBmD5nZ2lwcLpYLgvEW/O8qvFWwXTWbn1574esoOYh/HXuLn7es2nAx8cT57jJlH6hTUyL1O5n4jLpVqoi/DUtD58uu9pmHLfCPQpkwuKSCmzmHkxK5Ot+lmapSmasCiy19/yl4jLvYfnZ7/H8cZ79U2CLMwZLWg5xYtBfF3PfsNalt1ft9z9HIZ6U34AoXJxdaeUXaHRatS+Xw2cKymRyLTYwt70yuKamBqrL4/NpWi/y2+7Goj9/tY3bI5C5FufBIfqWEPbHW40/kIwF1010jJxcY6RQV/zwtAihk6/Jg/66P3xNZKxqlFKs2HOKyT52CvTl8Gn3JcY9+c5HTTrcq65WZXHdrRSFy8uLCFuwI7gVSZ12HjvP5sPumxGB7y6paNzDwCha65ADg/9zX/7517M2eH19T2uECc/ivOUQe5ybupQXqfVVhN2tb4d2UZO/ymXfhbinyVOzw7OOkoyzBSeuWw5GrptSEeU3GgdpBRmp/OxoV7E8uSrcPE0EDMe5zvjo8hOhi+vgEKv9j56+ZL76xIWoalyXPrGP6/hvFmSFYYe+eBLXwSFWvbvigNt9OTG845QQFRVIKvLwv8l4QzAkOMQgT7nh0p8q4tk9H3gehBahi+vgIN30QsS+YgO2g40HcR0cZBRXiNh3Q5i3xRV2cR0cJDQIEftOVsJe7fEoroODEEIIzyQ4CCGEcCPBQQghhJu4Dg4yHi2EEJ7FdXBw7tIlhBCirLgODq/G0aYqQggRjLgODkIIITyT4CCEEMKNBAchhBBuJDgIIYRwI8FBCCGEGwkOQggh3BgWHJRSTZVSi5VS25VS25RSDzjur6eU+l4ptcfxf12jyiiEEPHKyJZDCfCQ1vpKoC9wr1LqSmAasFBr3QZY6LgthBCiEhkWHLTWx7TWGxw/5wE7gCbAdcAMx2EzgOuNKaEQQsSvqBhzUEplAN2ANUCa1vqY46HjQJqX50xVSq1TSq3LycmplHIKIUS8MDw4KKVqAp8BD2qtz7s+prXWgMcFkLTWb2qte2qte6amplZCSYUQIn4YGhyUUgnYA8MsrfXnjrtPKKWucDx+BXDSqPIJIUS8MjJbSQHvADu01i+6PPQ1MMXx8xTgq8oumxBCxDuLga89ALgN+Ekptclx3yPAdOC/SqlfAAeBWwwqnxBCxC3DgoPWegXgbbudEZVZFiGEEGUZPiAthBAi+khwEEII4UaCgxBCCDcSHIQQQriR4CCEEMKNBAchhBBuJDgIIYRwI8FBCCGEGwkOQggh3AQcHJRS1SNZECGEENHDb3BQSvVXSm0HdjpuZyqlXo94yYQQQhgmkJbD34GrgFwArfVmYHAkCyWEEMJYAXUraa0Pl7vLGoGyCCGEiBKBrMp6WCnVH9COzXkewL7fsxBCiCoqkJbDPcC9QBPgCNDVcVsIIUQV5bfloLU+BUyqhLIIIYSIEn6Dg1LqPUCXv19rfVdESiSEEMJwgYw5fOvyczJwA3A0MsWpXD3VTu61fMVFkrhIIud1DY7q+hzVDTigG7FHp1Ns6E6qVYEmlXO0NR2mhTpOQ3WGVM5RW13AjA0TNoqwcE7X4Bw1OaIbkKUbccDWiCM0wPtmgSIQFkpork7QWh2hhTpOPZVHPZVHCgWlxxSSwGmdwmldi2zdgH26Mft0Y/KRqU0VVYsLtFHZNFcnaGY6SSrnqKEuUoNLABRhoZBETunanNB1OaIbsEs35aBOw2bwHOVAupU+c72tlPoIWBGxElWiJFVMPZVHNU5RjSLqmPJJURdLHy/UFnbqZmywtWG5rTNrbB24QDUDSxz9FDY6qEP0Ne2gr2k7PUy7qa/ySh+3akUutTmna1CCCRsmEimhtukCtcknSZWUHpurU9hoa80GW1uW2TqzTWegZVK/T9W4RF/TDnqbdtLLtIvOan+Z97RAJ3GaFPJ0NZyBN5lC6pnyqK0Kypxrn+0KfrS1Y51uxzJrF05StzJ/lZjUiFyGmLfQx7SDTLWPVqZjpY/ZtOIMNcnT1blAMhpFEsUkU0Sq6SzJqrj02AKdxHbdnNW2Dqy0dWS9rS2FJFbq76K0dusx8v0EpdoBs7XWrSNTpOD17NlTr1u3LujnZUyb7XZfCgU0Uadoo7LpZDpAZ3WAbqa9VFNFFGsza23tmW3ry1xrL05TKxzFj3lmrPQ27WSMaS1XmdfRSJ0BIMuWxlpbe7br5uzSTdlna8wpavuoEWkacpaWpmO0UkfJVPvobtpDa5O9oXpS12GxtStzbL1YYetMibTqAKjLeUaZ1zPKtJ5Bpp9IVsUUaTM/6Zass7Vlp60Ze3UTDuhGPlsDCZTQVJ2klTpKG5VNd9Meepp2U0ddAGCzrSULrN2ZY+vNXp1eWb9e1GurDjPevJIRpg10MNmz/k/qOmy0tWazrRXbdXMO6jSO6AYUkeDlLJo65JOucuhgOkQHdYiupr10UfuxKBuXdAJLbZl8Z+3NIlt38sr9HbOmjwup7Eqp9Vrrnh4f8xcclFJ52McclOP/48DD5VsURgpncPAkiSJ6mHYzyPQTo03raGU6Rok2sdLWkU+sQ5hn6+Xjj151XUEut1oWMsG8mFR1jos6kaW2TL639mClrSPHqB+W16nHeYaYNjPcvJEhpi3UUgWc0rX4xtqPL60D2KxbEX/dT5ruag+TLQsYZ1pNkiohWzfge2sPvrf1CFtNU2GjrcpmhGkjI83r6ar2YVKaLbYWfGYdzNfWfpyJw0pSGqe53vwD15t/oIPpECXaxFpbe5bYMlli68punU44PpM1uEhv006GmDYzxvwjjdQZCrWFRbZu/Nc6lGW2LlgxGxMcYkGkg0NZmg7qEOPMqxlvWkkzUw6ndU0+sw7mY+sw9ukmIZwzlmj6m7Zxu/l7Rpns7/kiW3c+sw5iqa0LF0mO6KsnUMIQ02auN69glGkDSaqYbbbmvG8dxVfW/hF/faNV4xLjzau4zfw9nUxZ5OlqfGYdxCfWoWzTzYl0kEzlLNeaV3GjeTmdTFkUazPzbD2ZWTKatbp9xF/fWJp+pu3cbp7PKNN6LMrGBltrvrQOYLa1L7nUjuirK2x0U3u5xrya8eaVNFDnOaHr8I+Sm3j26RdDO2cowUEp1d3XSbXWG0IqTQRUbnC4TGGjv2kbE82LuMq0jgRlZaX1St6zjmGhrbvhA0rhVJMCbjIv5zbz97Q2HSVXp/Af6zBmlYzgCKmGlCmFAsabVzLZvIAOpkOc19X4zDqYD6wjq1yQbqGOMdm8gJvNS6mlCthha8oH1lF8YR1IgUEBsZ06xM/My7jZvJQ66gI7bE2ZaR3Nl9YBVSpIl//sn9Y1+a91GB9Zh3FQNzKkTAmUMNy0kZvNS5hn68XzTz8f0nlCDQ6LfZxTa62Hh1SaCDAqOLhqwDluNi9lsuV7mqhcDtoaMsN6FR8gLZcAACAASURBVJ9Yh7j1D8aStuowt5vnc4N5BTVUIZtsrZhZMorZtr6VPkDmnaaH2s1tlu8Za1pLkiphhbUjM62jWWDrEbNB2oyVEaYNTDYvYLD5J4q0mbm23swsGcU63Y5oqaUnU8h480qmmOfT0XSQ87o6n1iHMNM6yrCLZzi0Uke4wzyPG83LHZ/9lrxfMppvo+qzbyfdSl5EQ3BwMmPlKtOP3GmZSy/TbvJ1Mp9YhzDDOposfUXYXy8SLJQw2rSOKZb59DHtpFAn8LW1HzOto/lJtzS6eD7V5xw/Ny9msmUBjdVpsnUDPigZyX+sQ2Omb7yB43e41bKQJiqXo7oeH5aM4D/WYeRQx+ji+WAfB7nDMo+xprWYsbHElslM62iW2rrERKaZwsYQ0xbuNM9liHkLhdrCN7b+zCwZxRbdyujieWVYcFBKdQKuhMttRa31zJBKEwHRFBxcdVL7udMyl2tNq7BgY7GtK+9Zx7DC1oloqfW5SuM0E8yLmWhZRCN1hkO2VD6wjuS/1qGcJcXo4gXFjJWRpvVMMc+nv3l7aYCbYR3N1qgMcJqeahe3WRYw1rSGRGVlubUTH1hHscDWHStmowsYlFTOcKt5EZMsC2mozrLf1oj3raP4NEpb0tW5xE3mZdxhnkcr0zFO6jq8XzKSD60jIj6WEA5GZSs9BgzFHhy+A8YCK7TWPwupNBEQrcHBKZWzTLIsYJJ5AanqPPttjfjYOozPrIMN/+ApbAw0bWWSeSEjHYNsS6yZzLCOZqktM2a7ZFy1Udncbp5f2j2wwdaaGSWjmWPrY3iWWS3yudG8gonmRbQzZXNeV+dTx7jJft3Y0LKFQwIljDWt5XbLfHqadnNBJ/G5dRAzrKOjIh22nTrEBPNibjIvp5YqYJOtJe+VjOE7W9+YmgBrVHD4CcgENmqtM5VSacAHWutRIZUmAqI9ODglUszVpjVMtCyij2knRdrMfFtP/mMdxkpbx0qtHTZVJxhvWsUt5iU0N50kV6fwiXUoH1qHc0inVVo5KlMKBdxkXsbt5vm0NB0nR9fiI+twPrcOqtQuPxM2+ph2cJN5OdeYVpGsitlka8lH1hF8be1XpQZzXXVUB7jDPI/x5lUkqWJWWq/kc9sg5ll7VWproiYFjDOvYaJ5EV1N+yjUFubaejOjZDQbdBuisVXvj1HB4UetdS+l1HpgGJAH7NBatw+pNBEQK8HBVSt1hInmRdxkXk5dlU+uTmGetRezbX1YbbsyIoEiXZ1kpGkD480r6W7aC8AaW3tmlYxkbhzN1XC2lm43z2eEaSMmpdlqy+Bba1++s/WJSHA0Y6Wb2sM48xrGmdfQUJ0lXyfzlXUAH1pHsE1nhP01o1VdzjPBvIQJ5kU0N52kUCew0NaNb6z9WGHrHJFAUYsLjDStZ6x5LYNNW0hSJeyypfOxdRhfWAfGXLdpeZUaHJRSrwEfAROBR4EJwENAPrBJa31nSKWJgFgMDk5JFDHUtIlx5jWMMG2ghirkvK7GGse0+bW2DuwOaY0nzRWcpotpH30ck2icU/l32JrxlbU/31j7GZaGGi0akcs48xquMa+mmyNgZtnS+MHWiR9sHdmiW5Gtg1/jyUIJ7VQ2maZ9DDJtYYBpG7VUAYU6gUW2rnxj7cciWzcukRSB3ypWaLqqfVxn/oFrzKtIVecp0SbW67Yss3Zhg27DVluLkIJFLfLpZMqin2k7A0xbS2caH9H1mWPtzbfWfmyqQpMnKzs4PIA9IDQG/oM9UJwBammtt4RUkgiJ5eDgyh4oNjPYtIX+pq20MJ0AoFib2asbs1c34biux3Fdl/PUoEhbKMZCEsXUVBcd0+9P0UydpJXpKA3VWQAKdQKrbR1YYstkqS2zSvRlR0K6ymGEaQMDTVvpa9peus7WWV2DHbbmZOsGHKU+OboOF3UShY6WVnV1iZpcoqE6S7o6STN1knYqmyTHWjlHdT2WWbuwzGb/JwvauXO2rIaaNzPEtJnOpqzSx/bZriBLN+KwTuWork8+1cnXyZRgJpESklUR9cjjCpVLY5VLO9Nh0tUpAEq0iS26JT/YOrHQ2r1KBQRXRnUrNcceJCYA1bAHiQ+11ntCKk0EVJXgUF4Tcuhm2ksH00GuVAfJUMdppM5QTRV5fc5JXYdDuiEHdRqbbS3ZbGvFDt08brqMwsVCCR1VFp1MWXRUWbQzHaaxyqUhZzArz9+ZQm3hiG5Atk5lp27GT7YWbNEtOajTqIoXpEiqQx5dTPvpovbTyZRFU3WSpiqHWuUWB3R1RtfkuK7HHt2E7bbmbNfN2WhrE5XZUeFm+DwHpVQ34F2gi9Y6anLrqmpw8ExTiwukcJFEVUICJVwikXxdjXyqSRCIMAsl1CWPJGVfTVMBF3QyBSRxjhoxkcsfy6pziZpcpIa6RAIlFJJAoU7gHDWq7EB+ICIRHALZ7MeCPX11AjACWAI8HlJJRBgozlOT89T0sAWTiLQSLORQV957gxSQbF8uRN7/iPMaHJRSo7APRl8NrAU+BqZqrS9UUtmEEEIYxFcb+GFgJdBBaz1ea/1hZQYGpdQYpdQupdRepdS0ynpdIYQQPloORi6sp5QyA68Bo4Bs4Eel1Nda6+1GlUkIIeJJtI6e9Qb2aq33a62LsHdpXWdwmYQQIm5Ea3BoAhx2uZ3tuK+UUmqqUmqdUmpdTk5OpRZOCCGqumgNDn5prd/UWvfUWvdMTY3vWb5CCBFu0RocjgBNXW6nO+4TQghRCaI1OPwItFFKtVBKJWKfY/G1wWUSQoi4EZULlmutS5RS9wHzADPwrtZ6m8HFEkKIuBGVwQFAa/0d9s2FhBBCVLJo7VYSQghhoLgODp2bRP/esEIIYYS4Dg4mWUVZCCE8iuvgkGCO619fCCG8iuurY9emdYwughBCRKW4Dg5KupWEEMKjuA4OQgghPJPgIIQQwk1cBwcl/UpCCOFRXAcHIYQQnklwEEII4Saug4N0KgkhhGdxHRyEEEJ4Ft/BQZoOQgjhUXwHByGEEB7FdXBonVrT6CIIIURUiuvgcE2XxkYXQQgholJcBwchhBCexXVwkAnSQgjhWVwHByGEEJ5JcBBCCOFGgoMQQgg3EhyEEEK4keAghBDCjQQHIYQQbiQ4CCGEcCPBQQghhBsJDkIIIdzEdXCQGdIVYzHJGyhEVRXXwUFUzA3dmhhdBCFEhEhwEEII4UaCgzDcO1N6Gl2EuPerIS0rfI7P/qdfGEoiooUEBxGyGkmWsJwnrVZyWM4jKkBX/BSt4mTzrD+O6xD2c9atngDA/SPahP3coYrr4KBkE+mQvTShK52a1PZ73LwHB5e5fc+QVjx5XUe349b/cST/N6Z92MpXlbVvlBL2c/qKDe3Swv96sSpr+jjuHtSSvU+PDet5qyfaK1qdGtcK+rnf/mZgWMviFNfBQYRuTKdGHu9/ZWK3MrfbNUrhrgEtSm9PG9veY0iuXzOJmsnhaYlUdXWrJ4b9nFp7Dw+/HtYqwHOEqzTx7TfDWwd1fCCVtFAYEhyUUs8rpXYqpbYopb5QStVxeexhpdRepdQupdRVRpQvHP2vVV2Sxex2X9b0cVyb6b71alJC2Y+Zv2tIiwY1KlI0UQFXd3YP+ibJ+Q5aWq0k7uifEfTzlFI8NLod/zM0sIAcSUa1HL4HOmmtuwC7gYcBlFJXAhOAjsAY4HWllPtVKMJSayb5PeYXA1twe7/mAZ/ztVu7V6RIMWXqYM/B9ZeDWni836lbU3sd4eGx0r3ki79rdVot75/fH6YNZ8nvhvLW7WWTAJy1/trVEtyeU79mIrf2aRZ0OeOZxWTi8fHu3acAk/vGxntpSHDQWs/XWpc4bq4G0h0/Xwd8rLUu1FofAPYCvSu7fP1a1S/9ef5vB/PKxG7c1D29zDFd0mtTLSHwuDWuyxVhK1+0e+RqzwN2dfx0h3RqUptdT41hdEfPXVZOzepVD7lssaJpvWohP3fmXX28PtakTjUyGtRg1JVpXo5QPH7tlW73TpLg4NfPeqT7PwhI9tDq9tWtZ5RoGHO4C5jj+LkJcNjlsWzHfW6UUlOVUuuUUutycnJCemFnDaz8TN/6New1r4YpSbRNS+HazMb87ZbMMsf0zKhX+vPUwS35YdrwoF47mmcXv3Bzpv+D8P2BHtoulfEeupjsz/N+Tk/dVeX1a1nf7zGx7oau3icY9mnh+/evVa1iYzd3DPDdwhOeubaYh7ZLDeq5zq9ENF0VIhYclFILlFJbPfy7zuWYR4ESYFaw59dav6m17qm17pmaGtwfwp9gu1jr1UikSZ3ganrROvj67I2d/daAAvng//vO3rxcbnDaqUEA3XbBqIpddr7qkT0z6rLrqTFe+7QDzcL78O7LLYxaju4kj5/9coVp3yiFtY+M8Di+JCAlyeK1S8mbZEcvhLdrj9mlMrnwoSEhly0YEQsOWuuRWutOHv59BaCUugO4BpikL1dBjwBNXU6T7rivUoW7hffenb3c7rvyiuBT1irDhF5N/R6TaA7uY+NsJTkHNntm1PX7HH8X/N+Nblv6c9+W9XwcGXsC6cJJsph55OoOTOxt/3sF8ncrr3/rBux9eiwrpw2nbg17l5+/sJJWK4kvfj2AhrWSecyl++mO/hlxu1ZZ+e9yo9rJJAT4Hdn859G8dmt3tySMmuXmELm+ta7zSbx3D1acUdlKY4A/AOO11gUuD30NTFBKJSmlWgBtgLVGlBF8tyBcH/IXTDwN8kXrF0l5KNjrkypWM//VkFZM6dc8qOwNf2M0yUGM90S7jX8aVfpz35b1ePqGzgE9L9Fiokt6Hf8H+mAxm2gcRKu3Xo0kqiXa33vXFmAwNeWqNGZkNim+uLc/m/48yv/BHtSunsC4Lle4ddHePahFQIkZ5YNIOBk15vAqkAJ8r5TapJR6A0BrvQ34L7AdmAvcq7W2RqoQFpNiZIc03rnjcs1+1t3eB/PclLuOrn1kRJhKdlmwfZeRUNG8+ppJFp64rlPpRaWidLl+jugbyguOs9Ye6wJtcX9zX2QmbRklyWKmTvVEUhxdxRUJ2M66WZLFzK+GeE9n/XhqX4CIprwala3UWmvdVGvd1fHvHpfHntZat9Jat9Naz/F1nopSSvH2lJ4MaWu/AE/q04wBrRuEfL6GtZJ59sbLtb6uTStWqwPjalmutfzGdWJ3eYtpAdS+9oR5tmukRSSZoRKzZWpXd29JxxpP79YVtavxzX0DefqGTm6Pmcv9zRLNJga3Db7i5+zS7duyPlnTx9E2grPXoyFbKSpkTR8XcHMevHcLTewdWMpfJJbuWPfHkWE71+PjO7L/matZ/fAImtf3PCnNYg7tdyh/HfL2XtZMstDZw+zPYe0aBvxagfTfB9o/XBl8XaPbNLT3NfvrUvOWrfTG5B5+X9/T36J7c/9jRJWpdcPoWcOp/NvVOb12QF2eu58ey8y7/GfpT+zdtEz24MqHg8uKrIjoTJkxWL0aidRMsvDoOPd870hpVq86h04X8MLNmSSYFWcuFPH4N9uDCiHhzgIymRSNantvNVzTpTE7j+fxr6X7w/q6TlufsE+Qz5g2u8z9qSlJHDl7sfR2oP2uax4ZwfqDZ/j1rA3hK2SEWEzuActft9xvR7alRpK5dJ2e8jqnB7/Mwu39mkfd+E6TOtXYezLf6GIEZGynRszZetzv9/hyKmvZI5+9sQsATetW4z/rDlO/Ersgo6fKFEUSLSa2PnGV1zz9UF2b2ZgrvFxsnZOe0molcV25HPfHPExKMtKkvvaZ4QlmEw+PDf8KlYFwXrDuHdbK58XLdYA9rVYyV3eOjcmIvxzsfa6Bt8bFAyPbcPegsrPTE82m0s9PPR9jRw0dK+M2rWvsYHHW9HF+j2lS1/MAeijLVYQq0F64f07uEdDvpP1MdOjTsj4v3tLVY8JIpEhwCILrchkKVTq3wdtyBT0dzXFntH9lYjf++rMuPl/DWXMItAf4qevt/ZspEZ43sfMvY1j7yAiypo8rHaOpbD0c76cGft6rKb8d2Zb7hoVnieOGKUmG/V6unH/36okWt4pEqJeFOwe0IGv6OJ8tj9FXpvHenb3cgkuUJtWVfhZapl7u8qzImlztG6XwxuQeLPjfwf4PdhGtWYfhIMEhCE9e14mGKZcDweQ+zXlnSk+v22VOG9ue+b8dXKbPPthxv0BrCuUnJHkqkzPDIRTJCebS2qVRXN+JBLOJB0a2CVsG1NpHRzLDpQ+4ef3ga9CBrgnls4Yb5nHh67oG1vpVSjGsXUO3gVMn59In3ZtVPMkiWPU8dKU4y3mVn6VWApVetxpjOjWidcMUEi1lL4uB1PyrIgkOIVLK3ic/okOa1wu4xWzymk1Q/im+gobPx7zc7xrEnPpGybIT5VNR/bmxW5NK3y3uFwODX0LCW+ph+0YpZS5wvuYEBPve9G5hnwDobbaya/ZcRTSpU425Dw7isWuDm/nrtCaINO+JvZuVSSRY+L/uM4LDVWGfeVdvpo1tz19/dnnQN5BksGD/Tv7PF30kOEQZZ9DwFhD+dVsPGrt0N6Q7urbK78KV5Kj9jCvXx/7hL/vE3LacL/68KyM6hDYT1NP6T66rw/73V5Hf2nLug4O5M5TlmwM4plVqTbKmj/Oagm0JYyZW+0a13GrVrqoneW/Fedvt70MP84qevbFzmczBujUSSXeMMyRZTEwd5HnV32C6eP44rgN/ua4jg9o04J4hrTy2Tl67tXuZDahapbp3W4WSdXi9l54G+/mihwSHIBkZ4e/on8FVHRux/P8up7MNa9+QT+7p53bxcbZmyqf99W/VgBEd0soswVzRZvP1XRvTK4AlMSrK2bXhL8//jcndmfPAIP5naCuPmUyuWWjOmrc3N/r4IgMkJ1TNr1DbNPvnpkMQy7wkWczsf+bqMuNfKUkWtj3hfVuW/gHOK5rUxz7et/5Po8gIYGzhg1/4nsxaI8nCbf0yPLb6nRf8Ye1TSyeZ/fjoSL6pwI5rzm7efc9cHbHNecJNUllDFM4IP7ZTI85dLA74+PL9wr0ygl9bKJzzqP4xwfMCe+H2ws1d+GLjEb+TCxvUTKLDFbV8XtjuHdaKRLOPLCfH/55qwylJFvIK7SvOL3xoKAOmLyp9bNOfR3Eqv5CRLy4re74A3+9wzkWryLLfHRvXZveJfJ8tBYC3bu/J9qPnS2+bTJfr0vN/O5hGtZM97jXer2X9gMdDwD4T2NNsYNf3y/m6k/o0Y2Cb0Cezejp3qodu2mA8e2Nn/njNlV7HdKKRBIcoUCPJwtmCssEhnC2UaB9Qy/Ayya68OtUTuTNMy0n//qrANxSqkWjmQpF9FZf5vx3M2YJibvnXKjpcUcttNd461RP97lvhyRPjO/LY19vK3PfPyT3417J9fPfT8aDPt+2JqyrlQjTqyjSvi7+lpSRTK9nzbOiPAkiOGNI2lQQvEy09BtswpQ4Fcppgg7jFbKJ2Ne+Btl1aTZbtzgn7XKWKqJpt4gj6/VXtgNCXAOjXqj4TezfjuZsup7S6psKW/1yG8nkP5KIwMsQ+/HCoU81+8Xz06g5kTR/nsWYZLF8rs740oSvv3lHxcZZ6NRJpm5ZC/Zr28nubsxIM55jQlR42ls9sWofXJ/XgvmGt+eLX/Uu7QALZGKZGkiXqJq8Fa8ZdvXl7ivuKxoHyVSkKZGXhgK7/YYq/fxjTnk/u6RdVXU7ScgjSLT2bcktP38sjv3lbD48DXGBPwSyfQfLAiLZMeTc8i88+cnV7BrdNZY6f2ubQIJagCLdqieawt2Y+ntqvdCZ1+S91+UmFoVr6+6GAfRD4pQldGdo2+Pfw1+W6Rl5zrHjb3LGG1nUexjh+56iQvDKxG28t31/hlVirov4uuzf6u6g/NKqtzy4tIzp+EsymkLqHI0mCQwT42+ayvESLqTQbw7kJUKjbBk4dbL/4+AsOALPvH8ieE7GxDEE0cB28dA04E3s35aO1hz09xeNzPWlYK5l9z1ztcyyoab3qPHmd+6Ju8eiZGzrz17m7SifDmU2B5w39ZkRgEyd9fQedf05f+3XHOgkOBpo2tj0DHdkaT17XiREdGrrVCkNdoG9K/ww2HT5bZlZ3eR0b16Zj4+hpxsaqZ2/sUroGTqB+N7otLRqUzSSLpcHKcHId4A9Uy9SavHFbD5bu9r1F8AMj2vDSwj1BnTuQiadJFjN//3mm3y1bY5mMORjoniGtSvsYqyWaGdMpfOv+1KuRyIy7elM/iga4YpHWlGa+BLNU9q8Gt6RJnWq8eqvnTK77hrfxu6FRvFj0u6ERO/dvR7VlRHt791/nJrVD2jHPmxu6pQe1UVKskZZDjPrsf/pzqTi8+yDVCNNSFLHsvTt6YdOao+culd730oRuHDt3KagB3oev7sDDVxuzKGGsqWiaqNO1mY35dstR7h3W2uPjD4xow8gAttX85+Tu/Gvpfmp4Wd02XsT3bx8DvLVwe4R5jf3nbuocdQNiwfry3gF8vPYQPZqF/t4Mc9Qy3199sPS+5ARzhRZ1cxWNyyRUFbWrJfDx1IrPeB/UJpVBbYxfhNFoEhwEAD/vFdgmRdGsa9M6Ydl9T1Qtf7m+E7Xn72JQ24pPjIsnEhyiVCXu2ihElda4TjVevKWr0cWIOTIgHeXiM39FiMA4F6GM9S7RaCQtByE8cAblcG/mIi1Cz9b/cWRIu5y1SUthye+G0qyesTvYVUUSHKJUuNeLF8H5WY90th09x+9GtwvL+aryjmHhUJGU60BWaRXBk+AQ5eSiYozkBHPQE9uEqEokOMSA7o601cGSbSEqiXOzqHDO2l4cwcluIvwkOEQp177prk3rsPMvY2J+lc145lwGJVa6Cx++ugN1qie67SRYEeGaKyIqhwSHKOccpJPAENtu79ecA6fy+fVQz7N3o03taglMGxv4nhei6pHgIEQlqJFkKbOJvRDRTuY5RKnY6HwQwp1zpV+Ll13cRGyQlkOUk6+XiDX/ur0Hu47nhWWHP2EcaTkIIcKqVnKCzFiuAiQ4RCnn3gEJAex1K4QQ4Sbtvig1uW9zcvIK+fWwVv4PFkKIMJPgEKWSE8yyWYwQwjDSZyGEEMKNBAchhBBuDA0OSqmHlFJaKdXAcVsppV5WSu1VSm1RSnU3snxCCBGvDAsOSqmmwGjgkMvdY4E2jn9TgX8aUDQhhIh7RrYc/g78gbKTga8DZmq71UAdpVT4Vv4SQggREEOCg1LqOuCI1npzuYeaAIddbmc77vN0jqlKqXVKqXU5OTkRKqkQQsSniKWyKqUWAI08PPQo8Aj2LqWQaa3fBN4E6NmzpyxFJIQQYRSx4KC1HunpfqVUZ6AFsNmxHHU6sEEp1Rs4AjR1OTzdcZ8QQohKpLTBO54rpbKAnlrrU0qpccB9wNVAH+BlrXXvAM6RAxwMsQgNgFMhPjdWye8cH+R3jg8V+Z2ba61TPT0QbTOkv8MeGPYCBcCdgTzJ2y8XCKXUOq11z1CfH4vkd44P8jvHh0j9zoYHB611hsvPGrjXuNIIIYQAmSEthBDCAwkOjoynOCO/c3yQ3zk+ROR3NnxAWgghRPSRloMQQgg3EhyEEEK4ievgoJQao5Ta5VgFdprR5Yk0pVRTpdRipdR2pdQ2pdQDRpepMiilzEqpjUqpb40uS2VRStVRSn2qlNqplNqhlOpndJkiSSn1W8dneqtS6iOlVLLRZYoEpdS7SqmTSqmtLvfVU0p9r5Ta4/i/bjheK26Dg1LKDLyGfSXYK4GJSqkrjS1VxJUAD2mtrwT6AvfGwe8M8ACww+hCVLKXgLla6/ZAJlX491dKNQHuxz6ZthNgBiYYW6qI+Tcwptx904CFWus2wELH7QqL2+AA9Ab2aq33a62LgI+xrwpbZWmtj2mtNzh+zsN+wfC4sGFVoZRKB8YBbxtdlsqilKoNDAbeAdBaF2mtzxpbqoizANWUUhagOnDU4PJEhNZ6GXC63N3XATMcP88Arg/Ha8VzcAh4BdiqSCmVAXQD1hhbkoj7B/al4W1GF6QStQBygPcc3WlvK6VqGF2oSNFaHwFewL43zDHgnNZ6vrGlqlRpWutjjp+PA2nhOGk8B4e4pZSqCXwGPKi1Pm90eSJFKXUNcFJrvd7oslQyC9Ad+KfWuhtwgTB1NUQjRx/7ddiDYmOghlJqsrGlMoZjlYmwzE+I5+AQlyvAKqUSsAeGWVrrz40uT4QNAMY7Fnf8GBiulPrA2CJVimwgW2vtbBV+ij1YVFUjgQNa6xytdTHwOdDf4DJVphPOTdEc/58Mx0njOTj8CLRRSrVQSiViH8D62uAyRZSyr5H+DrBDa/2i0eWJNK31w1rrdMf6XROARVrrKl+j1FofBw4rpdo57hoBbDewSJF2COirlKru+IyPoAoPwHvwNTDF8fMU4KtwnNTwhfeMorUuUUrdB8zDnt3wrtZ6m8HFirQBwG3AT0qpTY77HtFaf2dgmURk/AaY5aj47CfAFY5jkdZ6jVLqU2AD9oy8jVTRZTSUUh8BQ4EGSqls4DFgOvBfpdQvsG9dcEtYXkuWzxBCCFFePHcrCSGE8EKCgxBCCDcSHIQQQriR4CCEEMKNBAchhBBuJDgIEQSlVH2l1CbHv+NKqSOOn/OVUq8bXT4hwkVSWYUIkVLqcSBfa/2C0WURItyk5SBEGCilhjr3i1BKPa6UmqGUWq6UOqiUulEp9Vel1E9KqbmOJUxQSvVQSi1VSq1XSs1zLoEgRDSQ4CBEZLQChgPjgQ+AxVrrzsBFYJwjQLwC/Exr3QN4F3jaqMIKUV7cLp8hRITN0VoXK6V+wr48y1zH/T8BGUA7oBPwvX05IMzYl5sWIipIcBAiMgoBtNY2pVSxvjy4Z8P+vVPANq11ld6+U8Qu6VYSwhi7gFTn3s5KqQSlVEeDyyREKQkOQhjAsTXtz4DnlFKbgU3E1x4EIspJKqsQQgg30nIQQgjhTdZPNgAAAC1JREFURoKDEEIINxIchBBCuJHgIIQQwo0EByGEEG4kOAghhHAjwUEIIYSb/wefOv/flNO5twAAAABJRU5ErkJggg==\n" + }, + "metadata": { + "needs_background": "light" + } + } + ], + "source": [ + "# fiiting using cmaes to the toy data\n", + "\n", + "# Add noise\n", + "values += np.random.normal(0, 10, values.shape)\n", + "\n", + "# Create an object with links to the model and time series\n", + "problem = pints.SingleOutputProblem(model, times, values)\n", + "\n", + "# Select a score function\n", + "score = pints.SumOfSquaresError(problem)\n", + "\n", + "# Select some boundaries\n", + "boundaries = pints.RectangularBoundaries([0, 0], [20, 20])\n", + "\n", + "# Perform an optimization with boundaries and hints\n", + "x0 = 0.01, 0.01\n", + "#sigma0 = [0.01, 100]\n", + "found_parameters, found_value = pints.optimise(\n", + " score,\n", + " x0,\n", + " boundaries = boundaries,\n", + " method=pints.CMAES\n", + " )\n", + "\n", + "# Show score of true solution\n", + "print('Score at true solution: ')\n", + "print(score(real_parameters))\n", + "\n", + "# Compare parameters with original\n", + "print('Found solution: True parameters:' )\n", + "for k, x in enumerate(found_parameters):\n", + " print(pints.strfloat(x) + ' ' + pints.strfloat(real_parameters[k]))\n", + "\n", + "# Show quality of fit\n", + "plt.figure()\n", + "plt.xlabel('Time')\n", + "plt.ylabel('Value')\n", + "plt.plot(times, values, label='Nosiy data')\n", + "plt.plot(times, problem.evaluate(found_parameters), label='Fit')\n", + "plt.legend()\n", + "plt.show()\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.8.2 64-bit ('pints': conda)", + "language": "python", + "name": "python38264bitpintsconda8d0b754a30424fdd8045d82b54ea71e7" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.2-final" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/pints/__init__.py b/pints/__init__.py index 55cc96049..0519eca9a 100644 --- a/pints/__init__.py +++ b/pints/__init__.py @@ -166,7 +166,8 @@ def version(formatted=False): optimise, Optimiser, PopulationBasedOptimiser, - TriangleWaveTransform, + LineSearchBasedOptimiser, + TriangleWaveTransform ) from ._optimisers._cmaes import CMAES from ._optimisers._cmaes_bare import BareCMAES @@ -175,6 +176,7 @@ def version(formatted=False): from ._optimisers._pso import PSO from ._optimisers._snes import SNES from ._optimisers._xnes import XNES +from ._optimisers._lbfgs import LBFGS # diff --git a/pints/_optimisers/__init__.py b/pints/_optimisers/__init__.py index 4cb19588d..ae25833e7 100644 --- a/pints/_optimisers/__init__.py +++ b/pints/_optimisers/__init__.py @@ -9,6 +9,7 @@ from __future__ import print_function, unicode_literals import pints import numpy as np +from numpy.linalg import norm class Optimiser(pints.Loggable, pints.TunableMethod): @@ -279,6 +280,938 @@ def set_hyper_parameters(self, x): self.set_population_size(x[0]) +class LineSearchBasedOptimiser(Optimiser): + """ + Base class for optimisers that incorporate a line search + within their algorithm. + + The Hager-Zhang line search algorithm [1] is implemented + in this class. + + [1] Hager, W. W.; Zhang, H. Algorithm 851: CG_DESCENT, + a Conjugate Gradient Method with Guaranteed Descent. + ACM Trans. Math. Softw. 2006, 32 (1), 113-137. + https://doi.org/10.1145/1132973.1132979. + + Extends :class:`Optimiser`. + """ + + def __init__(self, x0, sigma0=None, boundaries=None): + super(LineSearchBasedOptimiser, self).__init__(x0, sigma0, boundaries) + + self._evaluator = None + + # Set optimiser state + self._running = False + self._ready_for_tell = False + + # Best solution found + self._xbest = self._x0 + self._fbest = float('inf') + self._dfdx_best = [float('inf'), float('inf')] + + # Number of iterations run + self._iterations = 0 + + # Parameters for wolfe conditions on line search + + # As c1 approaches 0 and c2 approaches 1, the line search + # terminates more quickly. + self._c1 = 1E-4 # Parameter for Armijo condition rule, 0 < c1 < 0.5 + self._c2 = 0.9 # Parameter for curvature condition rule, c1 < c2 < 1.0 + + # boundary values of alpha + self._minimum_alpha = 0.0 + self._maximum_alpha = float("inf") + self._proposed_alpha = 0.001 # same default value as used in stan + + self.__first_update_step_not_completed = True + self.__update_step_not_completed = True + self.__performing_line_search = False + + # Increase allowed between accepted positions when using approximate + # wolfe conditions, this takes into acccount machine error and + # insures decreasing. + self.epsilon = 1E-6 + + # range (0, 1), used in the ``self.__update()`` and + # ``self.__initial_bracket()`` when the potential intervals violate + # the opposite slope condition (see function definition) + self.theta = 0.5 + + self.__gamma = 0.66 + + # range (0, 1) small factor used in initial guess of step size + self.__ps_0 = 0.01 + # range (0, 1) small factor used in subsequent guesses of step size + self.__ps_1 = 0.1 + # range (1, inf) factor used in subsequent guesses of step size + self.__ps_2 = 2.0 + + self.__current_alpha = None + + # approximate inverse hessian + # initial the identity is used + self._B = np.identity(self._n_parameters) + + # newton direction + self._px = None + + # number of accepted steps/ newton direction updates + self._k = 0 + + # number of steps in the current line search iteration + self.__j = 0 + self.__logic_steps_left = 0 + + # maximum number of line search steps before a successful point + + # Current point, score, and gradient + self._current = self._x0 + self._current_f = None + self._current_dfdx = None + + # Proposed next point (read-only, so can be passed to user) + self._proposed = self._x0 + self._proposed.setflags(write=False) + + self.__convergence = False + + # logic for passing to tell at the right moments + self.__1st_wolfe_check_needed = False + self.__1st_wolfe_check_done = False + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = False + self.__need_update = False + self.__converged_ls = False + + def _set_function_evaluator(self, function): + + f = function + if self.needs_sensitivities: + f = f.evaluateS1 + + # Create evaluator object + self._evaluator = pints.SequentialEvaluator(f) + + def fbest(self): + """ See :meth:`Optimiser.fbest()`. """ + return self._fbest + + def wolfe_line_search_parameters(self): + """ + Returns the wolfe line search parameters this optimiser is using + as a vector ``[c1, c2]``. + As c1 approaches 0 and c2 approaches 1, the line search terminates + more quickly. ``c1`` is the parameter for the Armijo condition + rule, ``0 < c1 < 0.5``. ``c2 `` is the parameter for the + curvature condition rule, ``c1 < c2 < 1.0``. + """ + return (self._c1, self._c2) + + def needs_sensitivities(self): + """ See :meth:`Optimiser.needs_sensitivities()`. """ + return True + + def n_hyper_parameters(self): + """ See :meth:`pints.TunableMethod.n_hyper_parameters()`. """ + return 2 + + def running(self): + """ See :meth:`Optimiser.running()`. """ + return self._running + + def set_hyper_parameters(self, x): + """ + See :meth:`pints.TunableMethod.set_hyper_parameters()`. + + The hyper-parameter vector is ``[c1, c2, m]``. + ``c1`` is the parameter for the Armijo condition rule, + ``0 < c1 < 0.5``. + ``c2`` is the parameter for the curvature condition rule, + ``c1 < c2 < 1.0``. + ``m`` is the number of maximum number of correction matrices + that can be stored for the LM-BFGS update. + """ + + self.__set_wolfe_line_search_parameters(x[0], x[1]) + + def __set_wolfe_line_search_parameters(self, c1, c2): + """ + Sets the parameters for the wolfe conditions. + + Parameters + ---------- + c1: float + Parameter for the Armijo condition rule, ``0 < c1 < 0.5``. + + c2: float + Parameter for the curvature condition rule, ``c1 < c2 < 1.0``. + """ + if(0 < c1 < 0.5 and c1 < c2 < 1.0): + self._c1 = c1 + self._c2 = c2 + else: + cs = self.wolfe_line_search_parameters() + print('Invalid wolfe line search parameters!!!') + print('0 < c1 < 0.5 and c1 < c2 < 1.0') + print('using default parameters: c1 = ', cs[0], ' c2 = ', cs[1]) + + def xbest(self): + """ See :meth:`Optimiser.xbest()`. """ + return self._xbest + + def ask(self): + """ See :meth:`Optimiser.ask()`. """ + + # print('') + # print('in ask') + # print('') + + if not self._running: + self._proposed = np.asarray(self._x0) + else: + + if self.__j == 0: + # working out an initial stepsize value alpha + alpha_0 = self.__initialising(k=self._k, + alpha_k0=self.__current_alpha) + print('alpha_initial: ', alpha_0) + # Creating an initial bracketing interval of alpha values + # satisfying the opposite slope condition (see function + # docstring) beginning with initial guess [0, alpha_initial]. + bracket = (self.__initial_bracket(c=alpha_0)) + self._minimum_alpha = bracket[0] + self._maximum_alpha = bracket[1] + self._updated_minimum_alpha = self._minimum_alpha + self._updated_maximum_alpha = self._maximum_alpha + + # Looping while wolfe conditions don't need to be checked + # and the line search hasn't converged. + while (self.__1st_wolfe_check_needed is not True and + self.__2nd_wolfe_check_needed is not True and + self.__converged_ls is not True): + + # secant squared step of the line search + + # *************************************************************** + # a, b = self.__secant2(self._minimum_alpha, + # self._maximum_alpha) + a = self._updated_minimum_alpha + b = self._updated_maximum_alpha + c = self._proposed_alpha + + # checking if the bracketing interval has converged + self.__converged_ls = self.__very_close(a, b) + self.__logic_steps_left = 'not started' + if self.__converged_ls: + self.__logic_steps_left = ' converged in ls ' + # if converged is True don't do anything more. + + # ************ beginning of secant squared see [1] ************ + + # Preforming a secant to propose a value of alpha. + if (self.__1st_wolfe_check_done is not True and + self.__2nd_wolfe_check_done is not True and + self.__converged_ls is not True): + + # step S1 in [1] + self._proposed_alpha = self.__secant_for_alpha(a, b) + # passing to tell to check wolfe conditions + self.__1st_wolfe_check_needed = True + + # checking the proposed point is in range + if self._proposed_alpha < a or self._proposed_alpha > b: + # If the point is out of range there is no need to + # check wolfe conditions. + self.__1st_wolfe_check_needed = False + self.__1st_wolfe_check_done = True + + self.__logic_steps_left = 2 + self.__j += 1 / 3 + + elif (self.__1st_wolfe_check_done and + self.__2nd_wolfe_check_done is not True and + self.__converged_ls is not True): + + # (typically) updating one side of the + # bracketing interval + A, B = self.__update(a, b, c) + # end of step S1 in [1] + + # (typically) updating the otherside side of + # the bracketing interval + if c == B: + # S2 in [1] + # Preforming a secant to propose a value of alpha. + self._proposed_alpha = self.__secant_for_alpha(b, B) + + # checking the proposed point is in range + if (self._proposed_alpha < A or + self._proposed_alpha > B): + # If the point is out of range there is no need to + # check wolfe conditions. + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = True + else: + self.__2nd_wolfe_check_needed = True + self.__need_update = True + elif c == A: + # S3 in [1] + # Preforming a secant to propose a value of alpha. + self._proposed_alpha = self.__secant_for_alpha(a, A) + + # checking the proposed point is in range + if (self._proposed_alpha < A or + self._proposed_alpha > B): + # If the point is out of range there is no need to + # check wolfe conditions. + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = True + else: + self.__2nd_wolfe_check_needed = True + self.__need_update = True + else: + # No new point has been proposed therefore there + # is no need to check the wolfe conditions. + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = True + + self._updated_minimum_alpha = A + self._updated_maximum_alpha = B + + self.__logic_steps_left = 1 + self.__j += 1 / 3 + + elif (self.__1st_wolfe_check_done and + self.__2nd_wolfe_check_done and + self.__converged_ls is not True): + + # S4 in [1], this is only preformed if S2 or S3 was done + # and the propsed point was in range. + if self.__need_update: + a, b = self.__update(a, b, c) + self.__need_update = False + + # ***************** end of secant squared ***************** + + # preforming steps L2 from [1] + # determing whether a bisection step should be preformed + # i.e if self.__secant_for_interval() didn't shrink the + # bracketing interval by the propotion self.__gamma + new_width = b - a + old_width = (self._maximum_alpha - self._minimum_alpha) + + if new_width > (self.__gamma * old_width): + # preforming bisection + c = (a + b) / 2.0 + a, b = self.__update(a=a, b=b, c=c) + + # preforming steps L3 from [1] + # updating bracketing interval + self._minimum_alpha, self._maximum_alpha = a, b + self.__j += 1 / 3 + self.__logic_steps_left = 0 + + # reset logic + self.__1st_wolfe_check_needed = False + self.__1st_wolfe_check_done = False + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = False + + # *********************** CONTINUE LOOP *********************** + + if self.__converged_ls: + self._proposed = (self._current + + self._updated_maximum_alpha * self._px) + else: + self._proposed = (self._current + + self._proposed_alpha * self._px) + + # Running, and ready for tell now + self._ready_for_tell = True + self._running = True + + # print('') + # print('finished ask') + # print('') + # Return proposed points (just the one) in the search space to evaluate + return [self._proposed] + + def stop(self): + """ See :meth:`Optimiser.stop()`. """ + + # We use the condition number defined in the pycma code at + # cma/evolution_strategy.py#L2965. + + cond = norm(self._dfdx_best, ord=np.inf) + if cond <= 1e-6: + return 'Convergences i.e gradients ~ 0' + return False + + def tell(self, reply): + """ See :meth:`Optimiser.tell()`. """ + + # Check ask-tell pattern + if not self._ready_for_tell: + raise Exception('ask() not called before tell()') + self._ready_for_tell = False + + # Unpack reply + proposed_f, proposed_dfdx = reply[0] + proposed_f = proposed_f + proposed_dfdx = np.asarray(proposed_dfdx) + + # We need the evaluation of the gradients before we can start the BFGS, + # the first tell gets these. + if self._current_f is None: + + # Move to proposed point + self._current = self._proposed + self._current_f = np.asarray(proposed_f) + self._current_dfdx = np.asarray(proposed_dfdx) + + # Update newton direction + # if it isn't a descent direction the line searches will fail + # i.e return nonsense + self._px = - np.matmul(self._B, self._current_dfdx) + + # resetting the number of steps in the current + # line search iteration. + self.__j = 0 + + # Checking if exact wolfe conditions. + proposed_grad = np.matmul(np.transpose(proposed_dfdx), self._px) + + wolfe_curvature = (proposed_grad >= + self._c2 * + np.matmul(np.transpose(self._current_dfdx), + self._px)) + + exact_wolfe_suff_dec = (self._c1 * + np.matmul(np.transpose(self._current_dfdx), + self._px) + >= proposed_f - self._current_f) + + exact_wolfe = (exact_wolfe_suff_dec and wolfe_curvature) + + # Checking if approximate wolfe conditions are meet. + approx_wolfe_suff_dec = ((2.0 * self._c1 - 1.0) * + np.matmul(np.transpose(self._current_dfdx), + self._px) >= proposed_grad) + + approx_wolfe_applies = proposed_f <= self._current_f + self.epsilon + + approximate_wolfe = (approx_wolfe_suff_dec and wolfe_curvature + and approx_wolfe_applies) + + # If wolfe conditions meet the line search is stopped + # and the inverse hessian matrix and newton direction are updated. + # If the line search has converged we also accept the + # steps and update. + if exact_wolfe or approximate_wolfe or self.__converged_ls: + + print('Number of accepted steps: ', self._k) + print('step size of alpha accepted: ', self._proposed_alpha) + print('updating Hessian and changing newton direction') + + # Updating inverse hessian. + self._B = self.inverse_hessian_update(proposed_f, proposed_dfdx) + + # Move to proposed point + self._current = self._proposed + self._current_f = np.asarray(proposed_f) + self._current_dfdx = np.asarray(proposed_dfdx) + + # storing the accepted value of alpha + self.__current_alpha = self._proposed_alpha + + # if self.__k == 0: + # print('\nThe first accepted value of alpha was: ', + # self.__current_alpha) + # print('set initial alpha to this in subsequent runs' + + # 'to speed up computation') + + # Update newton direction + self._px = - np.matmul(self._B, self._current_dfdx) + + # incrementing the number of accepted steps + self._k += 1 + + # Resetting the number of steps in the current line search + # iteration as we have accepted a value and completed this + # line search. + self.__j = 0 + + # resetting line search logic + self.__1st_wolfe_check_needed = False + self.__1st_wolfe_check_done = False + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = False + self.__need_update = False + + else: + # wolfe conditions haven't been meet so we continue the line search + if self.__1st_wolfe_check_needed: + self.__1st_wolfe_check_needed = False + self.__1st_wolfe_check_done = True + if self.__2nd_wolfe_check_needed: + self.__2nd_wolfe_check_needed = False + self.__2nd_wolfe_check_done = True + + # Update xbest and fbest + if self._fbest > proposed_f: + self._fbest = proposed_f + self._dfdx_best = proposed_dfdx + self._xbest = self._current + + print('') + print('Hessian updates: ', self._k, ' line steps: ', self.__j, + ' propose alpha: ', self._proposed_alpha, ' propose points: ', + self._proposed) + + def __update(self, a, b, c): + ''' + This function is part of the Hager-Zhang line search method [1]. + + Updates the bracketing boundary values of alpha. Ensuring the opposite + slope conditions are obeyed. + + The opposite slope conditions are: + φ(a) ≤ φ(0) + epsilon, + φ'(a) < 0 (w.r.t every parameter), + φ'(b) ≥ 0 (w.r.t every parameter). + where φ(α) = f(x+α*px), f() is the function binging minimised, x is + the current mparameter set, px is the newton direction, and alpha is + the step size. + + In the first condition, epsilon is a small positive constant. The + condition demands that the function at the left end point be not much + bigger than the starting point (i.e. alpha = 0). This is an easy to + satisfy condition because by assumption, we are in a direction where + the function value is decreasing. The second and third conditions + together demand that there is at least one zero of the derivative in + between a and b. In addition to the interval, the update algorithm + requires a third point to be supplied. Usually, this point would lie + within the interval [a, b]. If the point is outside this interval, + the current interval is returned. If the point lies within the + interval, the behaviour of the function and derivative value at this + point is used to squeeze the original interval in a manner that + preserves the opposite slope conditions. + + :param a: lower bound of alpha + :param b: upper bound of alpha + :param c: proposed value of alpha + :param dfdx_c: vector of gradients at the proposed point i.e alpha = c + :param f_c: function evaluation at the proposed point i.e alpha = c + :param f_0: function evaluation at the previous point i.e alpha = 0 + ''' + + # Check c is within the bracket conditions (steps U0 from [1]). + if c < a or c > b: + # if the proposed point is not in range we return + # the old brackets unmodified + return a, b + else: + + # evaluate function for alpha = c i.e at the proposed boundary + # point_c = self._current + c * self._px + # fs_c = self._evaluator.evaluate([point_c]) + f_c, dfdx_c = self.obj_and_grad_func(c) + + # Checking if the opposite slope condition at the upper bound is + # obeyed by the proposed point + # (steps U1 from [1]). + if dfdx_c >= 0.0: + # Updating the upper bound. + return a, c + + # Checking if the opposite slope condition at the lower bound is + # obeyed by the proposed point, if so it is a valid lower bound + # (steps U2 from [1]). + elif dfdx_c < 0.0 and f_c <= self._current_f + self.epsilon: + # Updating the lower bound. + return c, b + + # The proposed point doesn't obey the opposite slope condition + # i.e. dfdx_c < 0.0 and f_c > self._current_f + self.epsilon. + # Checking this is unnecessary as it is the opposite to the above + # conditions. A secant/bisect can narrow down an interval between + # the current lower bound and the trial point c. + else: + b = c + return self.__bisect_or_secant(a, b) + + def __bisect_or_secant(self, a, b): + ''' + This function is part of the Hager-Zhang line search method [1]. + + Actual implementation of secant (or bisetc if `self.theta` = 0.5) + given a bracketing intervale [a, b] used in `__update()` and + `__initial_bracketing_interval()`. (steps U3a-c from [1]) + + + :param a: lower bound of alpha + :param b: upper bound of alpha + :param c: proposed value of alpha + ''' + + secant = True + + # FIXME: + # problem is in this while loop a and b merge but don't satisfy + # opposite slope rule for upper, + # probably should return when difference between a and b almost + # nothing??? + + # The interval needs updating if the upper bracket has a negative + # slope and the value of the function at that point is too high. + # It is not a valid lower bracket but along with the current + # lower bracket, it encloses another minima. The below function + # is a loop which tries to narrow the interval so that it + # satisfies the opposite slope conditions. + + # (steps U3 from [1]) + + while secant is True: + # according to [1]] this while loop is guaranteed to terminate + # as the intervale between a and b tends to zero + + # Preforming secant (if theta = 0.5 as is default this is a + # bisection) to propose a new point which hopeful obeys the + # opposite slope conditions. + # (steps U3a from [1]) + d = (1.0 - self.theta) * a + self.theta * b + + # Evaluating function for alpha = d i.e at the proposed boundary. + # point_d = self._current + d * self._px + # fs = self._evaluator.evaluate([point_d]) + # f_d, dfdx_d = fs[0] + + f_d, dfdd = self.obj_and_grad_func(d) + + # Checking if the opposite slope condition at the upper bound is + # obeyed by the proposed point. + # If the proposed point has a positive slope, then we have found a + # suitable upper bound to bracket a minima within opposite slopes. + # (still steps U3a from [1]) + converged = self.__very_close(d, b) or self.__very_close(d, a) + if dfdd >= 0.0 or converged: + secant = False + # Updating the upper bound. + return a, d + + # Checking if the opposite slope condition at the lower bound is + # obeyed by the proposed point. + # If the proposed point has a negative slope and the function value + # at that point is small enough, we can use it as a new lower bound + # to narrow down the interval. + # (steps U3b from [1]) + elif dfdd < 0.0 and f_d <= self._current_f + self.epsilon: + # Updating the lower bound. + a = d + + # The proposed point doesn't obey the opposite slope condition + # i.e. dfdx_c < 0.0 and f_c > self._current_f + self.epsilon + # Checking this is unnecessary as it is the opposite to the above + # conditions. We are therefore in the same situation as when we + # started the loop so we update the upper bracket and continue. + # (steps U3c from [1]) + else: + b = d + + def __secant_for_alpha(self, a, b): + ''' + This function is part of the Hager-Zhang line search method [1]. + + Preforms a secant step to propose a value of alpha. This is the same as + the secant routine described in [1]. + + :param a: lower bound of alpha + :param b: upper bound of alpha + ''' + + # Evaluating function for alpha = a to obtain gradients. + f_a, dfda = self.obj_and_grad_func(a) + + # Evaluating function for alpha = b to obtain gradients. + f_b, dfdb = self.obj_and_grad_func(b) + + # Preforming secant. + numerator = a * dfdb - b * dfda + denominator = dfdb - dfda + + return float(numerator / denominator) + + def __initial_bracket(self, c, rho=5): + ''' + This function is part of the Hager-Zhang line search method [1]. + + This function is used to generate an initial interval [a, b] for alpha + satisfying the opposite slope conditions and therefore bracketing + the minimum, beginning with the initial guess [0, c]. + The opposite slope conditions: + φ(a) ≤ φ(0) + epsilon, + φ'(a) < 0 (w.r.t every parameter), + φ'(b) ≥ 0 (w.r.t every parameter). + where φ(α) = f(x+α*px), f() is the function binging minimised, x is + the current parameter set, px is the newton direction, and alpha is + the step size. + + This is the same as the bracket routine described in [1] as steps B0-3 + + :param c: initial guess for maximum value of alpha + :param row: range (1, ∞), expansion factor used in the bracket rule to + increase the upper limit c (c_j+1 = row*c_j) until a + suitable interval is found that contains the minimum. + ''' + + # (steps B0 from [1]) + # Initiating a list of proposed boundary values of alpha. + c = [c] + # Initiating a list of error function evaluations at + # the proposed boundary values of alpha. + f_c = [] + # Initiating lower bound. + a = 0 + + # Initiating an iteration counter for the below while loop. + j = 0 + bracketing = True + + while bracketing: + + # Evaluating function for alpha = c[j] + # i.e at the proposed boundary. + f_cj, dfdc_j = self.obj_and_grad_func(c[j]) + + # Appending the error function evaluations at proposed boundary + # values of alpha. + f_c.append(f_cj) + + # Checking if the opposite slope condition at the upper bound is + # obeyed by the proposed point. If the slope at the proposed point + # is positive, then the given points already bracket a minimum. + # (steps B1 from [1]) + if dfdc_j >= 0.0: + # Setting the upper bound. + b = c[j] + + bracketing = False + + # Checking if the non derivative opposite slope condition at + # the lower bound is obeyed by any of the previously evaluated + # points and returning the value for which the boundary + # conditions are as close together as possible. + for i in range(1, j + 1): + if f_c[j - i] <= self._current_f + self.epsilon: + a = c[j - i] + return a, b + return a, b + + # Checking if the proposed point doesn't obey the opposite slope + # condition. This means the upper bracket limit almost works as a + # new lower limit but the objective function(f_cj) is too large. + # We therefore need to preform a secant/bisect but the minimum is + # not yet bracketed. + # (steps B2 from [1]) + elif dfdc_j < 0.0 and f_cj > self._current_f + self.epsilon: + # The interval needs updating if the upper bracket has a + # negative slope and the value of the function at that point + # is too high. It is not a valid lower bracket but along with + # the current lower bracket, it encloses another minima. The + # below function tries to narrow the interval so that it + # satisfies the opposite slope conditions. + bracketing = False + return self.__bisect_or_secant(0.0, c[j]) + + # The proposed point obeys the opposite slope condition + # at the lower bound + # i.e. dfdx_d < 0.0 and f_d <= self._current_f + self.epsilon. + # Checking this is unnecessary as it is the opposite to + # the above conditions. This means the bracket interval needs + # expanding to ensure a minimum is bracketed. + # (steps B3 from [1]) + else: + # Increasing the proposed point by a factor of row to attempt + # to bracket minimum and trying again. + c.append(rho * c[j]) + + # Increamenting the iteration counter. + j += 1 + + def __initialising(self, k, alpha_k0): + ''' + This function is part of the Hager-Zhang line search method [1]. + + Generate the starting guess of alpha, 'c', used by + ``__initial_bracket()``. This is the same as the routine + called initial and described as I0-2 in [1]. + + :param k: number of accepted steps/newton direction updates that + have taken place. + :param alpha_k0: the alpha value used by the previously accepted + steps/newton direction + update. If k = 0 this is the initial alpha the user wants to be used + ''' + # TODO: The Quadstep option has not been implemented yet. + # However, the linesearch is functional without is + QuadStep = False + + # Small factor used in initial guess of step size, range (0, 1). + # As the initial guess is very crude this is a very small factor + # to keep the initial step short and close the starting parameters + # self.x_0. + ps_0 = self.__ps_0 + + # Small factor used in subsequent guesses of step size + # if Quadstep is true, range (0, 1). + # TODO: this hasn't been implement yet + ps_1 = self.__ps_1 + + # Sacling factor used in subsequent guesses of step size, + # range (1, inf). + ps_2 = self.__ps_2 + + # For the first line search do the following + # (step I0 in [1]) + if k == 0: + + if alpha_k0 is not None: + # returning user specified initial alpha + return alpha_k0 + + # (step I0a in [1]) + elif np.all(self._x0 != 0.0): + # Crude step size estimate + # :math: \alpha = ps_0*||x_0||_\inf / ||dfdx||_\inf + return ((ps_0 * norm(self._x0, ord=np.inf)) + / (norm(self._current_dfdx, ord=np.inf))) + + # If self._x0 = 0.0 the above statement would give alpha = 0, + # hence we use the following estimation. + # (step I0b in [1]) + elif self._current_f != 0: + # Crude step size estimate + # :math: \alpha = ps_0*|f(x_0)| / ||dfdx||^2 + return ((ps_0 * abs(self._current_f)) + / (pow(norm(self._current_dfdx, ord=2), 2.0))) + + # Otherwise self._current_f = 0.0 and we are already at the + # minimum therefore return alpha = 1.0 it should not matter what we + # return in this case as if self._current_f = 0.0 the gradients + # will also equal zero. + # (step I0c in [1]) + else: + return 1.0 + + # TODO: implement the below option using a quadratic interpolant ??? + # everything will work without this + # (step I1 in [1]) + elif QuadStep: + + # point_current_scaled = self._current + ps_1*alpha_k0* self._px + # fs = self._evaluator.evaluate([point_current_scaled]) + f, df = self.obj_and_grad_func(ps_1 * alpha_k0) + + if f <= self._current_f: + pass + #TODO: add quad step option + pass + + # For the subsequent line search do the following + # (step I2 in [1]) + else: + # Increases the step size of the previous accepted step as it + # is only decreased in subsequent boundary manipulation. + return ps_2 * alpha_k0 + + def __very_close(self, x, y): + ''' Returns true if x is very close in value to y. ''' + return np.nextafter(x, y) >= y + + def obj_and_grad_func(self, alpha: float): + ''' + For a given alpha this returns the values of the objective function + and it's derivative. + ''' + point_alpha = self._current + alpha * self._px + fs_alpha = self._evaluator.evaluate([point_alpha]) + f_alpha, dfdx_alpha = fs_alpha[0] + + dfdalpha = np.matmul(np.transpose(dfdx_alpha), self._px) + + return f_alpha, dfdalpha + + def inverse_hessian_update(self, proposed_f, proposed_dfdx): + """ + Returns the newly calculated/updated inverse hessian matrix + by whichever quasi-Newton/ linesearch based optimiser is used + by the inherited class. + """ + raise NotImplementedError + + # def __secant2(self, a, b): + # ''' + # This function is part of the Hager-Zhang line search method [1]. + + # This function is referred to as secant^2 and described as steps + # S1-4 in [1]. + + # Preforms a secant step to update the bracketing interval of alpha. + # Given an interval that brackets a root, this procedure performs an + # update of both end points using two intermediate points generated + # using the secant interpolation `self.__secant_for_alpha()`. + # Assuming the interval [a, b] satisfy the opposite slope conditions. + + # The opposite slope conditions: + # φ(a) ≤ φ(0) + epsilon , + # φ'(a) < 0 (w.r.t every parameter), + # φ'(b) ≥ 0 (w.r.t every parameter). + # where φ(α) = f(x+α*px), f() is the function binging minimised, + # x is the current parameter set, px is the newton direction, + # and alpha is the step size. + + # :param a: power bound of alpha + # :param b: upper bound of alpha + # ''' + + # # Preforming a secant to propose a value of alpha. + # # (step S1 in [1]) + # c = self.__secant_for_alpha(a,b) + # # CHECK IF c SATISFY THE WOLFE CONDITIONS i.e pass to tell!!!!!!! + # # IF IT HAS STOP SEARCHING THIS DIRECTION!!!! + + # # IF WOLFE CONDITIONS AREAN'T MEET DO THIS thing below + + # # (typically) updating one side of the bracketing interval + # print('first update secant') + # A, B = self.__update(a, b, c) + + # # (typically) updating the otherside side of the bracketing interval + # # (step S2 in [1]) + # if c == A: + # # Preforming a secant to propose a value of alpha. + # C = self.__secant_for_alpha (a, A) + + # # (step S3 in [1]) + # if c == B: + # # Preforming a secant to propose a value of alpha. + # C = self.__secant_for_alpha (b, B) + + # # (step S4 in [1]) + # if c == A or c == B: + # # CHECK IF C SATISFY THE WOLFE CONDITIONS i.e pass to tell!!!!!!! + # # IF IT HAS STOP SEARCHING THIS DIRECTION!!!! + + # # IF WOLFE CONDITIONS AREAN'T MEET DO THIS thing below + + # print('second update secant') + # A, B = self.__update(A, B, C) + + # return A, B + + class OptimisationController(object): """ Finds the parameter values that minimise an :class:`ErrorMeasure` or @@ -337,6 +1270,8 @@ def __init__( elif not issubclass(method, pints.Optimiser): raise ValueError('Method must be subclass of pints.Optimiser.') self._optimiser = method(x0, sigma0, boundaries) + if issubclass(method, pints.LineSearchBasedOptimiser): + self._optimiser._set_function_evaluator(self._function) # Check if sensitivities are required self._needs_sensitivities = self._optimiser.needs_sensitivities() diff --git a/pints/_optimisers/_lbfgs.py b/pints/_optimisers/_lbfgs.py new file mode 100644 index 000000000..f5c3f854d --- /dev/null +++ b/pints/_optimisers/_lbfgs.py @@ -0,0 +1,156 @@ +# +# Broyden-Fletcher-Goldfarb-Shanno algorithm +# +# This file is part of PINTS (https://github.com/pints-team/pints/) which is +# released under the BSD 3-clause license. See accompanying LICENSE.md for +# copyright notice and full license details. +# +from __future__ import absolute_import, division +from __future__ import print_function, unicode_literals +import numpy as np +from numpy.linalg import norm +import pints + + +class LBFGS(pints.LineSearchBasedOptimiser): + """ + Broyden-Fletcher-Goldfarb-Shanno algorithm [2], [3], [4] + + The Hager-Zhang line search algorithm [1] is implemented in this class + + [1] Hager, W. W.; Zhang, H. Algorithm 851: CG_DESCENT, + a Conjugate Gradient Method with Guaranteed Descent. + ACM Trans. Math. Softw. 2006, 32 (1), 113-137. + https://doi.org/10.1145/1132973.1132979. + + [2] Liu, D. C.; Nocedal, J. + On the Limited Memory BFGS Method for Large Scale Optimization. + Mathematical Programming 1989, 45 (1), + 503-528. https://doi.org/10.1007/BF01589116. + + [3] Nocedal, J. Updating Quasi-Newton Matrices with Limited Storage. + Math. Comp. 1980, 35 (151), 773-782. + https://doi.org/10.1090/S0025-5718-1980-0572855-7. + + [4] Nash, S. G.; Nocedal, J. A Numerical Study of the Limited Memory + BFGS Method and the Truncated-Newton Method for Large Scale Optimization. + SIAM J. Optim. 1991, 1 (3), 358-372. https://doi.org/10.1137/0801023. + + """ + + def __init__(self, x0, sigma0=None, boundaries=None): + super(LBFGS, self).__init__(x0, sigma0, boundaries) + + # maximum number of correction matrices stored + self._m = 5 + + # Storage for vectors constructing the correction matrix + # this is the advised way of storing them. + self._S = np.zeros(shape=(self._n_parameters, self._m)) + self._Y = np.zeros(shape=(self._n_parameters, self._m)) + + def max_correction_matrice_storage(self): + """ + Returns ``m``, the maximum number of correction matrice for + calculating the inverse hessian used and stored + """ + return self._m + + def name(self): + """ See :meth:`Optimiser.name()`. """ + return 'Broyden-Fletcher-Goldfarb-Shanno (BFGS)' + + def __set_max_correction_matrice_storage(self, m): + """ + Sets the maximum number of correction matrice to be stored and used, + in subsequent inverse hessian updates, if ``m`` is set large enough + for the problem this method becomes the BFGS rather than the L-BFGS. + + Parameters + ---------- + m: int + The maximum number of correction matrice for calculating the + inverse hessian used and stored. + """ + if(m == int(m)): + self._m = m + self._S = np.zeros(self._n_parameters, m) + self._Y = np.zeros(self._n_parameters, m) + else: + print('Invalid value of m!!!\nm must be an integer') + print('using default parameters: m = ', self._m) + + def inverse_hessian_update(self, proposed_f, proposed_dfdx): + ''' + The inverse hessian matrix and newton direction are updated by the + L-BFGS/BFGS approximation of the hessian described in reference [2] + [3], and [4]. + ''' + + # identity matrix + I = np.identity(self._n_parameters) + + # We do this if we haven't exhausted existing memory yet, this is + # identical to the BFGS algorithm + if self._k <= self._m - 1: + k = self._k + # Defining the next column. + self._S[:, k] = self._proposed - self._current + self._Y[:, k] = proposed_dfdx - self._current_dfdx + + # Defining B_0. Scaling taken from [4]. + B = ((np.matmul(np.transpose(self._Y[:, k]), + self._S[:, k]) + / (norm(self._Y[:, k], ord=2) ** 2)) * I) + + # Updating inverse hessian. + for k in range(self._k + 1): + + V = (I - np.matmul(self._Y[:, k], + np.transpose(self._S[:, k])) + / np.matmul(np.transpose(self._Y[:, k]), + self._S[:, k])) + + B = np.matmul(np.transpose(V), np.matmul(self._B, V)) + B += (np.matmul(self._S[:, k], + np.transpose(self._S[:, k])) + / np.matmul(np.transpose(self._Y[:, k]), + self._S[:, k])) + + # We have exhausted the limited memory and now enter + # the LM-BFGS algorithm + else: + + m = self._m - 1 + # Shifting everything one column to the left. + self._S[:, 0:m] = self._S[:, 1:self._m] + self._Y[:, 0:m] = self._Y[:, 1:self._m] + + # Renewing last column. + self._S[:, m] = self._proposed - self._current + self._Y[:, m] = proposed_dfdx - self._current_dfdx + + # Defining B_0. Scaling taken from [4]. + B = ((np.matmul(np.transpose(self._Y[:, m]), + self._S[:, m]) + / (norm(self._Y[:, m], ord=2) ** 2)) * I) + + # Updating inverse hessian. + for k in range(self._m): + + V = (I - np.matmul(self._Y[:, k], + np.transpose(self._S[:, k])) + / np.matmul(np.transpose(self._Y[:, k]), + self._S[:, k])) + + B = np.matmul(np.transpose(V), np.matmul(self._B, V)) + B += (np.matmul(self._S[:, k], + np.transpose(self._S[:, k])) + / np.matmul(np.transpose(self._Y[:, k]), + self._S[:, k])) + + B = ((np.matmul(np.transpose(self._Y[:, m]), + self._S[:, m]) + / (norm(self._Y[:, m], ord=2)**2)) * I) + + return B