From 5befc5260a7df7e15a8e4a841e9037f275b3ae13 Mon Sep 17 00:00:00 2001
From: Morten Hjorth-Jensen In the example code here we implement the above equations (with explict
+expressions for the derivatives) with just one input variable \( x \) and
+one output variable. The target value \( y=2x+1 \) is a simple linear
+function in \( x \). Since this is a regression problem, we define the cost function to be proportional to the least squares error
+ with \( a_1 \) the output from the network. Running this code gives us an acceptable results after some 40-50 iterations. Note that the results depend on the value of the learning rate. Feel free to add more input nodes and weights to the above
+code. Furthermore, try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+ You could also try to change the function \( f(x)=y \) from a linear polynomial in \( x \) to a higher-order polynomial.
+Comment your results.
+ Hint: Increasing the number of input variables and input nodes requires a rewrite of the input data in terms of a matrix. You need to figure out the correct dimensionalities. We change our simple model to (see graph)
+ We change our simple model to (see graph below)
a network with just one hidden layer but with scalar variables only.
For the first hidden layer \( a_{i-1}=a_0=x \) for this simple model. The code here implements the above model with one hidden layer and
+scalar variables for the same function we studied in the previous
+example. The code is however set up so that we can add multiple
+inputs \( x \) and target values \( y \). Note also that we have the
+possibility of defining a feature matrix \( \boldsymbol{X} \) with more than just
+one column for the input values. This will turn useful in our next example. We have also defined matrices and vectors for all of our operations although it is not necessary here.
+ We see that after some few iterations (the results do depend on the learning rate however), we get an error which is rather small. Try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+Discuss your results as functions of the amount of training data and various learning rates.
+ Challenge: Try to change the activation functions and replace the hard-coded analytical expressions with automatic derivation via either autograd or JAX. We extend our simple code to a function which depends on two variable \( x_0 \) and \( x_1 \), that is We feed our network with \( n=100 \) entries \( x_0 \) and \( x_1 \). We have thus two features represented by these variable and an input matrix/design matrix \( \boldsymbol{X}\in \mathbf{R}^{n\times 2} \)February 5-9: Advanced machine learning and data analysis for the physical s
Feb 5, 2024
+February 6, 2024
@@ -567,10 +554,118 @@ Optimizing the parameters
+Implementing the simple perceptron model
+
+# import necessary packages
+import numpy as np
+import matplotlib.pyplot as plt
+
+def feed_forward(x):
+ # weighted sum of inputs to the output layer
+ z_1 = x*output_weights + output_bias
+ # Output from output node (one node only)
+ # Here the output is equal to the input
+ a_1 = z_1
+ return a_1
+
+def backpropagation(x, y):
+ a_1 = feed_forward(x)
+ # derivative of cost function
+ derivative_cost = a_1 - y
+ # the variable delta in the equations, note that output a_1 = z_1, its derivatives wrt z_o is thus 1
+ delta_1 = derivative_cost
+ # gradients for the output layer
+ output_weights_gradient = delta_1*x
+ output_bias_gradient = delta_1
+ # The cost function is 0.5*(a_1-y)^2. This gives a measure of the error for each iteration
+ return output_weights_gradient, output_bias_gradient
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = 4.0
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network
+n_inputs = 1
+n_outputs = 1
+# Initialize the network
+# weights and bias in the output layer
+output_weights = np.random.randn()
+output_bias = np.random.randn()
+
+# implementing a simple gradient descent approach with fixed learning rate
+eta = 0.01
+for i in range(40):
+ # calculate gradients from back propagation
+ derivative_w1, derivative_b1 = backpropagation(x, y)
+ # update weights and biases
+ output_weights -= eta * derivative_w1
+ output_bias -= eta * derivative_b1
+# our final prediction after training
+ytilde = output_weights*x+output_bias
+print(0.5*((ytilde-y)**2))
+
+Exercise 1: Extensions to the above code
+
+Adding a hidden layer
-The training
Code examples for the simple models
+Code example
+
+import numpy as np
+# We use the Sigmoid function as activation function
+def sigmoid(z):
+ return 1.0/(1.0+np.exp(-z))
+
+def forwardpropagation(x):
+ # weighted sum of inputs to the hidden layer
+ z_1 = np.matmul(x, w_1) + b_1
+ # activation in the hidden layer
+ a_1 = sigmoid(z_1)
+ # weighted sum of inputs to the output layer
+ z_2 = np.matmul(a_1, w_2) + b_2
+ a_2 = z_2
+ return a_1, a_2
+
+def backpropagation(x, y):
+ a_1, a_2 = forwardpropagation(x)
+ # parameter delta for the output layer, note that a_2=z_2 and its derivative wrt z_2 is just 1
+ delta_2 = a_2 - y
+ print(0.5*((a_2-y)**2))
+ # delta for the hidden layer
+ delta_1 = np.matmul(delta_2, w_2.T) * a_1 * (1 - a_1)
+ # gradients for the output layer
+ output_weights_gradient = np.matmul(a_1.T, delta_2)
+ output_bias_gradient = np.sum(delta_2, axis=0)
+ # gradient for the hidden layer
+ hidden_weights_gradient = np.matmul(x.T, delta_1)
+ hidden_bias_gradient = np.sum(delta_1, axis=0)
+ return output_weights_gradient, output_bias_gradient, hidden_weights_gradient, hidden_bias_gradient
+
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = np.array([4.0],dtype=np.float64)
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network, only scalars here
+n_inputs = x.shape
+n_features = 1
+n_hidden_neurons = 1
+n_outputs = 1
+
+# Initialize the network
+# weights and bias in the hidden layer
+w_1 = np.random.randn(n_features, n_hidden_neurons)
+b_1 = np.zeros(n_hidden_neurons) + 0.01
+
+# weights and bias in the output layer
+w_2 = np.random.randn(n_hidden_neurons, n_outputs)
+b_2 = np.zeros(n_outputs) + 0.01
+
+eta = 0.1
+for i in range(50):
+ # calculate gradients
+ derivW2, derivB2, derivW1, derivB1 = backpropagation(x, y)
+ # update weights and biases
+ w_2 -= eta * derivW2
+ b_2 -= eta * derivB2
+ w_1 -= eta * derivW1
+ b_1 -= eta * derivB1
+
+Exercise 2: Including more data
+
+Simple neural network and the back propagation equations
@@ -866,6 +1071,41 @@ Gradient expressions
Program example
+
+
+Getting serious, the back propagation equations for a neural network
@@ -3503,14 +3743,21 @@ Efficient Polynomial
We note that \( \alpha_i=0 \) except for \( i\in \left\{0,1,2\right\} \) and \( \beta_i=0 \) except for \( i\in\left\{0,1,2,3\right\} \).
@@ -3565,279 +3812,6 @@Does the number of floating point operations change here when we use the commutative property?
- -For problems with so-called harmonic oscillations, given by for example the following differential equation
-$$ -m\frac{d^2x}{dt^2}+\eta\frac{dx}{dt}+x(t)=F(t), -$$ - -where \( F(t) \) is an applied external force acting on the system (often called a driving force), one can use the theory of Fourier transformations to find the solutions of this type of equations.
- -If one has several driving forces, \( F(t)=\sum_n F_n(t) \), one can find -the particular solution to each \( F_n \), \( x_{pn}(t) \), and the particular -solution for the entire driving force is then given by a series like -
- -$$ -\begin{equation} -x_p(t)=\sum_nx_{pn}(t). -\label{_auto4} -\end{equation} -$$ - - - -This is known as the principle of superposition. It only applies when -the homogenous equation is linear. If there were an anharmonic term -such as \( x^3 \) in the homogenous equation, then when one summed various -solutions, \( x=(\sum_n x_n)^2 \), one would get cross -terms. Superposition is especially useful when \( F(t) \) can be written -as a sum of sinusoidal terms, because the solutions for each -sinusoidal (sine or cosine) term is analytic. -
- -Driving forces are often periodic, even when they are not -sinusoidal. Periodicity implies that for some time \( \tau \) -
- -$$ -\begin{eqnarray} -F(t+\tau)=F(t). -\end{eqnarray} -$$ - -One example of a non-sinusoidal periodic force is a square wave. Many -components in electric circuits are non-linear, e.g. diodes, which -makes many wave forms non-sinusoidal even when the circuits are being -driven by purely sinusoidal sources. -
- - -The code here shows a typical example of such a square wave generated using the functionality included in the scipy Python package. We have used a period of \( \tau=0.2 \).
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t)
-plt.plot(t, SqrSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-For the sinusoidal example the -period is \( \tau=2\pi/\omega \). However, higher harmonics can also -satisfy the periodicity requirement. In general, any force that -satisfies the periodicity requirement can be expressed as a sum over -harmonics, -
- -$$ -\begin{equation} -F(t)=\frac{f_0}{2}+\sum_{n>0} f_n\cos(2n\pi t/\tau)+g_n\sin(2n\pi t/\tau). -\label{_auto5} -\end{equation} -$$ - - - -We can write down the answer for -\( x_{pn}(t) \), by substituting \( f_n/m \) or \( g_n/m \) for \( F_0/m \). By -writing each factor \( 2n\pi t/\tau \) as \( n\omega t \), with \( \omega\equiv -2\pi/\tau \), -
- -$$ -\begin{equation} -\label{eq:fourierdef1} -F(t)=\frac{f_0}{2}+\sum_{n>0}f_n\cos(n\omega t)+g_n\sin(n\omega t). -\end{equation} -$$ - -The solutions for \( x(t) \) then come from replacing \( \omega \) with -\( n\omega \) for each term in the particular solution, -
- -$$ -\begin{eqnarray} -x_p(t)&=&\frac{f_0}{2k}+\sum_{n>0} \alpha_n\cos(n\omega t-\delta_n)+\beta_n\sin(n\omega t-\delta_n),\\ -\nonumber -\alpha_n&=&\frac{f_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\beta_n&=&\frac{g_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\delta_n&=&\tan^{-1}\left(\frac{2\beta n\omega}{\omega_0^2-n^2\omega^2}\right). -\end{eqnarray} -$$ - - - -Because the forces have been applied for a long time, any non-zero -damping eliminates the homogenous parts of the solution, so one need -only consider the particular solution for each \( n \). -
- -The problem is considered solved if one can find expressions for the -coefficients \( f_n \) and \( g_n \), even though the solutions are expressed -as an infinite sum. The coefficients can be extracted from the -function \( F(t) \) by -
- -$$ -\begin{eqnarray} -\label{eq:fourierdef2} -f_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\cos(2n\pi t/\tau),\\ -\nonumber -g_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\sin(2n\pi t/\tau). -\end{eqnarray} -$$ - -To check the consistency of these expressions and to verify -Eq. \eqref{eq:fourierdef2}, one can insert the expansion of \( F(t) \) in -Eq. \eqref{eq:fourierdef1} into the expression for the coefficients in -Eq. \eqref{eq:fourierdef2} and see whether -
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~\left\{ -\frac{f_0}{2}+\sum_{m>0}f_m\cos(m\omega t)+g_m\sin(m\omega t) -\right\}\cos(n\omega t). -\end{eqnarray} -$$ - -Immediately, one can throw away all the terms with \( g_m \) because they -convolute an even and an odd function. The term with \( f_0/2 \) -disappears because \( \cos(n\omega t) \) is equally positive and negative -over the interval and will integrate to zero. For all the terms -\( f_m\cos(m\omega t) \) appearing in the sum, one can use angle addition -formulas to see that \( \cos(m\omega t)\cos(n\omega -t)=(1/2)(\cos[(m+n)\omega t]+\cos[(m-n)\omega t] \). This will integrate -to zero unless \( m=n \). In that case the \( m=n \) term gives -
- -$$ -\begin{equation} -\int_{-\tau/2}^{\tau/2}dt~\cos^2(m\omega t)=\frac{\tau}{2}, -\label{_auto6} -\end{equation} -$$ - -and
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~f_n/2\\ -\nonumber -&=&f_n~\checkmark. -\end{eqnarray} -$$ - -The same method can be used to check for the consistency of \( g_n \).
- - -The code here uses the Fourier series applied to a -square wave signal. The code here -visualizes the various approximations given by Fourier series compared -with a square wave with period \( T=0.2 \) (dimensionless time), width \( 0.1 \) and max value of the force \( F=2 \). We -see that when we increase the number of components in the Fourier -series, the Fourier series approximation gets closer and closer to the -square wave signal. -
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-T =0.2
-# Max value of square signal
-Fmax= 2.0
-# Width of signal
-Width = 0.1
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-FourierSeriesSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t+np.pi*Width/T)
-a0 = Fmax*Width/T
-FourierSeriesSignal = a0
-Factor = 2.0*Fmax/np.pi
-for i in range(1,500):
- FourierSeriesSignal += Factor/(i)*np.sin(np.pi*i*Width/T)*np.cos(i*t*2*np.pi/T)
-plt.plot(t, SqrSignal)
-plt.plot(t, FourierSeriesSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
- In the example code here we implement the above equations (with explict
+expressions for the derivatives) with just one input variable \( x \) and
+one output variable. The target value \( y=2x+1 \) is a simple linear
+function in \( x \). Since this is a regression problem, we define the cost function to be proportional to the least squares error
+ with \( a_1 \) the output from the network. Running this code gives us an acceptable results after some 40-50 iterations. Note that the results depend on the value of the learning rate. Feel free to add more input nodes and weights to the above
+code. Furthermore, try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+ You could also try to change the function \( f(x)=y \) from a linear polynomial in \( x \) to a higher-order polynomial.
+Comment your results.
+ Hint: Increasing the number of input variables and input nodes requires a rewrite of the input data in terms of a matrix. You need to figure out the correct dimensionalities. We change our simple model to (see graph)
+ We change our simple model to (see graph below)
a network with just one hidden layer but with scalar variables only.
The code here implements the above model with one hidden layer and
+scalar variables for the same function we studied in the previous
+example. The code is however set up so that we can add multiple
+inputs \( x \) and target values \( y \). Note also that we have the
+possibility of defining a feature matrix \( \boldsymbol{X} \) with more than just
+one column for the input values. This will turn useful in our next example. We have also defined matrices and vectors for all of our operations although it is not necessary here.
+ We see that after some few iterations (the results do depend on the learning rate however), we get an error which is rather small. Try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+Discuss your results as functions of the amount of training data and various learning rates.
+ Challenge: Try to change the activation functions and replace the hard-coded analytical expressions with automatic derivation via either autograd or JAX. We extend our simple code to a function which depends on two variable \( x_0 \) and \( x_1 \), that is We feed our network with \( n=100 \) entries \( x_0 \) and \( x_1 \). We have thus two features represented by these variable and an input matrix/design matrix \( \boldsymbol{X}\in \mathbf{R}^{n\times 2} \) Does the number of floating point operations change here when we use the commutative property? For problems with so-called harmonic oscillations, given by for example the following differential equation where \( F(t) \) is an applied external force acting on the system (often called a driving force), one can use the theory of Fourier transformations to find the solutions of this type of equations. If one has several driving forces, \( F(t)=\sum_n F_n(t) \), one can find
-the particular solution to each \( F_n \), \( x_{pn}(t) \), and the particular
-solution for the entire driving force is then given by a series like
- This is known as the principle of superposition. It only applies when
-the homogenous equation is linear. If there were an anharmonic term
-such as \( x^3 \) in the homogenous equation, then when one summed various
-solutions, \( x=(\sum_n x_n)^2 \), one would get cross
-terms. Superposition is especially useful when \( F(t) \) can be written
-as a sum of sinusoidal terms, because the solutions for each
-sinusoidal (sine or cosine) term is analytic.
- Driving forces are often periodic, even when they are not
-sinusoidal. Periodicity implies that for some time \( \tau \)
- One example of a non-sinusoidal periodic force is a square wave. Many
-components in electric circuits are non-linear, e.g. diodes, which
-makes many wave forms non-sinusoidal even when the circuits are being
-driven by purely sinusoidal sources.
- The code here shows a typical example of such a square wave generated using the functionality included in the scipy Python package. We have used a period of \( \tau=0.2 \). For the sinusoidal example the
-period is \( \tau=2\pi/\omega \). However, higher harmonics can also
-satisfy the periodicity requirement. In general, any force that
-satisfies the periodicity requirement can be expressed as a sum over
-harmonics,
- We can write down the answer for
-\( x_{pn}(t) \), by substituting \( f_n/m \) or \( g_n/m \) for \( F_0/m \). By
-writing each factor \( 2n\pi t/\tau \) as \( n\omega t \), with \( \omega\equiv
-2\pi/\tau \),
- The solutions for \( x(t) \) then come from replacing \( \omega \) with
-\( n\omega \) for each term in the particular solution,
- Because the forces have been applied for a long time, any non-zero
-damping eliminates the homogenous parts of the solution, so one need
-only consider the particular solution for each \( n \).
- The problem is considered solved if one can find expressions for the
-coefficients \( f_n \) and \( g_n \), even though the solutions are expressed
-as an infinite sum. The coefficients can be extracted from the
-function \( F(t) \) by
- To check the consistency of these expressions and to verify
-Eq. (7), one can insert the expansion of \( F(t) \) in
-Eq. (6) into the expression for the coefficients in
-Eq. (7) and see whether
- Immediately, one can throw away all the terms with \( g_m \) because they
-convolute an even and an odd function. The term with \( f_0/2 \)
-disappears because \( \cos(n\omega t) \) is equally positive and negative
-over the interval and will integrate to zero. For all the terms
-\( f_m\cos(m\omega t) \) appearing in the sum, one can use angle addition
-formulas to see that \( \cos(m\omega t)\cos(n\omega
-t)=(1/2)(\cos[(m+n)\omega t]+\cos[(m-n)\omega t] \). This will integrate
-to zero unless \( m=n \). In that case the \( m=n \) term gives
- and The same method can be used to check for the consistency of \( g_n \). The code here uses the Fourier series applied to a
-square wave signal. The code here
-visualizes the various approximations given by Fourier series compared
-with a square wave with period \( T=0.2 \) (dimensionless time), width \( 0.1 \) and max value of the force \( F=2 \). We
-see that when we increase the number of components in the Fourier
-series, the Fourier series approximation gets closer and closer to the
-square wave signal.
- In the example code here we implement the above equations (with explict
+expressions for the derivatives) with just one input variable \( x \) and
+one output variable. The target value \( y=2x+1 \) is a simple linear
+function in \( x \). Since this is a regression problem, we define the cost function to be proportional to the least squares error
+ with \( a_1 \) the output from the network. Running this code gives us an acceptable results after some 40-50 iterations. Note that the results depend on the value of the learning rate. Feel free to add more input nodes and weights to the above
+code. Furthermore, try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+ You could also try to change the function \( f(x)=y \) from a linear polynomial in \( x \) to a higher-order polynomial.
+Comment your results.
+ Hint: Increasing the number of input variables and input nodes requires a rewrite of the input data in terms of a matrix. You need to figure out the correct dimensionalities. We change our simple model to (see graph)
+ We change our simple model to (see graph below)
a network with just one hidden layer but with scalar variables only.
For the first hidden layer \( a_{i-1}=a_0=x \) for this simple model. The code here implements the above model with one hidden layer and
+scalar variables for the same function we studied in the previous
+example. The code is however set up so that we can add multiple
+inputs \( x \) and target values \( y \). Note also that we have the
+possibility of defining a feature matrix \( \boldsymbol{X} \) with more than just
+one column for the input values. This will turn useful in our next example. We have also defined matrices and vectors for all of our operations although it is not necessary here.
+ We see that after some few iterations (the results do depend on the learning rate however), we get an error which is rather small. Try to increase the amount of input and
+target/output data. Try also to perform calculations for more values
+of the learning rates. Feel free to add either hyperparameters with an
+\( l_1 \) norm or an \( l_2 \) norm and discuss your results.
+Discuss your results as functions of the amount of training data and various learning rates.
+ Challenge: Try to change the activation functions and replace the hard-coded analytical expressions with automatic derivation via either autograd or JAX. We extend our simple code to a function which depends on two variable \( x_0 \) and \( x_1 \), that is We feed our network with \( n=100 \) entries \( x_0 \) and \( x_1 \). We have thus two features represented by these variable and an input matrix/design matrix \( \boldsymbol{X}\in \mathbf{R}^{n\times 2} \)
+Implementing the simple perceptron model
+
+
+$$
+C(y,w_1,b_1)=\frac{1}{2}(a_1-y)^2,
+$$
+
+
+# import necessary packages
+import numpy as np
+import matplotlib.pyplot as plt
+
+def feed_forward(x):
+ # weighted sum of inputs to the output layer
+ z_1 = x*output_weights + output_bias
+ # Output from output node (one node only)
+ # Here the output is equal to the input
+ a_1 = z_1
+ return a_1
+
+def backpropagation(x, y):
+ a_1 = feed_forward(x)
+ # derivative of cost function
+ derivative_cost = a_1 - y
+ # the variable delta in the equations, note that output a_1 = z_1, its derivatives wrt z_o is thus 1
+ delta_1 = derivative_cost
+ # gradients for the output layer
+ output_weights_gradient = delta_1*x
+ output_bias_gradient = delta_1
+ # The cost function is 0.5*(a_1-y)^2. This gives a measure of the error for each iteration
+ return output_weights_gradient, output_bias_gradient
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = 4.0
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network
+n_inputs = 1
+n_outputs = 1
+# Initialize the network
+# weights and bias in the output layer
+output_weights = np.random.randn()
+output_bias = np.random.randn()
+
+# implementing a simple gradient descent approach with fixed learning rate
+eta = 0.01
+for i in range(40):
+ # calculate gradients from back propagation
+ derivative_w1, derivative_b1 = backpropagation(x, y)
+ # update weights and biases
+ output_weights -= eta * derivative_w1
+ output_bias -= eta * derivative_b1
+# our final prediction after training
+ytilde = output_weights*x+output_bias
+print(0.5*((ytilde-y)**2))
+
+Exercise 1: Extensions to the above code
+
+Adding a hidden layer
-The training
Code examples for the simple models
+Code example
+
+import numpy as np
+# We use the Sigmoid function as activation function
+def sigmoid(z):
+ return 1.0/(1.0+np.exp(-z))
+
+def forwardpropagation(x):
+ # weighted sum of inputs to the hidden layer
+ z_1 = np.matmul(x, w_1) + b_1
+ # activation in the hidden layer
+ a_1 = sigmoid(z_1)
+ # weighted sum of inputs to the output layer
+ z_2 = np.matmul(a_1, w_2) + b_2
+ a_2 = z_2
+ return a_1, a_2
+
+def backpropagation(x, y):
+ a_1, a_2 = forwardpropagation(x)
+ # parameter delta for the output layer, note that a_2=z_2 and its derivative wrt z_2 is just 1
+ delta_2 = a_2 - y
+ print(0.5*((a_2-y)**2))
+ # delta for the hidden layer
+ delta_1 = np.matmul(delta_2, w_2.T) * a_1 * (1 - a_1)
+ # gradients for the output layer
+ output_weights_gradient = np.matmul(a_1.T, delta_2)
+ output_bias_gradient = np.sum(delta_2, axis=0)
+ # gradient for the hidden layer
+ hidden_weights_gradient = np.matmul(x.T, delta_1)
+ hidden_bias_gradient = np.sum(delta_1, axis=0)
+ return output_weights_gradient, output_bias_gradient, hidden_weights_gradient, hidden_bias_gradient
+
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = np.array([4.0],dtype=np.float64)
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network, only scalars here
+n_inputs = x.shape
+n_features = 1
+n_hidden_neurons = 1
+n_outputs = 1
+
+# Initialize the network
+# weights and bias in the hidden layer
+w_1 = np.random.randn(n_features, n_hidden_neurons)
+b_1 = np.zeros(n_hidden_neurons) + 0.01
+
+# weights and bias in the output layer
+w_2 = np.random.randn(n_hidden_neurons, n_outputs)
+b_2 = np.zeros(n_outputs) + 0.01
+
+eta = 0.1
+for i in range(50):
+ # calculate gradients
+ derivW2, derivB2, derivW1, derivB1 = backpropagation(x, y)
+ # update weights and biases
+ w_2 -= eta * derivW2
+ b_2 -= eta * derivB2
+ w_1 -= eta * derivW1
+ b_1 -= eta * derivB1
+
+Exercise 2: Including more data
+
+Gradient expressions
Program example
+
+
+$$
+y=f(x_0,x_1)=x_0^2+3x_0x_1+x_1^2+5.
+$$
+
+
+
+$$
+\boldsymbol{X}=\begin{bmatrix} x_{00} & x_{01} \\ x_{00} & x_{01} \\ x_{10} & x_{11} \\ x_{20} & x_{21} \\ \dots & \dots \\ \dots & \dots \\ x_{n-20} & x_{n-21} \\ x_{n-10} & x_{n-11} \end{bmatrix}.
+$$
+
+
+
+
+
+
+
+Efficient Polynomial Multiplication
$$
-\begin{split}
-\delta_0=&\alpha_0\beta_0\\
-\delta_1=&\alpha_1\beta_0+\alpha_1\beta_0\\
-\delta_2=&\alpha_0\beta_2+\alpha_1\beta_1+\alpha_2\beta_0\\
-\delta_3=&\alpha_1\beta_2+\alpha_2\beta_1+\alpha_0\beta_3\\
-\delta_4=&\alpha_2\beta_2+\alpha_1\beta_3\\
-\delta_5=&\alpha_2\beta_3.\\
-\end{split}
+\begin{align}
+\delta_0=&\alpha_0\beta_0
+\tag{4}\\
+\delta_1=&\alpha_1\beta_0+\alpha_1\beta_0
+\tag{5}\\
+\delta_2=&\alpha_0\beta_2+\alpha_1\beta_1+\alpha_2\beta_0
+\tag{6}\\
+\delta_3=&\alpha_1\beta_2+\alpha_2\beta_1+\alpha_0\beta_3
+\tag{7}\\
+\delta_4=&\alpha_2\beta_2+\alpha_1\beta_3
+\tag{8}\\
+\delta_5=&\alpha_2\beta_3.
+\tag{9}\\
+\tag{10}
+\end{align}
$$
@@ -3525,301 +3793,6 @@ A more efficient w
Convolution Examples: Principle of Superposition and Periodic Forces (Fourier Transforms)
-
-
-$$
-m\frac{d^2x}{dt^2}+\eta\frac{dx}{dt}+x(t)=F(t),
-$$
-
-
-
-$$
-\begin{equation}
-x_p(t)=\sum_nx_{pn}(t).
-\tag{4}
-\end{equation}
-$$
-
-Principle of Superposition
-
-
-$$
-\begin{eqnarray}
-F(t+\tau)=F(t).
-\end{eqnarray}
-$$
-
-
-Simple Code Example
-
-import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t)
-plt.plot(t, SqrSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-
-$$
-\begin{equation}
-F(t)=\frac{f_0}{2}+\sum_{n>0} f_n\cos(2n\pi t/\tau)+g_n\sin(2n\pi t/\tau).
-\tag{5}
-\end{equation}
-$$
-
-Wrapping up Fourier transforms
-
-
-$$
-\begin{equation}
-\tag{6}
-F(t)=\frac{f_0}{2}+\sum_{n>0}f_n\cos(n\omega t)+g_n\sin(n\omega t).
-\end{equation}
-$$
-
-
-
-$$
-\begin{eqnarray}
-x_p(t)&=&\frac{f_0}{2k}+\sum_{n>0} \alpha_n\cos(n\omega t-\delta_n)+\beta_n\sin(n\omega t-\delta_n),\\
-\nonumber
-\alpha_n&=&\frac{f_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\
-\nonumber
-\beta_n&=&\frac{g_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\
-\nonumber
-\delta_n&=&\tan^{-1}\left(\frac{2\beta n\omega}{\omega_0^2-n^2\omega^2}\right).
-\end{eqnarray}
-$$
-
-Finding the Coefficients
-
-
-$$
-\begin{eqnarray}
-\tag{7}
-f_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\cos(2n\pi t/\tau),\\
-\nonumber
-g_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\sin(2n\pi t/\tau).
-\end{eqnarray}
-$$
-
-
-
-$$
-\begin{eqnarray}
-f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~\left\{
-\frac{f_0}{2}+\sum_{m>0}f_m\cos(m\omega t)+g_m\sin(m\omega t)
-\right\}\cos(n\omega t).
-\end{eqnarray}
-$$
-
-
-
-$$
-\begin{equation}
-\int_{-\tau/2}^{\tau/2}dt~\cos^2(m\omega t)=\frac{\tau}{2},
-\tag{8}
-\end{equation}
-$$
-
-
-
-$$
-\begin{eqnarray}
-f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~f_n/2\\
-\nonumber
-&=&f_n~\checkmark.
-\end{eqnarray}
-$$
-
-
-Final words on Fourier Transforms
-
-import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-T =0.2
-# Max value of square signal
-Fmax= 2.0
-# Width of signal
-Width = 0.1
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-FourierSeriesSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t+np.pi*Width/T)
-a0 = Fmax*Width/T
-FourierSeriesSignal = a0
-Factor = 2.0*Fmax/np.pi
-for i in range(1,500):
- FourierSeriesSignal += Factor/(i)*np.sin(np.pi*i*Width/T)*np.cos(i*t*2*np.pi/T)
-plt.plot(t, SqrSignal)
-plt.plot(t, FourierSeriesSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-Two-dimensional Objects
diff --git a/doc/pub/week4/html/week4-solarized.html b/doc/pub/week4/html/week4-solarized.html
index 988681dd..c6955d17 100644
--- a/doc/pub/week4/html/week4-solarized.html
+++ b/doc/pub/week4/html/week4-solarized.html
@@ -89,6 +89,14 @@
2,
None,
'optimizing-the-parameters'),
+ ('Implementing the simple perceptron model',
+ 2,
+ None,
+ 'implementing-the-simple-perceptron-model'),
+ ('Exercise 1: Extensions to the above code',
+ 2,
+ None,
+ 'exercise-1-extensions-to-the-above-code'),
('Adding a hidden layer', 2, None, 'adding-a-hidden-layer'),
('Layout of a simple neural network with one hidden layer',
2,
@@ -97,10 +105,11 @@
('The derivatives', 2, None, 'the-derivatives'),
('Important observations', 2, None, 'important-observations'),
('The training', 2, None, 'the-training'),
- ('Code examples for the simple models',
+ ('Code example', 2, None, 'code-example'),
+ ('Exercise 2: Including more data',
2,
None,
- 'code-examples-for-the-simple-models'),
+ 'exercise-2-including-more-data'),
('Simple neural network and the back propagation equations',
2,
None,
@@ -280,25 +289,6 @@
2,
None,
'a-more-efficient-way-of-coding-the-above-convolution'),
- ('Convolution Examples: Principle of Superposition and Periodic '
- 'Forces (Fourier Transforms)',
- 2,
- None,
- 'convolution-examples-principle-of-superposition-and-periodic-forces-fourier-transforms'),
- ('Principle of Superposition',
- 2,
- None,
- 'principle-of-superposition'),
- ('Simple Code Example', 2, None, 'simple-code-example'),
- ('Wrapping up Fourier transforms',
- 2,
- None,
- 'wrapping-up-fourier-transforms'),
- ('Finding the Coefficients', 2, None, 'finding-the-coefficients'),
- ('Final words on Fourier Transforms',
- 2,
- None,
- 'final-words-on-fourier-transforms'),
('Two-dimensional Objects', 2, None, 'two-dimensional-objects'),
('Cross-Correlation', 2, None, 'cross-correlation'),
('More on Dimensionalities', 2, None, 'more-on-dimensionalities'),
@@ -343,7 +333,7 @@ February 5-9: Advanced machine learning and data analysis for the physical s
Feb 5, 2024
+February 6, 2024
@@ -471,10 +461,118 @@ Optimizing the parameters
$$
+
+Implementing the simple perceptron model
+
+# import necessary packages
+import numpy as np
+import matplotlib.pyplot as plt
+
+def feed_forward(x):
+ # weighted sum of inputs to the output layer
+ z_1 = x*output_weights + output_bias
+ # Output from output node (one node only)
+ # Here the output is equal to the input
+ a_1 = z_1
+ return a_1
+
+def backpropagation(x, y):
+ a_1 = feed_forward(x)
+ # derivative of cost function
+ derivative_cost = a_1 - y
+ # the variable delta in the equations, note that output a_1 = z_1, its derivatives wrt z_o is thus 1
+ delta_1 = derivative_cost
+ # gradients for the output layer
+ output_weights_gradient = delta_1*x
+ output_bias_gradient = delta_1
+ # The cost function is 0.5*(a_1-y)^2. This gives a measure of the error for each iteration
+ return output_weights_gradient, output_bias_gradient
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = 4.0
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network
+n_inputs = 1
+n_outputs = 1
+# Initialize the network
+# weights and bias in the output layer
+output_weights = np.random.randn()
+output_bias = np.random.randn()
+
+# implementing a simple gradient descent approach with fixed learning rate
+eta = 0.01
+for i in range(40):
+ # calculate gradients from back propagation
+ derivative_w1, derivative_b1 = backpropagation(x, y)
+ # update weights and biases
+ output_weights -= eta * derivative_w1
+ output_bias -= eta * derivative_b1
+# our final prediction after training
+ytilde = output_weights*x+output_bias
+print(0.5*((ytilde-y)**2))
+
+
+
+
+Exercise 1: Extensions to the above code
+
+
Adding a hidden layer
-The training
-Code examples for the simple models
+Code example
+
+import numpy as np
+# We use the Sigmoid function as activation function
+def sigmoid(z):
+ return 1.0/(1.0+np.exp(-z))
+
+def forwardpropagation(x):
+ # weighted sum of inputs to the hidden layer
+ z_1 = np.matmul(x, w_1) + b_1
+ # activation in the hidden layer
+ a_1 = sigmoid(z_1)
+ # weighted sum of inputs to the output layer
+ z_2 = np.matmul(a_1, w_2) + b_2
+ a_2 = z_2
+ return a_1, a_2
+
+def backpropagation(x, y):
+ a_1, a_2 = forwardpropagation(x)
+ # parameter delta for the output layer, note that a_2=z_2 and its derivative wrt z_2 is just 1
+ delta_2 = a_2 - y
+ print(0.5*((a_2-y)**2))
+ # delta for the hidden layer
+ delta_1 = np.matmul(delta_2, w_2.T) * a_1 * (1 - a_1)
+ # gradients for the output layer
+ output_weights_gradient = np.matmul(a_1.T, delta_2)
+ output_bias_gradient = np.sum(delta_2, axis=0)
+ # gradient for the hidden layer
+ hidden_weights_gradient = np.matmul(x.T, delta_1)
+ hidden_bias_gradient = np.sum(delta_1, axis=0)
+ return output_weights_gradient, output_bias_gradient, hidden_weights_gradient, hidden_bias_gradient
+
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = np.array([4.0],dtype=np.float64)
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network, only scalars here
+n_inputs = x.shape
+n_features = 1
+n_hidden_neurons = 1
+n_outputs = 1
+
+# Initialize the network
+# weights and bias in the hidden layer
+w_1 = np.random.randn(n_features, n_hidden_neurons)
+b_1 = np.zeros(n_hidden_neurons) + 0.01
+
+# weights and bias in the output layer
+w_2 = np.random.randn(n_hidden_neurons, n_outputs)
+b_2 = np.zeros(n_outputs) + 0.01
+
+eta = 0.1
+for i in range(50):
+ # calculate gradients
+ derivW2, derivB2, derivW1, derivB1 = backpropagation(x, y)
+ # update weights and biases
+ w_2 -= eta * derivW2
+ b_2 -= eta * derivB2
+ w_1 -= eta * derivW1
+ b_1 -= eta * derivB1
+
+
+
+
+Exercise 2: Including more data
+
+
Simple neural network and the back propagation equations
@@ -769,6 +977,41 @@ Gradient expressions
Program example
+
+
+
Getting serious, the back propagation equations for a neural network
@@ -3406,14 +3649,21 @@ Efficient Polynomial Multiplication
We note that \( \alpha_i=0 \) except for \( i\in \left\{0,1,2\right\} \) and \( \beta_i=0 \) except for \( i\in\left\{0,1,2,3\right\} \).
@@ -3468,279 +3718,6 @@Does the number of floating point operations change here when we use the commutative property?
-For problems with so-called harmonic oscillations, given by for example the following differential equation
-$$ -m\frac{d^2x}{dt^2}+\eta\frac{dx}{dt}+x(t)=F(t), -$$ - -where \( F(t) \) is an applied external force acting on the system (often called a driving force), one can use the theory of Fourier transformations to find the solutions of this type of equations.
- -If one has several driving forces, \( F(t)=\sum_n F_n(t) \), one can find -the particular solution to each \( F_n \), \( x_{pn}(t) \), and the particular -solution for the entire driving force is then given by a series like -
- -$$ -\begin{equation} -x_p(t)=\sum_nx_{pn}(t). -\label{_auto4} -\end{equation} -$$ - - -This is known as the principle of superposition. It only applies when -the homogenous equation is linear. If there were an anharmonic term -such as \( x^3 \) in the homogenous equation, then when one summed various -solutions, \( x=(\sum_n x_n)^2 \), one would get cross -terms. Superposition is especially useful when \( F(t) \) can be written -as a sum of sinusoidal terms, because the solutions for each -sinusoidal (sine or cosine) term is analytic. -
- -Driving forces are often periodic, even when they are not -sinusoidal. Periodicity implies that for some time \( \tau \) -
- -$$ -\begin{eqnarray} -F(t+\tau)=F(t). -\end{eqnarray} -$$ - -One example of a non-sinusoidal periodic force is a square wave. Many -components in electric circuits are non-linear, e.g. diodes, which -makes many wave forms non-sinusoidal even when the circuits are being -driven by purely sinusoidal sources. -
- -The code here shows a typical example of such a square wave generated using the functionality included in the scipy Python package. We have used a period of \( \tau=0.2 \).
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t)
-plt.plot(t, SqrSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-For the sinusoidal example the -period is \( \tau=2\pi/\omega \). However, higher harmonics can also -satisfy the periodicity requirement. In general, any force that -satisfies the periodicity requirement can be expressed as a sum over -harmonics, -
- -$$ -\begin{equation} -F(t)=\frac{f_0}{2}+\sum_{n>0} f_n\cos(2n\pi t/\tau)+g_n\sin(2n\pi t/\tau). -\label{_auto5} -\end{equation} -$$ - - -We can write down the answer for -\( x_{pn}(t) \), by substituting \( f_n/m \) or \( g_n/m \) for \( F_0/m \). By -writing each factor \( 2n\pi t/\tau \) as \( n\omega t \), with \( \omega\equiv -2\pi/\tau \), -
- -$$ -\begin{equation} -\label{eq:fourierdef1} -F(t)=\frac{f_0}{2}+\sum_{n>0}f_n\cos(n\omega t)+g_n\sin(n\omega t). -\end{equation} -$$ - -The solutions for \( x(t) \) then come from replacing \( \omega \) with -\( n\omega \) for each term in the particular solution, -
- -$$ -\begin{eqnarray} -x_p(t)&=&\frac{f_0}{2k}+\sum_{n>0} \alpha_n\cos(n\omega t-\delta_n)+\beta_n\sin(n\omega t-\delta_n),\\ -\nonumber -\alpha_n&=&\frac{f_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\beta_n&=&\frac{g_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\delta_n&=&\tan^{-1}\left(\frac{2\beta n\omega}{\omega_0^2-n^2\omega^2}\right). -\end{eqnarray} -$$ - - -Because the forces have been applied for a long time, any non-zero -damping eliminates the homogenous parts of the solution, so one need -only consider the particular solution for each \( n \). -
- -The problem is considered solved if one can find expressions for the -coefficients \( f_n \) and \( g_n \), even though the solutions are expressed -as an infinite sum. The coefficients can be extracted from the -function \( F(t) \) by -
- -$$ -\begin{eqnarray} -\label{eq:fourierdef2} -f_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\cos(2n\pi t/\tau),\\ -\nonumber -g_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\sin(2n\pi t/\tau). -\end{eqnarray} -$$ - -To check the consistency of these expressions and to verify -Eq. \eqref{eq:fourierdef2}, one can insert the expansion of \( F(t) \) in -Eq. \eqref{eq:fourierdef1} into the expression for the coefficients in -Eq. \eqref{eq:fourierdef2} and see whether -
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~\left\{ -\frac{f_0}{2}+\sum_{m>0}f_m\cos(m\omega t)+g_m\sin(m\omega t) -\right\}\cos(n\omega t). -\end{eqnarray} -$$ - -Immediately, one can throw away all the terms with \( g_m \) because they -convolute an even and an odd function. The term with \( f_0/2 \) -disappears because \( \cos(n\omega t) \) is equally positive and negative -over the interval and will integrate to zero. For all the terms -\( f_m\cos(m\omega t) \) appearing in the sum, one can use angle addition -formulas to see that \( \cos(m\omega t)\cos(n\omega -t)=(1/2)(\cos[(m+n)\omega t]+\cos[(m-n)\omega t] \). This will integrate -to zero unless \( m=n \). In that case the \( m=n \) term gives -
- -$$ -\begin{equation} -\int_{-\tau/2}^{\tau/2}dt~\cos^2(m\omega t)=\frac{\tau}{2}, -\label{_auto6} -\end{equation} -$$ - -and
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~f_n/2\\ -\nonumber -&=&f_n~\checkmark. -\end{eqnarray} -$$ - -The same method can be used to check for the consistency of \( g_n \).
- -The code here uses the Fourier series applied to a -square wave signal. The code here -visualizes the various approximations given by Fourier series compared -with a square wave with period \( T=0.2 \) (dimensionless time), width \( 0.1 \) and max value of the force \( F=2 \). We -see that when we increase the number of components in the Fourier -series, the Fourier series approximation gets closer and closer to the -square wave signal. -
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-T =0.2
-# Max value of square signal
-Fmax= 2.0
-# Width of signal
-Width = 0.1
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-FourierSeriesSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t+np.pi*Width/T)
-a0 = Fmax*Width/T
-FourierSeriesSignal = a0
-Factor = 2.0*Fmax/np.pi
-for i in range(1,500):
- FourierSeriesSignal += Factor/(i)*np.sin(np.pi*i*Width/T)*np.cos(i*t*2*np.pi/T)
-plt.plot(t, SqrSignal)
-plt.plot(t, FourierSeriesSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-In the example code here we implement the above equations (with explict +expressions for the derivatives) with just one input variable \( x \) and +one output variable. The target value \( y=2x+1 \) is a simple linear +function in \( x \). Since this is a regression problem, we define the cost function to be proportional to the least squares error +
+$$ +C(y,w_1,b_1)=\frac{1}{2}(a_1-y)^2, +$$ + +with \( a_1 \) the output from the network.
+ + + +# import necessary packages
+import numpy as np
+import matplotlib.pyplot as plt
+
+def feed_forward(x):
+ # weighted sum of inputs to the output layer
+ z_1 = x*output_weights + output_bias
+ # Output from output node (one node only)
+ # Here the output is equal to the input
+ a_1 = z_1
+ return a_1
+
+def backpropagation(x, y):
+ a_1 = feed_forward(x)
+ # derivative of cost function
+ derivative_cost = a_1 - y
+ # the variable delta in the equations, note that output a_1 = z_1, its derivatives wrt z_o is thus 1
+ delta_1 = derivative_cost
+ # gradients for the output layer
+ output_weights_gradient = delta_1*x
+ output_bias_gradient = delta_1
+ # The cost function is 0.5*(a_1-y)^2. This gives a measure of the error for each iteration
+ return output_weights_gradient, output_bias_gradient
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = 4.0
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network
+n_inputs = 1
+n_outputs = 1
+# Initialize the network
+# weights and bias in the output layer
+output_weights = np.random.randn()
+output_bias = np.random.randn()
+
+# implementing a simple gradient descent approach with fixed learning rate
+eta = 0.01
+for i in range(40):
+ # calculate gradients from back propagation
+ derivative_w1, derivative_b1 = backpropagation(x, y)
+ # update weights and biases
+ output_weights -= eta * derivative_w1
+ output_bias -= eta * derivative_b1
+# our final prediction after training
+ytilde = output_weights*x+output_bias
+print(0.5*((ytilde-y)**2))
+
+Running this code gives us an acceptable results after some 40-50 iterations. Note that the results depend on the value of the learning rate.
+ +Feel free to add more input nodes and weights to the above +code. Furthermore, try to increase the amount of input and +target/output data. Try also to perform calculations for more values +of the learning rates. Feel free to add either hyperparameters with an +\( l_1 \) norm or an \( l_2 \) norm and discuss your results. +
+ +You could also try to change the function \( f(x)=y \) from a linear polynomial in \( x \) to a higher-order polynomial. +Comment your results. +
+ +Hint: Increasing the number of input variables and input nodes requires a rewrite of the input data in terms of a matrix. You need to figure out the correct dimensionalities.
+ + +We change our simple model to (see graph) +
We change our simple model to (see graph below) a network with just one hidden layer but with scalar variables only.
@@ -641,7 +739,117 @@For the first hidden layer \( a_{i-1}=a_0=x \) for this simple model.
The code here implements the above model with one hidden layer and +scalar variables for the same function we studied in the previous +example. The code is however set up so that we can add multiple +inputs \( x \) and target values \( y \). Note also that we have the +possibility of defining a feature matrix \( \boldsymbol{X} \) with more than just +one column for the input values. This will turn useful in our next example. We have also defined matrices and vectors for all of our operations although it is not necessary here. +
+ + + +import numpy as np
+# We use the Sigmoid function as activation function
+def sigmoid(z):
+ return 1.0/(1.0+np.exp(-z))
+
+def forwardpropagation(x):
+ # weighted sum of inputs to the hidden layer
+ z_1 = np.matmul(x, w_1) + b_1
+ # activation in the hidden layer
+ a_1 = sigmoid(z_1)
+ # weighted sum of inputs to the output layer
+ z_2 = np.matmul(a_1, w_2) + b_2
+ a_2 = z_2
+ return a_1, a_2
+
+def backpropagation(x, y):
+ a_1, a_2 = forwardpropagation(x)
+ # parameter delta for the output layer, note that a_2=z_2 and its derivative wrt z_2 is just 1
+ delta_2 = a_2 - y
+ print(0.5*((a_2-y)**2))
+ # delta for the hidden layer
+ delta_1 = np.matmul(delta_2, w_2.T) * a_1 * (1 - a_1)
+ # gradients for the output layer
+ output_weights_gradient = np.matmul(a_1.T, delta_2)
+ output_bias_gradient = np.sum(delta_2, axis=0)
+ # gradient for the hidden layer
+ hidden_weights_gradient = np.matmul(x.T, delta_1)
+ hidden_bias_gradient = np.sum(delta_1, axis=0)
+ return output_weights_gradient, output_bias_gradient, hidden_weights_gradient, hidden_bias_gradient
+
+
+# ensure the same random numbers appear every time
+np.random.seed(0)
+# Input variable
+x = np.array([4.0],dtype=np.float64)
+# Target values
+y = 2*x+1.0
+
+# Defining the neural network, only scalars here
+n_inputs = x.shape
+n_features = 1
+n_hidden_neurons = 1
+n_outputs = 1
+
+# Initialize the network
+# weights and bias in the hidden layer
+w_1 = np.random.randn(n_features, n_hidden_neurons)
+b_1 = np.zeros(n_hidden_neurons) + 0.01
+
+# weights and bias in the output layer
+w_2 = np.random.randn(n_hidden_neurons, n_outputs)
+b_2 = np.zeros(n_outputs) + 0.01
+
+eta = 0.1
+for i in range(50):
+ # calculate gradients
+ derivW2, derivB2, derivW1, derivB1 = backpropagation(x, y)
+ # update weights and biases
+ w_2 -= eta * derivW2
+ b_2 -= eta * derivB2
+ w_1 -= eta * derivW1
+ b_1 -= eta * derivB1
+
+We see that after some few iterations (the results do depend on the learning rate however), we get an error which is rather small.
+ +Try to increase the amount of input and +target/output data. Try also to perform calculations for more values +of the learning rates. Feel free to add either hyperparameters with an +\( l_1 \) norm or an \( l_2 \) norm and discuss your results. +Discuss your results as functions of the amount of training data and various learning rates. +
+ +Challenge: Try to change the activation functions and replace the hard-coded analytical expressions with automatic derivation via either autograd or JAX.
+ +We extend our simple code to a function which depends on two variable \( x_0 \) and \( x_1 \), that is
+$$ +y=f(x_0,x_1)=x_0^2+3x_0x_1+x_1^2+5. +$$ + +We feed our network with \( n=100 \) entries \( x_0 \) and \( x_1 \). We have thus two features represented by these variable and an input matrix/design matrix \( \boldsymbol{X}\in \mathbf{R}^{n\times 2} \)
+$$ +\boldsymbol{X}=\begin{bmatrix} x_{00} & x_{01} \\ x_{00} & x_{01} \\ x_{10} & x_{11} \\ x_{20} & x_{21} \\ \dots & \dots \\ \dots & \dots \\ x_{n-20} & x_{n-21} \\ x_{n-10} & x_{n-11} \end{bmatrix}. +$$ + + + + +
+
+We note that \( \alpha_i=0 \) except for \( i\in \left\{0,1,2\right\} \) and \( \beta_i=0 \) except for \( i\in\left\{0,1,2,3\right\} \).
@@ -3545,279 +3795,6 @@Does the number of floating point operations change here when we use the commutative property?
-For problems with so-called harmonic oscillations, given by for example the following differential equation
-$$ -m\frac{d^2x}{dt^2}+\eta\frac{dx}{dt}+x(t)=F(t), -$$ - -where \( F(t) \) is an applied external force acting on the system (often called a driving force), one can use the theory of Fourier transformations to find the solutions of this type of equations.
- -If one has several driving forces, \( F(t)=\sum_n F_n(t) \), one can find -the particular solution to each \( F_n \), \( x_{pn}(t) \), and the particular -solution for the entire driving force is then given by a series like -
- -$$ -\begin{equation} -x_p(t)=\sum_nx_{pn}(t). -\label{_auto4} -\end{equation} -$$ - - -This is known as the principle of superposition. It only applies when -the homogenous equation is linear. If there were an anharmonic term -such as \( x^3 \) in the homogenous equation, then when one summed various -solutions, \( x=(\sum_n x_n)^2 \), one would get cross -terms. Superposition is especially useful when \( F(t) \) can be written -as a sum of sinusoidal terms, because the solutions for each -sinusoidal (sine or cosine) term is analytic. -
- -Driving forces are often periodic, even when they are not -sinusoidal. Periodicity implies that for some time \( \tau \) -
- -$$ -\begin{eqnarray} -F(t+\tau)=F(t). -\end{eqnarray} -$$ - -One example of a non-sinusoidal periodic force is a square wave. Many -components in electric circuits are non-linear, e.g. diodes, which -makes many wave forms non-sinusoidal even when the circuits are being -driven by purely sinusoidal sources. -
- -The code here shows a typical example of such a square wave generated using the functionality included in the scipy Python package. We have used a period of \( \tau=0.2 \).
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t)
-plt.plot(t, SqrSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-For the sinusoidal example the -period is \( \tau=2\pi/\omega \). However, higher harmonics can also -satisfy the periodicity requirement. In general, any force that -satisfies the periodicity requirement can be expressed as a sum over -harmonics, -
- -$$ -\begin{equation} -F(t)=\frac{f_0}{2}+\sum_{n>0} f_n\cos(2n\pi t/\tau)+g_n\sin(2n\pi t/\tau). -\label{_auto5} -\end{equation} -$$ - - -We can write down the answer for -\( x_{pn}(t) \), by substituting \( f_n/m \) or \( g_n/m \) for \( F_0/m \). By -writing each factor \( 2n\pi t/\tau \) as \( n\omega t \), with \( \omega\equiv -2\pi/\tau \), -
- -$$ -\begin{equation} -\label{eq:fourierdef1} -F(t)=\frac{f_0}{2}+\sum_{n>0}f_n\cos(n\omega t)+g_n\sin(n\omega t). -\end{equation} -$$ - -The solutions for \( x(t) \) then come from replacing \( \omega \) with -\( n\omega \) for each term in the particular solution, -
- -$$ -\begin{eqnarray} -x_p(t)&=&\frac{f_0}{2k}+\sum_{n>0} \alpha_n\cos(n\omega t-\delta_n)+\beta_n\sin(n\omega t-\delta_n),\\ -\nonumber -\alpha_n&=&\frac{f_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\beta_n&=&\frac{g_n/m}{\sqrt{((n\omega)^2-\omega_0^2)+4\beta^2n^2\omega^2}},\\ -\nonumber -\delta_n&=&\tan^{-1}\left(\frac{2\beta n\omega}{\omega_0^2-n^2\omega^2}\right). -\end{eqnarray} -$$ - - -Because the forces have been applied for a long time, any non-zero -damping eliminates the homogenous parts of the solution, so one need -only consider the particular solution for each \( n \). -
- -The problem is considered solved if one can find expressions for the -coefficients \( f_n \) and \( g_n \), even though the solutions are expressed -as an infinite sum. The coefficients can be extracted from the -function \( F(t) \) by -
- -$$ -\begin{eqnarray} -\label{eq:fourierdef2} -f_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\cos(2n\pi t/\tau),\\ -\nonumber -g_n&=&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~F(t)\sin(2n\pi t/\tau). -\end{eqnarray} -$$ - -To check the consistency of these expressions and to verify -Eq. \eqref{eq:fourierdef2}, one can insert the expansion of \( F(t) \) in -Eq. \eqref{eq:fourierdef1} into the expression for the coefficients in -Eq. \eqref{eq:fourierdef2} and see whether -
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~\left\{ -\frac{f_0}{2}+\sum_{m>0}f_m\cos(m\omega t)+g_m\sin(m\omega t) -\right\}\cos(n\omega t). -\end{eqnarray} -$$ - -Immediately, one can throw away all the terms with \( g_m \) because they -convolute an even and an odd function. The term with \( f_0/2 \) -disappears because \( \cos(n\omega t) \) is equally positive and negative -over the interval and will integrate to zero. For all the terms -\( f_m\cos(m\omega t) \) appearing in the sum, one can use angle addition -formulas to see that \( \cos(m\omega t)\cos(n\omega -t)=(1/2)(\cos[(m+n)\omega t]+\cos[(m-n)\omega t] \). This will integrate -to zero unless \( m=n \). In that case the \( m=n \) term gives -
- -$$ -\begin{equation} -\int_{-\tau/2}^{\tau/2}dt~\cos^2(m\omega t)=\frac{\tau}{2}, -\label{_auto6} -\end{equation} -$$ - -and
- -$$ -\begin{eqnarray} -f_n&=?&\frac{2}{\tau}\int_{-\tau/2}^{\tau/2} dt~f_n/2\\ -\nonumber -&=&f_n~\checkmark. -\end{eqnarray} -$$ - -The same method can be used to check for the consistency of \( g_n \).
- -The code here uses the Fourier series applied to a -square wave signal. The code here -visualizes the various approximations given by Fourier series compared -with a square wave with period \( T=0.2 \) (dimensionless time), width \( 0.1 \) and max value of the force \( F=2 \). We -see that when we increase the number of components in the Fourier -series, the Fourier series approximation gets closer and closer to the -square wave signal. -
- - - -import numpy as np
-import math
-from scipy import signal
-import matplotlib.pyplot as plt
-
-# number of points
-n = 500
-# start and final times
-t0 = 0.0
-tn = 1.0
-# Period
-T =0.2
-# Max value of square signal
-Fmax= 2.0
-# Width of signal
-Width = 0.1
-t = np.linspace(t0, tn, n, endpoint=False)
-SqrSignal = np.zeros(n)
-FourierSeriesSignal = np.zeros(n)
-SqrSignal = 1.0+signal.square(2*np.pi*5*t+np.pi*Width/T)
-a0 = Fmax*Width/T
-FourierSeriesSignal = a0
-Factor = 2.0*Fmax/np.pi
-for i in range(1,500):
- FourierSeriesSignal += Factor/(i)*np.sin(np.pi*i*Width/T)*np.cos(i*t*2*np.pi/T)
-plt.plot(t, SqrSignal)
-plt.plot(t, FourierSeriesSignal)
-plt.ylim(-0.5, 2.5)
-plt.show()
-
-)BxSSwiDXBCs$)#}d$gR=vMyPOC3M zzsx!tEhRxzm5`hG3AI3^okc1@Bn;htJ%Zj8CK$Rz8jV1)0lwOlHeH)hbYOeE?h!vX z5wjg32qgLeex? znaU<6h@n-2f=|0RDlA=iEq3}gpw1xaYfwDn#N Lt?2Jc)mi-`w|=BCaX7gVCm(#xxjP z$vIY^!Z%Jw>sw%SN(Eu8_UW0Zed4?HL}HbOd(Nfm5<3l0SH2G4#aNu_= U3()kn@vTw`y8SYmKnBLDX4AgP+)?M5e&^9PVxNr? z$Hgm*MWjZ_-}HtiD1xl X~@MJ5$7im3q2Guj-Owz zB&L2pM5qKH2m63R9>luQ^S=4|z8s5}pbQ`%aLOK6iVy+(J%qr;GDsgx@5SG>-`Xnw z(i)4DXgAtNOkAU1c48-=Ekqq#+PY2AlAHyi(zZxbEZKfmM(%(2<6U{~d8$`M)>O4# z&w8(7GAFDDx;~$Lo}|9wGD5--69Hjqk5vX)6miHhjUdGj#J_^~{S!uBs z#gA!YZhP|pW!(O(o^2dugUjfr6{Ipx?YFqzQIOUVFP|;6PWUXOAS2(`Zb(M=9ooSn z&5W0{JK(_BB6wZZb`zY!Zkl%_^w<*Ssd`ktj!@J3G8wOZ_1Ry*Y#~hNbkdAG5a NRo*)!XRT$uzeBv#kfc-Pohz0<6k$;e2G&6mUE*HW!-bl z#x~mKB8P$mc_7HV8E>QG5(yGTWX}eSB)d{cjXZ~FeX6tg{)d7S5Hx0rBvVHha0zJz zQBW|xNs?lm2?@Pb3{r$}bP1g4D6e!dxr6}0A)wydh(8AKxFoZ){|{U57#!OkZSlso zo$T1QZQHiZophY+*iLqA+qP{x*|E9va?ZK;)q7Ra14?oZuo{b$cP#`sM&L@g9U z oCnEav>Z1fch*V@ay(u-D^uWG6KSVol zOHc+!;*7W)ndGPA!dw$Ew+2?2rZp6`EAymCEkNa&`X=kc)d JLvU}Gquxpe3817f8sjqshvq3MaQNOmrX)U=^qlOCF;hz?! zz|P-MFJ{)SZi_D^x#Hb0)|dsu1`5$1J*8zbi~Kgi<#9N_FtUpv9><^_0mH_rY#y+% z(iT^Dc6P|w#M~>rf^d8n!)3Tn_^l_3t=F=X*$%v~ZqZ?(`kVtwe%(T4!7=*Hu{Ggs zL^U#dHL*BcsJt?CDgC6favjit&&$5G-0w#61CtVMpMl98v04j39;pJCpNu{4m<-}| zv;R$~m{JWEka1IMfuZ3LSy=vGKn622GuwZb`z7s-?_?N?@3Y1ODl7r*<0muCWF->H zC^iDL6c!dyBpDC!@-GrIQ?Aq=feuoWi?m73I*@e8KkMby>mBXo?HhqC)4}HB%tmRA zq0lpEn6hBSdWH!sBw&dVS|N$%%U0y@F8xeYX$_qPXt1QfznzyP_otc2(ve =VB4Z`ZN(6~VAce|ec5$+C z^Fg)mksN`G!Aw;I 3!$rHJp0az-Fl=0~=zi1{PGV~`SXE(87TsORkIVsk@aQSqy zREKKi+=WXJqwGV!Kdww4D2!`;Y*I|WlH=2##m(mxiu^{v#us0}{M~x&isAZV_B6%; zz0~N&|JjP6GAZ!>X2#~r@oZW 4!YzO!U$({zJFn}JYj<2cy zuf1g0t!jzw+mq*0!9v;bkuau&%7%?@0(b?#bVRBT?e=)NNWG<96)K@_b`kBiT+D)e zr_!NXh6c$XqxI^q=XoV2`uDT&ezVWRo-?HsUK;u}iG-rcDBeze?p)GKBgngPQo=f^ z8J|Vi;MBMXAdSeh2cClIcYuI^pm5>t(>xW*;N7jG=c=%ZfE6`n2D2s)?f7Ry;cw=p zpY@rlYTC!L>@At8OOc<{Wy Lkh-pP!A!gmWkNbQpw%spy$Fw|dfXS=CDUrte9VO{~*0>|K~y*MRUmeMg(AjebV` znR^i5pM}~1u8Yy@<+l)9ysFUxr(*-hR~hq7x$+;Cl(F;m%VC7W3 5cY8i2xI>I za4SDhBo9k*i3?d4;p3mepQgp-tsB<~pQ$CI49s4oI=h?ICt_FL)zlAf{XX0sGW}v9 z)!f+M$YK)jn0aC&?f{(f-UV3cL!#ld^ZWts6;zgSvT;dDxnklf8G)Ic_i?PM^vrUX zONQYmDd9a;p%P;1e<(P$F82NRqwoDMCWE5mAk;O~tP_HqRm4mAOP9kH>-Z(r+9Fva z)~U*gMa4ssRYULw_Moo!pp^Fl4#)43)!?aNjLqTt8v27&7XkN@d99cT2-0p%QH|XO ze8}fbn%p$BYmGydKxmCtG$L3&d?Hv|exZzVWQLiERu?SMTjEU#sCY&UJzR~5@faTA zC zGfu4njp5mZn^d)E~vCMH(C6+52(k~Lz3etMF|tyhZv;D}XD zid1rT$yNa?-yej9!pVk@R_OgcSnztcO+v@H1hoi>M(g~f2(&3Zp}5X2r4cPQtY>Q< z_r$+aO*~id4Rijifb>I&f-CuL-`lC?$IC15?$axMH9!#Yb-vg$(UgQ(8yhWsI}Isi zglwo5p_NpFW3Z}Ryjpn2ttj5lqTDCah^qS^i)lq7rwMX8aCzYp LDs(N3 zOU}=XOMv?5nC5i(HTPl_oF#^;uYQieJfJHjqK*!+& R AR1|!Lj&r=~ zXP?7~LWAko*tivbF9gI^&-H|J_0e7B9?j8 z<8^U5Bibn$5TNAXa#-!0q^9xyDHMSGYQ`cU&L}AZ7Wj&_o5E0;{o=)B$S$(UM$j7K z2{2TTf!x;B>2`Ma;=NQDTFBo{)3sR-ec8VIVU6 kO-H|v zX~x7~@R>`+GF57uW1_lDJbha57kzds_IJsNmcW+JZPd_4CH2S|N5W6dj2W`b=3$ET zzkVgp0uibY*6)i<4qrJKtRSekU!pVpcmUSZr!Z;FZX)<&3xwpgJtiJo^Dvl7*gc+s z;|;|=Ho8BRjbIF|?bC2{HqG+zcA@kWQ}uQLLA*@RzoBXDGT|kB<7d?JUAnC(Jb?`L z2iHN_j +bbVWJ+~T3)G1mwa+u@C)1s2;QGPskov*;1b@gj{`xDqpkBG;*#OwV z=647_q=RX%u3*~U4&lsR< 88d20O$&u*C2S zFq l@f zA^4l5b5C=uiE{P?YgscKYuVYlXU>a6MUOl_UjO5-PYG*fCR^X&f{$;_05A8zE
7z7^Zzb%n6^)(uMz$%QDGowOJi?sPGFe~$PGYJ#2E_wTm}!v0TAlQ|WS0ZbT# zi9O7K0qpw%|Gpr;FUS@H2B@%>GLj7qR^FDUSalo%+L2riiHzRM2SOxz7IpY)hIl z3LAn*-S;$4STc|9JBZZuRnkTJ<@uY^G+c(^MZ1b4z;hx1AB!# 3jSPCOwn@n29 zq1v6xn_DIXiJnlYD4Hk|#UY4NWV9hAcv-L&!&56|!_YoG)6Y8u(!ewwirKUNL(`nC zU@bmGRK=P(_+ojE16LOL7V|tz$9** 3Pg zDJ7x_g6x8%jHs1l#!TWUJE%aII0yN4?CoUjbmv}?jqZw>pvsSkg|g0*r9xCknlOJ9 z%mZ4$L)8 5ADs8^Xm vZD;56@o;<24EOc#Yuszg<%N; zLzi?6)_`Q<8x|-%Gtp+3aKX>D!Knr*`V%#KpLQBBp?bFGuF+-??{-+8_wmazKH!e5 z3xl8j;)ya#ND?wGP%I}xG_(oC@2;W&42hWA9uHBwE1~HoP2U0vxX`3AbrS@gF-?r$ z*FGtSNjF*V)KMHW0ZIzcm}Z)0KdO$+^wBN+oSJo)_=of>CmPx{#qTAge}R!W8-(`5 zzo9i02FMl=Vl3R6nFR_M%NXIBtf#^*<+Mgq4co$#fETqVF5lJSdS#*X@6VMMdGUfo zUF_ew=V*AzT0lgnA)6E(DP|bq7Z|gZ*EQADrFv~GZw_()1UU0U|7hZ#`1y`iU8OQY z6_cn6wd|Tjv=)MFdhf*Hvejs-=6*E(H4b@T=e&cF my_r~h~taF zdSA{xP~z6*AWOIw^Ru}w**A|CBw3xW&jgKTY@3t0-NV%9<8v0#hdUZQe!zf+`@(nA zR52d$b75szf@@d0*4e>mTp;q1L!JPz{Lahw0twhkd;8~XK*RX|u{i#tEpjsbPXPY= zX?Iy*MEW)iTJG+{9aQC{fDM3$G7m({n{z56kUN!HWI)C&VEFOTOxRWBHqu*3d)E=@ zU;0%jbgUzj34D7XAYBX0R+()9`*38$2Zq7Kx-N#iBx*v%K{Eilw-XGwyxuy4y}_y7 z!)k$R@t^_|BdA0o)8R2oX*f!1Xn+P9ElC0NYeuKRWXfwRL2+PuAy)mNby~eC*CBUV z#8SELq{e_lfr1C{s(%DKx{+l7H?B%QE!LP+ITn_6*Aj!sRb`RzjcL}5UKm;8ei959 z!$&E<4bRcfwrY*b!~UAK*I%;}k)7gce!O}!BW|GUh?vnNxHt^X7TwgCe*nWAqS?&A z(#^ZmnL9n3 Y<^95%q6Ny48>X!vOp~;9QpA^ z$b2ZRNGl+v!D2~;5QRq7b>$(VCbKHg+T0g-Np+Bs*+VZvlWA_@y``$d6J54) x`i|okoJO~E&v1+p#mlE-dU}O zQW$uUR0xYlpt**V##{2>Zo28hCS0eKdfAbIfg+joklT0(m55%}FGhni41*)zZtG3W zH4=R4-K*& ; z`E*3@yKxQ$+$yx bax$UR4V(98-lag1KuV0DnFg&bQ4!GAUc z|70E*yr)K1DNv7cgiP|8nf_HP;Bj*PDwqtL+eN(kgT(u9wZ-84KgdID|LZ79k#SfM zMDo9WMfYh8;oZvi#Tf+uMdT!;+O338?4~Uf4_dtd B^ka;u$+$FcoxRmQK5FMa#A#-Q7L}Y!}CMRD;efy^jLXl*9Cbcbn>-P@<7q$c;WgL6Rn{Y4Lb=oZe zwv>5SosrgGXn%UNA=J8lQqp;G-cetn5gnVHKN<{HbqCa;$&s3AVF7Tl8}8GB?O!kZ zTa!!aV*aD0b7jHMc|`U1gZ^2d4Z%Yt)|phVE{b4H5V8o2qCNU(IF{>T+PJ^S_{-?l z2399hZpb!PN35rdVc`$Y3Pj}!l2QtBX2_Hizp+Qbse(nQb)U^(b@2r`z8BI9x@1m_ z4dob9I3I1)b0i6TW)J`XJ-T}5?+6?6Oeyrxm&fJ|bwq^I?_A2A{Jx_n)!!}NSXn40 zc+e^A2Q%+ehStoUETa9yTA_HH2v!ai8;|uBnSU7H&dn{X5m-35>BAAqaA$qo!>B)J z!nRng7K|hg48-wp7?Vm;iJ#rF626=` YB0z*nTG@K2Q&(vhb}nKJ<0h0y^~U3_@L21nV1+;{RzLr?njzq3ID(} zW;Avw)j}Ab$mlhS66#gP=$f6Kf96VA4+iI>Ma&PrzOe!;ibyq%)}_eMi-a$n`kjXP zS;pl`h|bO+9-d0=N$tIIbp8mZ358D$w+f|FDIK5v*lS6Y+W8oSK%O+z?Th@3eLnen ze{iw4#AA hi*ojQhso{d;0AbPvONE&B>PNDovrq7JW(@Hdl>3 z$XI%>*PQN`@#&2)f@};$ha4hU2+bz3XiS0?kv1f(P56 %Ud18JU;HmDUzi&kfmif^Q7@HWm@s4MU1YJcw z(v0w@*Ya;_DQy;WeEas19ud9l0PHy)8!$Ou@F)OcHWm%qBe71|HG!F}p22PA%14jJ z&^ea(8Em<7u-v~U*pV>J&f)j-PeIen)w(0`xr~$u@0iD)(pPzW$0PfF*6sT%JauYP z7&&eQ7V!3OE62wH+}A+cH3Az#%{~o{!JEQ`tnXK>SH(}ZOLS4*PDvHhD7?K(AEU%t zP0%?G6NlItF3Wu+d|A{RmH4@|WwE0MS!==+Mj9L5UGM}j9D9*En&>H7{SW&xpu0;v zLLInzF0w>i$f%lWie>_yB4zENFfOp*hjf0 P5=B{)7P6@ zR3WMl*ySj2jngq~dSp 2jZp z3gWixLJaoE 5YYG|nYkad4plWuotG>(4h|4#NInePtm;H)1)eM-rH@_V z4PYv#+SPeMJ_cVlKMFOKU-1&k^+T~6qd?w qWESfbgE25OPy2d=!NZ&g%b z%D@GQNxut~YsLFS{Xe;bc`SWNhS|t#vvu!fK;N1JxE`hI=4J&IgSbv-Ia3DZG`Tko zVQ`0eWsFaAldp2S4zofclY$$Chny?uc`bq%?DRr5&(hE}xXhDNZG7Puo4Y*q>g5LG zba73kqtTnT$JNV~ z-zCeRQYxR54h79`(x_v{0$H{XS4u+snZPzU$uA~V Na^Zd5~xv;J7~daYq2<8)oUp+Agm5yiR+{0eR9 zgKH0;A2lMq+A@N)5kT7J+fRZ11?tYs2NB${(*>^-3d<{&(t=im4D_i=`%|#|WBgU) zW0y%AQeItA_6Ikeeirf7>SdT~%WwN%{NAeot*jHN_Gua}hNO^|`_Y}B2nzFV7b{_- z7DS~9^gy(o4UzUbFT`#7_WhqD!crJpgPEKYLHi!Eb%j53l2Q&R8R+ch8TCj0a7L<+ zrCKvtO{hj_tkF}VGlCnaB6-4s;6Q%OFQlTlZ!rWoo6mk`NZSM!V+tvr=?Te&h-e1^ z2Cu=-0Mkt80WYnP`QU8+lh)BBZJlkN?>X@_KZ7~T&@yac`)~tOg75n3hye%7l>H6A z%9M-1l9$e(jG57k?c$JPu+HIvkWRI+@=K|jJYfQ{j5|?LlX#TQ0ucBToRwfV+v@X7 z&C86I5#@gojY*pbvlA2W4LW(Xjw@dQyf9fPkJH3KTRnPyV;F`qlfT^%Q!bn%F*MN^ zQT>UfK6fnb*FBuQ&Ye;+H!TW(wkSqd#BvHpRH +NahlDeC{E$D%`*hMiIcv{DQK zQy=)VA|fS!UGHlv8Il#N@Qps#E-%r*Toil~fvDwPMFn0(fyJm54uWVz$NeUC(_}Ro zb67-@a*KKxBwN4zOGw1G)GiM@`Q9FME#jSUP)JdYZ_myn>0R!|m*L%|$M?fG`r*OG zs(x jXrS#aFjUPR<$rCZ-8spu4@VhZW7_XY8>l#=<@gAPM1Z* z#+vKtl;=3dBf!D@)EDdzst1TbnBmGdfw7cJZJQVsk_N!0u8Z>!e;T;;X8o-T+ZRIo zl{q<&m bX}HBkax8&NIH{p(z4kdA=lol+V~*ORM!8d%tS& z^JU#ZlhR8^5HLA*w$Z3PMg%~5ua=42g2m>#z7(@IuD@be&G2f%9f3#b7>s_xgCweI z@` !f=Xp8?OV zFLNzCx5qDEGH`V@9zwqH|DGeM`@fLUzFourrgASe{{6rE-9b*y#oxGEJi!YK^9C;B z+S%J729T2}CkRJS-uy{#IvAH%m)BAJ4)6W3?ap5D+li{$g+c2iEUij5CDZB|j{&*F z3Tp1niC^vWhH2;v%NB{%XYv {Vh-X(SHSE*?gn#WIMv4|#jLu=MD|GYpUT+P8q9DPTxml$&}%pfO>E&NEv_G}W|x z! j#K~w*qvy1M;)QmUL1I`8dy$rCk>E|a%GdrwFo%CGwouH zZJsV%Sh=bos%48ZHVPKz{Qfg+(qNO?djUsmt|JF6v;@I;N(UycN_LK_GTdnYega7X zOEd@ zZI05vp)N?}Ug+(g KkdBPE#-Rb9XbdwPwxTiUVWB7lBy*EHv> zCtrl2ODX&laU$;T#EsQ>90AcljzTykt9YvZzTPMQccf?S3z-4W0+|bd4=lDffTD zQQsypmj9U{Z)wQ=(};Z|7XG_2EuIE)rJ+RQy$hzQE`Jc)qf${J5fi^WMww8o4! FSUOC`!a6g`^hUfJUh~- 9d&8)^UbYQ9n#idPnWSsdfv}ZN3IlRVTLFy+i zdaJy;m)`gGvpgNUP^6>#bZ}Qc@>xKZu_vJb5G1Wq+kkZ6?BmF>0}%l;m@(rl%)~TV ziXF_)j=m%M6}p7;DPXE*s@dxR<|yMI=&@_-JCCO9=?L%a 21DHvHoy|h@=_Xk}5 z1H00hItQ{=7^UXJmMr=QijXn6N6`U+ija~xOlg{M=?5_7&cpLiGbXhen!~+n7&APH zBB!IDx6<7~=^_ P?M;e<%cauk4cj%xxxvNo zo-$r`(==t(AKC$n=91kohOH7tA@ngn7>&>{V@Vj7mrY*CU~KjL9(G`&5K*uMcmmkG z9MG`{>1(N1CY< n%FG#pV(Xt3K0Ip%1~^uMi!TD;v|DffRcCuCfnudOfr@_4#{nW$iG zq#$gS>)~!0`%aE{H;Z!TvRHobvui&gxFx)~i$Be$Qm8s6rrym-X}dhPcMdvy*39ZO zVmLchJRgDgbUQYE&4|LDa%`M{YzO~)Ut%(H{(o7x|IOdnS^g*Wyrljwp1Sk>du2d? z e}UmHrwPiIe-zaN@p5^ShmLvf|f3eCo8K~rEd$sv>DIe!(A zK!0zmB&{~&%4_^YtQb}zlDc$d>ap1zZ&E!JgR6Qrl-mMS#oQ&?YwciGyzwt~$dS2+ zw_;; (n zr*E({UW5XcNr_FT;SNI~{qqU 3DI;aVzbB?u+LeBtF-!d`3W0 zP>W3fcRh$Alr})ot>1Z>Qgc$yAAZGCaS+jVYo-JCxzhJxSE%^qN@{Yd7d-m>2d_Y$ zKii@0$rGm^^Trs-VjZXYb%xdr-bz1@w%VZJnk^7*704Iz`O@$qTMlt2^2-1n%n;}7 z{gIqm4DKx-!!Y%{^^emkvOXSV#EuAaC|}lZTv{aAKwshRZG^4dUmJoaSBQSPFR=e4 zfl}3Sp%K4bj^D^VR8S7~|6NP+UD7kZC7^eW3uGCih6V-oE;Vc7$K}zMV#pQ!kwOG{ zpoz<#>s}CuuvN+*Rt)A?h%7rCHA~FS5?F0HwFMNrs5h^{IO4&_tSAwV15}Q3$`a~{ z4m14TkBQ>JN-@q65ywZzivqtb5+`Vok>gY)r^al>h|AMRb(yRKCIKmjK91Sbsde9j zEXrQ6tLKBHKSXX?7!HZHZzfNVs;4AzSaQgMJC!%x#D-R+T+n0%!kUEF*5d_+JV)e` zOojeL1|#XI^&?i{fOfH;6T!NkM+42O^jpXMG*3?5PxGJj6J3pRx?~r3BYI_rLdrj_ zZe+ezc(uHJ^@&z}umDBI_zbD^RIS93NMLH?CUWPiYhuw&&h8$gINf=D`Cd*69bWq6 zGZL9!)WU5fkQJBaD&|In6dq0GjIx9u9!om7$(72UM9_Wa`kb#Sp_I`KHA(g>z1G%k zf{-XSO1JAiBDBBqmKkrd^Fn?^;Ha?;)2U2uU|X(1k5>tb76IU|ok*!Lqk*{tV3MuY zf|1#1iSvf!sV|KkNX<%o(bPadmunr;O?}arI52+H{y6-LYmYKP%yXy-E+srti-yR= zWw^%%lj{Qn@|~LAryc&lQ+vfJ&z^*}R)o^oJLaoKt0v^3eTOIYw0nY2(S9=R$yCiv zs8D|rP}}4FLjyRJRSCEKp*T#lU>y6v_JhStY~%gF!hzXVjp&gc{Jc567`X<9)i6B6 zlL3eW)F~3#(PdVmRU#+WBOiF=zK@6X4Hv%Bnk|EO&2mau;9*UZ264Odu2 Jh}jP_?=sROtR_}pJP#O?gv{>ye3zXG^? zI{N%F3NtEvr+%%paRU<>x%>EWD #S@b_v>eKV`) z+wdXx7X+-6C}RA-vzzJvIJ>zx{`WD_lyNv<|3^Ey!7{0U4s$48l?m7a;cE&Az_9e0 z9d@R*HL)=BO8k7rk4T_dZ%o9xBBq!>bR;0OI4&4k!#}t7gQaJ?bDHBMZ9DV_rdNkho!& z?wXX<@mC>g6suB%L>U^E#B=VdWH~7oGX%6?&BY%$ ;9BorJ*~%Fe1HhrY >`bB)fi2yeVcwFYm{-%dDn)WFAQB2stNY+o)3_r =r)E7{k}?l1*ThwMam<;J>S;2|z$-U-sUL+PWIwfk zZ(zs_be5YtVc^orr2y1$adu7Ov_rJ+xol^px9IB)p%LcoW$e=t*VD7lcYbk{`5p3d ztI%x}SJ!t*z~0=g9Y5!r%Lw*VuI0Z7WYEdN6fw4?ioG`dZi<}ZK2C`%mdS`oNQ$)> zi9*+^q27>FsXC|$Ev45rY9c4C$dM5UVc7{qP|%47L-mdn psWx9~@BY)qN-qZVGodv{AM
ZrJfIdie}IYH`X zouaJlL6UO$CPk%n6r;nvW4iDM`aEz68bc+7ohwTEW~^-s22}K?_AE@4{JIQKb~r12 zy3wyr74nASokl>Ghc6!Wj!LhROT$Bc{Sz7bHta1eKoSAZB)Uf$+kA&NqETyyPls=O zrSbPkmJfVK_68SkiHoQC$skuYYMRM{u(x7T$1j(aM3Cs{An2goancHQ=4*R_pSOK{ zQE<63f8BV^xHt4=!?-iC-Prwd8|=*PC>LZFU^9hYeLOXiW=Nv_F&exc@U!|kAQu$v zsiNaV$Qg?ZHG;6a=2dX&(%J1;E Yr(urjEk~JDT52&29nQBrW3g zI+EdP5q@aifHtVFC68d!nYa{fQGFKC`y&q;l|;Q2j~1vcqED?@JfYY0>vTvV)~hU* zm)o#=j=w_h3Fd{7sHJ?BVlb|JfXzH-J@31T!@Pi>vJBSIs(r1=``z*CC}KkcCPN4r zQCVC@;#vYg^WXBA0h&%X`hJmYvr+%Xn+{XE<_(f=Gw$8+m)x}xlb)4-Pm-wzhJgdM zx}mt6sutB^ABugwzPi8my`Ji0W4$NCR?FjtQ9ZbO+%99^`_Ac!rkopG#HBBYKiP(2 z)6~OH{!%9z)A1;8L~=C`_EH^eR{Ho-7iF|88GuLj1Gwv`#>p!(uvqw}()JUz-Wfl~ zuFbhlE$pPc+YMMJ*lSZK2xdF%)=Z^FxT1Q05vGLL8rfiinpcmh2Z~0|LuTt;U?sS^ zy}kRlX8hcks`RMe4y#TIm>M%;d+O3uadf%6G^>)hQ&-Fz`_*vGMr%`rb_-NR2MXUh z7@)U=sKcgatLh#BTGuslA!*YiyByeEY4Zt9&vu#MbH6C~#~*DaSixev(hH3jevl(5 zNwJu1xNt&H;S1tse?(cPFIdiGM5h$#3IhKqiMStFCZTR=SvWs2Sv8bOc9c=j!HdUS zfke`0hxTSzWW9;_F9Qw}K ?D;fzIyDDJ7XbINp`(cANoxO3^Hq{jhyyz~ICWM%@sVRUtbTpU%P z9Ep+i_ I7K}4nRrh zFFQEWCFrS~{`W$1S$~VkhjQIwN+i8OqXf;>h{HG(t1`iq-@gO4T8~Oc!vpx$hS`*q z>Ae(y3db5(`*MU2frA$B*AojpOlOpgtMHGyR6*kkGuC`F;o%su#?#JBj>Qv-(=Y># za xW?UG3JD8cb;OCYQ!5@7F4i zgdbBZxPP5K6gCd!s*ZfG=mE(P)tZh~_p`eBNhNY*F-IbHG;=R&n(5ZsVZ@@lRC_B^ zN2$K2jHI+jjKn~a>4oe4#|)5#b*;l*Au>{x*|N1&ddmSvK47TptFM~pw31ZyN6ce| z+!p kbc2@c9GZPjYyHNE|h3W z;0}kXy|bh5RvB?)>Jfe=9J)UQtZ^!y92p&oSlT$W89? <^#VR->1~>URF*yL`1(UDh?! z25di}2bha>w9Wf*K#EdVQNrl9o?I5&JR{4oDtlFxCe78Jy6MyiX2xli1?|YcC}l9M zWnuaYzT3q%meZos#w-ic@{TOgt-JfzAyrKTic}Fi6aMF!?sVD7f&J4dETQ`;Vi2T} zZ9C)wLzLfi4X@&gfBMe`0oAo7oI7OkQm|i|t^jD+_0?wUEVoCV-#k+Y)2tl!0;EG; z2T?i*!@u%mL&s&QJ!8New`A+3G|E8Ae(m4(xHJ(}W!D&%bnkKADO*BvI$K;(Ci1rl zM+`>DcOwJygtL1edcB@g7Kk$1r;`XSh147Ozj)%=Hjn+$R8OwB3M-gV=-pOLW_Z_3 zVgV$|2OG)_FZjWwh>XOqn3v9*4Hw5r35*5Mfx1hyS^h4(rhpq3moub=uQNaS1SE{H zx@j;-z!uNV*Ti6CjV^RdCTRa*T1lbi_h2-jNYqL8j>Wv^`T?95p2ou?lHPXjy_t-t z)uq1|R2HfR%?yMBnwcn2O)WNTtwvS1v`U^S9Um<1UXHd;f29FnsO#$eh3DXWM*pZX zaJeB+8oM!NFm=|Y{$C;f-ZSwJoPvpwh2y{YOVK~*)c-b4C_zw%4k(lf2IPt@AU1+$ z;GFRJCHs;_U1H{&_0xmGOHRZ{Pad@xo%wtO)^?!snrR=|2ai }i6q!K Xm#rsqMz`DmUB5`BFM?BZqziCS1$1WS-d4c z;G?6A3Dz$oq*%loVE_pH8ZjP{oBvU061_ot)_n_U@!S)Q$Hl>*PlXQ-h0TUC`@Z}1 z+t4IDugxgvS3Wm~=&bJ1_Gt^SLWF!0X<1$tHsGeO2fu`|WL@%S_Qa%4wuc%%FsxJu zX(R=`M!26wR{7KF>wx>8nXEZS0Vp9MGk!W|LmS)cdPdP5gjawk6f{)^b5e1v$~L=6 zKWq=)lbD{*Mm{Z#Mo~UOv*vL&-MD{?-Y5gP3kqN4UYD6L7z>+JU2#T*RfN`PVb NxFo9?PBEJpQLXAv$lzGPQ|jadia$yyPbziLO{;I( zm%husWfc2DTp9q1qzTF#m!<2E>V~ipAx(EzHjE?T;-^&;x1T>tO$N-9sO)ZLlQ6|6 zuRLyK{UKLw*e{Ku4pwdP41`MsS}>X0A&}EvI0x5rYGsm{N-S*Ko_NgY6$o&yzXV$K z1YK8~M%tqW4-wDHum8lQb=$;VOHjqY+o&v7C&T6I?y>+R53P^6^D~h*4}N0PTHqmj zv8VhE%R~HX9hN%qh94*cU3Vj}z1&nJ88rtyALEmd QwO`KCH6QH zz_S>IT0ellF{A6#e+x_JTIW<4OC+_TfjDO7=fFu*iRVg5LA@(Q?Q}V4tct%pRS;NL z`rfW#pD6%$z1_r_)~2JmxkdZmn#Kv4-)!XEt oZX_$ zv;NE8D%-q_LYf3R>`W|>qN=%b^+~JN z;2$K(q+WkSL>XqF++oTVOQ_Ai_Ds=|+N_=#jd;m_PCseqhwG?2i9JA5ewCQUdYG;q z#;j4Msy0Z48RZRNg%5X)KUjjoa{Q<(>$S^J@}Hd{z;Fpl)NIwy_XPS|^|k-G&*2$! z*o%OA<@d32^ANkD02m@+_yTXoJO5U<{}BNGLpy Tc)lkeRrcMlWD0As !19TeP Pwh1y4t+K-OlKM_5N?_gTg`Mk{v;1jl?8rTgQZY z%Uohd8~D8X#0`gBYTu15t2y_tn5+9rT0K0eNWs|Ljan)rO(ld Gd3HIx&nDbI|Cy`Jz@*3pMs^97@XQl;Foq9N229kl3-<6 zTxo55XX@bk8WD;cJk2F3^09}LWI!hZLZ%>qb$OkLvvNMdL(r%QKQMocHYfxPFVsV7 zrm5j8GshXKNtV6gUSx8=Sd|$hyeb5777PP dL{)Frp&w7!J1a>~z8=uxg`=cBwS`_zT##{Jh z7T;+&!eYbYWGZrZDy*g5Zj@|~J+&Ty|4INzv4NHWxGsNZV4Yr@RoG$i)!3ADYtv5( z1RD#6gPV{xD)$rfQlSkvehpAc+l+wiaJja$N{4Q(x*cWZ)Ds#2?3MOXuXn_9$9X*6 zdiu@Gm^x_vP*Q`PTouFPl>;{&CSs<~D>0EhI%p}T)fzS5f>(QIIPNCB1VJi$a!$H9 z)g9S*CR?O(sh{J-HI6Mh_jv^h0%DHEZ=V}1B26_0B6I?g4g8S~!l=0OFQDOmrJjGE zWlYxpBOSuU@;`f)o7DCG>BW8j>BT>Db@csE2mJHIcnnu%ctCwmxqBmzR_%;yniFzA zzk6{q31@6Fe8yfNtJ@pD93N7u>R_}U{n3WxI_6~tPvQ{wh`?rHFR{uj&P8M2+oukh zFRvmZQmP9?Psg`=`}4!2{rw?ad@$VkVX8XKNI`X+JV0gPsBs+%p*aKM#umEYy|hc3 zU1sB(RzpvDrkrmYK=*_F=$%%&wIR?*m;BzQFFsHyUpWV);^O90H$)4bLwkySqQiZp zVT+%ydj^m(m3M{on7U!suyq~tIaWW`Vb)LRy?Cp6opE``{kuxXB+Fbenqe zV@CRC4#3wYyU51-<3Pyrc{#uBwSQ~H1XO6e3Fh>hyGOe1QKoMk#qQtcfEH!E6+2>s zlku5z&?y=d&*s&|5$C@_l}Zim`b!m2Hczi>bz^bIw5dmrYV_mHYn!gk?iHw~HdKw< z*fIK@OuW{WQH4<=7Ib18dw{bz^KjR~g0LfuTL3l50~fJl?2C%_+As(ZY;^qH>u$}@ zrS`q0aphVeRnX0Mgp-hxQ?nTLcvp<&RrkuTF0^p66nbo)J|y@Sk{j;;CIunsC|(-{ zc4}a(4{;|im)pGpG#b48P}4@#XSxURQ~w@_4VzoY#oX-C25B6~76O{YLnpG4v0~}a zZh-uA2rDR;s!*bY6hXbH!Y|O!*+dvU{onklcN%Fn7inX4Q@y_ sZT& z#08`079tkV$|>IS*8S41-|}QWv0O+R>*L0RGt8nB4oLR6J3l7-N$B+Dh<-U94Fu zLxR6lukUr3+`@!Ylw+6qv$?%t$PtyVGu1}7gBl6EpF{#Zi|Oa$Y- iFFGh&85OXWM)|z%qjJk zn?zKPqghlud=}Iw9dYylq93x~^O%##gvJ?T?KETUSuC+|VcRC7K+B6UPd=%}X?5>C z?9a`X)tw;_agF1+ uKB-DFzh(rLBezMu-WU$ZS@nXr;l}iR4S2_~!w*^LM8Wpk*$T4(BYyph9>Y-<{ zs+cC$41M_A=+-L~$(^$nn6@ZTQKbwy>Cqi%z1{?AbWnF?>Iq2L+yI&92XNLvrDF;b z=+Xah0dX-QPMs+`e900YGgbSr<&}=9*vIN(5RPo$bcpaW9C)oMUVjKo5XFUJ-QyRP z6sVD)n+acdYmLZ@v`jHmTL#GSrS%s>{q*A~Jg;-jnYG>k--s$wv2;F1n*2>1{!A~J z&XVQ>n*RUrb&kQ6Mq9UzZL4G3wmP fhp0_*+g#sb_jKpjcYwM==fycdTc$XX&ojfh16&+D%JuLO z=+!GYn(izJHVfh;LqD-!hZh$qUiv9v$}oA1c{RN1{+)eg(GuRLD=BjOK=G_JhPJ5= z@*&TlnS_RBJ8Smu#5?mAPnp4mtRSl@@}j*_zK$$Ie{#UId7+Wh_uzDX)o RzNJ1*PkIZqx__u~LqtfM#>rNF+g)o> zhG^L%S~D9Hh 5<@?TD{4MIEW0>Q9b)qPMv&M8RTh&TkM;CzorUTKAneVjoFj ziF;%}{8mjQ!;HKc9&KN?6LL1lWyXslPebl6CS3@I8EFj8mS|jKcWHzUn0A;oj$T@T zlh8z7=rjYD8qQX)02(j6w!GLLJ^_deafJVQaK=T8bw_LHCVRudp_*|jc6q&3@{_Dv zV;=8BS5$Fg?NK@49uvdRAZh^_g&9{|VRgu4Q_s$BA+wN>PcPUq;ccP=?_m?YC>ULT zX9^Q*_0w{MZ4z-)=eyqUGNEiq_E^8F(?QI-w>3{D$mjFTQh)=KhK~3)HUJ>_VKD1x zj#i~$67a*Y{~2{Np(KO%3d@%cIRG;(1%FE>CiApHRMvy-*h6S9Y2XSwtro(GYVIYS zYLpS4cuPT2v^}>?R+seCxmN6%dGZ3rS}B4$BHTxh0p%TTA|nTp#6zCWPvt2}< rQ{N*KhD}p#PN^z(_!_NVJr4@e5K^)}597eG28dIW>L}$FAq7VhMn^TKpQl73RnN zB1Mu=8*|I=uE$ZO-?c9mUpapgxEf>)*F+sCIk#^xl}If5qr;I5W-W0IOB!}-PY+BV z6+SvpGGW0dk%H<)zm+AWHte3#*B@jZv`UExYLCZpD^pxi6lSGe)%8Odo`}QMS{;EK zuk}r#6=Fb(C+w8yX$G*N1lKgys2(U` *XA8DvHm4ighv6@_>4THpRifCg=^<77 zMzWsiDeAwKuIOW`uEsK<7y)u*+OQ`vFVsgJX36su{6&n@p(VmTEZb%{j *bH|5aUh~${1@>M7vsNY*2?eJHk0um zt?e~Qd`M)sY YJt#WG zAb#2WSqTGDM*bd|ptJ%F7XOm`G==aoI689u-*ldLVQdkM!N)M 2J0mW5Y{gz*EAbx- erSmA53rFQP-5LJCxta8u7agf}}$k%4m-Ms(9Kd^&v oj@lNe* zOblHD5Ic|!=i|o`hhinAm1=gPGRYAL+=Ic!BcJG>u+?`k791^x^pt~213-2LSKPp= zI~EtrKU-?2grHFYy{=cna*5my?T8vkD>-8Mo1evlP^-g1$Z8FN?>F&LwFezyq+;B} z34%EI*{Zyxdr+{%@{S5W4yuDG1JGhr7YI4kviyrQm@O3thE+s}`gS2zwgt%Zh|p2` zrvlHNr4kG-ykXTwbbdGkYrmP1Y~$q~dB6W?Qo(er~s~xm+4~cHtG2se7&$r6zYaSZ^ z4$z1fX`5*N0(S}aI^kBO@#xYM5Xc)#c==@l^K(2gm_;|fC%lW>B)7QWco`@TX9 005gQ*SONhA!4RJ$_6WU@a#J>NFTQ2C=??bv*S4M$F&J{ga`hCSI}mP_S@fJaZt zbQPHsqA94%62phK44zn>YjD}*v@n9_U3k%>u(g!%- ^L{!@J&QTrN)Ys;sr te1*Q t30@9i7E3V1 z0YJ#|IV}%e%6MkzD53g6-LM$jt2Gzh-hrdw>d7fRJqD;{y<-=ap%b)-crv-cZ z@=uO&s+dbiE>pWdLI&l5raGg&gVz?yl%2V~TZ|y;inTvjU^c8q9rs&m;y>-Rk_Ih0 zzUNkjBtH0p@d(cDjBN(|)JfuypwV~aOn`${(qTtmDmnw_{15Y-ICk>ZMEciPe8Y^j z8XbEPTD#sU@YOKZ7%u#*68{kC4gbB~pfDcW4p-s(4MhVFSXZt%{x? ><=)fk-m(Yzvpn4UupJoNa?)K0qaq znT|X~+j31tbc{#`y#E&CTqg~}(i*q4b;0rQ&W+g_*PHp?VB&FxPAYCKFA%CSd@DZG z_%JiYYrDtuQN^C7i{MQBg*B)NseAz(Bk(Byb%2|6v#$KDvmUm$%E3JdVQ#Fh7{@dA z@d(C-VI?YCIN%rdiGvn43x 6mo|2s{Dh7oRDMbOrw+o(R+OY?|B(fsv;_r_QET$Y!D%ajp~giwwLRB zUC-*tuZ*!=ASzW3 ^cT5pSFhZRtn xap6}O*yAA=T#r9^rrB)!DY`n zxT*S6@iySQyB Q&{c}z+HhBs)SfW$)MSQ))~m9j8Jez3-?4t0r19 90;YdKKzgmM5Dc)a2cIBlIO>% fUt=61(u`L zBt5Ux02Z|BE@yT^D4-?-RO5zoDGIHWe(6VYn<(@;-|kH4`V52&82T;-6Cz9%p?jP! zq(-f|wAA8QpfE%!uaSt85lOmzRpPx=vx!8o14I3QuyhD@C`+gF1(p;`6=k&2Fz_k& z7m`c-UL2ACGNu0L1zOl<0&AgCV)Po~(=4W$1BKL{;y`$rKH$n^J=6>1k(0a>o;BWw z1i=Fa!Jm(i;v3U+(2*oy&>Vg--Uyq>5CoLr92^tDqw;rx!DASNE93l)D7w~Q_vCHw zM75R?D1D0q@6Vx`{RxqQM`77+z`^RI;?oV;pq4`Ase-Ax-1tKUlGlpaC~EPMb=eg* z>=404m9zxzx2~4hXEoH i*K2tVjyA38$QQ|3h z-`1w(;zfpU mDb7lMoP{}NApvyI>4iEGL~Nd4Hpb~WFVHI>5sTm>dw+(GV2-M;s-D^hNE zmq@syXu4dMO`FE~V8Oub#QwxSu5t|*>iIG#6PCYt?v0%%$^UM`6t>JC@3jxJn6Vp7 zz41rVp0ysF9+rp2H@&;H&Twd{V}!WVY5Q7$e+|~Wrp1HDp^D)}X=-PKo8Br?_Jgl= z2&mc zHddkshbu65aGcF=y{z(4C}O0kcp_PdM5B_-^3q+^kUj*rMV*4PdleBP^F?2uLISEq zsCrlSM6hIdR%|)PUVaYVyf|5ut5iBVNL>e w)w)f04X{IdQio1l`sbJYE0W!t7UDA9kXBPlBb9_hW9UiP3Eg-0py8&IzMWB z$v5>4t aT>Si+fIAVGEgB~{MU0trNnYx0;D z!sUz`v?drznHnhGY`Ga*HB`#EtN5$driACK@*3plJ=s=yC-U1uy`iW%7GsmDgc_x{ zBu&9~mKEI|_)if&-;$qV0ATPA8|)5*NW(|rs4sJP5Q3*`LQ`*eCFF@s`|>2|H5%K9 z8w>adu0iR_Ww_&>{t*L_(CQ}sl_A6P_&o_%MRh&Y9bU1%6IfE-{N=m{uWTqCe zyb$zwI(}=3e`G?y(*k>uGoo#r|1t{FUTJPPab8bAs5q0>NlsSW1MItwOAdB!Q~Yjj zT6CG9sZ@$*H`6*K6pQ$_HJ={Eze4Hj2D(+Dmij64CgvK*vnGm^FN3hCaBs3krqZN~ zjK#na{9QF#5fS`B3B6co2OM-p73i_Y-;Mdh?lZZ+Hgr^cvFEdCOS*F3ur>0N#LY@H zbIlUh1LvK3iyFjzj6y)L9v3vWLIe `gf^-)L74$Zd9+Y%;U3O^Bj~2$DUe*_Nu085!b@*~n zbbY{ $v?!ng5A7GO~Y%fDk}a0v5jy-hV;OCzPN}B#H@iSA~N9MX@3ny3chZ zkAmY3VjB4!#OT9sXFLZ{2dQ 2Z8jy0_&$f zY;8qp8f}&rtC!)Y8a*-a>pDUR#`wT`=soapWqg@DNtA(RS~9%!f!$Qf(SvfWQYjCG zpINr-D+o0{-J#h$D_lM7fT=zaQ~Tf&8IiZf4rdeTQY{@_*g{=VKNlPd+Hs`0HuR~# zhGY@hFW-*ORBr8Bj-=lC77DPkf=s4dg|q^NaPnlF6gx`iSg*dntYaIwbM_@Mp-PZ& zF!44oi~5DkJq7u|R3j_rj8sQwc@;hQa6-{_R3{(9TAxu&75^rB0657yW{-q8_$wM& zjb|8D-0)+C3yF5}XSFcYs(OvRui$ufT vNCm$*TEE+t&l@G(a5+ zgN>NR@i1`!V^#meb+wA*1-`(iCbjJYoL!jCBJH^cMyrnMgp#lIa0gx;4&!vLeL?)o z` BjTL6KJ$DcB2?Sh*4o1{mdoP*I^)B=3-^VOyZ z7-LdB16o8x#4)A3h$bepR}fOoNGoE`TDiA9gm(+RUh%rS6xO=P>j0`)P5gE=6F3#n zr$;ykHg8}v0B#{H7vB4}g=6vI2fFc1TCG&`wZ)Dx%vHWJLryk6McQq?bl6`9VNfNO zem)kZdLLXe%v|0$ykd}oP~1zrL#3+ zcXOqfFXZ5!;l8>zQ43&Q{Rt&ek{OYvM|FVm5*k~GPvioue|=8!X#o>;e*23*^d1XO zYEilsP-_+E`}$NSZ|%Tko%P~!Us>MNf;Lc8H2I-RYuZTg=_2v;u{|nj2NsSsc*4b= zG>8yOf1VB$HS%?l_|&L8h;RvUy#ueeb?pq?Xv(5~fW7qQy*DrLb^le@@rG+&K`8dm zB9gjmhKvrv@jnkd-+0ym`L~Bj^$yBt%1})2$L 4MhwO%rnLMY7rhlm4*5swj%Z(RyVGGv1KA$~l`C=(aqpu*er z_%~wcH9@0tFff(OHW7(v_i=aevU}!c8hlO9;075mMF%Lm^{Evx(LbPwnu32N@tDpG z31_9bVZyeMLKt2As6nHkp0v-O@17FqI{ZW^d1(2$vZc?7NY_`~v4)(K{MJEDef>gD zrb?{r=uAI`LP3JAo~GNO>WjBh((Cw;NujZ@AVepb5AWh?+VeRfi&)=HVPg=91}8Ij z71ZkRBmf8m*qs=^KMw3%-6N#m`me_^>m0ppLdPl3_Av4D;6rfY(YsXT^q}PJ98BkL zK&KS1+J@@h4{OWt1%e8Ey!Tg|*RU->oS0{%r-$uFKw)7odP8lvxE{mY@e9T{xcU=m zEq}X9p?k|^L|&$FcPKR11oH6=k4({Bka}9KIsrI2!^LaOB1L-1$$}rP;|`u0sITU& z3WqIz>@->BrRAKdL)BsN7p4C>aRLIAb-@5Ch=+*w;0=a?AK_L#cszZo+qi-=VtU)- zLlFFL6y=DO*88wD+=GmDtbvsrPhZnEcR;4;M~*th73q(XaQL-WZFGFJ0}3R>6-WbB z=>WsyJ)i!AgI}O*f3H#M=7{-&D^{bk^Cv|iV^`?We=sm+_CYbNgr|v1q^QnGj}B?* z9e*(uC}H7`T@}ebU)pE22dV}+l37d$%b47yFq9YOj&JwOyF_*b|8N6dlt+%uIn_Y) z^tTy33*g|NqbIG#FIn2?Ei2ts>Nr D^o%R&{FEbN}JE1N(P= z06j88TYAHd+DCsR`aKoer@XY40Uz_CU9Lq{gs)xPoc3M{hT}=uWOuyHW$_2fjowsR z`sDt8WHm^7W(_>%iCMqdmglPn{Z#U@LaXvQXZ8F7HCM>SLPSQ}7X;%_wUD*kegz;< zRkQVzd{{98e$b0|#U_h2F!qK_LZg4~&csatE0;Wd_{yV+z1*{(dDV;^diB<$;foe| zFdq=0Eh{hEF=Hj$2;{!;`5wIUGLnbCs07Nzi>Bw(opl~aB+!!D4~&m&)gQ{x?bpR4 z@DAx&X?ybj271}08stKQ0dsu^E24oietS~=^H9%LQ;hw?j^w+o-%Ao-hnGegSF9|* zuB}^Gc1l&ufB(7z{{4^_vKGhs)n#lZ<5#n;tdGLmG`#J7H@T9h?w 3^+8aehsF53WPk;szn%Hw?EZYsCD& $atM`<>waB)93vcrKwsK{yycy3-xWiT8;;eRdA7UF*AphxAKy1^8N%S ztBbg|&WFqMA)jBp0?_ _a3cPwL%lCTRU_;W1D7wWX$O}S?6z_Lp} zY$#$>2lXksv=80-t%>YJ_X^X+RB(!U{5)VSb&*77(LAF=BedmhG57CaM%A %s$cY6F7lW~1!#-V1s&eKj9UbWu}IWoksq&yU*yI(>#97{a2=;H5B6 zINQh-m`Nupv=>*}S+%{8>%s{{wb)GAgRdfHCT;g&tAo@miLq1E+@ETi-KPEp*l~dU z34j`I1`gH45!h5e16@Q8cJ_R2jA$>5*4z$=FA5#AO?c;z20y9rC2_VXUPXE;uUvDM z7DmxXShJ7 IB+Cv={+MX{dl|YD_5gq}S3#+L2m;zzftl<-L0oSy{v~`UXb6 zV{1f}M+pWPb&Z@#m+M=;D2_FlVFao4W8vKh$w_P)E5ZB3y!so&slR2eodfF%TGR4& zCQT^syeJT=p0Y-5s;|N~=8Z~5v^Ty=7= 6=z#e*8y z1pLLBlFQ~ZkNr>x*o6kE&*oJHR7{LmF@vr3`Qai+lhR~zC(f?P)=xqcD931#V8omK z$ysuLUW99`I9Hj5g~HCXwfg}tL%)F6g5xW#^mJNNB)&r)*}>8Eb Nj1^x`<#aOb ?K9ezKwu4h zVn- )QmSQ0^c7o6PG9y$Mp z5CXJxi_Mk 5%|DpqYii)gB zo`sD|-)RK*P`a05;PT4KS LtF-jlcX-_Ud4cqhkJ|?(AuI!D_>n) z9{?BRF8s1?sfjVpb@Or2>~?rI@2G`QP!6f-+mV*)>@50Ht8jWX8AJA`zb%TXI)i{G zO09A%0nNb%RJXOE4fywL)wrKR^;kY;yDgCv%Nc!z49b)5Z@zKBZS(lxc?0ENxSMY7 zqIX>f^*N6a>L_Nw)3OT@=v(Co SEs-17}q%nW&*wm<~j!xfS?2FJS+MjfzIN%957 z-*-nO{HQ@%r+rq`fa24D3hfgv{`=Rz%%8S~Ig@sz&GCtLM7W6tR@df>MauyFKi_?^ zU^r{TdZN*t;A_ozUlYhof)D>Gzwb?I2+sS79CAqn_X8m~VH(h6YaPp~<^#WG(*N_q z8n7MIT1n>HmXgNdCS@X0$Kv44c$LhJ7>)VbFXbJW%Y88^=9As(DLk@uxb44CXFypf z-bfYwM J1Y}`ZHcx2l&-t zii2w8Ly=e2OxK1ZrSmqC-cPA?>Too%Yd rJAd+* zurRwL-^lCRnk5@KRp9qD=E+f%d+?w*r>aj%w+1akgy~`|_jLC-U_yUSJdr H8oT;a)i^f4fF+cup6{ISiy<*%?ed%_mK1 4jek*bKbDmi7He(Pf|6l(9=fEqZ?_ z(_3^hRm3%&4Q)T)%k1X}y+YNfD_Mzepl(9;AheWoogK;F-@H=K{Cwi5%%>$|#P4PN z@a|x{vjseDzU4>UGSiLy2!8|&mq;~hV#nL0@E^@~h4T*zl$$6Ys8;ZpK_cE%*BwgT z;Y>Cxy{G@mIjEIk_n@_(orOw^1Htr}YFZ`e_(5uCkhYIvDV|lyGivqdb36T+s%@*o z2{+QLh``Abd5?7%YpI%Gk%(!j#!LK2;JGO|?`kg}9|) ETATSg*-jU3uhyPj0 9U+^QD()mIJ?@B~r|LEl;Nv zI?&&p)RNfHabg9yFim)v3)P4@XNKPT#YO@{qNM^wcnOcVb%|GAMf*lk`3zB_digd< zcB=;J8=NiALgN}{DVi^*&%DSVeH-M?f6eny>rHcQQJ`N-x7IuWg0&4pO Pu255d zM;9Lqej4fUsdpO^Ki4)X+s-C zLH!9B^wdQhx|otQ_(p>IIPov|LUZ6@=ooa|)FWw8Z%sULe!*6J2xq}$lBhY^Q&Xac z&1nTs1vK8< _B5s;ssc2h{8jio_JW|bED4r8 Ll*Lk+hYb zx@!LR2m?nqQHM6yVh@UZx@UmR@n=Z~%0ENobKVA~BVMBgQB&OXlyFWjofEtJ=1F_s zmqM@IPhhUt3xwY})6u`hco3*k6la-E77~K_nGoH>PAOIbwv4dZD|9+BBvjq3jSP$q zu|M`z7VJhFSYJ&VxPO~AJStx3mUfZDdT2tjrPY83A?Q^*zyv{EdO_Lw*9lh$mubjY z)=^29*mE6*0cxt)NF|Cp54t-kw-FXGpH^z-gG!s^*GQA^Ik`Mobi^&TriohQPS;m+ zOCUTJCdPLFV!OS9IcilyE65{raZZsNELA)j`$)fu<4VK|l30@Kfu rztRw zGiqj)>Uk`Ap?3lNfd@79n+)tnfl_EUx2henllU{W6T=5D7f+KfkZTwcI@`B~!Xr|k zJ28y5rDai^m4(%iCh2hQgw#5d4+sN%_4LsP1BS%tD(J7{sajJz=UqZga5L`DJ=$%D z2=c6C-I{zh4Jq1Lz29AVzUDtKX E6?Cb!Q#le-C9v3SeLssKoDw4NZx2Q?^)mI| zK>CIUTK^UoYl58@63ybAl8k5-&8E_Kr!WN ze#adK5MhCNEYueHCzO;^t+)AlrN=CjOiaTj0cpW3GYHRIoyU-q>HQR81T{Lu7O#(? z?r7~gf&VwRVGCL&AQsRW>WYjBvH=d=7fzR}9LFmvmII-U=k@|B^iIsNj`s1pwPaR3 zqz5h<44mQuYS1<{EaSn#K3mJ8P &laTm-81BO(j>_qV6h6( ztO{>(>(vnnzA!Nx_+;8Ge+LV^v|+nHd*JK8OhRm&kuJvs>WQz*i^cEWzcmw`IrJkZ zU=w)>Mung|6k=Mu7qx+*fmXXz7fYO5eby}V7`iNPzJVp-eq`*f5K&G~gKs7y$XqW6 z@PMPf!7`~!3h$&=t&<+ph#zi$X*EcV&}XjdiD9#IfF@3xBSqHUhV7x-OVp#W!C@6T z=j7UxR^pwObCH5O--m #Qo10)x2_g_$@XkZ(h`?Ujkq%I@LcUjh zW5#ShYTVi0r)5ZI#Cm`s7=h4vA><(yo#y3lx;~IP`mKi?!+rgi0IF_Eg9p+N(B+np z^ui6NP}x5|xRTDuTk(uufUZ8%jPk*=o2Om-6X_{j 6^-n-gF*k3$8Q+U!TPB|lXt}IX3 Qkgu1e@O1CCZ(DZ{oBe*7em IH@ahPBPPli2sD1P}3a1rvdi8i+ vR3$FNfXh z?%6cJ@{4op19DX9W+OFk=f9(l?419$KFe0^wf(0EUHy4PD%haD=A2Clx3Nr%Z7pjJ zACJ)h-5SI!^WA3{->ZJr7PXYZ3&QJtE7LqNA*JBJEIB6{O&n$))I=!(otCOTtA O|nk5cchERG@%%`@0Jy?s#sS& z`gHOq!*o5}^AbvZ9hL2)8HJo!_V!YO$4g%O;Q7A7x zc$T1m)@R%cG+uvZDdAFUwxGLI7hP4P3vS6=fs zuV@;V4wHAxVcZM5zRlM5^O&|Nqx*H>AS;?dTd%&f^#W#d((4ah$vYuJ *IrS5&eFxyC&L@e$5@(2JqNzy$(n4Dff_fHB!CzNa@dOvIH5cp{w; zh(G|)7_L^Roe(94r5__e_Y~8ocSLgoyK5iP0eYZDWiv8(D5!52e^EG!v!0q$49Fj4 zXl>pDjqYK5)Txe%Q&mlRdUKPzJ*|1~ZZlJsJMZ93yV 3@H*F|CN z*!+yn_We}+H_+X(Ns4_(%g!3;RjR_(&y3>Fa6kO}dSBSsdt3r~3=iI{P%G4Toxg@U zQGR=@y%hJq1wwxC7D^g!FM#}Bo*%ea?@oDoTO>e+@4Nojdj4<7#CIqB&!emIKS$Tj zw_cj*J cxGcbC0cegq`-rRhP z)gjC<$T5MtfFZR9NTCcU1vH~wINqx_@RX@el!oD}v9{?CvcBH0%2?;;FTYh)H0shH zv{qD=ce{0W51J0)>?n%@M`5*q%+g^%i!iP4n7~Y1-seK-xF`C(lKx;eSH`dTel~_W zF0;HapItC-o Q5qNs)~>l(dptx6Fkcs zvq)*j-psPDa5R5CYw-v_tOn<*ZZEVoa{kJ-6jmf$ugOXM)fwcWrnKEbkj!G&{%Yj2 z=;QXc-L@hAb?wJ+HBJ+&Z@n`!a!1g8W{%U=xLTtXtP>meVy7;rG)ci9kXll10w(S^ z*Cf?lN;Lmaz|h$&Ho rz ztWIO;fcc4sqzRi?h^C`^)~I1rlr_UScv{c8Mg6hDYs>~usv&OLjcb_)I@h+T>4y&M zC!eznRCYdLNx^0QRz4A($xM|s?k$nKH#LQ?wXy&M7s%p(YS0UujlKId^+(Kvgg5!$ zhPn0UAWLttmh2y@rc`oNVoad~c*Yn6?AppX02x{Av^e; I75kR?O~G}lc~-zCAfpq?9x(_oL>?2lwD4aS%zIU{zryB|`L7?6?d1EKe@(27=% z7IQTsz|ps(rN9(5WBe6E>S^1tks;aJ`QWOzBIpCj&dJn9u&YF(3RtG{ObF?5gP$GZ zC18VZv9J@#D0+fptN|KEDE+$ero^ 1P7US+Cj7*0wX>(%Wdk_IodO5 z=0`cHt2Eord%JI`%Ws>T96O?8OaEw6Q8AhS3#HG&^l#_4>~H^+Z#watjyhq73}ci0 zoS+AVa25&GQzJbw+!T^NSkX4}Xv!k~@>QYaaXoStluxjX@ILU$$67@e@0wUy95Xzu z`2>a~5u&xg5fvp3a70ejino5e_$^|mXDNk+UQ^vW1K{m?Jf8pc_~{gff A BzCU1RC6^F)LhPCAN$ix$SY;CsN^2wLw|ER3-A5*Tih7>>?$dYi z$20A1!wGsAI!ANBr1c`V-T9|wraERFE4=$Pc!oY6x*aeC;ZnnJ{g4q2Cp_t5I{z^b zTmX4$ZayMRK@f?OUo 3YuZo z-??D`>(xV*g-k7WbB-L_3BO2j8=b6!{V~miLr5YBo~A7t$OTkfr&5L*-Ru37XPxQ^ zh5*7G1lO*IE|+B649QO4=rKgBvhdt6tmZNfftOKUpOLhewYF}Tliw63X3{Z|WwX{< zBp=8~kzaN{mD`I+DR2l4wSY^7T8Wj1g5Vk;-%b%y1WtRPQ{QG^u;;y{%%+@q^%A~} z?tr9m(2x3Z;c>#9u~2el$p56Hf#kO=_yOT9=c$f>j2J8>EH~tkV!aXe99dMEVk(4n zq;|hi=Sjig5s2F#%aSA>w`?(n4dmagylp9Li><|t(k`=2@B%xtn`P1bDwR}Yo7W(U zS*ZJhzjNX7!T(lOlfck6+TwgsJ`JiNh;mcnhei!O7cn(fcn->rNDEb{;b8jaR%wHF z Gyc8i|BKrpJM+Iq*WYXY-@?u3G>HY5 ztZ7Hld?+x;GDjn>v;oUXn>`dB)A8zIRG!rac)HWkoZ8{Lqsh#mTsq>D{wSRae@)Q( zPiFjpzMwtYC}deG1d9Nin6cI6^4w7METVwYRgNJhtZ%=SEB5+9>Ins;T}CO&wIs@Z zrf{TaXgvrEfO78D7UuUWu2qWwV&Fm|TSfY?BVCLPtW~kz>Jpd6X*6GIC4a+O&INXd z{jKHcjrfZI`)}L4&XRGUC`E%EBp{W1gYf#Wd=I?6Lhxr~!XSTj$-)bI^9Y(1r4bkd z&+hg$YTMX{813l{z^9p;c`|l;^@LWwFFb27R@faOV8FpT?_5;T-VL=(&c?T-?T#-# zO~RgIDWy@GLHen~l_OTynM9*9IUMGiQ+q-E@)VsuOw(E*_%2n(@>0q2FK1k`Wt4Qe z+r%P?tk9$lNZV4Mhpvo2cF6{N1yYAA7EG0+c^WMw9rca*y}O}-KfO+SkdvLqByLl) zIGxKCzy-H({k`>%N$psEEib)br8nU+qU5l90(X)b3l!^LuwS-WJM?y2TK=C;?sRHQ z*d!7TDfI9WSo5YQudFWz$#v2BB&cDQXiq0X9VDFxUPVdn#D+<+oOo&KbqnZTwJKY) zwcI40fKznV*;TrGC_lksm{1c6vCEK>B)>a$JJF&X-AF?S->YY`NBS5<;Sk7T@`_(B zZ!oFd2XAWOBn09I{vnfIdwIt9y)k58CmQ|D@Ek!|$h#1wZ^U{&nMoxGK0f_!Whrj_ za;P@Owi334ztK3u=7d9|3=J($G-Th$rJH$MdBq7~xd {mf!6pD1pHJKyNm_YquQ$sl zfI5o;zcxXUlY}7mKf}ZW1>^1w8wQP}Jf~U5nNVuKw>-}RVtu1Pe)oKH==vQSuSX8u z?$0