Skip to content

Commit b60528f

Browse files
author
hudde
committed
refactoring
1 parent b0a0741 commit b60528f

File tree

2 files changed

+36
-33
lines changed

2 files changed

+36
-33
lines changed

Diff for: nonlinear_programming.ipynb

+27-24
Original file line numberDiff line numberDiff line change
@@ -51,30 +51,32 @@
5151
"\n",
5252
"- Expected yearly returns:\n",
5353
"\n",
54-
" Share $A$: $r_{A} = 7\\%$\n",
54+
" - Share $A$: $r_{A} = 7\\%$\n",
5555
"\n",
56-
" Share $B$: $r_{B} = 9\\%$\n",
56+
" - Share $B$: $r_{B} = 9\\%$\n",
5757
" \n",
5858
" \n",
5959
"- Volatilities:\n",
6060
"\n",
61-
" Share $A$: $\\sigma_{A} = 20\\%$\n",
61+
" - Share $A$: $\\sigma_{A} = 20\\%$\n",
6262
"\n",
63-
" Share $B$: $\\sigma_{B} = 30\\%$\n",
63+
" - Share $B$: $\\sigma_{B} = 30\\%$\n",
6464
" \n",
6565
" \n",
6666
"- Correlation:\n",
6767
"\n",
68-
" $\\rho_{A, B} = 0.7$\n",
68+
" - $\\rho_{A, B} = 0.7$\n",
6969
"\n",
7070
"The investor wants to invest her money in such a way, that the expected return is $5\\%$, and where risk (volatility) is minimized.\n",
7171
"A portfolio consisting of $x_{1} \\times 10\\,000$ € in Share $A$ und $x_{2} \\times 10\\,000$ € in share $B$ can be expressed as\n",
7272
"\n",
73-
"$\\begin{align}\n",
73+
"$$\n",
74+
"\\begin{align}\n",
7475
"f (x_{1}, x_{2})\n",
7576
"=\n",
7677
"\\sqrt{ \\sigma_{A}^2 x_1^2 + \\sigma_{B}^2 x_2^2 + 2 \\sigma_{A} x_1 \\sigma_{B} x_2 \\rho_{A, B}}.\n",
77-
"\\end{align}$\n",
78+
"\\end{align}\n",
79+
"$$\n",
7880
"\n",
7981
"Our goal is\n",
8082
"\n",
@@ -211,7 +213,7 @@
211213
"source": [
212214
"## **Intuition:** The Gradient always points in the direction of the steepest ascent.\n",
213215
"\n",
214-
"Example 1: $f(x_{1}, x_{2}) = x_1$, $\\nabla f(x_{1}, x_{2}) = \\begin{pmatrix} 1 \\\\ 0 \\end{pmatrix}$."
216+
"**Example 1:** $f(x_{1}, x_{2}) = x_1$, $\\nabla f(x_{1}, x_{2}) = \\begin{pmatrix} 1 \\\\ 0 \\end{pmatrix}$."
215217
]
216218
},
217219
{
@@ -233,7 +235,7 @@
233235
"id": "13632e34-1665-4598-8873-2bc804728365",
234236
"metadata": {},
235237
"source": [
236-
"Example 2: $f(x_{1}, x_{2}) = \\tfrac{1}{2} (x_{1} + x_{2})$, $\\nabla f(x_{1}, x_{2}) = \\begin{pmatrix} \\tfrac{1}{2} \\\\ \\tfrac{1}{2} \\end{pmatrix}$."
238+
"**Example 2:** $f(x_{1}, x_{2}) = \\tfrac{1}{2} (x_{1} + x_{2})$, $\\nabla f(x_{1}, x_{2}) = \\begin{pmatrix} \\tfrac{1}{2} \\\\ \\tfrac{1}{2} \\end{pmatrix}$."
237239
]
238240
},
239241
{
@@ -271,7 +273,8 @@
271273
"\\end{pmatrix}\n",
272274
"\\end{equation}\n",
273275
"\n",
274-
"Example: \n",
276+
"**Examples:**\n",
277+
"\n",
275278
"\\begin{equation}\n",
276279
"\\nabla f(-4, -2) =\n",
277280
"\\begin{pmatrix}\n",
@@ -392,8 +395,8 @@
392395
"metadata": {},
393396
"outputs": [],
394397
"source": [
395-
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=1, Iterationen=i, color = \"#636EFA\")\n",
396-
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=1, Iterationen=i, color = \"#636EFA\")\n",
398+
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=1, iterations=i, color = \"#636EFA\")\n",
399+
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=1, iterations=i, color = \"#636EFA\")\n",
397400
"show_plot(contour_plot, surface_plot)\n",
398401
"i += 1"
399402
]
@@ -436,8 +439,8 @@
436439
"source": [
437440
"# Gradient descent with learning rate gamma = 0.1:\n",
438441
"\n",
439-
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=0.1, Iterationen=i, color = \"#EF553B\")\n",
440-
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=0.1, Iterationen=i, color = \"#EF553B\")\n",
442+
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=0.1, iterations=i, color = \"#EF553B\")\n",
443+
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=0.1, iterations=i, color = \"#EF553B\")\n",
441444
"show_plot(contour_plot, surface_plot)\n",
442445
"i+=1"
443446
]
@@ -481,8 +484,8 @@
481484
"source": [
482485
"# Gradient descent with learning rate gamma = 2:\n",
483486
"\n",
484-
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=2, Iterationen=i, color = \"#00CC96\")\n",
485-
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=2, Iterationen=i, color = \"#00CC96\")\n",
487+
"contour_plot.add_gradient_descent(x0=[-4, -2], function = f, grad=grad_f, gamma=2, iterations=i, color = \"#00CC96\")\n",
488+
"surface_plot.add_gradient_descent_surface(x0=[-4, -2], function = f, grad=grad_f, gamma=2, iterations=i, color = \"#00CC96\")\n",
486489
"show_plot(contour_plot, surface_plot)\n",
487490
"i += 1"
488491
]
@@ -537,8 +540,8 @@
537540
"\n",
538541
"x0=[random.uniform(-5,6),random.uniform(-3,3)]\n",
539542
"\n",
540-
"contour_plot.add_gradient_descent(x0=x0, function = f, grad=grad_f, gamma=1, Iterationen=30)\n",
541-
"surface_plot.add_gradient_descent_surface(x0=x0, function = f, grad=grad_f, gamma=1, Iterationen=30)\n",
543+
"contour_plot.add_gradient_descent(x0=x0, function = f, grad=grad_f, gamma=1, iterations=30)\n",
544+
"surface_plot.add_gradient_descent_surface(x0=x0, function = f, grad=grad_f, gamma=1, iterations=30)\n",
542545
"show_plot(contour_plot, surface_plot)"
543546
]
544547
},
@@ -547,11 +550,11 @@
547550
"id": "39148f36-835e-4ef2-9073-5764c20f03f1",
548551
"metadata": {},
549552
"source": [
550-
"## Nonlinear programming with Scipy\n",
553+
"## Nonlinear programming with `Scipy`\n",
551554
"\n",
552555
"The presented method is still too simple for practical use, in practical application there are still many problems to be considered.\n",
553556
"Therefore, one usually takes an already existing implementation.\n",
554-
"The Python package Scipy with the function `minimize` is very suitable for this.\n",
557+
"The Python package `Scipy` with the function `minimize` is very suitable for this.\n",
555558
"It suffices to enter the function to be minimized and the initial guess:"
556559
]
557560
},
@@ -584,7 +587,7 @@
584587
"An example for a constraint of the form $g(x) \\leq 0$ is\n",
585588
"\n",
586589
"\\begin{equation}\n",
587-
"-(x_{1}+2)^{2} + x_{2}^{3} \\leq 0\n",
590+
"-(x_{1}+2)^{2} + x_{2}^{3} \\leq 0.\n",
588591
"\\end{equation}\n",
589592
"\n",
590593
"This inequality defines a domain, in which the solution lives."
@@ -756,7 +759,7 @@
756759
"\\begin{pmatrix}\n",
757760
"4x_{1}^{3} + 36x_{1}^{2} + 108x_{1} + 108 - 4x_{1}x_{2} - 12x_{2} \\\\\n",
758761
"-2x_{1}^{2} -12x_{1} + 2x_{2} - 18\n",
759-
"\\end{pmatrix}\n",
762+
"\\end{pmatrix}.\n",
760763
"\\end{equation}\n",
761764
"\n",
762765
"The Gradient of $f_{ \\text{pen}}$ is\n",
@@ -775,7 +778,7 @@
775778
"outputs": [],
776779
"source": [
777780
"contour_plot.add_gradient_descent(x0=contour_plot.result, function=f, grad=lambda x : (grad_f(x) + alpha*grad_h_sq(x)), gamma=gamma,\n",
778-
" Iterationen=100, Nebenbedingung = h)\n",
781+
" iterations=100, Nebenbedingung = h)\n",
779782
"\n",
780783
"contour_plot.show()\n",
781784
"\n",
@@ -892,7 +895,7 @@
892895
"We optimize the function\n",
893896
"\n",
894897
"\\begin{equation}\n",
895-
"f(x) = \\exp \\left(- \\sum_{i=1}^{n} i \\cdot x_{i} \\right) + \\sum_{i=1}^{n} x_{i}^{2}\n",
898+
"f(x) = \\exp \\left(- \\sum_{i=1}^{n} i \\cdot x_{i} \\right) + \\sum_{i=1}^{n} x_{i}^{2}.\n",
896899
"\\end{equation}"
897900
]
898901
},

Diff for: nonlinear_programming.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -42,12 +42,12 @@ def add_gradients(self, gradf):
4242
arrowwidth=2,
4343
arrowcolor='red')
4444

45-
def add_gradient_descent(self, x0, function, grad, gamma=1, Iterationen=10, color=None, Nebenbedingung=None):
46-
x = np.zeros(shape=(Iterationen + 1, 2))
47-
f_x = np.zeros(Iterationen + 1)
45+
def add_gradient_descent(self, x0, function, grad, gamma=1, iterations=10, color=None, Nebenbedingung=None):
46+
x = np.zeros(shape=(iterations + 1, 2))
47+
f_x = np.zeros(iterations + 1)
4848
x[0, :] = np.array(x0)
4949
f_x[0] = np.round(function(x[0, :]), 3)
50-
for i in range(Iterationen):
50+
for i in range(iterations):
5151
x[i + 1] = -gamma * grad(x[i, :]) + x[i, :]
5252
f_x[i + 1] = np.round(function(x[i + 1, :]), 3)
5353
self.add_scatter(
@@ -59,11 +59,11 @@ def add_gradient_descent(self, x0, function, grad, gamma=1, Iterationen=10, colo
5959
self.result = x[-1]
6060
if Nebenbedingung is None:
6161
self.update_layout(title="x0=" + str(np.round(x0, 3)) + ", gamma =" +
62-
str(np.round(gamma, 3)) + ",<br> Iterationen=" + str(Iterationen) +
62+
str(np.round(gamma, 3)) + ",<br> iterations=" + str(iterations) +
6363
", f(x)=" + str(np.round(f_x[-1], 3)) + ", x=" + str(np.round(self.result, 3)))
6464
else:
6565
self.update_layout(title="x0=" + str(np.round(x0, 3)) + ", gamma =" +
66-
str(np.round(gamma, 3)) + ",<br> Iterationen=" + str(Iterationen) +
66+
str(np.round(gamma, 3)) + ",<br> iterations=" + str(iterations) +
6767
", f(x)=" + str(np.round(f_x[-1], 3)) + ", h(x) = "
6868
+ str(np.round(Nebenbedingung(self.result), 3))
6969
+ ",<br> x=" + str(np.round(self.result, 3)))
@@ -107,9 +107,9 @@ def contour_zoom(self, xmin, xmax, ymin, ymax, function):
107107
self.update_layout(xaxis_range=[xmin, xmax])
108108
self.update_layout(yaxis_range=[ymin, ymax])
109109

110-
def add_gradient_descent_surface(self, x0, function, grad, gamma=1, Iterationen=10, color=None, Nebenbedingung=None):
111-
x = np.zeros(shape=(Iterationen + 1, 2))
112-
f_x = np.zeros(Iterationen + 1)
110+
def add_gradient_descent_surface(self, x0, function, grad, gamma=1, iterations=10, color=None, Nebenbedingung=None):
111+
x = np.zeros(shape=(iterations + 1, 2))
112+
f_x = np.zeros(iterations + 1)
113113
x[0, :] = np.array(x0)
114114
f_x[0] = np.round(function(x[0, :]), 3)
115115
for i in range(Iterationen):

0 commit comments

Comments
 (0)