Skip to content

Instantly share code, notes, and snippets.

@pomo-mondreganto
Created April 11, 2020 11:06
Show Gist options
  • Save pomo-mondreganto/40802a8255783d935f5b253d6e2cef85 to your computer and use it in GitHub Desktop.
Save pomo-mondreganto/40802a8255783d935f5b253d6e2cef85 to your computer and use it in GitHub Desktop.
БДЗ задание 15(4)
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import math"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"EPS = 1e-3"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def f(x, y):\n",
" return 3 * (x ** 4) + (y ** 4) + 2 * (x ** 2) * (y ** 2) + 4 * (y ** 3) - 4 * (x ** 2) * y + 4 * x - 5 * y\n",
"\n",
"def dfdx(x, y):\n",
" return 12 * (x ** 3) + 4 * x * (y ** 2) - 8 * x * y + 4\n",
"\n",
"def dfdy(x, y):\n",
" return 4 * (y ** 3) + 4 * (x ** 2) * y + 12 * (y ** 2) - 4 * (x ** 2) - 5"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def grad(x, y):\n",
" return (dfdx(x, y), dfdy(x, y))"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def step(g, x, y, L):\n",
" nx = x - g[0] * L\n",
" ny = y - g[1] * L\n",
" return nx, ny"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def norm(g):\n",
" return math.sqrt(sum([x ** 2 for x in g]))"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def descent(x0, y0, L):\n",
" g = grad(x0, y0)\n",
" x = x0\n",
" y = y0\n",
" it = 0\n",
" while True:\n",
" if it % 10000 == 0:\n",
" print(f'Iteration {it}, x={x}, y={y}, grad={g}, f={f(x, y)}')\n",
" nx, ny = step(g, x, y, L)\n",
" x = nx\n",
" y = ny\n",
" g = grad(x, y)\n",
" if norm(g) < EPS:\n",
" print(f'Iteration {it}, x={x}, y={y}, grad={g}, f={f(x, y)}')\n",
" print('Converges!')\n",
" break\n",
" it += 1\n",
" if it > 100000:\n",
" print('Does not converge!')\n",
" break\n",
" return x, y"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"L = [1, 0.5, 0.1, 0.01, 0.001]\n",
"P = [(0, 0), (-10, -10), (-1, -1)]"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Running descent with L=1, (x0, y0)=(0, 0)\n",
"Iteration 0, x=0, y=0, grad=(4, -5), f=0\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=1, (x0, y0)=(-10, -10)\n",
"Iteration 0, x=-10, y=-10, grad=(-16796, -7205), f=60010\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=1, (x0, y0)=(-1, -1)\n",
"Iteration 0, x=-1, y=-1, grad=(-20, -5), f=7\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.5, (x0, y0)=(0, 0)\n",
"Iteration 0, x=0, y=0, grad=(4, -5), f=0\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.5, (x0, y0)=(-10, -10)\n",
"Iteration 0, x=-10, y=-10, grad=(-16796, -7205), f=60010\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.5, (x0, y0)=(-1, -1)\n",
"Iteration 0, x=-1, y=-1, grad=(-20, -5), f=7\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.1, (x0, y0)=(0, 0)\n",
"Iteration 0, x=0, y=0, grad=(4, -5), f=0\n",
"Iteration 10000, x=-0.35067008388560295, y=0.9176743859997663, grad=(4.875712929039104, 8.156212046765845), f=-2.3895923958343386\n",
"Iteration 20000, x=-0.4132861718762124, y=0.8470767819126798, grad=(4.767386639128525, 5.937231171454904), f=-3.188522171927465\n",
"Iteration 30000, x=-0.5660757947869758, y=0.8111231963037333, grad=(4.0067932190549245, 4.7875733963186455), f=-4.062411502709911\n",
"Iteration 40000, x=-0.5940522508421093, y=0.8784581954571898, grad=(3.8254279075315134, 6.800283544635626), f=-3.783169653350494\n",
"Iteration 50000, x=-0.5490573082103838, y=0.9242479502495564, grad=(4.197374778238233, 8.31756234610086), f=-3.256482215922709\n",
"Iteration 60000, x=-0.45114837745827985, y=0.950781790371166, grad=(4.6983289505498895, 9.245735428757925), f=-2.5851422932397794\n",
"Iteration 70000, x=-0.3537424605842924, y=0.9444252979151173, grad=(4.879418284829811, 9.044932206085708), f=-2.174580475739382\n",
"Iteration 80000, x=-0.3744169913637305, y=0.8969223873608025, grad=(4.851889646929045, 7.482023998450982), f=-2.6673595856937804\n",
"Iteration 90000, x=-0.4695965647692875, y=0.8234607468509693, grad=(4.5771737954620555, 5.2148429069259645), f=-3.5837809688288367\n",
"Iteration 100000, x=-0.5955698334460018, y=0.8323762089631064, grad=(3.780334751803412, 5.383222842551003), f=-4.069301672678987\n",
"Does not converge!\n",
"\n",
"Finished, (x, y)=(-0.9736033086263429, 0.29405392470800606), f=-3.5109517213978343, ||grad||=8.304310242376387\n",
"--------------------------------------\n",
"Running descent with L=0.1, (x0, y0)=(-10, -10)\n",
"Iteration 0, x=-10, y=-10, grad=(-16796, -7205), f=60010\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.1, (x0, y0)=(-1, -1)\n",
"Iteration 0, x=-1, y=-1, grad=(-20, -5), f=7\n",
"Iteration 10000, x=-0.9644502133446499, y=0.33329379584626, grad=(-4.62214557593631, -5.999472519530504), f=-3.8016358972428135\n",
"Iteration 20000, x=-0.9761620421596966, y=0.2069582308914404, grad=(-5.713172504583184, -7.473295673548861), f=-2.885343506967941\n",
"Iteration 30000, x=-0.9710461701529254, y=0.09680596137275344, grad=(-6.271926287899744, -8.290511863138143), f=-2.0445952107364214\n",
"Iteration 40000, x=-0.9255500063535465, y=0.031096103288375976, grad=(-5.2877210650357505, -8.30829435593111), f=-1.760944919425615\n",
"Iteration 50000, x=-0.8463040388824128, y=0.03535426665202146, grad=(-3.0386530396390565, -7.748459034224609), f=-2.1223474181058535\n",
"Iteration 60000, x=-0.8567324745737663, y=0.1408807579414718, grad=(-2.6484409519282703, -7.272988445786792), f=-2.8880139821110107\n",
"Iteration 70000, x=-0.9214643616144235, y=0.29572635710466144, grad=(-3.5312796176319523, -6.23908649394294), f=-3.7463831838879957\n",
"Iteration 80000, x=-0.9729892537101448, y=0.3035757982159427, grad=(-5.04930964253116, -6.419434438666849), f=-3.5757628771869454\n",
"Iteration 90000, x=-0.9787722303246065, y=0.15682499926459936, grad=(-6.120228136920698, -7.9204731739798415), f=-2.483745763117021\n",
"Iteration 100000, x=-0.9548475140584521, y=0.07511465904947978, grad=(-5.894566188411419, -8.303595018397882), f=-1.9631107317493008\n",
"Does not converge!\n",
"\n",
"Finished, (x, y)=(-0.36539089521731016, 0.905474160889268), f=-2.5583523444320524, ||grad||=9.155924556036586\n",
"--------------------------------------\n",
"Running descent with L=0.01, (x0, y0)=(0, 0)\n",
"Iteration 0, x=0, y=0, grad=(4, -5), f=0\n",
"Iteration 55, x=-0.8314434721207791, y=0.6413543035577138, grad=(0.000685469546442441, -0.00045368418762947726), f=-5.079179693802791\n",
"Converges!\n",
"\n",
"Finished, (x, y)=(-0.8314434721207791, 0.6413543035577138), f=-5.079179693802791, ||grad||=0.0008220084191813515\n",
"--------------------------------------\n",
"Running descent with L=0.01, (x0, y0)=(-10, -10)\n",
"Iteration 0, x=-10, y=-10, grad=(-16796, -7205), f=60010\n",
"\n",
"Bad L, overflow\n",
"--------------------------------------\n",
"Running descent with L=0.01, (x0, y0)=(-1, -1)\n",
"Iteration 0, x=-1, y=-1, grad=(-20, -5), f=7\n",
"Iteration 71, x=-0.07255485582916145, y=-2.842798940663498, grad=(-6.422152997398456e-05, 0.0007679439528605059), f=-12.516844245927643\n",
"Converges!\n",
"\n",
"Finished, (x, y)=(-0.07255485582916145, -2.842798940663498), f=-12.516844245927643, ||grad||=0.000770624629535819\n",
"--------------------------------------\n",
"Running descent with L=0.001, (x0, y0)=(0, 0)\n",
"Iteration 0, x=0, y=0, grad=(4, -5), f=0\n",
"Iteration 591, x=-0.8314366675970023, y=0.6413500445456628, grad=(0.0008209333741016067, -0.0005358076126578126), f=-5.079179686570482\n",
"Converges!\n",
"\n",
"Finished, (x, y)=(-0.8314366675970023, 0.6413500445456628), f=-5.079179686570482, ||grad||=0.0009803169908228222\n",
"--------------------------------------\n",
"Running descent with L=0.001, (x0, y0)=(-10, -10)\n",
"Iteration 0, x=-10, y=-10, grad=(-16796, -7205), f=60010\n",
"Iteration 428, x=-0.07255547408629767, y=-2.842791542456412, grad=(-8.188324018121307e-05, 0.000979424570277665), f=-12.516844239418774\n",
"Converges!\n",
"\n",
"Finished, (x, y)=(-0.07255547408629767, -2.842791542456412), f=-12.516844239418774, ||grad||=0.0009828414693561535\n",
"--------------------------------------\n",
"Running descent with L=0.001, (x0, y0)=(-1, -1)\n",
"Iteration 0, x=-1, y=-1, grad=(-20, -5), f=7\n",
"Iteration 732, x=-0.07255549242021682, y=-2.842791330731959, grad=(-8.242407373870719e-05, 0.0009854753316220766), f=-12.516844239209284\n",
"Converges!\n",
"\n",
"Finished, (x, y)=(-0.07255549242021682, -2.842791330731959), f=-12.516844239209284, ||grad||=0.0009889162538695203\n",
"--------------------------------------\n"
]
}
],
"source": [
"for l in L:\n",
" for (x, y) in P:\n",
" print(f'Running descent with L={l}, (x0, y0)={(x, y)}')\n",
" try:\n",
" xr, yr = descent(x, y, l)\n",
" except OverflowError:\n",
" print('\\nBad L, overflow')\n",
" else:\n",
" print(f'\\nFinished, (x, y)={(xr, yr)}, f={f(xr, yr)}, ||grad||={norm(grad(xr, yr))}')\n",
" print('--------------------------------------')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Dirty 3.7",
"language": "python",
"name": "dirty3.7"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment