Skip to content

Commit a74d3fe

Browse files
authored
Elstuhn| gradient-descent| Added main.py and readme (#246)
Add gradient descent implementation
1 parent 638b712 commit a74d3fe

File tree

2 files changed

+55
-0
lines changed

2 files changed

+55
-0
lines changed

gradient-descent/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# Made by [Elstuhn](https://github.com/Elstuhn)
2+
# Gradient Descent with Approximation in Python

gradient-descent/main.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
### Made By Elston
2+
### https://github.com/Elstuhn
3+
### Gradient descent using approximation
4+
5+
from functools import cache
6+
import sympy
7+
8+
@cache
9+
def findgrad(x : float, f, step : float):
10+
return (f(x+step)-f(x))/step
11+
12+
def cleanfunc(func : str):
13+
x = sympy.Symbol("x")
14+
try:
15+
f = sympy.sympify(func)
16+
return sympy.lambdify(x, f)
17+
except:
18+
try:
19+
func = func.replace("^", "**")
20+
f = sympy.sympify(func)
21+
return sympy.lambdify(x, f)
22+
except:
23+
pass
24+
inds = []
25+
func = list(func)
26+
for i, j in enumerate(func):
27+
if all([
28+
j == "x",
29+
func[i-1] != "(",
30+
func[i-1].isdigit(),
31+
i != 0,
32+
]):
33+
inds.append(i)
34+
35+
prev = 0
36+
result = ""
37+
for index in inds:
38+
result += "".join(func[prev:index])+"*"
39+
prev = index
40+
result += "".join(func[prev:])
41+
f = sympy.sympify(result)
42+
return sympy.lambdify(x, f)
43+
44+
45+
def grad_desc(x : float, func : str, step : int = 0.05, lr : float = 3e-4, threshold : float = 0.001):
46+
"""
47+
Trains x by approximating the gradient and checks it against the threshold
48+
"""
49+
f = cleanfunc(func)
50+
while findgrad(x, f, step) > threshold:
51+
x -= findgrad(x, f, step)*lr
52+
53+
return x

0 commit comments

Comments
 (0)