Skip to content

Commit 27c461e

Browse files
committed
gradient descent tutorial
1 parent 317b6e9 commit 27c461e

File tree

6 files changed

+179
-2
lines changed

6 files changed

+179
-2
lines changed

.idea/misc.xml

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/py.iml

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
''' Good students always try to solve exercise on their own first and then look at the ready made solution
2+
I know you are an awesome student !! :)
3+
Hence you will look into this code only after you have done your due diligence.
4+
If you are not an awesome student who is full of laziness then only you will come here
5+
without writing single line of code on your own. In that case anyways you are going to
6+
face my anger with fire and fury !!!
7+
'''
8+
9+
import numpy as np
10+
import pandas as pd
11+
from sklearn.linear_model import LinearRegression
12+
import math
13+
14+
def predict_using_sklean():
15+
df = pd.read_csv("test_scores.csv")
16+
r = LinearRegression()
17+
r.fit(df[['math']],df.cs)
18+
return r.coef_, r.intercept_
19+
20+
def gradient_descent(x,y):
21+
m_curr = 0
22+
b_curr = 0
23+
iterations = 1000000
24+
n = len(x)
25+
learning_rate = 0.0002
26+
27+
cost_previous = 0
28+
29+
for i in range(iterations):
30+
y_predicted = m_curr * x + b_curr
31+
cost = (1/n)*sum([value**2 for value in (y-y_predicted)])
32+
md = -(2/n)*sum(x*(y-y_predicted))
33+
bd = -(2/n)*sum(y-y_predicted)
34+
m_curr = m_curr - learning_rate * md
35+
b_curr = b_curr - learning_rate * bd
36+
if math.isclose(cost, cost_previous, rel_tol=1e-20):
37+
break
38+
cost_previous = cost
39+
print ("m {}, b {}, cost {}, iteration {}".format(m_curr,b_curr,cost, i))
40+
41+
return m_curr, b_curr
42+
43+
if __name__ == "__main__":
44+
df = pd.read_csv("test_scores.csv")
45+
x = np.array(df.math)
46+
y = np.array(df.cs)
47+
48+
m, b = gradient_descent(x,y)
49+
print("Using gradient descent function: Coef {} Intercept {}".format(m, b))
50+
51+
m_sklearn, b_sklearn = predict_using_sklean()
52+
print("Using sklearn: Coef {} Intercept {}".format(m_sklearn,b_sklearn))
53+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
name,math,cs
2+
david,92,98
3+
laura,56,68
4+
sanjay,88,81
5+
wei,70,80
6+
jeff,80,83
7+
aamir,49,52
8+
venkat,65,66
9+
virat,35,30
10+
arthur,66,68
11+
paul,67,73

ML/3_gradient_descent/gradient_descent.ipynb

+92
Large diffs are not rendered by default.
+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import numpy as np
2+
3+
def gradient_descent(x,y):
4+
m_curr = b_curr = 0
5+
iterations = 10000
6+
n = len(x)
7+
learning_rate = 0.08
8+
9+
for i in range(iterations):
10+
y_predicted = m_curr * x + b_curr
11+
cost = (1/n) * sum([val**2 for val in (y-y_predicted)])
12+
md = -(2/n)*sum(x*(y-y_predicted))
13+
bd = -(2/n)*sum(y-y_predicted)
14+
m_curr = m_curr - learning_rate * md
15+
b_curr = b_curr - learning_rate * bd
16+
print ("m {}, b {}, cost {} iteration {}".format(m_curr,b_curr,cost, i))
17+
18+
x = np.array([1,2,3,4,5])
19+
y = np.array([5,7,9,11,13])
20+
21+
gradient_descent(x,y)

0 commit comments

Comments
 (0)