Skip to content

Commit

Permalink
gradient descent tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
dhavalsays committed Jul 22, 2018
1 parent 317b6e9 commit 27c461e
Show file tree
Hide file tree
Showing 6 changed files with 179 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion .idea/py.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

53 changes: 53 additions & 0 deletions ML/3_gradient_descent/Exercise/ex_gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
''' Good students always try to solve exercise on their own first and then look at the ready made solution
I know you are an awesome student !! :)
Hence you will look into this code only after you have done your due diligence.
If you are not an awesome student who is full of laziness then only you will come here
without writing single line of code on your own. In that case anyways you are going to
face my anger with fire and fury !!!
'''

import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
import math

def predict_using_sklean():
df = pd.read_csv("test_scores.csv")
r = LinearRegression()
r.fit(df[['math']],df.cs)
return r.coef_, r.intercept_

def gradient_descent(x,y):
m_curr = 0
b_curr = 0
iterations = 1000000
n = len(x)
learning_rate = 0.0002

cost_previous = 0

for i in range(iterations):
y_predicted = m_curr * x + b_curr
cost = (1/n)*sum([value**2 for value in (y-y_predicted)])
md = -(2/n)*sum(x*(y-y_predicted))
bd = -(2/n)*sum(y-y_predicted)
m_curr = m_curr - learning_rate * md
b_curr = b_curr - learning_rate * bd
if math.isclose(cost, cost_previous, rel_tol=1e-20):
break
cost_previous = cost
print ("m {}, b {}, cost {}, iteration {}".format(m_curr,b_curr,cost, i))

return m_curr, b_curr

if __name__ == "__main__":
df = pd.read_csv("test_scores.csv")
x = np.array(df.math)
y = np.array(df.cs)

m, b = gradient_descent(x,y)
print("Using gradient descent function: Coef {} Intercept {}".format(m, b))

m_sklearn, b_sklearn = predict_using_sklean()
print("Using sklearn: Coef {} Intercept {}".format(m_sklearn,b_sklearn))

11 changes: 11 additions & 0 deletions ML/3_gradient_descent/Exercise/test_scores.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name,math,cs
david,92,98
laura,56,68
sanjay,88,81
wei,70,80
jeff,80,83
aamir,49,52
venkat,65,66
virat,35,30
arthur,66,68
paul,67,73
92 changes: 92 additions & 0 deletions ML/3_gradient_descent/gradient_descent.ipynb

Large diffs are not rendered by default.

21 changes: 21 additions & 0 deletions ML/3_gradient_descent/gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import numpy as np

def gradient_descent(x,y):
m_curr = b_curr = 0
iterations = 10000
n = len(x)
learning_rate = 0.08

for i in range(iterations):
y_predicted = m_curr * x + b_curr
cost = (1/n) * sum([val**2 for val in (y-y_predicted)])
md = -(2/n)*sum(x*(y-y_predicted))
bd = -(2/n)*sum(y-y_predicted)
m_curr = m_curr - learning_rate * md
b_curr = b_curr - learning_rate * bd
print ("m {}, b {}, cost {} iteration {}".format(m_curr,b_curr,cost, i))

x = np.array([1,2,3,4,5])
y = np.array([5,7,9,11,13])

gradient_descent(x,y)

0 comments on commit 27c461e

Please sign in to comment.