generated from fastai/nbdev_template
-
Notifications
You must be signed in to change notification settings - Fork 29
Expand file tree
/
Copy pathlinear_regression.py
More file actions
61 lines (50 loc) · 1.67 KB
/
linear_regression.py
File metadata and controls
61 lines (50 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# AUTOGENERATED! DO NOT EDIT! File to edit: 03_linear_regression.ipynb (unless otherwise specified).
__all__ = ['generate_data', 'make_point_plot', 'make_line_plot', 'gradient_descent']
# Cell
import numpy as np
import pandas as pd
import altair as alt
def generate_data(n):
"""It generates dummy data."""
noise = np.random.randn(n,1)
X = 2 * np.random.rand(n,1)
y = 5 + 3 * X + noise
return X,y
# Cell
def make_point_plot(x,y):
"""It plots the point chart of the data"""
data_points = pd.DataFrame({'x': x.flatten(), 'y': y.flatten()})
chart = alt.Chart(data_points).mark_point(size=50, color='red',filled=True).encode(
x="x",
y="y"
)
return chart
# Cell
def make_line_plot(x,y):
"""It plots the line chart of the data"""
data = pd.DataFrame({'x': x.flatten(), 'y': y.flatten()})
line = alt.Chart(data).mark_line(size=3).encode(
x="x",
y="y"
)
return line
# Cell
def gradient_descent(data,w_0_t,w_1_t,learning_rate,num_iterations):
"""Gradient descent implementation, which gets `data`, starting `w_0` and `w_1`, `learning_rate`
and the number of iterations `num_iterations`"""
w_0 = 0
w_1 = 0
(X,y) = data
N = len(X)
for t in range(0,num_iterations):
w_0_deriv = np.zeros((N,N))
w_1_deriv = np.zeros((N,N))
w_0_deriv = -2 * (y - (w_0_t + w_1_t * X))
w_1_deriv = -2 * np.dot(X.T, (y - (w_0_t + w_1_t * X)))
w0_sum = np.sum(w_0_deriv,axis=0)
w1_sum = np.sum(w_1_deriv,axis=0)
w_0 = w_0 - learning_rate * (w0_sum / N)
w_1 = w_1 - learning_rate * (w1_sum / N)
w_0_t = w_0
w_1_t = w_1
return w_0, w_1