-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlinear_regression.py
73 lines (41 loc) · 1.24 KB
/
linear_regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import numpy as np
def compute_error(b, m, points):
total_error = 0
for i in range(len(points)):
x = points[i, 0]
y_true = points[i, 1]
y_calculated = (b*x + m)
total_error += np.square(y_true - y_calculated)
return total_error / float(len(points))
def descend(points, initial_m, initial_b, learning_rate, no_of_iterations):
b = initial_b
m = initial_m
for i in range(no_of_iterations):
b, m = gradient_step(b, m, points, learning_rate)
return [b, m]
def gradient_step(current_m, current_b, points, learning_rate):
gradient_b = 0
gradient_m = 0
n = len(points)
for i in range(len(points)):
x = points[i, 0]
y = points[i, 1]
gradient_b += - (2/n) * (y - ((current_m * x) + current_b))
gradient_m = - (2/n) * x * (y - ((current_m * x) + current_b))
new_b = current_b - (learning_rate * gradient_b)
new_m = current_m - (learning_rate * gradient_m)
return[new_m, new_b]
def run():
# Prep data
points = np.genfromtxt('dataset.csv', delimiter=',')
# Hyperparam setup
initial_m = 0
initial_b = 0
learning_rate = 0.0001
no_of_iterations = 1000
# Training
[b, m] = descend(points, initial_m, initial_b, learning_rate, no_of_iterations)
print(b)
print(m)
print(compute_error(b, m, points))
run()