-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMultipleLR.py
More file actions
53 lines (46 loc) · 1.47 KB
/
MultipleLR.py
File metadata and controls
53 lines (46 loc) · 1.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import numpy as np #type: ignore
# y = 2a + 3b + 4
train_x = np.array([[0, 0], [0, 1], [1, 0], [1, 1], [2, 0], [0, 2], [2, 2], [3, 0], [0, 3], [3, 3]])
train_y = np.array([4, 7, 6, 9, 8, 10, 14, 10, 13, 19])
test = np.array([[4, 0], [0, 4], [4, 4], [5, 0], [0, 5], [5, 5]])
len(train_x)
#Cost Function
def cost_func(w, b):
total = 0
for i in range(10):
total += ((np.dot(w, train_x[i]) + b) - train_y[i])**2
print(f'Current value: w = {w}, b = {b}')
print(f'Cost Function: {total/(2*len(train_x))}')
return total/(2*len(train_x))
#Gradient Descent
a = 0.01
def gradient_descent_b(b, CostDer_b):
temp_b = b - a*CostDer_b
return temp_b
def gradient_descent_w(w, CostDer_w):
X = np.multiply(a, CostDer_w)
temp_w = np.subtract(w, X)
return temp_w
def PartDer_w(w, b):
grads = []
for j in range(2):
total = 0
for i in range(10):
total += ((np.dot(w, train_x[i]) + b) - train_y[i]) * train_x[i][j]
grads.append(total / len(train_x))
return np.array(grads)
def PartDer_b(w, b):
total = 0
for i in range(len(train_x)):
total += ((np.dot(train_x[i], w) + b) - train_y[i])
return (total/len(train_y))
w = np.array([0, 0])
b = 0
for _ in range(5000):
temp_w = gradient_descent_w(w, PartDer_w(w, b))
temp_b = gradient_descent_b(b, PartDer_b(w, b))
w = temp_w
b = temp_b
current_cost = cost_func(w, b)
for _ in range(6):
print(f"Value for {test[_]}: {np.dot(w, test[_]) + b}")