@@ -4,17 +4,23 @@ def sigmoid(z):
4
4
try :
5
5
return 1 / (1 + np .exp (- z ))
6
6
except OverflowError as e :
7
+ issue_2_branch
8
+ == == == =
9
+ print (f"OverflowError in sigmoid: { e } " )
10
+ main
7
11
return 1.0 if z > 0 else 0.0
8
12
9
13
class LogisticRegression :
10
- def __init__ (self , learning_rate = 0.01 , epochs = 50 , batch_size = 4 , regularization_strength = 0.01 , use_regularization = True ):
14
+ def __init__ (self , learning_rate = 0.01 , epochs = 50 , batch_size = 4 , regularization_strength = 0.01 , use_regularization = True , learning_rate_deacy = 0.99 ):
11
15
self .learning_rate = learning_rate
12
16
self .epochs = epochs
13
17
self .batch_size = batch_size
14
18
self .regularization_strength = regularization_strength
15
19
self .use_regularization = use_regularization
20
+ self .learning_rate_decay = learning_rate_deacy
16
21
17
22
def fit (self , X , y ):
23
+ issue_2_branch
18
24
try :
19
25
n_samples , n_features = X .shape
20
26
self .weights = np .zeros (n_features ) # Corrected weight initialization
@@ -30,6 +36,14 @@ def fit(self, X, y):
30
36
for i in range (0 , n_samples , self .batch_size ):
31
37
X_batch = X_shuffled [i :i + self .batch_size ]
32
38
y_batch = y_shuffled [i :i + self .batch_size ]
39
+ == == == =
40
+ n_samples , n_features = X .shape
41
+ self .weights = np .random .randn (n_features ) # Corrected weight initialization
42
+ self .bias = 0 # Corrected bias initialization
43
+
44
+ prev_weights = np .zeros (n_features )
45
+ prev_bias = 0
46
+ main
33
47
34
48
35
49
linear_model = np .dot (X_batch , self .weights ) + self .bias
@@ -41,12 +55,19 @@ def fit(self, X, y):
41
55
if self .use_regularization :
42
56
dw += (self .regularization_strength * self .weights ) # Corrected regularization term
43
57
58
+ issue_2_branch
44
59
self .weights -= self .learning_rate * dw
45
60
self .bias -= self .learning_rate * db # Corrected bias update logic
61
+ == == == =
62
+ if self .use_regularization :
63
+ dw += (self .regularization_strength * self .weights ) # Corrected regularization term
64
+ dw += (self .regularization_strength * self .bias )
65
+ main
46
66
47
67
if np .allclose (prev_weights , self .weights , rtol = 1e-05 ): # Corrected stopping condition
48
68
break
49
69
70
+ issue_2_branch
50
71
prev_weights = self .weights
51
72
52
73
except ValueError as e :
@@ -60,6 +81,18 @@ def fit(self, X, y):
60
81
61
82
except Exception as e :
62
83
print (f"Unexpected error in fit method: { e } " )
84
+ == == == =
85
+ self .learning_rate *= self .learning_rate_decay
86
+
87
+ if np .allclose (prev_weights , self .weights , rtol = 1e-05 ): # Corrected stopping condition
88
+ break
89
+
90
+ prev_weights = np .copy (self .weights )
91
+ prev_bias = self .bias
92
+
93
+ print (f"Epoch { epoch } : Weights change: { np .linalg .norm (dw )} , Bias change: { abs (db )} " )
94
+
95
+ main
63
96
64
97
def predict (self , X ):
65
98
try :
0 commit comments