Skip to content

Commit

Permalink
Merge pull request #16 from rugbedbugg/issue_3_branch
Browse files Browse the repository at this point in the history
fixed issue #3
  • Loading branch information
aditisaxena259 authored Oct 1, 2024
2 parents ce6aac9 + d2ab1f2 commit d7cf6e8
Showing 1 changed file with 17 additions and 0 deletions.
17 changes: 17 additions & 0 deletions neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,21 @@ def sigmoid(z):
return 1.0 if z > 0 else 0.0

class LogisticRegression:
issue_3_branch
def compute_loss(self, X, y):
linear_model = np.dot(X, self.weights) + self.bias
y_predicted = sigmoid(linear_model)
log_loss = -np.mean(y * np.log(y_predicted) + (1 - y) * np.log(1 - y_predicted))

if self.use_regularization:
log_loss += (self.regularization_strength / 2) * np.sum(np.square(self.weights)) # L2 regularization

return log_loss

def __init__(self, learning_rate=0.01, epochs=50, batch_size=4, regularization_strength=0.01, use_regularization=True):
=======
def __init__(self, learning_rate=0.01, epochs=50, batch_size=4, regularization_strength=0.01, use_regularization=True, learning_rate_deacy = 0.99):
main
self.learning_rate = learning_rate
self.epochs = epochs
self.batch_size = batch_size
Expand Down Expand Up @@ -84,6 +98,9 @@ def fit(self, X, y):
=======
self.learning_rate *= self.learning_rate_decay

loss = self.compute_loss(X, y)
print(f'Epoch {epoch+1}/{self.epochs}, Loss: {loss:.4f}')

if np.allclose(prev_weights, self.weights, rtol=1e-05): # Corrected stopping condition
break

Expand Down

0 comments on commit d7cf6e8

Please sign in to comment.