-
Notifications
You must be signed in to change notification settings - Fork 0
/
LogRegression.py
56 lines (31 loc) · 1.35 KB
/
LogRegression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import numpy as np
from sklearn.linear_model import Ridge
from sklearn.model_selection import GridSearchCV
class LogRegression(object):
"""docstring for LogRegression."""
def __init__(self,arg):
super(LogRegression, self).__init__()
self.arg = arg
self.theta=None
self.prob=[]
self.loss=0
def fit(self, x_train, y_train,lr,itr,alpha):
self.cost=0
l=len(x_train)
self.theta = np.zeros(x_train.shape[1])
for _ in range(itr):
linear_model = np.dot(x_train, self.theta)
y_pred = self.sigmoid(linear_model)
derivative = (1 / l) * (np.dot(x_train.T, (y_pred - y_train))+(2*alpha*self.theta))
self.theta -= lr * derivative
def predict(self, x_test):
self.prob = self.sigmoid(np.dot(x_test, self.theta))
y_pred = [1 if i > 0.5 else 0 for i in self.prob]
return np.array(y_pred)
def sigmoid(self, data):
return 1 / (1 + np.exp(-data))
def log_loss(self,y_test,itr):
self.loss=0
for i in range(len(y_test)):
self.loss=self.loss + -y_test[i]*np.log(self.prob[i])-(1-y_test[i])*np.log(1-self.prob[i])
return self.loss/len(y_test)