-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlinear_models.py
61 lines (46 loc) · 1.54 KB
/
linear_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
# coding: utf-8
# In[351]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def gen_data(n = 1100):
a = (np.random.rand() - 0.5)*10
b = (np.random.rand() - 0.5)*10
# line will be a*x + b
x = (np.random.rand(n) - 0.5)*10
y = (np.random.rand(n) - 0.5)*10
z = np.ones(n)
for i in range(0,n):
if a*x[i] + b < y[i]:
z[i] = - 1
if np.random.rand(1) < 0.1:
z[i] *= -1
color = ['red' if l == 0 else 'green' for l in z]
plt.scatter(x, y, c = z)
plt.ylim(min(y), max(y))
plt.xlim(min(x), max(x))
points = np.linspace(min(x),max(x))
plt.plot(points, points * a + b, label = "blue")
plt.show()
return np.stack([x,y],axis = 1), np.array(z), a, b
class LogisticRegression():
def __init__(self, iterations = 1000, learning_rate = 0.01):
self.lr = learning_rate
self.iterations = iterations
self.weights = None
def E(X,Y,w):
return np.sum(np.log1p(np.exp(-np.dot(X, np.array(w)) * Y)))/len(X)
def gradient(X,Y,w,index):
return -Y[index] * X[index]/(1 + np.exp(np.dot(X[index],np.array(w)) * Y[index]))
def fit(self, X, Y):
self.weights =
def SGD(X,Y,lr = 0.001, iterations = 10000):
w = np.random.rand(3)
X = np.c_[X,np.ones(len(X))]
for t in range(iterations):
w = w - lr * gradient(X, Y, w, np.random.randint(0,len(X)))
return w
# In[ ]:
def predict(X,w):
X = np.c_[X,np.ones(len(X))]
return np.sign(np.dot(X,w))