-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathregressionnl.py
60 lines (45 loc) · 1.44 KB
/
regressionnl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
from sklearn import neighbors, datasets
import numpy as np
from sklearn import tree
from sklearn import linear_model
from sklearn.kernel_ridge import KernelRidge
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
def kernelregress(D,xq,beta):
kk = np.exp(-beta*np.round(np.abs(D[:,0]-xq)))
y = np.dot(kk,D[:,1])/np.sum(kk,1)
return y
#Training Set
X = np.zeros((22,1))
X[:,0] = np.arange(0,11,.5)
noisesigma = 0.2
Y = (2 + np.sin(X) + noisesigma * np.random.randn(22, 1))
#Y[[5,10,15],0] = 2 * Y[[5,10,15],0]
#Testing Set
Xp = np.zeros((110,1))
Xp[:,0] = np.arange(0,11,.1)
Yp = (2 + np.sin(Xp))
# Linear Regression
reglr = linear_model.LinearRegression()
reglr.fit(X,Y)
Ylr = reglr.predict(Xp)
# Kernel Ridge Regression
regkr = KernelRidge(kernel='rbf', gamma=0.1,alpha=0.1)
regkr.fit(X,Y)
Ykr = regkr.predict(Xp)
# Kernel Regression
Yp1 = kernelregress(np.hstack((X,Y)),Xp,10)
Yp2 = kernelregress(np.hstack((X,Y)),Xp,1)
# Decision Tree Regressor
min_samples_split = 3
regtree = tree.DecisionTreeRegressor(min_samples_split=min_samples_split)
regtree = regtree.fit(X, Y)
Ytree = regtree.predict(Xp)
plt.plot(X,Y,'go',label='true')
plt.plot(Xp,Yp1,'g',label='kerReg10')
plt.plot(Xp,Yp2,'g:',label='kerReg1')
plt.plot(Xp,Ykr,'r',label='KernRidge')
plt.plot(Xp,Ytree,'b',label='tree')
plt.plot(Xp,Ylr,'m',label='linregres')
plt.legend( loc = 3 )
plt.show()