Support Vector Regression (SVR) is a regression by 바죠

Support Vector Regression (SVR) is a regression algorithm

SVM응 주로 분류에 적용되는 것이다.
과적합이 잘 일어나지 않는다.

SVR은 많이 사용되지 않은 알고리듬이다. 이보다 더 좋은 성능을 낼 수 있는 기계학습 방법들이 많이 있기 때문이다.
대표적인것이 LightGBM, XGBoost 등이 있다. 아울러, 베이지안 옵티마이제이션 방법을 활용할 경우, hyperparameter들을 최적화 할 수 있어서 향상된 성능을 얻을 수 있다.

---------------------------------------------------------------------------------------------------------------------
import random
import math
import numpy as np
import matplotlib.pyplot as plt
from sklearn.svm import SVR
from sklearn.metrics import mean_squared_error
random.seed(123)
def getData(N):
     x,y =[],[]
     for i in range(N): 
          a = i/10+random.uniform(-1,1)
          yy =math.sin(a)+3+random.uniform(-1,1)
          x.append([a])
          y.append([yy])
     return np.array(x), np.array(y)
x,y=getData(200)
model = SVR()
print(model)
model.fit(x,y)
pred_y = model.predict(x)
for yo, yp in zip(y[1:15,:], pred_y[1:15]):
     print(yo,yp)
x_ax=range(200)
plt.scatter(x_ax, y, s=5, color="blue", label="original")
plt.plot(x_ax, pred_y, lw=1.5, color="red", label="predicted")
plt.legend()
plt.show()
score=model.score(x,y)
print(score)
mse =mean_squared_error(y, pred_y)
print("Mean Squared Error:",mse)
rmse = math.sqrt(mse)
print("Root Mean Squared Error:", rmse)


---------------------------------------------------------------------------------------------------------------------
import random
import numpy as np
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.metrics import mean_squared_error
random.seed(123)
def CreateDataset(N):
     a,b,c,y = [],[],[],[]
     for i in range(N):   
          aa = i/10+random.uniform(-4,3)
          bb = i/30+random.uniform(-4,4)
          cc = i/40+random.uniform(-3,3)-5
          yy = (aa+bb+cc/2)/3
          a.append([aa])
          b.append([bb])
          c.append([cc])
          y.append([yy])
     return np.hstack([a,b,c]), np.array(y)
N = 200
x,y = CreateDataset(N)
x_ax = range(N)
plt.plot(x_ax, x, 'o', label="feature", markersize=3)
plt.plot(x_ax, y, lw=1.5, color="red", label="y")
plt.legend()
plt.show()
def BuildModel():
     model = Sequential()
     model.add(Dense(128, input_dim=3,activation='relu'))
     model.add(Dense(32, activation='relu'))
     model.add(Dense(8,activation='relu'))
     model.add(Dense(1,activation='linear'))
     model.compile(loss="mean_squared_error", optimizer="adam")  
     return model
BuildModel().summary()
regressor = KerasRegressor(build_fn=BuildModel,nb_epoch=100,batch_size=3)
regressor.fit(x,y)
y_pred = regressor.predict(x)
mse_krr = mean_squared_error(y, y_pred)
print(mse_krr)
plt.plot(y, label="y-original")
plt.plot(y_pred, label="y-predicted")
plt.legend()
plt.show()
model = BuildModel()
model.fit(x, y, nb_epoch=100, verbose=False, shuffle=False)
y_krm = model.predict(x)
mse_krm=mean_squared_error(y, y_krm)
print(mse_krm)
plt.plot(y, label="y-original")
plt.plot(y_krm, label="y-predicted")
plt.legend()
plt.show()

---------------------------------------------------------------------------------------------------------------------



핑백

덧글

댓글 입력 영역

최근 포토로그



MathJax