溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊(cè)×
其他方式登錄
點(diǎn)擊 登錄注冊(cè) 即表示同意《億速云用戶服務(wù)條款》

python 機(jī)器學(xué)習(xí)之支持向量機(jī)非線性回歸SVR模型

發(fā)布時(shí)間:2020-10-06 22:04:09 來(lái)源:腳本之家 閱讀:167 作者:吳裕雄 欄目:開(kāi)發(fā)技術(shù)

本文介紹了python 支持向量機(jī)非線性回歸SVR模型,廢話不多說(shuō),具體如下:

import numpy as np
import matplotlib.pyplot as plt

from sklearn import datasets, linear_model,svm
from sklearn.model_selection import train_test_split

def load_data_regression():
  '''
  加載用于回歸問(wèn)題的數(shù)據(jù)集
  '''
  diabetes = datasets.load_diabetes() #使用 scikit-learn 自帶的一個(gè)糖尿病病人的數(shù)據(jù)集
  # 拆分成訓(xùn)練集和測(cè)試集,測(cè)試集大小為原始數(shù)據(jù)集大小的 1/4
  return train_test_split(diabetes.data,diabetes.target,test_size=0.25,random_state=0)

#支持向量機(jī)非線性回歸SVR模型
def test_SVR_linear(*data):
  X_train,X_test,y_train,y_test=data
  regr=svm.SVR(kernel='linear')
  regr.fit(X_train,y_train)
  print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_))
  print('Score: %.2f' % regr.score(X_test, y_test))
  
# 生成用于回歸問(wèn)題的數(shù)據(jù)集
X_train,X_test,y_train,y_test=load_data_regression() 
# 調(diào)用 test_LinearSVR
test_SVR_linear(X_train,X_test,y_train,y_test)

python 機(jī)器學(xué)習(xí)之支持向量機(jī)非線性回歸SVR模型

def test_SVR_poly(*data):
  '''
  測(cè)試 多項(xiàng)式核的 SVR 的預(yù)測(cè)性能隨 degree、gamma、coef0 的影響.
  '''
  X_train,X_test,y_train,y_test=data
  fig=plt.figure()
  ### 測(cè)試 degree ####
  degrees=range(1,20)
  train_scores=[]
  test_scores=[]
  for degree in degrees:
    regr=svm.SVR(kernel='poly',degree=degree,coef0=1)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  ax=fig.add_subplot(1,3,1)
  ax.plot(degrees,train_scores,label="Training score ",marker='+' )
  ax.plot(degrees,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_poly_degree r=1")
  ax.set_xlabel("p")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1.)
  ax.legend(loc="best",framealpha=0.5)

  ### 測(cè)試 gamma,固定 degree為3, coef0 為 1 ####
  gammas=range(1,40)
  train_scores=[]
  test_scores=[]
  for gamma in gammas:
    regr=svm.SVR(kernel='poly',gamma=gamma,degree=3,coef0=1)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  ax=fig.add_subplot(1,3,2)
  ax.plot(gammas,train_scores,label="Training score ",marker='+' )
  ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_poly_gamma r=1")
  ax.set_xlabel(r"$\gamma$")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1)
  ax.legend(loc="best",framealpha=0.5)
  ### 測(cè)試 r,固定 gamma 為 20,degree為 3 ######
  rs=range(0,20)
  train_scores=[]
  test_scores=[]
  for r in rs:
    regr=svm.SVR(kernel='poly',gamma=20,degree=3,coef0=r)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  ax=fig.add_subplot(1,3,3)
  ax.plot(rs,train_scores,label="Training score ",marker='+' )
  ax.plot(rs,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_poly_r gamma=20 degree=3")
  ax.set_xlabel(r"r")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1.)
  ax.legend(loc="best",framealpha=0.5)
  plt.show()
  
# 調(diào)用 test_SVR_poly
test_SVR_poly(X_train,X_test,y_train,y_test)

python 機(jī)器學(xué)習(xí)之支持向量機(jī)非線性回歸SVR模型

def test_SVR_rbf(*data):
  '''
  測(cè)試 高斯核的 SVR 的預(yù)測(cè)性能隨 gamma 參數(shù)的影響
  '''
  X_train,X_test,y_train,y_test=data
  gammas=range(1,20)
  train_scores=[]
  test_scores=[]
  for gamma in gammas:
    regr=svm.SVR(kernel='rbf',gamma=gamma)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  fig=plt.figure()
  ax=fig.add_subplot(1,1,1)
  ax.plot(gammas,train_scores,label="Training score ",marker='+' )
  ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_rbf")
  ax.set_xlabel(r"$\gamma$")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1)
  ax.legend(loc="best",framealpha=0.5)
  plt.show()
  
# 調(diào)用 test_SVR_rbf
test_SVR_rbf(X_train,X_test,y_train,y_test)

python 機(jī)器學(xué)習(xí)之支持向量機(jī)非線性回歸SVR模型

def test_SVR_sigmoid(*data):
  '''
  測(cè)試 sigmoid 核的 SVR 的預(yù)測(cè)性能隨 gamma、coef0 的影響.
  '''
  X_train,X_test,y_train,y_test=data
  fig=plt.figure()

  ### 測(cè)試 gammam,固定 coef0 為 0.01 ####
  gammas=np.logspace(-1,3)
  train_scores=[]
  test_scores=[]

  for gamma in gammas:
    regr=svm.SVR(kernel='sigmoid',gamma=gamma,coef0=0.01)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  ax=fig.add_subplot(1,2,1)
  ax.plot(gammas,train_scores,label="Training score ",marker='+' )
  ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_sigmoid_gamma r=0.01")
  ax.set_xscale("log")
  ax.set_xlabel(r"$\gamma$")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1)
  ax.legend(loc="best",framealpha=0.5)
  ### 測(cè)試 r ,固定 gamma 為 10 ######
  rs=np.linspace(0,5)
  train_scores=[]
  test_scores=[]

  for r in rs:
    regr=svm.SVR(kernel='sigmoid',coef0=r,gamma=10)
    regr.fit(X_train,y_train)
    train_scores.append(regr.score(X_train,y_train))
    test_scores.append(regr.score(X_test, y_test))
  ax=fig.add_subplot(1,2,2)
  ax.plot(rs,train_scores,label="Training score ",marker='+' )
  ax.plot(rs,test_scores,label= " Testing score ",marker='o' )
  ax.set_title( "SVR_sigmoid_r gamma=10")
  ax.set_xlabel(r"r")
  ax.set_ylabel("score")
  ax.set_ylim(-1,1)
  ax.legend(loc="best",framealpha=0.5)
  plt.show()
  
# 調(diào)用 test_SVR_sigmoid
test_SVR_sigmoid(X_train,X_test,y_train,y_test)

python 機(jī)器學(xué)習(xí)之支持向量機(jī)非線性回歸SVR模型

以上就是本文的全部?jī)?nèi)容,希望對(duì)大家的學(xué)習(xí)有所幫助,也希望大家多多支持億速云。

向AI問(wèn)一下細(xì)節(jié)

免責(zé)聲明:本站發(fā)布的內(nèi)容(圖片、視頻和文字)以原創(chuàng)、轉(zhuǎn)載和分享為主,文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如果涉及侵權(quán)請(qǐng)聯(lián)系站長(zhǎng)郵箱:is@yisu.com進(jìn)行舉報(bào),并提供相關(guān)證據(jù),一經(jīng)查實(shí),將立刻刪除涉嫌侵權(quán)內(nèi)容。

AI