數(shù)據(jù)來源于論文:Application of neural networks and fuzzy systems for the intelligent prediction of CO2-induced strength alteration of coal
預(yù)測方法:支持向量回歸(Support Vector Regression)
代碼如下:
# #############################################################################
# 聲明需要用到的包
import numpy as np
import pandas as pd
import random
from sklearn.svm import SVR
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
%matplotlib inline
# #############################################################################
# 導(dǎo)入原始數(shù)據(jù)
rawData = pd.read_csv('raw_data.csv')
print("原始數(shù)據(jù)集大?。?,rawData.shape)
rawData.head()
# #############################################################################
# 對所有數(shù)值類型的特征進行標準化處理
# 每一個因素的均值和方差都存儲到 scaled_features 變量中。
quant_features = [ 'FC', 'Interaction_time', 'Saturation_pressure', 'Measured_UCS']
scaled_features = {}
for each in quant_features:
mean, std = rawData[each].mean(), rawData[each].std()
scaled_features[each] = [mean, std]
rawData.loc[:, each] = (rawData[each] - mean)/std
rawData.head()
# #############################################################################
# 轉(zhuǎn)化為 numpy 數(shù)組
rawDataNP = rawData.values
rawDataNP[0:5,:]
完成了數(shù)據(jù)預(yù)處理過后,就可以開始進行回歸預(yù)測了:
# #############################################################################
# 定義輸入 輸出
x=rawDataNP[:,0:3]
y=rawDataNP[:,4]
# 分割訓(xùn)練數(shù)據(jù)和測試數(shù)據(jù)
# 隨機采樣25%作為測試 75%作為訓(xùn)練
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=0)
線性核函數(shù),預(yù)測結(jié)果:
# 線性核函數(shù)配置支持向量機
linear_svr = SVR(kernel="linear")
# 訓(xùn)練
linear_svr.fit(x_train, y_train)
# 預(yù)測 保存預(yù)測結(jié)果
linear_svr_y_predict = linear_svr.predict(x_test)
plt.plot(y_test)
plt.plot(linear_svr_y_predict)
plt.show()

線性核函數(shù),預(yù)測結(jié)果
fig, ax = plt.subplots(figsize = (10, 7))
mean, std = scaled_features['Measured_UCS']
y = (y_test * std + mean).astype(float)
x = (linear_svr_y_predict * std + mean).astype(float)
poly = np.polyfit(x,y,deg=1)
z = np.polyval(poly, x)
ax.plot(x, y, 'o')
ax.plot(x, z,label='Linear Regression')
ax.legend()
ax.set_ylabel('Measured UCS')
ax.set_xlabel('Predicted UCS')

線性核函數(shù),預(yù)測結(jié)果
多項式核函數(shù),預(yù)測結(jié)果:
# 多項式核函數(shù)配置支持向量機
poly_svr = SVR(kernel='poly', C=100, gamma='auto', degree=3, epsilon=.1, coef0=1)
# 訓(xùn)練
poly_svr.fit(x_train, y_train)
# 預(yù)測 保存預(yù)測結(jié)果
poly_svr_y_predict = poly_svr.predict(x_test)
plt.plot(y_test)
plt.plot(poly_svr_y_predict)
plt.show()

多項式核函數(shù),預(yù)測結(jié)果
fig, ax = plt.subplots(figsize = (10, 7))
mean, std = scaled_features['Measured_UCS']
y = (y_test * std + mean).astype(float)
x = (poly_svr_y_predict * std + mean).astype(float)
poly = np.polyfit(x,y,deg=1)
z = np.polyval(poly, x)
ax.plot(x, y, 'o')
ax.plot(x, z,label='Linear Regression')
ax.legend()
ax.set_ylabel('Measured UCS')
ax.set_xlabel('Predicted UCS')

多項式核函數(shù),預(yù)測結(jié)果
徑向基核函數(shù),預(yù)測結(jié)果:
# 徑向基核函數(shù)配置支持向量機
rbf_svr = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=.1)
# 訓(xùn)練
rbf_svr.fit(x_train, y_train)
# 預(yù)測 保存預(yù)測結(jié)果
rbf_svr_y_predict = rbf_svr.predict(x_test)
plt.plot(y_test)
plt.plot(rbf_svr_y_predict)
plt.show()

徑向基核函數(shù),預(yù)測結(jié)果
fig, ax = plt.subplots(figsize = (10, 7))
mean, std = scaled_features['Measured_UCS']
y = (y_test * std + mean).astype(float)
x = (rbf_svr_y_predict * std + mean).astype(float)
poly = np.polyfit(x,y,deg=1)
z = np.polyval(poly, x)
ax.plot(x, y, 'o')
ax.plot(x, z,label='Linear Regression')
ax.legend()
ax.set_ylabel('Measured UCS')
ax.set_xlabel('Predicted UCS')

徑向基核函數(shù),預(yù)測結(jié)果