tensorflow 實(shí)現(xiàn)簡單線性回歸/非線性回歸

import numpy as np
import matplotlib.pyplot as plt

DATA_FILE =  "boston_housing.csv"
BATCH_SIZE = 10
NUM_FEATURES = 14

#歸一化
def nolmalize(X):
    mean = np.mean(X)
    std  = np.std(X)   #矩陣標(biāo)準(zhǔn)差
    X    = (X-mean)/std
    return X
#線性回歸#非線性回歸
x_data=np.linspace(-1,1,1000)[:,None]
noise=np.random.normal(0,0.01,x_data.shape)#生成干擾,形狀和x_data一樣
y_data =(x_data)+noise
y_data1 = np.square(x_data)+noise
print(x_data.shape)

x = tf.placeholder(tf.float32,[None,1])
y = tf.placeholder(tf.float32,[None,1])

weight = tf.Variable(tf.random_normal([1,1]))
bias   = tf.Variable(tf.random_normal([1,1]))

prediction = tf.matmul(x,weight)+bias

loss  = tf.reduce_mean(tf.square(y-prediction))
train = tf.train.AdagradOptimizer(0.01).minimize(loss)

weight1 = tf.Variable(tf.random_normal([1,5]))
bias1   = tf.Variable(tf.random_normal([1,5]))
prediction1 = tf.nn.tanh(tf.matmul(x,weight1)+bias1)

weight2 = tf.Variable(tf.random_normal([5,1]))
bias2   = tf.Variable(tf.random_normal([1,1]))
prediction2 = tf.nn.tanh(tf.matmul(prediction1,weight2)+bias2)


loss1  = tf.reduce_mean(tf.square(y-prediction2))
train1 = tf.train.AdagradOptimizer(0.01).minimize(loss1)

with tf.Session() as sess:
    sess.run(tf.initialize_all_variables())
    for i in range(10000):
        if i%100==0:
            print(i)
        sess.run(train,feed_dict={x:x_data,y:y_data})
        sess.run(train1,feed_dict={x:x_data,y:y_data1})
    
    prediction_value=sess.run(prediction,feed_dict={x:x_data})
    prediction_value1=sess.run(prediction2,feed_dict={x:x_data})
    
    plt.figure()
    plt.scatter(x_data,y_data)
    plt.plot(x_data,prediction_value,'r')
    plt.show()
    
    plt.figure()
    plt.scatter(x_data,y_data1)
    plt.plot(x_data,prediction_value1,'r')
    plt.show()
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時(shí)請結(jié)合常識(shí)與多方信息審慎甄別。
平臺(tái)聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點(diǎn),簡書系信息發(fā)布平臺(tái),僅提供信息存儲(chǔ)服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

友情鏈接更多精彩內(nèi)容