吳恩達作業(yè)1

!/usr/bin/env python

coding: utf-8

In[64]:

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

In[65]:

path = 'D:\STUDY\machine learning\homework1\ex1data1.txt'
data = pd.read_csv(path, names = ['Population','Profit'] )
data.head()

In[39]:

data.describe()

In[40]:

data.plot(kind = 'scatter',x = 'Population',y = 'Profit',figsize = (12,8))
plt.show()

In[66]:

def computeCost(X, y, theta):

j = np.power(((X * theta.T)-y),2)


return np.sum(j)/(2*len(X))

In[42]:

data.insert(0,'Ones',1)

In[43]:

cols = data.shape[1]#數(shù)據(jù)列數(shù)
X = data.iloc[:,0:cols-1]
y = data.iloc[:,cols-1:cols]

In[44]:

X.head()

In[45]:

y.head()

In[46]:

X = np.matrix(X.values)
y = np.matrix(y.values)
theta = np.zeros([1,2])

In[47]:

computeCost(X, y, theta)

In[67]:

def gradientDescent(X, y, theta, alpha, iters):
temp = np.matrix(np.zeros(theta.shape))
parameters = int(len(theta.ravel()))
cost = np.zeros(iters)
print(parameters)
for i in range(iters):
error = (X * theta.T) - y
for j in range(parameters):
term = np.multiply(error, X[:,j])
temp[0,j] = theta[0,j] - ((alpha / len(X)) * np.sum(term))

    theta = temp
    cost[i] = computeCost(X, y, theta)
  
return theta , cost

In[68]:

alpha = 0.01
iters = 1000

In[50]:

g ,cost = gradientDescent(X, y, theta, alpha,iters)

In[51]:

computeCost(X, y, g)

In[52]:

x = np.linspace(data.Population.min(), data.Population.max(), 100)
f = g[0, 0] + (g[0, 1] * x)

fig, ax = plt.subplots(figsize=(12,8))
ax.plot(x, f, 'r', label='Prediction')
ax.scatter(data.Population, data.Profit, label='Traning Data')
ax.legend(loc=2)
ax.set_xlabel('Population')
ax.set_ylabel('Profit')
ax.set_title('Predicted Profit vs. Population Size')
plt.show()

In[53]:

fig, ax = plt.subplots(figsize=(12,8))
ax.plot(np.arange(iters), cost, 'r')
ax.set_xlabel('Iterations')
ax.set_ylabel('Cost')
ax.set_title('Error vs. Training Epoch')
plt.show()

In[69]:

path = "D:\STUDY\machine learning\homework1\ex1data2.txt"

In[70]:

data2 = pd.read_csv(path, header = None, names = ['Size', 'Bedrooms', 'Price'])

data2.head()

In[71]:

data2.describe()

In[72]:

data2 = (data2-data2.mean())/data2.std()
data2.head()

In[73]:

data2.insert(0, 'Ones', 1)

In[74]:

cols = data2.shape[1]
X2 = data2.iloc[:,0:cols -1]
y2 = data2.iloc[:,cols-1:cols]
X2,y2

In[75]:

X2 = np.matrix(X2.values)
y2 = np.matrix(y2.values)
theta2 = np.zeros([1,3])

In[76]:

g2 , cost2 = gradientDescent(X2, y2, theta2, alpha,iters)
computeCost(X2, y2, g2)

In[77]:

fig, ax = plt.subplots(figsize=(12,8))
ax.plot(np.arange(iters), cost2, 'r')
ax.set_xlabel('Iterations')
ax.set_ylabel('Cost')
ax.set_title('Error vs. Training Epoch')
plt.show()

In[80]:

def normalEqn(X, y):
theta = (np.linalg.inv((X.T)X))(X.T)*y
return theta

In[81]:

final_theta2 = normalEqn(X,y)
final_theta2

In[ ]:

?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時請結(jié)合常識與多方信息審慎甄別。
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點,簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

友情鏈接更多精彩內(nèi)容