# TensorFlow中的线性回归解释和实例图解

h(x)= wx + b

``````Repeat until Convergence {
w = w - ? * ?J/?w
b = b - ? * ?J/?b
}``````

## 线性回归的实现

``````import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf``````

``````tf.set_random_seed(101)
np.random.seed(101)``````

``````# Generating random linear data
# There will be 50 data points which are ranging from 0 to 50.
x = np.linspace(0, 50, 50)
y = np.linspace(0, 50, 50)

# Adding noise to the random linear data
x += np.random.uniform(-4, 4, 50)
y += np.random.uniform(-4, 4, 50)
n= len(x) #Number of data points``````

＃训练数据图

``````plt.scatter(x, y)
plt.xlabel('x')
plt.xlabel('y')
plt.title("Training Data")
plt.show()``````

``````X= tf.placeholder("float")
Y= tf.placeholder("float")``````

``````np.random.randn().
W= tf.Variable(np.random.randn(), name="W")
B= tf.Variable(np.random, randn(), name="b")``````

``````learning_rate= 0 .01
training_epochs= 1000``````

``````# Hypothesis of the function
# Mean Square Error function
cost = tf.reduce_sum(tf.pow(y_pred-Y, 2)) / (2 * n)
# Global Variables Initializer
init = tf.global_variables_initializer( )``````

``````# Starting the Tensorflow Session
with tf.Session() as sess:

# Initializing the Variables
sess.run(init)

# Iterating through all the epochs
for epoch in range(training_epochs):

# Feeding each data point into the optimizer according to the Feed Dictionary.
for (_x, _y) in zip(x, y):
sess.run(optimizer, feed_dict = {X : _x, Y : _y})

# Here, we are displaying the result after every 50 epoch
if (epoch + 1) % 50 ==0:
# Calculating the cost at every epoch.
c = sess.run(cost, feed_dict = {X : x, Y : y})
print("Epoch", (epoch + 1), ": cost =", c, "W =", sess.run(W), "b=", sess.run(b))
# Store the necessary value which has used outside the Session
training_cost = sess.run (cost, feed_dict ={X: x, Y: y})
weight = sess.run(W)
bias = sess.run(b)``````

``````Epoch: 50  cost = 5.8868037 W = 0.9951241 b = 1.2381057
Epoch: 100 cost = 5.7912708 W = 0.9981236 b = 1.0914398
Epoch: 150 cost = 5.7119676 W = 1.0008028 b = 0.96044315
Epoch: 200 cost = 5.6459414 W = 1.0031956 b = 0.8434396
Epoch: 250 cost = 5.590798 W = 1.0053328 b = 0.7389358
Epoch: 300 cost = 5.544609 W = 1.007242 b = 0.6455922
Epoch: 350 cost = 5.5057884 W = 1.008947 b = 0.56223
Epoch: 400 cost = 5.473068 W = 1.01047 b = 0.46775345
Epoch: 450 cost = 5.453845 W = 1.0118302 b = 0.42124168
Epoch: 500 cost = 5.421907 W = 1.0130452 b = 0.36183489
Epoch: 550 cost = 5.4019218 W = 1.0141305 b = 0.30877414
Epoch: 600 cost = 5.3848578 W = 1.0150996  b = 0.26138115
Epoch: 650 cost = 5.370247 W = 1.0159653  b = 0.21905092
Epoch: 700 cost = 5.3576995 W = 1.0167387  b = 0.18124212
Epoch: 750 cost = 5.3468934 W = 1.0174294  b = 0.14747245
Epoch: 800 cost = 5.3375574 W = 1.0180461  b = 0.11730932
Epoch: 850 cost = 5.3294765 W = 1.0185971  b = 0.090368526
Epoch: 900 cost = 5.322459 W = 1.0190894  b = 0.0663058
Epoch: 950 cost = 5.3163588 W = 1.0195289  b = 0.044813324
Epoch: 1000 cost = 5.3110332 W = 1.0199218  b = 0.02561669``````

``````# Calculate the predictions
predictions = weight * x + bias
print ("Training cost =", training_cost, "Weight =", weight, "bias =", bias, '\n')``````

``Training cost= 5.3110332 Weight= 1.0199214 bias=0.02561663``

``````# Plotting the Results below
plt.plot(x, y, 'ro', label ='original data')
plt.plot(x, predictions, label ='Fited line')
plt.title('Linear Regression Result')
plt.legend()
plt.show()``````

• 回顶