Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # coding: utf-8
- import numpy as np
- import tensorflow as tf
- import matplotlib.pyplot as plt
- import seaborn as sns
- get_ipython().magic('matplotlib inline')
- from sklearn.datasets import load_boston
- data=load_boston()
- X_data = data.data
- y_data = data.target
- m = len(X_data)
- n = len(X_data[0])
- X = tf.placeholder(tf.float32,[m,n])
- y = tf.placeholder(tf.float32,[m,1])
- W = tf.Variable(tf.ones([n,1]))
- b = tf.Variable(tf.ones([1]))
- y_ = tf.matmul(X,W)+b
- loss = tf.reduce_mean(tf.square( y - y_))
- optimizer = tf.train.GradientDescentOptimizer(0.01)
- train = optimizer.minimize(loss)
- with tf.Session() as sess:
- init = tf.global_variables_initializer()
- sess.run(init)
- vals = []
- for i in range(100):
- val = sess.run(train,feed_dict={X:X_data , y:y_data[:,None]})
- vals.append(val)
- print(vals)
- [None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- None,
- ...
- None]
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement