In [6]:
import numpy as np
import tensorflow as tf
In [14]:
xy = np.loadtxt('./dataset/data-01-test-score.csv',delimiter=','
, dtype = np.float32 )
x = xy[:,0:-1]
y = xy[:,[-1]]
In [16]:
print(x.shape, x, len(x))
print(y.shape, y, len(y))
In [18]:
# shape=[None, 3] n행의 3열을 받는다
X = tf.placeholder(tf.float32, shape=[None, 3])
Y = tf.placeholder(tf.float32, shape=[None, 1])
W = tf.Variable(tf.random_normal([3, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
hypothesis = tf.matmul(X, W) + b
cost = tf.reduce_mean(tf.square(hypothesis - Y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate = 1e-5)
train = optimizer.minimize(cost)
In [19]:
sess = tf.Session()
sess.run(tf.global_variables_initializer())
In [21]:
for step in range(2001):
cost_val, hy_val, _ = sess.run(
[cost, hypothesis, train], feed_dict={X:x, Y:y})
if step % 100 == 0:
print(step, "cost:", cost_val, "\nprediction:\n", hy_val)
In [ ]:
'AI > Deep Learning' 카테고리의 다른 글
딥러닝: 04. Logistic Classification(로지스틱 회귀) (0) | 2019.07.30 |
---|---|
딥러닝: 02. 경사하강법(Gradient descent algorithm) (0) | 2019.07.30 |
딥러닝 : 01. Tensorflow의 정의 (0) | 2019.07.26 |
딥러닝 : 로지스틱 회귀 코딩 (0) | 2019.07.25 |
titanic : Machine Learning from Disaster - kaggle 연습 (0) | 2019.07.23 |
댓글