텐서플로우 파일에서 데이터 읽어오기
- 우선 csv 파일을 저장해야한다.
-> csv 파일 저장 방버을 모른다면 -> csv 파일 만들기
import tensorflow as tf import numpy as np import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' tf.set_random_seed(777) # for reproducibility xy = np.loadtxt('data-01-test-score.csv', delimiter=',', dtype=np.float32) x_data = xy[:, 0:-1] y_data = xy[:, [-1]] # Make sure the shape and data are OK print(x_data.shape, x_data, len(x_data)) print(y_data.shape, y_data) # placeholders for a tensor that will be always fed. X = tf.placeholder(tf.float32, shape=[None, 3]) Y = tf.placeholder(tf.float32, shape=[None, 1]) W = tf.Variable(tf.random_normal([3, 1]), name='weight') b = tf.Variable(tf.random_normal([1]), name='bias') # Hypothesis hypothesis = tf.matmul(X, W) + b # Simplified cost/loss function cost = tf.reduce_mean(tf.square(hypothesis - Y)) # Minimize optimizer = tf.train.GradientDescentOptimizer(learning_rate=1e-5) train = optimizer.minimize(cost) # Launch the graph in a session. sess = tf.Session() # Initializes global variables in the graph. sess.run(tf.global_variables_initializer()) # Set up feed_dict variables inside the loop. for step in range(2001): cost_val, hy_val, _ = sess.run( [cost, hypothesis, train], feed_dict={X: x_data, Y: y_data}) if step % 1000 == 0: print(step, "Cost: ", cost_val, "\nPrediction:\n", hy_val) # Ask my score print("Your score will be ", sess.run(hypothesis, feed_dict={X: [[100, 70, 101]]})) print("Other scores will be ", sess.run(hypothesis, feed_dict={X: [[60, 70, 110], [90, 100, 80]]}))
- 실행 결과
- Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
다음과 같은 에러가 발생한다면 -> Your CPU supports instructions
'IT > 머신러닝' 카테고리의 다른 글
[section_5_lab] Logistic (Regression) Classification 실습 (0) | 2018.06.01 |
---|---|
[section_5] Logistic (Regression) Classification (0) | 2018.06.01 |
[section_4_lab1] Multivariable linear regression using Matrix (0) | 2018.06.01 |
[section_4] Multivariable linear regression using Matrix (0) | 2018.06.01 |
[section_3] Gradient descent algorithm (0) | 2018.06.01 |