본문 바로가기
Study/Machine&Deep Learning

[ML] Logistic Classification

by graygreat 2018. 5. 20.
728x90
반응형









소스 코드


logistic_regression.py

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# Lab 5 Logistic Regression Classifier
import tensorflow as tf
 
x_data = [[12],
          [23],
          [31],
          [43],
          [53],
          [62]]
y_data = [[0],
          [0],
          [0],
          [1],
          [1],
          [1]]
 
# placeholders for a tensor that will be always fed.
= tf.placeholder(tf.float32, shape=[None, 2])
= tf.placeholder(tf.float32, shape=[None, 1])
 
= tf.Variable(tf.random_normal([21]), name='weight')
= tf.Variable(tf.random_normal([1]), name='bias');
 
# Hypothesis using sigmoid: tf.div(1., 1. + tf.exp(tf.matmul(X, W)))
hypothesis = tf.sigmoid(tf.matmul(X, W) + b)
 
# cost/loss function
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1 - hypothesis))
 
train = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)
 
# Accuracy computation
# True if hypothesis > 0.5 else False
predicted = tf.cast(hypothesis > 0.5, dtype = tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
 
# Launch graph
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
 
    for step in range(10001):
        cost_val, _ = sess.run([cost, train], feed_dict={X: x_data, Y: y_data})
        if step % 200 == 0:
            print(step, cost_val)
 
    # Accuracy report
    h, c, a = sess.run([hypothesis, predicted, accuracy], feed_dict={X: x_data, Y: y_data})
    print("\nHypothesis: ", h, "\nCorrect(Y): ", c, "\nAccuracy: ", a)
cs


반응형

댓글