- 例一:Hello World。
import tensorflow as tf
hw = tf.constant("Hello World")
with tf.Session() as sess:
print(sess.run(hw))
- 例二:两个矩阵相乘。
# Build a dataflow graph.
c = tf.constant([[1.0, 2.0], [3.0, 4.0]])
d = tf.constant([[1.0, 1.0], [0.0, 1.0]])
e = tf.matmul(c, d)
# Construct a `Session` to execute the graph.
with tf.Session() as sess:
# Execute the graph and store the value that `e` represents in `result`.
result = sess.run(e)
print(result)
- 例三:使用Feeding在执行时传入参数
import tensorflow as tf
# Build a dataflow graph.
c = tf.constant([[1.0, 2.0], [3.0, 4.0]])
d = tf.constant([[1.0, 1.0], [0.0, 1.0]])
e = tf.matmul(c, d)
# Construct a `Session` to execute the graph.
sess = tf.Session()
# Execute the graph and store the value that `e` represents in `result`.
result = sess.run(e,feed_dict={c:[[0.0, 0.0], [3.0, 4.0]]})
print(result)
sess.close()
TensorFlow的一大特色时其图中的节点可以是带状态的。
- 例四:带状态的图
import tensorflow as tf
# Build a dataflow graph.
count = tf.Variable([0],trainable=False);
init_op = tf.global_variables_initializer()
update_count = count.assign_add(tf.constant([2]))
# Construct a `Session` to execute the graph.
sess = tf.Session()
sess.run(init_op)
for step in range(10):
result = sess.run(update_count)
print("step %d: count = %g" % (step,result))
sess.close()
- 例五:梯度计算
import tensorflow as tf
# Build a dataflow graph.
filename_queue = tf.train.string_input_producer(['1.txt'],num_epochs=1)
reader = tf.TextLineReader()
key,value = reader.read(filename_queue)
num = tf.decode_csv(value,record_defaults=[[0]])
x = tf.Variable([0])
loss = x * num
grads = tf.gradients([loss],x)
grad_x = grads[0]
def train_fn(sess):
train_fn.counter += 1
result = sess.run(grad_x)
print("step %d: grad = %g" % (train_fn.counter,result))
train_fn.counter = 0
sv = tf.train.Supervisor()
tf.train.basic_train_loop(sv,train_fn)
网友评论