美文网首页
【TensorFlow基本功】常用函数使用截图

【TensorFlow基本功】常用函数使用截图

作者: 安安爸Chris | 来源:发表于2019-04-08 20:29 被阅读0次

tf.zero

构造一个全是0的张量

# 维度 2X3
tensor = tf.zeros([2, 3])
with tf.Session() as sess:
    print("tensor=", sess.run(tensor))
    print("shape(np):", np.shape(tensor))
    print("shape(tf):", sess.run(tf.shape(tensor)))
    print("rank(tf):", sess.run(tf.rank(tensor)))
    print("size(tf):", sess.run(tf.size(tensor)))

效果:


tf.zero

tf.reshape

将数组按维度重新定义

t1 = [2, 3, 4, 5, 6, 7, 8, 9, 1]
tensor1 = tf.reshape(t1, [3, 3])
with tf.Session() as sess1:
    print("reshape:", sess1.run(tensor1))
tf.reshape

tf.expand_dims

扩展维度,主要是shape上的变化

t2 = [[1, 2], [3, 4], [5,6]]
print("t2: ", t2, "#shape:", np.shape(t2))
t2_a = tf.expand_dims(t2, 0)
t2_b = tf.expand_dims(t2, 1)
t2_c = tf.expand_dims(t2, -1)
with tf.Session() as sess2:
    print("t2, expand_dims 0: ", sess2.run(t2_a), "#shape:", sess2.run(tf.shape(t2_a)))
    print("t2, expand_dims 1: ", sess2.run(t2_b), "#shape:", sess2.run(tf.shape(t2_b)))
    print("t2, expand_dims -1: ", sess2.run(t2_c), "#shape:", sess2.run(tf.shape(t2_c)))
tf.reshape

tf.squeeze

缩减维度

t3 = [[[2], [1]]]  # (1, 2, 1)
print("t3: ", t3, "#shape:", np.shape(t3))
t3_a = tf.squeeze(t3, 0)
t3_c = tf.squeeze(t3, -1)
with tf.Session() as sess3:
    print("t3, squeeze 0: ", sess3.run(t3_a), "#shape:", np.shape(t3_a))
    print("t3, squeeze -1: ", sess3.run(t3_c), "#shape:", np.shape(t3_c))
tf.squeeze

tf.concat

指定某一个维度,连接两个张量

t4_1 = [[1, 2, 3, 4]]  # (1,4)
t4_2 = [[5, 6, 7, 8]]  # (4,)
t4_contact = tf.concat([t4_1, t4_2], 1)
with tf.Session() as sess4:
    print("concat:", sess4.run(t4_contact), "#shape:", np.shape(t4_contact))
tf.concat

tf.stack

也是连接张量,但是与concat不同。stack将每一个维护的每个子元素拼接起来。
对于两位张量,axis=0相当于将x轴拼接成对,axis=1/-1相当于将y轴拼接成对

t5_1 = [[10, 11, 12], [13, 14, 15]]# (2,3)
t5_2 = [[20, 21, 22], [23, 24, 25]]# (2,3)
t5_stack_h = tf.stack([t5_1, t5_2], axis=0)
t5_stack_v = tf.stack([t5_1, t5_2], axis=-1)
with tf.Session() as sess5:
    print("t5_1:", t5_1, "#shape:", np.shape(t5_1))
    print("t5_2:", t5_2, "#shape:", np.shape(t5_2))
    print("tf.stack axis=0:", sess5.run(t5_stack_h), "#shape:", np.shape(t5_stack_h))
    print("tf.stack axis=-1:", sess5.run(t5_stack_v), "#shape:", np.shape(t5_stack_v))
tf.stack

tf.unstack

与tf.stack相反,tf.unstack是将张量按维度拆解

t6 = [[[1], [2]], [[3], [4]], [[5], [6]]]  # (3,2,)
arr1 = tf.unstack(t6)
arr2 = tf.unstack(t6, axis=1)
arr3 = tf.unstack(t6, axis=-1)
with tf.Session() as sess6:
    print("unstack, axis=0:", sess6.run(arr1), "#shape:", np.shape(arr1))
    print("unstack, axis=1:", sess6.run(arr2), "#shape:", np.shape(arr2))
    print("unstack, axis=-1:", sess6.run(arr3), "#shape:", np.shape(arr3))
tf.unstack

tf.gather

在一位数组中抽取部分

t7 = tf.range(0, 10) * 10 + tf.constant(1, shape=[10]) # 定义一个(10,)
t7_gather = tf.gather(t7, [1, 5, 9])
with tf.Session() as sess7:
    print("t7:", sess7.run(t7), "#shape:",np.shape(t7) )
    print("gather:", sess7.run(t7_gather), "#shape:", np.shape(t7_gather))
tf.gather

当然,也可以在张量中从某一维度抽取

t7 = [[1],[2],[3],[4],[5],[6],[7]]
t7_gather = tf.gather(t7, [1, 5, 2], axis=0)
with tf.Session() as sess7:
    print("t7:", t7, "#shape:",np.shape(t7) )
    print("gather:", sess7.run(t7_gather), "#shape:", np.shape(t7_gather))
tf.gather

tf.one_hot

t8 = np.random.randint(0, 10, size=[10])
t8_one_hot1 = tf.one_hot(t8, 11, on_value=1, off_value=0, axis=0)
t8_one_hot2 = tf.one_hot(t8, 10, on_value=1, off_value=None, axis=-1)
with tf.Session() as sess8:
    print("randomint:", t8)
    print("one_hot:", sess8.run(t8_one_hot1), "#shape:", np.shape(t8_one_hot1))
    print("nonzeros:", sess8.run(tf.count_nonzero(t8_one_hot1)))
    print("one_hot:", sess8.run(t8_one_hot2), "#shape:", np.shape(t8_one_hot2))
    print("nonzeros:", sess8.run(tf.count_nonzero(t8_one_hot2)))

相关文章

网友评论

      本文标题:【TensorFlow基本功】常用函数使用截图

      本文链接:https://www.haomeiwen.com/subject/tpgpiqtx.html