03_Tensor_Manipulation.ipynb_uploaded_180414_15_04.html
# More than 70% of tensorflow codes are dealing with 'tensor'
# You should be familar with tensor object,operations on tensor,tensor manipulation
# Most of tensor api is similar to numpy api
# So,it's good idea to study numpy api first
# @
# Let's see numpy n-dimensional array and tf.Tensor
import tensorflow as tf
import numpy as np
one_dimension_100_elements_nparray=np.arange(100)
# print(one_dimension_100_elements_nparray)
# array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,
# 17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,
# 34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,
# 51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,
# 68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,
# 85,86,87,88,89,90,91,92,93,94,95,96,97,98,99])
# print(type(one_dimension_100_elements_nparray))
#
# one_dimension_100_elements_nparray.shape
# (100,)
tf_variable_made_from_nparray_node=tf.Variable(one_dimension_100_elements_nparray)
# print("tf_variable_made_from_nparray_node",tf_variable_made_from_nparray_node)
#
sess_object=tf.Session()
# You can initialize all tensorflow variables
sess_object.run(tf.initialize_all_variables())
# numpy array object is compatible to tf.tensor
# Result of sess_object.run() is always numpy array object,
# or list of numpy array objects
# print(sess_object.run(tf_variable_made_from_nparray_node))
# [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
# 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
# 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
# 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
# 96 97 98 99]
# print(type(sess_object.run(tf_variable_made_from_nparray_node)))
#
# print("sess_object.run(tf_variable_made_from_nparray_node).shape",sess_object.run(tf_variable_made_from_nparray_node).shape)
# (100,)
# @
# Let's talk about reshape
# print("np.arange(20)",np.arange(20))
# [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19]
# 1 dimension
# 20 elements
reshaped_oned_20elements_nparray=np.arange(20).reshape(4,5)
# print("reshaped_oned_20elements_nparray.shape",reshaped_oned_20elements_nparray.shape)
# (4,5)
# print("reshaped_oned_20elements_nparray",reshaped_oned_20elements_nparray)
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
reshaped_4by5_nparray=reshaped_oned_20elements_nparray.reshape(2,10)
# print(reshaped_4by5_nparray.shape)
# (2,10)
# print(reshaped_4by5_nparray)
# [[ 0 1 2 3 4 5 6 7 8 9]
# [10 11 12 13 14 15 16 17 18 19]]
# You can reshape tensorflow tensors
# 2nd argument: dimension information
# tf.reshape(tensor_you_want_to_reshape,[dim,dim,...] )
# For example,suppose you want to [4,5] shape tensor to [5,4] shape tensor
reshaped_4by5_nparray_to_tf_tensor_node\
=tf.reshape(reshaped_oned_20elements_nparray,[5,4])
# print("reshaped_4by5_nparray_to_tf_tensor_node",reshaped_4by5_nparray_to_tf_tensor_node)
# Tensor("Reshape:0",shape=(5,4),dtype=int64)
# print("reshaped_4by5_nparray_to_tf_tensor_node",sess_object.run(reshaped_4by5_nparray_to_tf_tensor_node))
# From,(4,5)
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# to,(5,4)
# [[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]
# [12 13 14 15]
# [16 17 18 19]]
# Suppose you want to reshpe [4,5] shape nparray,
# to [1,20] shape tf tensor node
reshaped_oned_20elements_nparray_to_tf_tensor_node\
=tf.reshape(reshaped_oned_20elements_nparray,[1,20])
# print("reshaped_oned_20elements_nparray_to_tf_tensor_node"\
# ,reshaped_oned_20elements_nparray_to_tf_tensor_node)
# Tensor("Reshape_1:0",shape=(1,20),dtype=int64)
# print("reshaped_oned_20elements_nparray_to_tf_tensor_node"\
# ,sess_object.run(reshaped_oned_20elements_nparray_to_tf_tensor_node))
# [[ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19]]
# You can use -1 for dimension inference which is very useful
# You will reshape 1by20 nparray to [10,?] shape tensor
# Second dimension will be inferenced
reshaped_1by20_nparray_to_tf_tensor_node\
=tf.reshape(reshaped_oned_20elements_nparray,[10,-1])
# print("reshaped_1by20_nparray_to_tf_tensor_node"\
# ,reshaped_1by20_nparray_to_tf_tensor_node)
# Tensor("Reshape_2:0",shape=(10,2),dtype=int64)
# print("reshaped_1by20_nparray_to_tf_tensor_node"\
# ,sess_object.run(reshaped_1by20_nparray_to_tf_tensor_node))
# [[ 0 1]
# [ 2 3]
# [ 4 5]
# [ 6 7]
# [ 8 9]
# [10 11]
# [12 13]
# [14 15]
# [16 17]
# [18 19]]
# You want to reshape any nparray to 1 dimension tensor
reshaped_1by20_nparray_to_tf_tensor4_node= tf.reshape(reshaped_oned_20elements_nparray,[-1])
# print("reshaped_1by20_nparray_to_tf_tensor4_node",reshaped_1by20_nparray_to_tf_tensor4_node)
# Tensor("Reshape_3:0",shape=(20,),dtype=int64)
# print("reshaped_1by20_nparray_to_tf_tensor4_node",sess_object.run(reshaped_1by20_nparray_to_tf_tensor4_node))
# [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19]
# @
# Let's talk about split and concatenation
# You can split tensor into number_of_split tensors along one dimension
# tf.split(
# value,
# num_or_size_splits,
# axis=0,
# num=None,
# name='split'
# )
# You can concatenate tensors along one dimension
# tf.concat(concat_dim,values,name='concat')
# (4,5)
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# Horizontal split(1) by tf.split(1,5,reshaped_oned_20elements_nparray)
# [[ 0]
# [ 5]
# [10]
# [15]]
# -------
# [[ 1]
# [ 6]
# [11]
# [16]]
# -------
# [[ 2]
# [ 7]
# [12]
# [17]]
# -------
# [[ 3]
# [ 8]
# [13]
# [18]]
# -------
# [[ 4]
# [ 9]
# [14]
# [19]]
# -------
# print("reshaped_oned_20elements_nparray",reshaped_oned_20elements_nparray)
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# print("reshaped_oned_20elements_nparray",reshaped_oned_20elements_nparray.shape)
# (4, 5)
# You want to [4,5] nparray to 5 splited tesors which are splited by 1 axis
horizontally_splited_4by5_nparray_to_one_d_5_tensors\
=tf.split(reshaped_oned_20elements_nparray,5,1)
sess_object.run(horizontally_splited_4by5_nparray_to_one_d_5_tensors)
# print("type(horizontally_splited_4by5_nparray_to_one_d_5_tensors)",type(horizontally_splited_4by5_nparray_to_one_d_5_tensors))
#
# print("horizontally_splited_4by5_nparray_to_one_d_5_tensors)",len(horizontally_splited_4by5_nparray_to_one_d_5_tensors)))
# 5
# print("horizontally_splited_4by5_nparray_to_one_d_5_tensors)[0].shape",horizontally_splited_4by5_nparray_to_one_d_5_tensors)[0].shape)
# (4,1)
result=sess_object.run(horizontally_splited_4by5_nparray_to_one_d_5_tensors)
# for i in result: print(i); print("-------")
# [[ 0]
# [ 5]
# [10]
# [15]]
# -------
# [[ 1]
# [ 6]
# [11]
# [16]]
# -------
# [[ 2]
# [ 7]
# [12]
# [17]]
# -------
# [[ 3]
# [ 8]
# [13]
# [18]]
# -------
# [[ 4]
# [ 9]
# [14]
# [19]]
# -------
t_h_c=tf.concat(result,1)
# print("t_h_c",t_h_c)
# Tensor("concat:0",shape=(4,5),dtype=int64)
# print("sess_object.run(t_h_c)",sess_object.run(t_h_c))
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# (4,5)
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# You can perform vertical split(0 axis)
t_v=tf.split(reshaped_oned_20elements_nparray,4,0)
result=sess_object.run(t_v)
# print("type of result t_v : ",type(result))
# type of result t_v :
# print "len. of result t_v : ",len(result)
# len. of result t_v : 4
# print "Shape of splitted element : ",result[0].shape
# Shape of splitted element : (1,5)
# for i in result: print i; print "-------"
# tensor1: [[0 1 2 3 4]]
# -------
# tensor2: [[5 6 7 8 9]]
# -------
# tensor3: [[10 11 12 13 14]]
# -------
# tensor4: [[15 16 17 18 19]]
# -------
# t_v_c=tf.concat(0,result)
# print(t_v_c)
# Tensor("concat_1:0",shape=(4,5),dtype=int64)
# print(sess_object.run(t_v_c))
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# Expand & Squeeze
# tf.expand_dims(input,dim,name=None)
# Inserts a dimension of 1 into a tensor's shape.
# tf.squeeze(input,squeeze_dims=None,name=None)
# Removes dimensions of size 1 from the shape of a tensor.
# print("np.arange(9)",np.arange(9))
# [0 1 2 3 4 5 6 7 8]
reshaped_1by9_nparray_to_3by3_nparray=np.arange(9).reshape(3,3)
# print("Shape : ",reshaped_1by9_nparray_to_3by3_nparray.shape)
# (3, 3)
# print("reshaped_1by9_nparray_to_3by3_nparray",reshaped_1by9_nparray_to_3by3_nparray)
# [[0 1 2]
# [3 4 5]
# [6 7 8]]
# You can insert new dimension at 0
expanded_dimension_at0_node=tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,2)
# print("expanded_dimension_at0_node",expanded_dimension_at0_node)
# Tensor("ExpandDims:0", shape=(1, 3, 3), dtype=int64)
# print("expanded_dimension_at0_node",sess_object.run(expanded_dimension_at0_node))
# tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,0)
# 0 is squared bracket at most outer area
# Dimension is added at 0 index in shape=(1, 3, 3)
# From (3,3), to Tensor("ExpandDims:0", shape=(1, 3, 3), dtype=int64)
# [[[0 1 2]
# [3 4 5]
# [6 7 8]]]
# tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,1)
# From (3,3), to Tensor("ExpandDims:0", shape=(3, 1, 3), dtype=int64)
# 1 is squared bracket at 2nd outer area
# Dimension is added at 1 index in shape=(3, 1, 3)
# [[[0 1 2]]
# [[3 4 5]]
# [[6 7 8]]]
# tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,2)
# From (3,3), to Tensor("ExpandDims:0", shape=(3, 3, 1), dtype=int64)
# 2 is squared bracket at 3rd outer area
# Dimension is added at 2 index in shape=(3, 3, 1)
# [[[0]
# [1]
# [2]]
# [[3]
# [4]
# [5]]
# [[6]
# [7]
# [8]]]
# Tensor("ExpandDims:0", shape=(1, 3, 3), dtype=int64)
# [[[0 1 2]
# [3 4 5]
# [6 7 8]]]
# squeeze() removde all dimensions which are 1 dimension
squeezed_tensor_node=tf.squeeze(expanded_dimension_at0_node)
# print("squeezed_tensor_node",squeezed_tensor_node)
# Tensor("Squeeze:0", shape=(3, 3), dtype=int64)
# print("sess_object.run(squeezed_tensor_node)",sess_object.run(squeezed_tensor_node))
# [[0 1 2]
# [3 4 5]
# [6 7 8]]
# You can insert new dimension at 1
expanded_dimension_at1_node=tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,1)
# print("expanded_dimension_at1_node",expanded_dimension_at1_node)
# Tensor("ExpandDims_1:0", shape=(3, 1, 3), dtype=int64)
# print("sess_object.run(expanded_dimension_at1_node)",sess_object.run(expanded_dimension_at1_node))
# [[[0 1 2]]
# [[3 4 5]]
# [[6 7 8]]]
# shape=(3, 1, 3)
# [[[0 1 2]]
# [[3 4 5]]
# [[6 7 8]]]
squeezed_expanded_dimension_at1_node_node=tf.squeeze(expanded_dimension_at1_node)
# print("squeezed_expanded_dimension_at1_node_node",squeezed_expanded_dimension_at1_node_node)
# Tensor("Squeeze_1:0", shape=(3, 3), dtype=int64)
# print("sess_object.run(squeezed_expanded_dimension_at1_node_node)",sess_object.run(squeezed_expanded_dimension_at1_node_node))
# [[0 1 2]
# [3 4 5]
# [6 7 8]]
# expand two times
# expanded_dimension_at2_node=tf.expand_dims(reshaped_1by9_nparray_to_3by3_nparray,2)
# print("expanded_dimension_at2_node",expanded_dimension_at2_node)
# Tensor("ExpandDims_2:0",shape=(3,3,1),dtype=int64)
# print("sess_object.run(expanded_dimension_at2_node)",sess_object.run(expanded_dimension_at2_node))
# [[[0]
# [1]
# [2]]
# [[3]
# [4]
# [5]]
# [[6]
# [7]
# [8]]]
# t_d_3=tf.expand_dims(expanded_dimension_at2_node,3)
# print("t_d_3",t_d_3)
# Tensor("ExpandDims_3:0",shape=(3,3,1,1),dtype=int64)
# print("sess_object.run(t_d_3)",sess_object.run(t_d_3))
# [[[[0]]
# [[1]]
# [[2]]]
# [[[3]]
# [[4]]
# [[5]]]
# [[[6]]
# [[7]]
# [[8]]]]
# shape=(3,3,1,1)
# t_s_3=tf.squeeze(t_d_3)
# print("t_s_3",t_s_3)
# Tensor("Squeeze_2:0",shape=(3,3),dtype=int64)
# print("sess_object.run(t_s_3)",sess_object.run(t_s_3))
# [[0 1 2]
# [3 4 5]
# [6 7 8]]
# @
# Stack & Unstack
# tf.stack(
# values,
# axis=0,
# name='stack'
# )
# This method stacks list of rank-R tensors into one rank-(R+1) tensor.
# tf.unstack(
# value,
# num=None,
# axis=0,
# name='unstack'
# )
# This method unstacks given dimension of rank-R tensor into rank-(R-1) tensors
x=[1,2,3]
y=[4,5,6]
z=[7,8,9]
xyz_list=[x,y,z]
# You will stack at axis=0
stacked_list_to_tensor_node=tf.stack(xyz_list)
# print(stacked_list_to_tensor_node)
# Tensor("stack:0",shape=(3,3),dtype=int32)
# print sess_object.run(stacked_list_to_tensor_node)
# [[1 2 3]
# [4 5 6]
# [7 8 9]]
unstacked_tensor_node=tf.unstack(stacked_list_to_tensor_node)
# print("unstacked_tensor_node",unstacked_tensor_node)
# [,
# ,
# ]
# for i in unstacked_tensor_node:
# print i
# Tensor("unstack:0",shape=(3,),dtype=int32)
# Tensor("unstack:1",shape=(3,),dtype=int32)
# Tensor("unstack:2",shape=(3,),dtype=int32)
# for i in unstacked_tensor_node:
# print sess_object.run(i)
# [1 2 3]
# [4 5 6]
# [7 8 9]
# You should aware,
# that you can do same task with tf.split & tf.reshape
# But you should know split_numbers and target shape,
# especially in this 3rd case
t_sp=tf.split(stacked_list_to_tensor_node,3,0)
# print("t_sp",t_sp)
# [,
# ,
# ]
# for i in t_sp:
# print i
# Tensor("split_2:0",shape=(1,3),dtype=int32)
# Tensor("split_2:1",shape=(1,3),dtype=int32)
# Tensor("split_2:2",shape=(1,3),dtype=int32)
# for i in t_sp:
# print sess_object.run(i)
# [[1 2 3]]
# [[4 5 6]]
# [[7 8 9]]
# You can perform transpose
# tf.transpose(
# a,
# perm=None,
# name='transpose',
# conjugate=False
# )
# This method transposes "a",
# and permutes dimensions according to perm
# print("arange(8).reshape(2,4)",np.arange(8).reshape(2,4))
# [[0 1 2 3]
# [4 5 6 7]]
n_t=np.arange(8).reshape(2,4)
# print("n_t.shape",n_t.shape)
# (2,4)
# print("n_t",n_t)
# [[0 1 2 3]
# [4 5 6 7]]
t_tr=tf.transpose(n_t)
# print("t_tr",t_tr)
# from (2,4)
# Tensor("transpose:0",shape=(4,2),dtype=int64)
# print("sess_object.run(t_tr)",sess_object.run(t_tr))
# [[0 4]
# [1 5]
# [2 6]
# [3 7]]
# You can reverse transpose
t_tr_r=tf.transpose(t_tr)
# print("t_tr_r",t_tr_r)
# Tensor("transpose_1:0",shape=(2,4),dtype=int64)
# print sess_object.run(t_tr_r)
# [[0 1 2 3]
# [4 5 6 7]]
# You can create tensor node with initial values
# You will create tensor node with all elements set to 1
# tf.ones(shape,dtype=tf.float32,name=None)
# You will creates tensor with all elements set to zero.
# tf.zeros(shape,dtype=tf.float32,name=None)
# You will creates tensor filled with scalar value.
# tf.fill(dims,value,name=None)
t_1=tf.ones([2,7])
# print("t_1",t_1)
# Tensor("ones:0",shape=(2,7),dtype=float32)
# print("sess_object.run(t_1)",sess_object.run(t_1))
# [[ 1. 1. 1. 1. 1. 1. 1.]
# [ 1. 1. 1. 1. 1. 1. 1.]]
t_0=tf.zeros([2,7])
# print t_0
# Tensor("zeros:0",shape=(2,7),dtype=float32)
# print sess_object.run(t_0)
# [[ 0. 0. 0. 0. 0. 0. 0.]
# [ 0. 0. 0. 0. 0. 0. 0.]]
# t_f=tf.fill([2,7],77)
# print t_f
# Tensor("Fill:0",shape=(2,7),dtype=int32)
# print sess_object.run(t_f)
# [[77 77 77 77 77 77 77]
# [77 77 77 77 77 77 77]]
# You can copy shape of other tensor
# You can creates tensor with all elements set to 1
# tf.ones_like(tensor,dtype=None,name=None,optimize=True)
# You can creates tensor with all elements set to zero
# tf.zeros_like(tensor,dtype=None,name=None,optimize=True)
# n_sh=np.arange(20).reshape(4,5)
# print n_sh.shape
# (4,5)
# print n_sh
# [[ 0 1 2 3 4]
# [ 5 6 7 8 9]
# [10 11 12 13 14]
# [15 16 17 18 19]]
# t_o=tf.ones_like(n_sh)
# print t_o
# Tensor("ones_like:0",shape=(4,5),dtype=int64)
# print sess_object.run(t_o)
# [[1 1 1 1 1]
# [1 1 1 1 1]
# [1 1 1 1 1]
# [1 1 1 1 1]]
# t_z=tf.zeros_like(n_sh)
# print t_z
# Tensor("zeros_like:0",shape=(4,5),dtype=int64)
# print sess_object.run(t_z)
# [[0 0 0 0 0]
# [0 0 0 0 0]
# [0 0 0 0 0]
# [0 0 0 0 0]]
# Tiling tensor
# You can construct tensor by tiling given tensor
# tf.tile(input,multiples,name=None)
# n_r=np.arange(3)
# print n_r.shape
# (3,)
# print n_r
# [0 1 2]
# You will tile to horizontal
# t_ti=tf.tile(n_r,[3])
# print t_ti
# Tensor("Tile:0",shape=(9,),dtype=int64)
# print sess_object.run(t_ti)
# [0 1 2 0 1 2 0 1 2]
# n_r2=np.arange(6).reshape(2,3)
# print n_r2.shape
# (2,3)
# print n_r2
# [[0 1 2]
# [3 4 5]]
# You will tile horizontally 2 times & vertically 3 times
# t_ti2=tf.tile(n_r2,[2,3])
# print t_ti2
# Tensor("Tile_1:0",shape=(4,9),dtype=int64)
# print sess_object.run(t_ti2)
# [[0 1 2 0 1 2 0 1 2]
# [3 4 5 3 4 5 3 4 5]
# [0 1 2 0 1 2 0 1 2]
# [3 4 5 3 4 5 3 4 5]]