"No one is harder on a talented person than the person themselves" - Linda Wilkinson ; "Trust your guts and don't follow the herd" ; "Validate direction not destination" ;

March 25, 2019

Day #223 - Pytorch - Sessions (9-12)



#Session #1
import torch
import numpy as np
data = np.array([1,2,2])
#Class constructor
#Create Copy
t1 = torch.Tensor(data)
#Factor function
#Return object
#Create Copy
t2 = torch.tensor(data)
#Share Memory
t3 = torch.as_tensor(data)
t4 = torch.from_numpy(data)
print(t1)
print(t1.dtype)
#type inferences
print(torch.tensor(data))
print(torch.tensor(data,dtype=torch.float64))
#Session #2
#Tensor Operations
#Reshape
# 3 x 4 tensor
t = torch.tensor([[1,1,1,1],[2,2,2,2],[3,3,3,3]],dtype=torch.float32)
print(t.size())
print(t.shape)
#Rank - Check length of shape
print(len(t.shape))
#ElementWise
print(torch.tensor(t.shape).prod())
#Number of elements
print(t.numel())
#Change tensor by squeezing and unsqueezing them
#Removes all aexs with length of 1
#Build a flatten function
print(t.reshape(1,12).squeeze())
print(t.reshape(1,12).squeeze().shape)
#Flatten function to get 1D Array
def flatten(t):
#Any shape -1
#Figure out value based on elements
#Reshape will find 12
t = t.reshape(1,-1)
t = t.squeeze()
return t
print(flatten(t))
print(t.reshape(1,12))
#Ways to concatenate
t1 = torch.tensor([[1,1,1,1],[1,1,1,1],[1,1,1,1],[1,1,1,1]])
t2 = torch.tensor([[2,2,2,2],[2,2,2,2],[2,2,2,2],[2,2,2,2]])
t3 = torch.tensor([[3,3,3,3],[3,3,3,3],[3,3,3,3],[3,3,3,3]])
t = torch.stack((t1,t2,t3))
print(t.shape)
#Batch 3, 4 x 4 Images
#torch.Size([3, 4, 4])
#Add Axes Color Channels
t= t.reshape(3,1,4,4)
print(t)
#First Image
print(t[0])
#First Channel
print(t[0][0])
#First row of First Color Channel
print(t[0][0][0])
#Flatten Images in the batch
#Flatten Image tensor within batch tensor
print(t.reshape(1,-1)[0])
print(t.reshape(-1))
print(t.view(t.numel()))
#Built in method
print(t.flatten())
#We need individual predictions
#Flatten with Batch Axes
print(t.flatten(start_dim=1).shape)
print(t.flatten(start_dim=1))
#Elementwise operation
t1 = torch.tensor([[1,2],[3,4]])
t2 = torch.tensor([[9,8],[7,6]])
#First Axes
print(t1[0])
#Second Axes
print(t1[0][0])
#Perform element-wise operation
print(t1+t2)
print(t1*t2)
print(t1-t2)
print(t1/t2)
#Lower Rank Transferred to Higher Rank
t1 = torch.tensor([[1,1],[1,1]],dtype=torch.float32)
t2 = torch.tensor([2,4],dtype=torch.float32)
print(t1+t2)
#Reduction Ops
print(t1.sum())
print(t1.mean())
print(t1.std())
print(t1.sum(dim=1))
print(t1.max(dim=1))
Link - 13th Session

Happy Mastering DL!!!

No comments: