Skip to content

Instantly share code, notes, and snippets.

@psaitu
Created September 18, 2019 16:04
Show Gist options
  • Select an option

  • Save psaitu/ab3f8c3c82737eec3673b961ea967c2b to your computer and use it in GitHub Desktop.

Select an option

Save psaitu/ab3f8c3c82737eec3673b961ea967c2b to your computer and use it in GitHub Desktop.
import numpy as np
import math
class Linear():
# DO NOT DELETE
def __init__(self, in_feature, out_feature):
self.in_feature = in_feature
self.out_feature = out_feature
self.W = np.random.randn(out_feature, in_feature)
self.b = np.zeros(out_feature)
self.dW = np.zeros(self.W.shape)
self.db = np.zeros(self.b.shape)
def __call__(self, x):
return self.forward(x)
def forward(self, x):
self.x = x
self.out = x.dot(self.W.T) + self.b
return self.out
def backward(self, delta):
self.db = delta
self.dW = np.dot(self.x.T, delta)
dx = np.dot(delta, self.W.T)
return dx
class Conv1D():
def __init__(self, in_channel, out_channel,
kernel_size, stride):
self.in_channel = in_channel
self.out_channel = out_channel
self.kernel_size = kernel_size
self.stride = stride
self.W = np.random.randn(out_channel, in_channel, kernel_size)
self.b = np.zeros(out_channel)
self.dW = np.zeros(self.W.shape)
self.db = np.zeros(self.b.shape)
def __call__(self, x):
return self.forward(x)
def forward(self, x):
## Your codes here
self.batch, __ , self.width = x.shape
print("The Shape - >> ", x.shape)
self.x = x
assert __ == self.in_channel, 'Expected the inputs to have {} channels'.format(self.in_channel)
output_size = int(math.floor((self.width - self.kernel_size) / self.stride) + 1)
y = np.zeros((self.batch, self.out_channel, output_size))
for batch_number in range(self.batch):
for channel in range(self.out_channel):
for location in range(output_size):
a0 = x[batch_number][:,location * self.stride:(location * self.stride + self.kernel_size)]
z1 = a0 * self.W[channel] + self.b[channel]
y[batch_number,channel,location] = np.sum(z1)
return y
def backward(self, delta):
self.batch, __ , self.width = self.x.shape
output_size = int(math.floor((self.width - self.kernel_size) / self.stride) + 1)
dx = np.zeros((self.x.shape))
for batch_number in range (self.batch):
for channel in range (self.out_channel):
for location in range (output_size):
dx[batch_number,:,location * self.stride:(location * self.stride) + self.kernel_size] += self.W[channel,:,:] * delta[batch_number, channel, location]
self.dW[channel,:,:] += delta[batch_number, channel, location] * self.x[batch_number,:,location * self.stride:(location * self.stride) + self.kernel_size]
self.db[channel] += delta[batch_number, channel, location]
return dx
class Flatten():
def __call__(self, x):
return self.forward(x)
def forward(self, x):
## Your codes here
print("Shape -- ", x[0].shape)
return x[0].reshape(1, x[0].shape[0] * x[0].shape[1])
# raise NotImplemented
def backward(self, x):
# Your codes here
raise NotImplemented
class ReLU():
def __call__(self, x):
return self.forward(x)
def forward(self, x):
self.dy = (x>=0).astype(x.dtype)
return x * self.dy
def backward(self, delta):
return self.dy * delta
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment