Skip to content

Commit

Permalink
rename
Browse files Browse the repository at this point in the history
  • Loading branch information
ShivamShrirao committed Aug 11, 2020
1 parent 2a46ff5 commit 58bc720
Show file tree
Hide file tree
Showing 9 changed files with 48 additions and 48 deletions.
14 changes: 7 additions & 7 deletions nnet/cnn_old.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,17 @@ def conv2d(self, inp, kernels, biases, stride=[1, 1], padding=0): # padding=(ks
padded = np.zeros((batches, d, row, col))
padded[:, :, padding:-padding, padding:-padding] = inp
# Take all windows into a matrix
kern = kernels.Reshape(-1, num_ker)
kern = kernels.reshape(-1, num_ker)
window = (np.arange(ksz)[:, None] * row + np.arange(ksz)).ravel() + np.arange(d)[:, None] * row * col
slider = (np.arange(out_row * stride[0])[:, None] * row + np.arange(out_col * stride[1]))
ind = window.ravel() + slider[::stride[0], ::stride[1]].ravel()[:, None]
output = np.empty((batches, out_row * out_col, num_ker))
for i, img in enumerate(padded): # img[d,row,col]
# windows(out_row*out_col, ksz*ksz*d) . kernels(d*ksz*ksz,num_ker)
output[i] = np.dot(np.take(img, ind), kern) + biases
# output=np.array([(np.dot(np.take(i,ind),kern)+biases) for i in padded]).Reshape(batches,out_row,out_col,num_ker)
# output=np.array([(np.dot(np.take(i,ind),kern)+biases) for i in padded]).reshape(batches,out_row,out_col,num_ker)
# bind= np.arange(batches)[:,None]*d*row*col+ind.ravel() #for batches
# output=(np.dot(np.take(padded, bind).Reshape(-1,d*ksz*ksz), kern)+biases)
# output=(np.dot(np.take(padded, bind).reshape(-1,d*ksz*ksz), kern)+biases)
# [batches*out_row*out_col,d*ksz*ksz] . [d*ksz*ksz, num_ker]
return output.reshape(batches, out_row, out_col, num_ker)

Expand All @@ -118,7 +118,7 @@ def conv2d_back(self, errors, inp, kernels, biases, stride=[1, 1], layer=1): #
d_inputs = self.conv2d(errors, flipped, 0)
else:
d_inputs = 0
d_bias = errors.Reshape(-1, num_ker).mean(axis=0)[None, :]
d_bias = errors.reshape(-1, num_ker).mean(axis=0)[None, :]

return d_inputs, d_kernels * self.learning_rate, d_bias * self.learning_rate

Expand All @@ -127,15 +127,15 @@ def max_pool(self, inp, ksize=[2, 2], stride=[2, 2]):
ksz = ksize[0]
batches, row, col, d = inp.shape
out_row, out_col = row // ksz, col // ksz
ipp = inp.Reshape(batches, out_row, ksz, out_col, ksz, d)
ipp = inp.reshape(batches, out_row, ksz, out_col, ksz, d)
output = ipp.max(axis=(2, 4), keepdims=True)
mask = ((ipp - output) == 0)
# [batches,o_row,o_col,d]
return output.squeeze().Reshape(batches, out_row, out_col, d), mask
return output.squeeze().reshape(batches, out_row, out_col, d), mask

def max_pool_back(self, errors, inp, mask, ksize=[2, 2], stride=[2, 2]):
# errors[batches,esz,esz,d],inp[batches,row,col,d],kernels[ksz,ksz],stride[row,col]
ksz = ksize[0]
batches, row, col, d = inp.shape
out_row, out_col = row // ksz, col // ksz
return (mask * errors.Reshape(batches, out_row, 1, out_col, 1, d)).Reshape(inp.shape)
return (mask * errors.reshape(batches, out_row, 1, out_col, 1, d)).reshape(inp.shape)
6 changes: 3 additions & 3 deletions nnet/coled_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def alloc(self, coled_size, obj):
self.COLED = np.empty(coled_size, dtype=self.dtype)
for oo in self.objs:
try:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
except:
self.objs.remove(oo)
self.objs.add(obj)
Expand All @@ -35,7 +35,7 @@ def alloc(self, coled_size, obj):
self.COLED = np.empty(coled_size, dtype=self.dtype)
for oo in self.objs:
try:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
except:
self.objs.remove(oo)
self.objs.add(obj)
Expand All @@ -54,5 +54,5 @@ def free(self):
if self.COLED.nbytes > mx.coled.nbytes:
self.COLED = np.empty(mx.coled.size, dtype=self.dtype)
for oo in self.objs:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
collect()
42 changes: 21 additions & 21 deletions nnet/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __init__(self, num_kernels=0, input_shape=None, kernel_size=0, kernels=None,
else:
self.kernel_size = kernels.shape[1]
self.num_kernels = kernels.shape[3]
self.kern = self.kernels.Reshape(-1, self.num_kernels)
self.kern = self.kernels.reshape(-1, self.num_kernels)
self.weights = self.kernels
self.padding = padding
self.dilation = dilation
Expand Down Expand Up @@ -113,8 +113,8 @@ def __init__(self, num_kernels=0, input_shape=None, kernel_size=0, kernels=None,
slider = (np.arange(self.out_row * stride[0])[:, None] * self.prow + np.arange(self.out_col * stride[1]))
self.ind = window.ravel() + slider[::stride[0], ::stride[1]].ravel()[:, None]
self.output = np.empty((self.batches, self.out_row * self.out_col, self.num_kernels), dtype=self.dtype)
# self.coled=np.empty((self.batches,*self.ind.shape),dtype=self.dtype).Reshape(-1,self.channels*self.kernel_size*self.kernel_size)
self.coled = COLT.alloc(self.ind.size * self.batches, self).Reshape(-1, self.channels * self.kernel_size * self.kernel_size)
# self.coled=np.empty((self.batches,*self.ind.shape),dtype=self.dtype).reshape(-1,self.channels*self.kernel_size*self.kernel_size)
self.coled = COLT.alloc(self.ind.size * self.batches, self).reshape(-1, self.channels * self.kernel_size * self.kernel_size)
COLT.free()
# bind= np.arange(self.batches)[:,None]*self.channels*self.prow*self.pcol+self.ind.ravel() #for self.batches
self.shape = (None, self.out_row, self.out_col, self.num_kernels)
Expand Down Expand Up @@ -173,8 +173,8 @@ def forward(self, inp, training=True):
1])).ravel() + np.arange(self.channels)[:, None] * self.prow * self.pcol + self.off_transpose
slider = (np.arange(self.out_row * self.stride[0])[:, None] * self.prow + np.arange(self.out_col * self.stride[1]))
self.ind = window.ravel() + slider[::self.stride[0], ::self.stride[1]].ravel()[:, None]
# self.coled=np.empty((self.batches,*self.ind.shape),dtype=self.dtype).Reshape(-1,self.channels*self.kernel_size*self.kernel_size)
self.coled = COLT.alloc(self.ind.size * self.batches, self).Reshape(-1, self.channels * self.kernel_size * self.kernel_size)
# self.coled=np.empty((self.batches,*self.ind.shape),dtype=self.dtype).reshape(-1,self.channels*self.kernel_size*self.kernel_size)
self.coled = COLT.alloc(self.ind.size * self.batches, self).reshape(-1, self.channels * self.kernel_size * self.kernel_size)
COLT.free()
if not self.is_not_dker:
if self.padding:
Expand All @@ -188,20 +188,20 @@ def forward(self, inp, training=True):
self.padding:-self.padding:self.dlate[1]] = self.inp # this takes time. FIX
else:
self.padded[:, :, ::self.dlate[0], ::self.dlate[1]] = self.inp
self.kern = self.kernels.Reshape(-1, self.num_kernels)
self.kern = self.kernels.reshape(-1, self.num_kernels)
# for i,img in enumerate(self.padded): #img[self.channels,self.row,self.col]
# windows(out_row*out_col, kernel_size*kernel_size*channels) . kernels(channels*kernel_size*kernel_size,num_kernels)
# self.output[i]=np.dot(img.take(self.ind), self.kern)
# output=np.array([(np.dot(np.take(i,self.ind),self.kern)+self.biases) for i in padded]).Reshape(self.batches,self.out_row,self.out_col,self.num_kernels)
# output=(np.dot(np.take(padded, bind).Reshape(-1,self.channels*kernel_size*kernel_size), self.kern)+self.biases)
# output=np.array([(np.dot(np.take(i,self.ind),self.kern)+self.biases) for i in padded]).reshape(self.batches,self.out_row,self.out_col,self.num_kernels)
# output=(np.dot(np.take(padded, bind).reshape(-1,self.channels*kernel_size*kernel_size), self.kern)+self.biases)
# [self.batches*self.out_row*self.out_col,self.channels*kernel_size*kernel_size] . [self.channels*kernel_size*kernel_size, self.num_kernels]
ctake.take(c_void_p(np.ascontiguousarray(self.padded).ctypes.data), c_void_p(self.ind.ctypes.data),
c_void_p(self.coled.ctypes.data), c_int(self.batches), c_int(self.padded[0].size), c_int(self.ind.size),
c_int(NUM_THREADS))
self.output = self.coled.dot(self.kern)
if self.bias_is_not_0:
self.output += self.biases
self.z_out = self.output.Reshape(self.batches, self.out_row, self.out_col, self.num_kernels)
self.z_out = self.output.reshape(self.batches, self.out_row, self.out_col, self.num_kernels)
self.a_out = self.activation(self.z_out)
return self.a_out

Expand Down Expand Up @@ -277,11 +277,11 @@ def forward(self, inp, training=True):
inp = inp.reshape(self.batches, self.out_row, self.ksz, self.out_col, self.ksz, self.channels)
output = inp.max(axis=(2, 4), keepdims=True)
self.mask = (inp == output)
return output.Reshape(self.batches, self.out_row, self.out_col, self.channels)
return output.reshape(self.batches, self.out_row, self.out_col, self.channels)

def backprop(self, grads, layer=1):
# grads[self.batches,esz,esz,self.channels],inp[self.batches,row,col,self.channels],kernels[self.ksz,self.ksz],stride[row,col]
z_out = (self.mask * grads.Reshape(self.batches, self.out_row, 1, self.out_col, 1, self.channels))
z_out = (self.mask * grads.reshape(self.batches, self.out_row, 1, self.out_col, 1, self.channels))
if self.rem_col:
self.padded[:, :-self.rem_col, :-self.rem_col, :] = z_out.reshape(self.batches, self.row, self.col, self.channels)
return self.padded.reshape(self.input_shape)
Expand Down Expand Up @@ -309,13 +309,13 @@ def __init__(self, input_shape=None, name=None):
def forward(self, inp, training=True):
self.input_shape = inp.shape
self.batches = self.input_shape[0]
inp = inp.Reshape(self.batches, self.Ncount, self.channels)
inp = inp.reshape(self.batches, self.Ncount, self.channels)
output = inp.mean(axis=1)
return output.Reshape(self.batches, self.channels)
return output.reshape(self.batches, self.channels)

def backprop(self, grads, layer=1):
# grads/=self.Ncount
z_out = grads.repeat(self.Ncount, axis=0).Reshape(self.batches, self.row, self.col, self.channels)
z_out = grads.repeat(self.Ncount, axis=0).reshape(self.batches, self.row, self.col, self.channels)
return z_out


Expand Down Expand Up @@ -345,8 +345,8 @@ def forward(self, inp, training=True):

def backprop(self, grads, layer=1):
# grads[self.batches,esz,esz,self.channels],inp[self.batches,row,col,self.channels],kernels[self.ksz,self.ksz],stride[row,col]
grads = grads.Reshape(self.input_shape[0], self.row, self.ksz, self.col, self.ksz, self.channels)
return grads.sum(axis=(2, 4), keepdims=True).Reshape(self.input_shape)
grads = grads.reshape(self.input_shape[0], self.row, self.ksz, self.col, self.ksz, self.channels)
return grads.sum(axis=(2, 4), keepdims=True).reshape(self.input_shape)


class flatten(Layer):
Expand All @@ -366,10 +366,10 @@ def __init__(self, name=None):
self.activation = echo

def forward(self, inp, training=True):
return inp.Reshape(-1, self.fsz)
return inp.reshape(-1, self.fsz)

def backprop(self, grads, layer=1):
return grads.Reshape(-1, self.r, self.c, self.channels)
return grads.reshape(-1, self.r, self.c, self.channels)


class reshape(Layer):
Expand All @@ -389,16 +389,16 @@ def __init__(self, target_shape, name=None):
for i in target_shape:
tt /= i
if tt != 1:
raise Exception("Cannot Reshape input " + str(self.input_shape) + " to " + str(target_shape) + '.')
raise Exception("Cannot reshape input " + str(self.input_shape) + " to " + str(target_shape) + '.')
self.shape = (None, *target_shape)
self.param = 0
self.activation = echo

def forward(self, inp, training=True):
return inp.Reshape(-1, *self.target_shape)
return inp.reshape(-1, *self.target_shape)

def backprop(self, grads, layer=1):
return grads.Reshape(-1, *self.input_shape)
return grads.reshape(-1, *self.input_shape)


class dense(Layer):
Expand Down
6 changes: 3 additions & 3 deletions nnet_gpu/coled_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def alloc(self, coled_size, obj):
self.COLED = np.empty(coled_size, dtype=self.dtype)
for oo in self.objs:
try:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
except:
self.objs.remove(oo)
self.objs.add(obj)
Expand All @@ -35,7 +35,7 @@ def alloc(self, coled_size, obj):
self.COLED = np.empty(coled_size, dtype=self.dtype)
for oo in self.objs:
try:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
except:
self.objs.remove(oo)
self.objs.add(obj)
Expand All @@ -54,5 +54,5 @@ def free(self):
if self.COLED.nbytes > mx.coled.nbytes:
self.COLED = np.empty(mx.coled.size, dtype=self.dtype)
for oo in self.objs:
oo.coled = self.COLED.ravel()[:oo.coled.size].Reshape(oo.coled.shape)
oo.coled = self.COLED.ravel()[:oo.coled.size].reshape(oo.coled.shape)
collect()
4 changes: 2 additions & 2 deletions nnet_gpu/layers/convolution/conv2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,6 @@ def backprop(self, grads, do_d_inp=True):
d_inputs = 0
if self.bias_is_not_0:
with self.backp_stream:
self.d_c_b = grads.Reshape(-1, self.num_kernels).sum(axis=0, keepdims=True)
# self.d_c_b = grads.Reshape(-1, self.num_kernels).mean(axis=0, keepdims=True)
self.d_c_b = grads.reshape(-1, self.num_kernels).sum(axis=0, keepdims=True)
# self.d_c_b = grads.reshape(-1, self.num_kernels).mean(axis=0, keepdims=True)
return d_inputs
4 changes: 2 additions & 2 deletions nnet_gpu/layers/convolution/conv2dtranspose.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,6 @@ def backprop(self, grads, do_d_inp=True):
d_inputs = 0
if self.bias_is_not_0:
with self.backp_stream:
self.d_c_b = grads.Reshape(-1, self.num_kernels).sum(axis=0, keepdims=True)
# self.d_c_b=grads.Reshape(-1,self.num_kernels).mean(axis=0,keepdims=True)
self.d_c_b = grads.reshape(-1, self.num_kernels).sum(axis=0, keepdims=True)
# self.d_c_b=grads.reshape(-1,self.num_kernels).mean(axis=0,keepdims=True)
return d_inputs
6 changes: 3 additions & 3 deletions nnet_gpu/layers/pooling/globalAveragePool.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ def do_init(self, kwargs):
def forward(self, inp, training=True):
self.input_shape = inp.shape
self.batches = self.input_shape[0]
inp = inp.Reshape(self.batches, self.Ncount, self.channels)
inp = inp.reshape(self.batches, self.Ncount, self.channels)
output = inp.mean(axis=1)
return output.Reshape(self.batches, self.channels)
return output.reshape(self.batches, self.channels)

def backprop(self, grads, do_d_inp=True):
# grads/=self.Ncount
z_out = grads.repeat(self.Ncount, axis=0).Reshape(self.batches, self.row, self.col, self.channels)
z_out = grads.repeat(self.Ncount, axis=0).reshape(self.batches, self.row, self.col, self.channels)
return z_out
10 changes: 5 additions & 5 deletions nnet_gpu/layers/pooling/maxpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,19 +45,19 @@ def forward(self, inp, training=True):
# if self.batches!=batches:
# self.padded=cp.zeros(self.input_shape,dtype=self.dtype)
self.batches = batches
inp = inp.Reshape(self.batches, self.out_row, self.ksz, self.out_col, self.ksz, self.channels)
inp = inp.reshape(self.batches, self.out_row, self.ksz, self.out_col, self.ksz, self.channels)
output = inp.max(axis=(2, 4), keepdims=True)
self.out_event = stream_maps.default_stream.record(self.out_event)
with self.mask_stream:
self.mask_stream.wait_event(self.out_event)
self.mask = (inp == output)
return output.Reshape(self.batches, self.out_row, self.out_col, self.channels)
return output.reshape(self.batches, self.out_row, self.out_col, self.channels)

def backprop(self, grads, do_d_inp=True):
# grads[self.batches,esz,esz,self.channels],inp[self.batches,row,col,self.channels],kernels[self.ksz,self.ksz],stride[row,col]
z_out = (self.mask * grads.Reshape(self.batches, self.out_row, 1, self.out_col, 1, self.channels))
z_out = (self.mask * grads.reshape(self.batches, self.out_row, 1, self.out_col, 1, self.channels))
# if self.rem_col:
# self.padded[:,:-self.rem_col,:-self.rem_col,:]=z_out.Reshape(self.batches,self.row,self.col,self.channels)
# return self.padded.Reshape(self.input_shape)
# self.padded[:,:-self.rem_col,:-self.rem_col,:]=z_out.reshape(self.batches,self.row,self.col,self.channels)
# return self.padded.reshape(self.input_shape)
# else:
return z_out.reshape(self.input_shape)
4 changes: 2 additions & 2 deletions nnet_gpu/layers/upsampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,5 @@ def forward(self, inp, training=True):

def backprop(self, grads, do_d_inp=True):
# grads[self.batches,esz,esz,self.channels],inp[self.batches,row,col,self.channels],kernels[self.ksz,self.ksz],stride[row,col]
grads = grads.Reshape(self.input_shape[0], self.row, self.ksz, self.col, self.ksz, self.channels)
return grads.sum(axis=(2, 4), keepdims=True).Reshape(self.input_shape)
grads = grads.reshape(self.input_shape[0], self.row, self.ksz, self.col, self.ksz, self.channels)
return grads.sum(axis=(2, 4), keepdims=True).reshape(self.input_shape)

0 comments on commit 58bc720

Please sign in to comment.