Skip to content

Commit

Permalink
Loading may or may not work. But probably.
Browse files Browse the repository at this point in the history
  • Loading branch information
pjreddie committed Nov 7, 2013
1 parent 9b1774b commit d7286c2
Show file tree
Hide file tree
Showing 14 changed files with 155 additions and 57 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
images/
opencv/
convnet/
decaf/
cnn

# OS Generated #
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ CFLAGS=-Wall `pkg-config --cflags opencv` -O0 -g
LDFLAGS=`pkg-config --libs opencv` -lm
VPATH=./src/

OBJ=network.o image.o tests.o convolutional_layer.o connected_layer.o maxpool_layer.o activations.o
OBJ=network.o image.o tests.o convolutional_layer.o connected_layer.o maxpool_layer.o activations.o list.o option_list.o parser.o utils.o

all: cnn

Expand Down
11 changes: 11 additions & 0 deletions src/activations.c
Original file line number Diff line number Diff line change
@@ -1,6 +1,17 @@
#include "activations.h"

#include <math.h>
#include <stdio.h>
#include <string.h>

ACTIVATION get_activation(char *s)
{
if (strcmp(s, "sigmoid")==0) return SIGMOID;
if (strcmp(s, "relu")==0) return RELU;
if (strcmp(s, "identity")==0) return IDENTITY;
fprintf(stderr, "Couldn't find activation function %s, going with ReLU\n", s);
return RELU;
}

double identity_activation(double x)
{
Expand Down
9 changes: 8 additions & 1 deletion src/activations.h
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
#ifndef ACTIVATIONS_H
#define ACTIVATIONS_H

typedef enum{
SIGMOID, RELU, IDENTITY
}ACTIVATOR_TYPE;
}ACTIVATION;

ACTIVATION get_activation(char *s);
double relu_activation(double x);
double relu_gradient(double x);
double sigmoid_activation(double x);
double sigmoid_gradient(double x);
double identity_activation(double x);
double identity_gradient(double x);

#endif

34 changes: 17 additions & 17 deletions src/connected_layer.c
Original file line number Diff line number Diff line change
Expand Up @@ -4,34 +4,34 @@
#include <stdlib.h>
#include <string.h>

connected_layer make_connected_layer(int inputs, int outputs, ACTIVATOR_TYPE activator)
connected_layer *make_connected_layer(int inputs, int outputs, ACTIVATION activator)
{
int i;
connected_layer layer;
layer.inputs = inputs;
layer.outputs = outputs;
connected_layer *layer = calloc(1, sizeof(connected_layer));
layer->inputs = inputs;
layer->outputs = outputs;

layer.output = calloc(outputs, sizeof(double*));
layer->output = calloc(outputs, sizeof(double*));

layer.weight_updates = calloc(inputs*outputs, sizeof(double));
layer.weights = calloc(inputs*outputs, sizeof(double));
layer->weight_updates = calloc(inputs*outputs, sizeof(double));
layer->weights = calloc(inputs*outputs, sizeof(double));
for(i = 0; i < inputs*outputs; ++i)
layer.weights[i] = .5 - (double)rand()/RAND_MAX;
layer->weights[i] = .5 - (double)rand()/RAND_MAX;

layer.bias_updates = calloc(outputs, sizeof(double));
layer.biases = calloc(outputs, sizeof(double));
layer->bias_updates = calloc(outputs, sizeof(double));
layer->biases = calloc(outputs, sizeof(double));
for(i = 0; i < outputs; ++i)
layer.biases[i] = (double)rand()/RAND_MAX;
layer->biases[i] = (double)rand()/RAND_MAX;

if(activator == SIGMOID){
layer.activation = sigmoid_activation;
layer.gradient = sigmoid_gradient;
layer->activation = sigmoid_activation;
layer->gradient = sigmoid_gradient;
}else if(activator == RELU){
layer.activation = relu_activation;
layer.gradient = relu_gradient;
layer->activation = relu_activation;
layer->gradient = relu_gradient;
}else if(activator == IDENTITY){
layer.activation = identity_activation;
layer.gradient = identity_gradient;
layer->activation = identity_activation;
layer->gradient = identity_gradient;
}

return layer;
Expand Down
2 changes: 1 addition & 1 deletion src/connected_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ typedef struct{
double (* gradient)();
} connected_layer;

connected_layer make_connected_layer(int inputs, int outputs, ACTIVATOR_TYPE activator);
connected_layer *make_connected_layer(int inputs, int outputs, ACTIVATION activator);

void run_connected_layer(double *input, connected_layer layer);
void learn_connected_layer(double *input, connected_layer layer);
Expand Down
20 changes: 10 additions & 10 deletions src/convolutional_layer.c
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,20 @@ double convolution_gradient(double x)
return (x>=0);
}

convolutional_layer make_convolutional_layer(int h, int w, int c, int n, int size, int stride)
convolutional_layer *make_convolutional_layer(int h, int w, int c, int n, int size, int stride)
{
int i;
convolutional_layer layer;
layer.n = n;
layer.stride = stride;
layer.kernels = calloc(n, sizeof(image));
layer.kernel_updates = calloc(n, sizeof(image));
convolutional_layer *layer = calloc(1, sizeof(convolutional_layer));
layer->n = n;
layer->stride = stride;
layer->kernels = calloc(n, sizeof(image));
layer->kernel_updates = calloc(n, sizeof(image));
for(i = 0; i < n; ++i){
layer.kernels[i] = make_random_kernel(size, c);
layer.kernel_updates[i] = make_random_kernel(size, c);
layer->kernels[i] = make_random_kernel(size, c);
layer->kernel_updates[i] = make_random_kernel(size, c);
}
layer.output = make_image((h-1)/stride+1, (w-1)/stride+1, n);
layer.upsampled = make_image(h,w,n);
layer->output = make_image((h-1)/stride+1, (w-1)/stride+1, n);
layer->upsampled = make_image(h,w,n);
return layer;
}

Expand Down
5 changes: 4 additions & 1 deletion src/convolutional_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ typedef struct {
image output;
} convolutional_layer;

convolutional_layer make_convolutional_layer(int w, int h, int c, int n, int size, int stride);
convolutional_layer *make_convolutional_layer(int h, int w, int c, int n, int size, int stride);
void run_convolutional_layer(const image input, const convolutional_layer layer);
void learn_convolutional_layer(image input, convolutional_layer layer);
void update_convolutional_layer(convolutional_layer layer, double step);
void backpropagate_convolutional_layer(image input, convolutional_layer layer);
void backpropagate_convolutional_layer_convolve(image input, convolutional_layer layer);

#endif

1 change: 1 addition & 0 deletions src/image.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ void normalize_image(image p);
void threshold_image(image p, double t);
void zero_image(image m);
void rotate_image(image m);
void subtract_image(image a, image b);

void show_image(image p, char *name);
void show_image_layers(image p, char *name);
Expand Down
8 changes: 4 additions & 4 deletions src/maxpool_layer.c
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#include "maxpool_layer.h"

maxpool_layer make_maxpool_layer(int h, int w, int c, int stride)
maxpool_layer *make_maxpool_layer(int h, int w, int c, int stride)
{
maxpool_layer layer;
layer.stride = stride;
layer.output = make_image((h-1)/stride+1, (w-1)/stride+1, c);
maxpool_layer *layer = calloc(1, sizeof(maxpool_layer));
layer->stride = stride;
layer->output = make_image((h-1)/stride+1, (w-1)/stride+1, c);
return layer;
}

Expand Down
2 changes: 1 addition & 1 deletion src/maxpool_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ typedef struct {
image output;
} maxpool_layer;

maxpool_layer make_maxpool_layer(int h, int w, int c, int stride);
maxpool_layer *make_maxpool_layer(int h, int w, int c, int stride);
void run_maxpool_layer(const image input, const maxpool_layer layer);

#endif
Expand Down
50 changes: 48 additions & 2 deletions src/network.c
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,15 @@
#include "convolutional_layer.h"
#include "maxpool_layer.h"

network make_network(int n)
{
network net;
net.n = n;
net.layers = calloc(net.n, sizeof(void *));
net.types = calloc(net.n, sizeof(LAYER_TYPE));
return net;
}

void run_network(image input, network net)
{
int i;
Expand Down Expand Up @@ -84,9 +93,9 @@ void learn_network(image input, network net)
}
}

double *get_network_output(network net)

double *get_network_output_layer(network net, int i)
{
int i = net.n-1;
if(net.types[i] == CONVOLUTIONAL){
convolutional_layer layer = *(convolutional_layer *)net.layers[i];
return layer.output.data;
Expand All @@ -101,6 +110,43 @@ double *get_network_output(network net)
}
return 0;
}

int get_network_output_size_layer(network net, int i)
{
if(net.types[i] == CONVOLUTIONAL){
convolutional_layer layer = *(convolutional_layer *)net.layers[i];
return layer.output.h*layer.output.w*layer.output.c;
}
else if(net.types[i] == MAXPOOL){
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
return layer.output.h*layer.output.w*layer.output.c;
}
else if(net.types[i] == CONNECTED){
connected_layer layer = *(connected_layer *)net.layers[i];
return layer.outputs;
}
return 0;
}

double *get_network_output(network net)
{
int i = net.n-1;
return get_network_output_layer(net, i);
}

image get_network_image_layer(network net, int i)
{
if(net.types[i] == CONVOLUTIONAL){
convolutional_layer layer = *(convolutional_layer *)net.layers[i];
return layer.output;
}
else if(net.types[i] == MAXPOOL){
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
return layer.output;
}
return make_image(0,0,0);
}

image get_network_image(network net)
{
int i;
Expand Down
6 changes: 5 additions & 1 deletion src/network.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@ typedef struct {
LAYER_TYPE *types;
} network;

network make_network(int n);
void run_network(image input, network net);
double *get_network_output(network net);
void learn_network(image input, network net);
void update_network(network net, double step);
double *get_network_output(network net);
double *get_network_output_layer(network net, int i);
int get_network_output_size_layer(network net, int i);
image get_network_image(network net);
image get_network_image_layer(network net, int i);

#endif

Loading

0 comments on commit d7286c2

Please sign in to comment.