MNIST-1LNN  1.0
A simple 1-layer neural network to recognize handwritten single digit numbers from the MNIST image files.
1lnn.c
Go to the documentation of this file.
1 /**
2  * @file 1lnn.c
3  * @brief Machine learning functionality for a 1-layer neural network
4  * @author Matt Lind
5  * @date July 2015
6  */
7 
8 #include <stdlib.h>
9 #include <string.h>
10 #include <math.h>
11 
12 #include "mnist-utils.h"
13 #include "1lnn.h"
14 
15 
16 
17 
18 /**
19  * @details Returns an output vector with targetIndex set to 1, all others to 0
20  */
21 
22 Vector getTargetOutput(int targetIndex){
23  Vector v;
24  for (int i=0; i<NUMBER_OF_OUTPUT_CELLS; i++){
25  v.val[i] = (i==targetIndex) ? 1 : 0;
26  }
27  return v;
28 }
29 
30 
31 
32 
33 /**
34  * @details Initialize layer by setting all weights to random values [0-1]
35  * @attention It actually makes no difference whether the weights are
36  * initialized to a constant (e.g. 0.5) or to a random number.
37  * The result (85% success rate) will not change significantly.
38  */
39 
40 void initLayer(Layer *l){
41 
42  for (int o=0; o<NUMBER_OF_OUTPUT_CELLS; o++){
43 
44  for (int i=0; i<NUMBER_OF_INPUT_CELLS; i++){
45  l->cell[o].input[i]=0;
46  l->cell[o].weight[i]=rand()/(double)(RAND_MAX);
47  }
48 
49  l->cell[o].output = 0;
50  l->cell[o].bias = 0;
51  }
52 }
53 
54 
55 
56 
57 /**
58  * @details The output prediction is derived by simply sorting all output values
59  * and using the index (=0-9 number) of the highest value as the prediction.
60  */
61 
63 
64  double maxOut = 0;
65  int maxInd = 0;
66 
67  for (int i=0; i<NUMBER_OF_OUTPUT_CELLS; i++){
68 
69  if (l->cell[i].output > maxOut){
70  maxOut = l->cell[i].output;
71  maxInd = i;
72  }
73  }
74 
75  return maxInd;
76 
77 }
78 
79 
80 
81 
82 /**
83  * @details Creates an input vector of length NUMBER_OF_INPUT_CELLS
84  * of a given MNIST image, setting input vector cells to [0,1]
85  * based on the pixels of the image.
86  * Scalar pixel intensity [=grey-scale] is ignored, only 0 or 1 [=black-white].
87  */
88 
89 void setCellInput(Cell *c, MNIST_Image *img){
90 
91  for (int i=0; i<NUMBER_OF_INPUT_CELLS; i++){
92  c->input[i] = img->pixel[i] ? 1 : 0;
93  }
94 }
95 
96 
97 
98 
99 /**
100  * @details Calculates a cell's output by suming all input-weight-products
101  * and normalizes to [0-1].
102  */
103 
105 
106  c->output=0;
107 
108  for (int i=0; i<NUMBER_OF_INPUT_CELLS; i++){
109  c->output += c->input[i] * c->weight[i];
110  }
111 
112  c->output /= NUMBER_OF_INPUT_CELLS; // normalize output (0-1)
113 }
114 
115 
116 
117 
118 /**
119  * @details Returns the difference between a target value and the cell's ouput
120  */
121 
122 double getCellError(Cell *c, int target){
123 
124  double err = target - c->output;
125 
126  return err;
127 }
128 
129 
130 
131 
132 /**
133  * @details Updates a cell's weights based on given error and LEARNING_RATE
134  */
135 
136 void updateCellWeights(Cell *c, double err){
137 
138  for (int i=0; i<NUMBER_OF_INPUT_CELLS; i++){
139  c->weight[i] += LEARNING_RATE * c->input[i] * err;
140  }
141 }
142 
143 
144 
145 
146 /**
147  * @details Performs the training algorithm:
148  * feeding input, calculate output, calculate error, update weights)
149  */
150 
151 void trainCell(Cell *c, MNIST_Image *img, int target){
152 
153  setCellInput(c, img);
154  calcCellOutput(c);
155 
156  // learning (by updating the weights)
157  double err = getCellError(c, target);
158  updateCellWeights(c, err);
159 }
160 
161 
162 
163 
164 /**
165  * @details Performs the testing of the trained network
166  * Same as training a cell, but without updating weights (learning)
167  */
168 
169 void testCell(Cell *c, MNIST_Image *img, int target){
170 
171  setCellInput(c, img);
172  calcCellOutput(c);
173 
174 }
void updateCellWeights(Cell *c, double err)
Updates a cell's weights based on given error and LEARNING_RATE.
Definition: 1lnn.c:136
Cell cell[NUMBER_OF_OUTPUT_CELLS]
Definition: 1lnn.h:44
Data structure containing defined number of integer values (the output vector contains values for 0-9...
Definition: 1lnn.h:54
Vector getTargetOutput(int targetIndex)
Returns an output vector with targetIndex set to 1, all others to 0.
Definition: 1lnn.c:22
uint8_t pixel[28 *28]
Definition: mnist-utils.h:41
void setCellInput(Cell *c, MNIST_Image *img)
Sets a cell's input according to the pixels of a given MNIST image.
Definition: 1lnn.c:89
#define LEARNING_RATE
Incremental increase for changing connection weights.
Definition: 1lnn.h:14
double bias
Definition: 1lnn.h:33
void testCell(Cell *c, MNIST_Image *img, int target)
Performs the testing of the trained network.
Definition: 1lnn.c:169
double getCellError(Cell *c, int target)
Returns the difference between a target value and the cell's ouput.
Definition: 1lnn.c:122
void calcCellOutput(Cell *c)
Calculates a cell's output by suming all input-weight-products.
Definition: 1lnn.c:104
double output
Definition: 1lnn.h:32
#define NUMBER_OF_OUTPUT_CELLS
use 10 output cells to model 10 digits (0-9)
Definition: 1lnn.h:12
Machine learning functionality for a 1-layer neural network.
Core unit of the neural network (neuron and synapses)
Definition: 1lnn.h:29
The single (output) layer of this network (a layer is number cells)
Definition: 1lnn.h:43
double weight[NUMBER_OF_INPUT_CELLS]
Definition: 1lnn.h:31
#define NUMBER_OF_INPUT_CELLS
use 28*28 input cells (= number of pixels per MNIST image)
Definition: 1lnn.h:11
int val[NUMBER_OF_OUTPUT_CELLS]
Definition: 1lnn.h:55
void trainCell(Cell *c, MNIST_Image *img, int target)
Performs the training algorithm.
Definition: 1lnn.c:151
void initLayer(Layer *l)
Initialize layer by setting all weights to random values [0-1].
Definition: 1lnn.c:40
double input[NUMBER_OF_INPUT_CELLS]
Definition: 1lnn.h:30
Utitlies for handling the MNIST files.
int getLayerPrediction(Layer *l)
Returns the index of the cell with the hightest output.
Definition: 1lnn.c:62
Data block defining a MNIST image.
Definition: mnist-utils.h:40