25 uint8_t *sbptr = (uint8_t*) l->
nodes;
27 sbptr += nodeId * nodeSize;
51 uint8_t *sbptr = (uint8_t*) nn->
layers;
58 uint8_t *sbptr = (uint8_t*) nn->
layers;
86 if (actFct==
TANH) dVal = 1-pow(tanh(outVal),2);
87 else dVal = outVal * (1-outVal);
109 int prevLayerNodeSize = 0;
118 uint8_t *sbptr = (uint8_t*) prevLayer->
nodes;
120 for (
int i=0; i<updateNode->
wcount; i++){
123 sbptr += prevLayerNodeSize;
145 for (
int h=0;h<hl->
ncount;h++){
148 double outputcellerrorsum = 0;
150 for (
int o=0;o<ol->
ncount;o++){
154 int targetOutput = (o==targetClassification)?1:0;
156 double errorDelta = targetOutput - on->
output;
159 outputcellerrorsum += errorSignal * on->
weights[h];
182 for (
int o=0;o<ol->
ncount;o++){
186 int targetOutput = (o==targetClassification)?1:0;
188 double errorDelta = targetOutput - on->
output;
255 int prevLayerNodeSize = 0;
266 uint8_t *sbptr = (uint8_t*) prevLayer->
nodes;
271 for (
int i=0; i<prevLayer->
ncount;i++){
274 sbptr += prevLayerNodeSize;
292 for (
int i=0;i<l->
ncount;i++){
329 for (
int i=0; i<v->
size;i++){
346 int inpNodeSize =
sizeof(
Node);
347 int inpLayerSize =
sizeof(
Layer) + (inpCount * inpNodeSize);
349 Layer *il = malloc(inpLayerSize);
359 uint8_t *sbptr = (uint8_t*) il->
nodes;
362 for (
int i=0;i<il->
ncount;i++){
363 memcpy(sbptr,&iln,inpNodeSize);
364 sbptr += inpNodeSize;
380 int nodeSize =
sizeof(
Node) + (weightCount *
sizeof(
double));
386 Node *dn = (
Node*)malloc(
sizeof(
Node) + ((weightCount)*
sizeof(
double)));
390 for (
int o=0;o<weightCount;o++) dn->
weights[o] = 0;
392 uint8_t *sbptr = (uint8_t*) l->
nodes;
395 for (
int i=0;i<nodeCount;i++) memcpy(sbptr+(i*nodeSize),dn,nodeSize);
421 uint8_t *sbptr = (uint8_t*) nn->
layers;
475 uint8_t *sbptr = (uint8_t*) l->
nodes;
477 for (
int o=0; o<l->
ncount;o++){
481 for (
int i=0; i<n->
wcount; i++){
482 n->
weights[i] = 0.7*(rand()/(double)(RAND_MAX));
488 n->
bias = rand()/(double)(RAND_MAX);
508 int inpNodeSize =
sizeof(
Node);
509 int inpLayerSize =
sizeof(
Layer) + (inpCount * inpNodeSize);
512 int hidWeightsCount = inpCount;
513 int hidNodeSize =
sizeof(
Node) + (hidWeightsCount *
sizeof(
double));
514 int hidLayerSize =
sizeof(
Layer) + (hidCount * hidNodeSize);
517 int outWeightsCount = hidCount;
518 int outNodeSize =
sizeof(
Node) + (outWeightsCount *
sizeof(
double));
519 int outLayerSize =
sizeof(
Layer) + (outCount * outNodeSize);
560 for (
int i=0; i<l->
ncount; i++){
void backPropagateOutputLayer(Network *nn, int targetClassification)
Back propagates network error in output layer.
Dynamic data structure containing defined number of values.
Layer * createLayer(int nodeCount, int weightCount)
Neural network functionality for a 3-layer (INPUT, HIDDEN, OUTPUT) feed-forward, back-prop NN...
ActFctType outLayerActType
void feedForwardNetwork(Network *nn)
Feeds input layer values forward to hidden to output layer (calculation and activation fct) ...
void backPropagateNetwork(Network *nn, int targetClassification)
Back propagates network error from output layer to hidden layer.
Layer * getLayer(Network *nn, LayerType ltype)
Returns one of the layers of the network.
void setNetworkDefaults(Network *nn)
Sets the default network parameters (which can be overwritten/changed)
void updateNodeWeights(Network *nn, LayerType ltype, int id, double error)
Updates a node's weights based on given error.
void activateNode(Network *nn, LayerType ltype, int id)
Performs an activiation function (as defined in the NN's defaults) to a specified node...
Node * getNode(Layer *l, int nodeId)
int getNetworkClassification(Network *nn)
Returns the network's classification using the ID of teh node with the hightest output.
double getActFctDerivative(Network *nn, LayerType ltype, double outVal)
Returns the result of applying the given outputValue to the derivate of the activation function...
void feedInput(Network *nn, Vector *v)
Feeds some Vector data into the INPUT layer of the NN.
double learningRate
Factor by which connection weight changes are applied.
Dynamic data structure holding a definable number of nodes that form a layer.
void backPropagateHiddenLayer(Network *nn, int targetClassification)
Back propagates network error to hidden layer.
Layer * createInputLayer(int inpCount)
ActFctType hidLayerActType
Network * createNetwork(int inpCount, int hidCount, int outCount)
Creates a dynamically-sized, 3-layer (INTPUT, HIDDEN, OUTPUT) neural network.
void initWeights(Network *nn, LayerType ltype)
Initializes a layer's weights with random values.
void calcLayer(Network *nn, LayerType ltype)
Calculates the output values of a given NN layer.
Utitlies for handling the MNIST files.
Dynamic data structure modeling a neuron with a variable number of connections/weights.
void calcNodeOutput(Network *nn, LayerType ltype, int id)
Calculates the output value of a specified node by multiplying all its weights with the previous laye...
void initNetwork(Network *nn, int inpCount, int hidCount, int outCount)
Initializes the NN by creating and copying INTPUT, HIDDEN, OUTPUT data structures into the NN's memor...
Dynamic data structure holding the whole network.