Logo Search packages:      
Sourcecode: lebiniou version File versions  Download package

perceptron.c

/*
 *  Copyright 1994-2011 Olivier Girondel
 *
 *  This file is part of lebiniou.
 *
 *  lebiniou is free software: you can redistribute it and/or modify
 *  it under the terms of the GNU General Public License as published by
 *  the Free Software Foundation, either version 2 of the License, or
 *  (at your option) any later version.
 *
 *  lebiniou is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *  GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with lebiniou. If not, see <http://www.gnu.org/licenses/>.
 */

#include "perceptron.h"

#define E(c, i)     (p->Error[c][i])
#define A(c, i)     (p->Activity[c][i])
#define W(c, i, j)  (p->Weights[c][i][j])
#define DW(c, i, j) (p->dWeights[c][i][j])


void
Perceptron_init(Perceptron_t *p, float sigma)
{
  int i, c, j;
      
  /* initialize weights at random */
  for (c = 1; c < p->layers; c++)
    for (j = 1; j <= p->npl[c]; j++)
      for (i = 0; i <= p->npl[c-1]; i++) {
      W(c, j, i) = g_rand_double_range(p->grand, -sigma, sigma);
      DW(c, j, i) = 0.0;
      }
}


Perceptron_t *
Perceptron_new(int layers, int *_npl, float sigma)
{
  int i, /*c, j,*/ l, kk;
  Perceptron_t *p;
  unsigned long som = 0;

  p = xmalloc(sizeof(Perceptron_t));

  p->layers = layers;
  p->npl = xmalloc(p->layers*sizeof(int));

  printf("[+] Building BPNN: ");
  for (l = 0; l < p->layers; l++) {
    p->npl[l] = _npl[l];
    printf("%d", p->npl[l]);
    if (l != p->layers-1)
      printf("-");
  }
  printf("\n[+] Allocated %d layers.\n", p->layers);

  /* Alloc the connections */
  p->Weights  = xmalloc(layers*sizeof(float **));
  p->Activity = xmalloc(layers*sizeof(float *));
  p->Error    = xmalloc(layers*sizeof(float *));
  p->dWeights = xmalloc(layers*sizeof(float **));

  for (l = 0; l < p->layers; l++) {
    int npl_p1 = p->npl[l]+1;

    p->Activity[l] = xmalloc(npl_p1*sizeof(float));
    p->Activity[l][0] = -1;
    for (kk = 1; kk < npl_p1; kk++)
      p->Activity[l][kk] = 0.0;

    p->Error[l] = xmalloc(npl_p1*sizeof(float));
    for (kk = 0; kk < npl_p1; kk++)
      p->Error[l][kk] = 0.0;
  }

#define npl_p1 (p->npl[l]+1)
#define npl_p2 (p->npl[l-1]+1)
  for (l = 1; l < p->layers; l++) {
    p->Weights[l] = xmalloc(npl_p1*sizeof(float *));
    p->dWeights[l] = xmalloc(npl_p1*sizeof(float *));

    for (i = 1; i <= p->npl[l]; i++) {
      p->Weights[l][i] = xcalloc(npl_p2, sizeof(float));  /* not xmalloc */
      p->dWeights[l][i] = xcalloc(npl_p2, sizeof(float)); /* not xmalloc */
    }
  }

  for (i = 1; i < p->layers; i++)
    som += (1 + p->npl[i-1]) * p->npl[i];
  printf("[+] Allocated %lu connections.\n", som);

  p->grand = g_rand_new();
  Perceptron_init(p, sigma);
  printf("[+] BPNN initialized.\n");

  return p;
}


void
Perceptron_delete(Perceptron_t *p)
{
  int i, l;

  for (l = 1; l < p->layers; l++) {
    for (i = 1; i <= p->npl[l]; i++) {
      xfree(p->Weights[l][i]);
      xfree(p->dWeights[l][i]);
    }
    xfree(p->Weights[l]);
    xfree(p->dWeights[l]);
  }
  xfree(p->Weights);
  xfree(p->dWeights);

  for (l = 0; l < p->layers; l++) {
    xfree(p->Error[l]);
    xfree(p->Activity[l]);
  }

  xfree(p->Error);
  xfree(p->Activity);
  xfree(p->npl);
  g_rand_free(p->grand);
}


void
Perceptron_save(__attribute__ ((unused)) const Perceptron_t *per, const char *file)
{
  assert(NULL != per);
  printf("[i] Saving to file %s...\n", file);
}


void
Perceptron_load(__attribute__ ((unused)) Perceptron_t *per, const char *file)
{
  assert(NULL != per);
  printf("[i] Loading from file %s...\n", file);
}


inline float sygmoide(float x) { return tanhf(x); }


float
Perceptron_FF(const Perceptron_t *p, float *values)
{
  int c, i, j;
  
  /* set input */
  for (i = 0; i < p->npl[0]; i++)
    A(0, i+1) = values[i];

  /* feed forward */
  for (c = 1; c < p->layers; c++)
    for (i = 1; i <= p->npl[c]; i++) {
      float som = 0;
                  
      for (j = 0; j <= p->npl[c-1]; j++)
      som += W(c, i, j) * A(c-1, j);
      A(c, i) = sygmoide(som);
    }

  return A(p->layers-1, 1);
}


void
Perceptron_BP(Perceptron_t *p, float zi, float mu, float alpha) {
  int c, i, j;
  float output = A(p->layers-1, 1);

  /* error on output = 2*sygmoide'(bi)*(ai-zi) = 2*...*(ai-zi) */
  E(p->layers-1,1)=2*(1-output*output)*(output-zi);

  /* compute errors */
  for (c = p->layers-2; c > 0; c--)
    for (j = 1; j <= p->npl[c]; j++) {
      float som = 0.0;
      for (i = 1; i <= p->npl[c+1]; i++)
      som += W(c+1, i, j) * E(c+1, i);
      /* error on (c,j) = sygmoide'(bi)*som(Wji*ej) */
      E(c,j) = som * (1 - A(c, j) * A(c, j));
    }
    
  /* modify weights */
  for (c = p->layers-1; c > 0; c--)
    for (i = 1; i <= p->npl[c]; i++)
      for (j = 0; j <= p->npl[c-1]; j++) {
      DW(c, i, j) = alpha * DW(c, i, j) - mu * E(c, i) * A(c-1, j);
      W(c, i, j) += DW(c, i, j);
      }
}

Generated by  Doxygen 1.6.0   Back to index