﻿ 反向传播算法C语言实现_python_开心洋葱网
• 欢迎访问开心洋葱网站，在线教程，推荐使用最新版火狐浏览器和Chrome浏览器访问本网站，欢迎加入开心洋葱` QQ群`
• 为方便开心洋葱网用户，开心洋葱官网已经开启复制功能！
• 欢迎访问开心洋葱网站，手机也能访问哦~欢迎加入开心洋葱多维思维学习平台` QQ群`
• 如果您觉得本站非常有看点，那么赶紧使用Ctrl+D 收藏开心洋葱吧~~~~~~~~~~~~~！
• 由于近期流量激增，小站的ECS没能经的起亲们的访问，本站依然没有盈利，如果各位看如果觉着文字不错，还请看官给小站打个赏~~~~~~~~~~~~~！

# 反向传播算法C语言实现

2974次浏览

```//实现对异或的分类
#include
#include
#include
#include
#define PN 4
#define INPUT 2
#define HIDDEN 2
#define TARGET 1
#define OUTPUT 1
struct NN
{
int ni;
int nh;
int no;
double * ai;
double * ah;
double * ao;
double * wi;
double * wo;
double * ci;
double * co;
};
double rand(double a, double b) //就是这个随机数函数，不知道是不是哪里有错
{
srand(time(NULL));
double tmp = rand() / (double)(RAND_MAX);
return (b - a) * tmp + a;
}
double * makeMatrix(int i, int j, double fill)
{
double * matrix = (double *) malloc (i * j * sizeof(double));
int m, n;
for (m = 0;m < i;++ m) {
for (n = 0;n < j;++ n) { *(matrix + m * j + n) = fill; } } return matrix; } double sigmoid(double x) { return (exp(x) - exp(-x)) / (exp(x) + exp(-x)); } double dsigmoid(double x) { return 1.0 - x * x; } void init(NN * self, int ni, int nh, int no) { int i, j; self->ni = ni + 1;
self->nh = nh;
self->no = no;
self->ai = (double *) malloc (sizeof(double) * self->ni);
self->ah = (double *) malloc (sizeof(double) * self->nh);
self->ao = (double *) malloc (sizeof(double) * self->no);
for (i = 0;i < self->ni;++ i) {
self->ai[i] = 1.0;
}
for (i = 0;i < self->nh;++ i) {
self->ah[i] = 1.0;
}
for (i = 0;i < self->no;++ i) {
self->ao[i] = 1.0;
}
self->wi = makeMatrix(self->ni, self->nh, 0);
self->wo = makeMatrix(self->nh, self->no, 0);

self->wi[0] = 0.13776874061001926;
self->wi[1] = 0.10318176117612099;
self->wi[2] = -0.031771367667662004;
self->wi[3] = -0.09643329988281467;
self->wi[4] = 0.004509888547443414;
self->wi[5] = -0.03802634501983429;
self->wo[0] = 1.1351943561390905;
self->wo[1] = -0.7867490956842902;//以上的随机数是我用python生成的，不知为何C生成的随机数不能达到要求

self->ci = makeMatrix(self->ni, self->nh, 0);
self->co = makeMatrix(self->nh, self->no, 0);
}
double * update(NN * self, double *inputs)
{
if (INPUT != self->ni - 1) {
printf("wrong number of inputs\n");
}

int i, j, k;
for (i = 0;i < self->ni - 1;++ i) {
self->ai[i] = inputs[i];
}
for (j = 0;j < self->nh;++ j) {
double sum = 0;
for (i = 0;i < self->ni;++ i) {
sum += self->ai[i] * self->wi[i * self->nh + j];
}
self->ah[j] = sigmoid(sum);
}
for (k = 0;k < self->no;++ k) {
double sum = 0;
for (j = 0;j < self->nh;++ j) {
sum += self->ah[j] * self->wo[j * self->no + k];
}
self->ao[k] = sigmoid(sum);
}
double *output = (double *) malloc (sizeof(double) * self->no);
for (i = 0;i < self->no;++ i) {
*(output + i) = self->ao[i];
}
return output;
}
double backprobagation(NN * self, double *targets, double N, double M)
{
if (TARGET != self->no) {
printf("wrong number of target values\n");
}

int i, j, k;
double *output_deltas = (double *) calloc (self->no, sizeof(double));
for (k  = 0;k < self->no;++ k) {
double error = targets[k] - self->ao[k];
output_deltas[k] = dsigmoid(self->ao[k]) * error;
}

double *hidden_deltas = (double *) calloc (self->nh, sizeof(double));
for (j = 0;j < self->nh;++ j) {
double error = 0;
for (k = 0;k < self->no;++ k) {
error += output_deltas[k] * self->wo[j * self->no + k];
}
hidden_deltas[j] = dsigmoid(self->ah[j]) * error;
}

for (j = 0;j < self->nh;++ j) {
for (k = 0;k < self->no;++ k) {
double change = output_deltas[k] * self->ah[j];
self->wo[j * self->no + k] += (N * change + M * self->co[j * self->no + k]);
self->co[j * self->no + k] = change;
}
}
for (i = 0;i < self->ni;++ i) {
for (j = 0;j < self->nh;++ j) {
double change = hidden_deltas[j] * self->ai[i];
self->wi[i * self->nh + j] += (N * change + M * self->ci[i * self->nh + j]);
self->ci[i * self->nh + j] = change;
}
}
double error = 0;
for (k = 0;k < TARGET;++ k) { error += 0.5 * (targets[k] - self->ao[k]) * (targets[k] - self->ao[k]);
}
return error;
}
void test(NN * self, double *inputs, double *targets)
{
int i, j, k;
for (i = 0;i <  PN;++ i) {
double * input = (double *) malloc (sizeof(double) * INPUT);
for (j = 0;j < INPUT;++ j) {
input[j] = inputs[i * INPUT + j];
printf("%lf ", input[j]);
}
update(self, input);
for (k = 0;k < self->no;++ k) {
printf("%lf ", self->ao[k]);
}
printf("\n");
}
}
void train(NN * self, double *inputs, double *targets, int iteration, double N, double M)
{
int i, j, k, p;
for (i = 0;i < iteration;++ i) {
double error = 0;
for (p = 0;p < PN;++ p) {
double *input = (double *) malloc (sizeof(double) * INPUT);
double *target = (double *) malloc (sizeof(double) * TARGET);
for (j = 0;j < INPUT;++ j) {
*(input + j) = inputs[p * INPUT + j];
}
for (k = 0;k < TARGET;++ k) {
*(target + k) = targets[p * TARGET + k];
}
update(self, input);
error += backprobagation(self, target, N, M);
}
if (i % 100 == 0) {
printf("error %-.5lf\n", error);
}
}
}
int main()
{
double inputs[PN * INPUT] = {0, 0, 0, 1, 1, 0, 1, 1};
double targets[PN * TARGET] = {0, 1, 1, 0};
NN * self = (NN *) calloc (1, sizeof(NN));
init(self, 2, 2, 1);
train(self, inputs, targets, 1000, 0.5, 0.1);
test(self, inputs, targets);
return 0;
}
```