반응형
#include <stdio.h>

double x = 2;						// input layer
double w = 3;						// weight
double b = 1;						// bias
double y;							// output layer

double E;							// Error (=loss)
double yT = 10;						// Target value, label

double yE;							// back propagation output
double xE;							// back propagation input
double wE;							// back propagation weight
double bE;							// back propagation bias

double lr = 0.01;					// learning rate

int main(void)
{
	// Forward propagation
	y = (x * w) + (1 * b);
	printf("예측값(y) : %.2lf\n", y);

	// Mean Squared Error (MSE)
	E = (y - yT) * (y - yT) / 2;
	printf("오차(E) : %.2lf\n", E);

	// Back propagation (Output)
	yE = y - yT;
	printf("출력 역전파 오차(yE) : %.2lf\n", yE);

	// Back propagation (Input)
	xE = yE * w;
	printf("입력 역전파 오차(xE) : %.2lf\n", xE);

	// Weight, Bias Forward propagation
	y = (w * x) + (b * 1);
	printf("가중치, 편향 순전파(y) : %.2lf\n", y);

	// Weight, Bias Back propagation
	wE = yE * x;
	bE = yE * 1;
	printf("가중치 역전파(wE) : %.2lf\n", wE);
	printf("편향 역전파(bE) : %.2lf\n", bE);

	// Neural network learning (Weight, Bias)
	w -= lr * wE;
	b -= lr * bE;
	printf("가중치 학습(w) : %f\n", w);
	printf("편향 학습(b) : %f\n", b);

	// Predict with learned neural networks
	y = x * w + 1 * b;
	printf("y : %f\n", y);
	
	/*
	예측값(y) : 7.00
	오차(E) : 4.50
	출력 역전파 오차(yE) : -3.00
	입력 역전파 오차(xE) : -9.00
	가중치, 편향 순전파(y) : 7.00
	가중치 역전파(wE) : -6.00
	편향 역전파(bE) : -3.00
	가중치 학습(w) : 3.060000
	편향 학습(b) : 1.030000
	y : 7.150000
	*/

	return 0;
}
#include <stdio.h>

double x = 2;		// input layer
double w = 3;		// weight
double b = 1;		// bias
double yT = 10;		// target value, label
double lr = 0.01;	// learning rate

void dnn_test();

int main(void)
{
	dnn_test();

	return 0;
}

void dnn_test()
{
	for (int epoch = 0; epoch < 200; epoch++)
	{
		double y = x * w + 1 * b;			// Forward propagation
		double E = (y - yT) * (y - yT) / 2;	// Mean squared Error
		double yE = y - yT;					// Back propagation (Output)
		double xE = yE - w;					// Back propagation (Input)
		double wE = yE * x;					// Back propagation (Weight)
		double bE = yE * 1;					// Back propagation (Bias)

		w -= lr * wE;						// Weight learning
		b -= lr * bE;						// bias learning

		printf("epoch = %d\n", epoch);
		printf("예측값(y) = %6.3f\n", y);
		printf("가중치(w) = %6.3f, 편향(b) = %6.3f\n", w, b);
		printf("가중치 역전파(wE) = %6.3f, 편향 역전파(bE) = %6.3lf\n", wE, bE);
		printf("------------------------------------------------------------\n");

		if (E < 0.0000001) break;
	}
}
반응형

+ Recent posts