[deep learning 학습 노트] Yusugomori의 SDA 코드인 Sda를 주석합니다.cpp - 모델 준비

2050 단어
보조 함수, 구조/분석 함수 등을 포함한 준비 작업들
#include <iostream>
#include <math.h>
#include "HiddenLayer.h"
#include "dA.h"
#include "LogisticRegression.h"
#include "SdA.h"
using namespace std;

// To generate a value between min and max in a uniform distribution
double uniform(double min, double max) 
{
  return rand() / (RAND_MAX + 1.0) * (max - min) + min;
}

// To get the result of n-binomial test by the p probability
int binomial(int n, double p) 
{
  if(p < 0 || p > 1) return 0;
  
  int c = 0;
  double r;
  
  for(int i=0; i<n; i++) {
    r = rand() / (RAND_MAX + 1.0);
    if (r < p) c++;
  }

  return c;
}

// To get the result of sigmoid function
double sigmoid(double x) 
{
  return 1.0 / (1.0 + exp(-x));
}

// SdA
SdA::SdA (
	int size, 
	int n_i, 
	int *hls, 
	int n_o, 
	int n_l
		) 
{
	// the global information
  	N = size;
  	n_ins = n_i;
  	hidden_layer_sizes = hls;
  	n_outs = n_o;
  	n_layers = n_l;

  	sigmoid_layers = new HiddenLayer* [n_layers];
  	dA_layers = new dA* [n_layers];

  	// construct multi-layer
  	int input_size;
  	for(int i=0; i<n_layers; i++) 
  	{
    	if(i == 0) 
		{
      		input_size = n_ins;
    	} 
		else 
		{
      		input_size = hidden_layer_sizes[i-1];
    	}

    	// construct sigmoid_layer
    	sigmoid_layers[i] = 
			new HiddenLayer	(N, input_size, hidden_layer_sizes[i], NULL, NULL);

    	// construct dA_layer
    	dA_layers[i] = 
			new dA(N, input_size, hidden_layer_sizes[i],\
                          sigmoid_layers[i]->W, sigmoid_layers[i]->b, NULL);
  	}

  	// layer for output using LogisticRegression
  	log_layer = 
	  new LogisticRegression(N, hidden_layer_sizes[n_layers-1], n_outs);
}

SdA::~SdA() 
{
  	delete log_layer;		// call destructor of LogisticRegression

  	for(int i=0; i<n_layers; i++) 
  	{
    	delete sigmoid_layers[i];	// call destructor of HiddenLayer
    	delete dA_layers[i];		// call destructor of dA
  	}
  	delete[] sigmoid_layers;
  	delete[] dA_layers;
}

좋은 웹페이지 즐겨찾기