Neuron Net
Macros | Functions
Convolution layer's neuron Gradients Calculation kernels

Describes the gradient calculation process for the Neuron of convolution layer. More...

Macros

#define def_k_CalcHiddenGradientConv   8
 Index of the kernel of the convolution neuron to transfer gradient to previous layer (CalcHiddenGradientConv) More...
 
#define def_k_chgc_matrix_w   0
 Weights matrix (m+1)*n, where m - input window and n - output window. More...
 
#define def_k_chgc_matrix_g   1
 Tensor of gradients at current layer. More...
 
#define def_k_chgc_matrix_o   2
 Output tensor. More...
 
#define def_k_chgc_matrix_ig   3
 Tensor of gradients at previous layer. More...
 
#define def_k_chgc_outputs   4
 Number of outputs. More...
 
#define def_k_chgc_step   5
 Step size. More...
 
#define def_k_chgc_window_in   6
 Size of input window. More...
 
#define def_k_chgc_window_out   7
 Size of output window. More...
 
#define def_k_chgc_activation   8
 Activation type (ENUM_ACTIVATION) More...
 

Functions

__kernel void CalcHiddenGradientConv (__global double *matrix_w, __global double *matrix_g, __global double *matrix_o, __global double *matrix_ig, int outputs, int step, int window_in, int window_out, uint activation)
 Kernel of the Convolution neuron to transfer gradient to previous layer (CNeuronConvOCL) More...
 

Detailed Description

Describes the gradient calculation process for the Neuron of convolution layer.

Macro Definition Documentation

◆ def_k_CalcHiddenGradientConv

#define def_k_CalcHiddenGradientConv   8

Index of the kernel of the convolution neuron to transfer gradient to previous layer (CalcHiddenGradientConv)

Definition at line 190 of file NeuroNet.mqh.

◆ def_k_chgc_activation

#define def_k_chgc_activation   8

Activation type (ENUM_ACTIVATION)

Definition at line 199 of file NeuroNet.mqh.

◆ def_k_chgc_matrix_g

#define def_k_chgc_matrix_g   1

Tensor of gradients at current layer.

Definition at line 192 of file NeuroNet.mqh.

◆ def_k_chgc_matrix_ig

#define def_k_chgc_matrix_ig   3

Tensor of gradients at previous layer.

Definition at line 194 of file NeuroNet.mqh.

◆ def_k_chgc_matrix_o

#define def_k_chgc_matrix_o   2

Output tensor.

Definition at line 193 of file NeuroNet.mqh.

◆ def_k_chgc_matrix_w

#define def_k_chgc_matrix_w   0

Weights matrix (m+1)*n, where m - input window and n - output window.

Definition at line 191 of file NeuroNet.mqh.

◆ def_k_chgc_outputs

#define def_k_chgc_outputs   4

Number of outputs.

Definition at line 195 of file NeuroNet.mqh.

◆ def_k_chgc_step

#define def_k_chgc_step   5

Step size.

Definition at line 196 of file NeuroNet.mqh.

◆ def_k_chgc_window_in

#define def_k_chgc_window_in   6

Size of input window.

Definition at line 197 of file NeuroNet.mqh.

◆ def_k_chgc_window_out

#define def_k_chgc_window_out   7

Size of output window.

Definition at line 198 of file NeuroNet.mqh.

Function Documentation

◆ CalcHiddenGradientConv()

__kernel void CalcHiddenGradientConv ( __global double *  matrix_w,
__global double *  matrix_g,
__global double *  matrix_o,
__global double *  matrix_ig,
int  outputs,
int  step,
int  window_in,
int  window_out,
uint  activation 
)

Kernel of the Convolution neuron to transfer gradient to previous layer (CNeuronConvOCL)

Parameters
[in]matrix_wWeights matrix (m+1)*n, where m - input window and n - output window
[in]matrix_gTensor of gradients at current layer
[in]matrix_oOutput tensor
[out]matrix_igTensor of gradients at previous layer
outputsNumber of outputs
stepStep size
window_inSize of input window
window_outSize of output window
activationActivation type (ENUM_ACTIVATION)

Definition at line 395 of file NeuroNet.cl.

405  {
406  int i=get_global_id(0);
407  int inputs=get_global_size(0);
408  double sum=0;
409  double out=matrix_o[i];
410  int start=i-window_in+step;
411  start=(start-start%step)/step;
412  int stop=(i-i%step)/step+1;
413  if(stop>(outputs/window_out))
414  stop=outputs;
415  for(int h=0;h<window_out;h+=4)
416  {
417  for(int k=start;k<stop;k++)
418  {
419  int shift_w=(stop-k-1)*step+i%step+h*(window_in+1);
420  int shift_g=k*window_out+h;
421  if(shift_g>=outputs || shift_w>=(window_in+1)*window_out)
422  break;
423  sum+=matrix_g[k*window_out+h]*matrix_w[shift_w];
424  }
425  }
426  switch(activation)
427  {
428  case 0:
429  sum=clamp(sum+out,-1.0,1.0)-out;
430  sum=sum*(1-pow(out==1 || out==-1 ? 0.99999999 : out,2));
431  break;
432  case 1:
433  sum=clamp(sum+out,0.0,1.0)-out;
434  sum=sum*(out==0 || out==1 ? 0.00000001 : (out*(1-out)));
435  break;
436  case 2:
437  if(out<0)
438  sum*=0.01;
439  break;
440  default:
441  break;
442  }
443  matrix_ig[i]=sum;
444  }