Activation

计算激活函数值,并将其写入传递的向量/矩阵。

bool vector::Activation(
  vector&                   vect_out,      // 获取函数值的向量
  ENUM_ACTIVATION_FUNCTION  activation,    // 激活函数
   ...                                     // 附加参数
   );
 
 
bool matrix::Activation(
  matrix&                   matrix_out,    // 获取函数值的矩阵
  ENUM_ACTIVATION_FUNCTION  activation     // 激活函数
   );
 
 
bool matrix::Activation(
  matrix&                   matrix_out,    // 获取函数值的矩阵
  ENUM_ACTIVATION_FUNCTION  activation,    // 激活函数
  ENUM_MATRIX_AXIS          axis,          // 轴线
   ...                                     // 附加参数
   );

参数

vect_out/matrix_out

[out]  获取激活函数计算值的向量或矩阵。

activation

[in] ENUM_ACTIVATION_FUNCTION枚举中的激活函数。

axis

[in] ENUM_MATRIX_AXIS枚举值(AXIS_HORZ ― 横轴,AXIS_VERT ― 竖轴)。

...

[in]  某些激活函数所需的附加参数。如果没有指定参数,则使用默认值。

返回值

如果成功返回true,否则返回false。

附加参数

某些激活函数接受附加参数。如果没有指定参数,则使用默认值

   AF_ELU  (Exponential Linear Unit)  
     double alpha=1.0
   
   Activation function: if(x>=0f(x) = x
                      else f(x) = alpha * (exp(x)-1)
   
   
   AF_LINEAR   
     double alpha=1.0
     double beta=0.0
   
   Activation function: f(x) = alpha*x + beta
   
   
   AF_LRELU   (Leaky REctified Linear Unit)   
     double alpha=0.3
   
   Activation function: if(x>=0f(x)=x
                      else f(x) = alpha*x
   
                        
   AF_RELU  (REctified Linear Unit)   
     double alpha=0.0
     double max_value=0.0
     double treshold=0.0
   
   Activation function: if(alpha==0f(x) = max(x,0)
                      else if(x>max_valuef(x) = x
                      else f(x) = alpha*(x - treshold)
   
   
   AF_SWISH   
     double beta=1.0
   
   Activation function: f(x) = x / (1+exp(-x*beta))
   
   
   AF_TRELU   (Thresholded REctified Linear Unit)   
     double theta=1.0
   
   Activation function: if(x>thetaf(x) = x
                      else f(x) = 0
   
   
   AF_PRELU   (Parametric REctified Linear Unit)   
     double alpha[] - learned array of coeefficients
   
   Activation function: if(x[i]>=0f(x)[i] = x[i]
                      else f(x)[i] = alpha[i] * x[i]

注意

在人工神经网络中,神经元的激活函数决定输出信号,输出信号由一个输入信号或一组输入信号定义。激活函数的选择对神经网络的性能有很大的影响。不同的模型部件(层)可以使用不同的激活函数。

使用附加参数的示例:

   vector x={0.10.40.92.0, -5.00.0, -0.1};
   vector y;
 
   x.Activation(y,AF_ELU);
   Print(y);
   x.Activation(y,AF_ELU,2.0);
   Print(y);
 
   Print("");
   x.Activation(y,AF_LINEAR);
   Print(y);
   x.Activation(y,AF_LINEAR,2.0);
   Print(y);
   x.Activation(y,AF_LINEAR,2.0,5.0);
   Print(y);
 
   Print("");
   x.Activation(y,AF_LRELU);
   Print(y);
   x.Activation(y,AF_LRELU,1.0);
   Print(y);
   x.Activation(y,AF_LRELU,0.1);
   Print(y);
  
   Print("");
   x.Activation(y,AF_RELU);
   Print(y);
   x.Activation(y,AF_RELU,2.0,0.5);
   Print(y);
   x.Activation(y,AF_RELU,2.0,0.5,1.0);
   Print(y);
 
   Print("");
   x.Activation(y,AF_SWISH);
   Print(y);
   x.Activation(y,AF_SWISH,2.0);
   Print(y);
 
   Print("");
   x.Activation(y,AF_TRELU);
   Print(y);
   x.Activation(y,AF_TRELU,0.3);
   Print(y);
 
   Print("");
   vector a=vector::Full(x.Size(),2.0);
   x.Activation(y,AF_PRELU,a);
   Print(y);
 
/*  Results
   [0.1,0.4,0.9,2,-0.993262053000915,0,-0.095162581964040]
   [0.1,0.4,0.9,2,-1.986524106001829,0,-0.190325163928081]
   
   [0.1,0.4,0.9,2,-5,0,-0.1]
   [0.2,0.8,1.8,4,-10,0,-0.2]
   [5.2,5.8,6.8,9,-5,5,4.8]
   
   [0.1,0.4,0.9,2,-1.5,0,-0.03]
   [0.1,0.4,0.9,2,-5,0,-0.1]
   [0.1,0.4,0.9,2,-0.5,0,-0.01]
   
   [0.1,0.4,0.9,2,0,0,0]
   [0.2,0.8,0.9,2,-10,0,-0.2]
   [-1.8,-1.2,0.9,2,-12,-2,-2.2]
   
   [0.052497918747894,0.239475064044981,0.6398545523625035,1.761594155955765,-0.03346425462142428,0,-0.047502081252106]
   [0.054983399731247,0.275989792451045,0.7723340415895611,1.964027580075817,-0.00022698934351217,0,-0.045016600268752]
   
   [0,0,0,2,0,0,0]
   [0,0.4,0.9,2,0,0,0]
   
   [0.1,0.4,0.9,2,-10,0,-0.2]
*/