Download MetaTrader 5

need c++ fast learning neural network

To add comments, please log in or register
No time for trading? Subscribe to signals!
Sabil Yudifera Daeng Pattah
13949
Sabil Yudifera Daeng Pattah 2016.06.07 17:04 

My code is very slow, takes time learning data is 1 year is 2 days, I need learning data is 1 year is 1 minute.

class elman{
   private:
  
      double wih[10000][10000];
      double who[10000][10000];
       double sampleInput[100][10000];
       double sampleout[100][10000];
       //hidden layer
  
       //context dan output layer
       double hidden[10000], actual[10000];
       //error layer
       double erro[10000], errh[10000];
  
       //input dan output value
       double target[100];
       double inputs[10000];
       /////
       int inputNeurons;
       int hiddenNeurons;
       int outputNeurons;
  
       double learnRate;
  
       int maxTests;
       int maxsample;
   public:
       elman(int _input, int _output, int _hidden){
           hiddenNeurons = _hidden;
           inputNeurons = _input;
           outputNeurons = _output;
  
           learnRate = 0.2;
           maxTests = 200000;
       }
       int getRandomNumber(){
           //Generate random value between 0 and 6.
           return int(6 * rand() / ((double)32767 + 1.0));
       }
       double sigmoid(double val){
           return (1.0 / (1.0 + exp(-val)));
       }
       double sigmoidDerivative(double val){
           return (val * (1.0 - val));
       }
       void feedForward();
       void assignRandomWeights();
       void backPropagate();
       void set(double& _input[][], double& _output[][], int maxindex);
       void ElmanNetwork();
       void testNetwork();
       void loadneuron();
       void saveneuron();
       double error();
};
void elman::set(double &_input[][],double &_output[][],int maxindex)
{
      for (int i = 0; i<maxindex; i++)
       {
              for (int j = 0; j<inputNeurons; j++)
              {
                  sampleInput[i][j] = _input[i][j];
              }
              for (int j = 0; j<outputNeurons; j++)
              {
                  sampleout[i][j] = _output[i][j];
              }
       }
       maxsample = maxindex;
}
void elman::assignRandomWeights(){
       for (int inp = 0; inp <= inputNeurons; inp++){
              for (int hid = 0; hid <= (hiddenNeurons - 1); hid++){
                     wih[inp][hid] = -0.5 + double(rand() / (32767 + 1.0));
              }
       }
       for (int hid = 0; hid <= hiddenNeurons; hid++){
              for (int out = 0; out <= (outputNeurons - 1); out++){
                     who[hid][out] = -0.5 + double(rand() / (32767 + 1.0));
              }
       }
}
void elman::backPropagate(){
    for (int out = 0; out <= (outputNeurons - 1); out++)
    {
        erro[out] = (target[out] - actual[out]);// * sigmoidDerivative(actual[out]);
    }
    for (int hid = 0; hid <= (hiddenNeurons - 1); hid++)
    {
        errh[hid] = 0.0;
        for (int out = 0; out <= (outputNeurons - 1); out++)
        {
            errh[hid] += erro[out] * who[hid][out];
        }
        errh[hid] *= sigmoidDerivative(hidden[hid]);
    }
    for (int out = 0; out <= (outputNeurons - 1); out++)
    {
        for (int hid = 0; hid <= (hiddenNeurons - 1); hid++)
        {
            who[hid][out] += (learnRate * erro[out] * hidden[hid]);
        }
        who[hiddenNeurons][out] += (learnRate * erro[out]);
    }
    for (int hid = 0; hid <= (hiddenNeurons - 1); hid++)
    {
        for (int inp = 0; inp <= (inputNeurons - 1); inp++)
        {
            wih[inp][hid] += (learnRate * errh[hid] * inputs[inp]);
        }
        wih[inputNeurons][ hid] += (learnRate * errh[hid]);
    }
}
void elman::feedForward()
{
    double sum;

    //Calculate input and context connections to hidden layer.
    for (int hid = 0; hid <= (hiddenNeurons - 1); hid++)
    {
        sum = 0.0;
        for (int inp = 0; inp <= (inputNeurons - 1); inp++)
        {
            sum += inputs[inp] * wih[inp][hid];
        }

        sum += wih[inputNeurons][hid];
        hidden[hid] = sigmoid(sum); ///context layer
    }

    //Calculate the hidden to output layer.
    for (int out = 0; out <= (outputNeurons - 1); out++)
    {
        sum = 0.0;
        for (int hid = 0; hid <= (hiddenNeurons - 1); hid++)
        {
            sum += hidden[hid] * who[hid][out];
        }
        sum += who[hiddenNeurons][out];
        actual[out] = sigmoid(sum);
    }
}
double elman::error(){
   
    double sum = 0;
    for (int out = 0; out <= (outputNeurons - 1); out++)
    {
        sum += (target[out] - actual[out]);
    }
    return sum;
}
void elman::ElmanNetwork(){
    int sample = 0;
    int iterations = 0;
    bool stopLoop = false;
    do {

        for (int i = 0; i < inputNeurons; i++){
            inputs[i] = sampleInput[sample][i];
        }
        for (int i = 0; i < outputNeurons; i++){
            target[i] = sampleout[sample][i];
        }

        feedForward();
       
        if (iterations > maxTests)
        {
            stopLoop = true;
        }
        iterations += 1;
      if (error() < 0.00001 && error() >= 0)
        {
            stopLoop = true;
        }
        backPropagate();

        sample += 1;
        if (sample > maxsample - 1)
        {
            sample = 0;
        }
    } while (stopLoop == false);
    printf("Learning :%d",iterations);
}
void elman::saveneuron()
{
      ///save neuron 1
      string a33 = "";
       for (int inp = 0; inp <= inputNeurons; inp++){
           for (int hid = 0; hid <= (hiddenNeurons - 1); hid++){
               a33 += DoubleToStr(wih[inp][hid])+" ";
          }
          a33 += "\n";
       }
      int handle = FileOpen("elman.txt", FILE_CSV|FILE_READ|FILE_WRITE, ',');
      FileWriteString(handle, a33); 
      FileClose(handle);
     
      ///save neuron 2
      a33 = "";
       for (int hid = 0; hid <= hiddenNeurons; hid++){
           for (int out = 0; out <= (outputNeurons - 1); out++){
               a33 += DoubleToStr(who[hid][out])+" ";
           }
           a33 += "\n";
       }
       handle = FileOpen("elman1.txt", FILE_CSV|FILE_READ|FILE_WRITE, ',');
      FileWriteString(handle, a33); 
      FileClose(handle);
      /////////////////////
}
struct prices
{
   char            bid;  // bid price
};
void elman::loadneuron()
{
      ///load neuron 1
      prices a44[];
      string temp44="";
      int index = 0, kolom = 0;
      int handle=FileOpen("elman.txt",FILE_READ|FILE_BIN);
      FileReadArray(handle,a44);
      for(int i=0;i<ArraySize(a44);i++)
      {
                  char c11 = a44[i].bid;
                    if (c11 != ' '){
                        temp44 += CharToString(c11);
                    }
                    if (c11 == ' ')
                    {
                        wih[kolom][index] = StrToDouble(temp44);
                        index++;
                        temp44 = "";
                    }
                    if (c11 == '\n')
                    {
                        kolom++;
                        index = 0;
                        temp44 = "";
                    }
      }
      FileClose(handle);
     
      temp44="";
      index = 0; kolom = 0;
      handle=FileOpen("elman1.txt",FILE_READ|FILE_BIN);
      FileReadArray(handle,a44);
      for(int i=0;i<ArraySize(a44);i++)
      {
                  char c11 = a44[i].bid;
                    if (c11 != ' '){
                        temp44 += CharToString(c11);
                    }
                    if (c11 == ' ')
                    {
                        who[kolom][index] = StrToDouble(temp44);
                        index++;
                        temp44 = "";
                    }
                    if (c11 == '\n')
                    {
                        kolom++;
                        index = 0;
                        temp44 = "";
                    }
      }
       FileClose(handle); 
}

void elman::testNetwork()
{     
    int counter=0;
   for(int i=1;i<4;i++)
   {
               inputs[counter]=inttodouble4((int) MathRound((High[i]/Point)-(Open[i]/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound((Open[i]/Point)-(Low[i]/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound((iATR(NULL,0,3,i)/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound((iStdDev(NULL,0,3,0,MODE_EMA,PRICE_CLOSE,i)/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound(iRSI(NULL,0,3,PRICE_CLOSE,i)/Point));counter++;
               inputs[counter]=inttodouble4((int) MathRound((iWPR(NULL,0,3,i)*-1)));counter++;
               inputs[counter]=inttodouble4((int) MathRound(iStochastic(NULL,0,5,3,3,MODE_SMA,0,MODE_MAIN,i)/Point));counter++;
               inputs[counter]=inttodouble4((int) MathRound(iStochastic(NULL,0,5,3,3,MODE_SMA,0,MODE_SIGNAL,i)/Point));counter++;   
               inputs[counter]=inttodouble4((int) MathRound(iAD(NULL,0,i)*-1));counter++;
               inputs[counter]=inttodouble4((int) MathRound(iMFI(NULL,0,3,i)/Point));counter++;
                     
               inputs[counter]=inttodouble4((int) MathRound((iADX(NULL,0,3,PRICE_CLOSE,MODE_MAIN,i)/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound((iADX(NULL,0,3,PRICE_CLOSE,MODE_PLUSDI,i)/Point)));counter++;
               inputs[counter]=inttodouble4((int) MathRound((iADX(NULL,0,3,PRICE_CLOSE,MODE_MINUSDI,i)/Point)));counter++;
    }
    feedForward();  
   
    if(transaksi==true){
       double Lots = 0.01;//((AccountBalance()/100)*(AccountInfoDouble(ACCOUNT_MARGIN_SO_CALL)/100))/50;
       if(Lots < MarketInfo(Symbol(),MODE_MINLOT))
       {
           Lots=0.01;
       }
       if(Lots>MarketInfo(Symbol(),MODE_MAXLOT))
       {
          Lots = MarketInfo(Symbol(),MODE_MAXLOT);
       }
       if(NormalizeDouble(actual[0],4) > NormalizeDouble(actual[1],4)){
            OrderSend(Symbol(),OP_SELL,Lots,Bid,50,0,NormalizeDouble(Bid-Point*25, Digits),"",OrdersTotal(),0,CLR_NONE);
       }
       if(NormalizeDouble(actual[0],4) < NormalizeDouble(actual[1],4)){
            OrderSend(Symbol(),OP_BUY,Lots,Ask,50,0,NormalizeDouble(Ask+Point*25, Digits),"",OrdersTotal(),0,CLR_NONE);
       }
    }
}


Stanislav Korotky
17925
Stanislav Korotky 2016.06.07 21:48  
You may use this one from the codebase. (make sure to apply a crash fix attached to this post on russian forum)
Слегка исправленная версия BPNN.dll (marketeer) - MQL4 форум
Слегка исправленная версия BPNN.dll (marketeer) - MQL4 форум
  • www.mql5.com
Слегка исправленная версия BPNN.dll (marketeer) - MQL4 форум
Jose Miguel Soriano
4963
Jose Miguel Soriano 2016.06.08 11:32  
Sabil Yudifera Daeng Pattah:

My code is very slow, takes time learning data is 1 year is 2 days, I need learning data is 1 year is 1 minute.


https://www.mql5.com/en/code/1146
ALGLIB - Numerical Analysis Library
ALGLIB - Numerical Analysis Library
  • votes: 57
  • 2012.10.12
  • Sergey Bochkanov
  • www.mql5.com
ALGLIB math function library (v. 3.5.0) ported to MQL5.
Sabil Yudifera Daeng Pattah
13949
Sabil Yudifera Daeng Pattah 2016.12.11 13:53  
hello it's only library. Real programmers do not use the library :)
Fernando Carreiro
2348
Fernando Carreiro 2016.12.11 13:57  
Sabil Yudifera Daeng Pattah: hello it's only library. Real programmers do not use the library :)

Real programmers DO use libraries so as to have reusable code! If you don't like it, just copy the code to your main file! It is that simple and real programmers know that!

PS! I am refering to MQL code and not DLLs which is a totally different story!

To add comments, please log in or register