
You are missing trading opportunities:
- Free trading apps
- Over 8,000 signals for copying
- Economic news for exploring financial markets
Registration
Log in
You agree to website policy and terms of use
If you do not have an account, please register
Could you post the code that produces this output ?
A little rough around the edges, but good enough to notice the timer wasn't dropping to anywhere close to the 1ms mark.
int timeBeginPeriod(uint per);
int timeEndPeriod (uint per);
#import
int cnt = 0;
ulong cumulative = 0;
ulong last_time = 0;
int OnInit()
{
timeBeginPeriod(1);
EventSetMillisecondTimer(1);
last_time = GetMicrosecondCount();
return(INIT_SUCCEEDED);
}
void OnDeinit(const int reason)
{
timeEndPeriod(1);
EventKillTimer();
}
void OnTimer()
{
ulong time_now = GetMicrosecondCount();
cumulative += (time_now-last_time);
last_time = time_now;
cnt++;
if(cnt>=1000)
{
printf("Average delay: %iμs",cumulative/cnt);
cumulative = 0;
cnt = 0;
}
}
And what is the external tool you used ?
https://vvvv.org/contribution/windows-system-timer-tool
Quite insightful. For example, playing a YouTube video in Chrome drops it to 0.5ms
A little rough around the edges, but good enough to notice the timer wasn't dropping to anywhere close to the 1ms mark.
int timeBeginPeriod(uint per);
int timeEndPeriod (uint per);
#import
int cnt = 0;
ulong cumulative = 0;
ulong last_time = 0;
int OnInit()
{
timeBeginPeriod(1);
EventSetMillisecondTimer(1);
last_time = GetMicrosecondCount();
return(INIT_SUCCEEDED);
}
void OnDeinit(const int reason)
{
timeEndPeriod(1);
EventKillTimer();
}
void OnTimer()
{
ulong time_now = GetMicrosecondCount();
cumulative += (time_now-last_time);
last_time = time_now;
cnt++;
if(cnt>=1000)
{
printf("Average delay: %iμs",cumulative/cnt);
cumulative = 0;
cnt = 0;
}
}
https://vvvv.org/contribution/windows-system-timer-tool
Quite insightful. For example, playing a YouTube video in Chrome drops it to 0.5ms
And I noticed Firefox is also dropping the timer to 1ms or 0.5 ms.
I tested on MT5 and got same behaviour. No way to go below 15 ms.
And I noticed Firefox is also dropping the timer to 1ms or 0.5 ms.
It does seem that adjusting the system timer will affect Sleep()
#import "winmm.dll"
int timeBeginPeriod(uint per);
int timeEndPeriod(uint per);
#import
void OnStart()
{
printf("Average delay without winmm: %iμs",GetDelay());
timeBeginPeriod(1);
printf("Average delay with winmm: %iμs",GetDelay());
timeEndPeriod(1);
}
ulong GetDelay()
{
ulong cumulative = 0;
ulong last_time = GetMicrosecondCount();
Sleep(1);
for(int i=0; i<1000; i++)
{
ulong time_now = GetMicrosecondCount();
cumulative += (time_now-last_time);
last_time = time_now;
Sleep(1);
}
return(cumulative/1000);
}
Average delay with winmm: 2000μs
I have been using wPeriodMin and Sleep to slow down the tester when the speed is set to 32.
//| Timer.mq4 |
//| Copyright 2016, |
//| https:// |
//+------------------------------------------------------------------+
#property copyright "Copyright 2016,"
#property link "https://"
#property version "1.00"
#property strict
struct timecaps_tag {
uint wPeriodMin;
uint wPeriodMax;
};
#import "kernel32.dll"
void Sleep(int);
#import "winmm.dll"
int timeGetDevCaps(timecaps_tag&,uint);
int timeBeginPeriod(uint);
int timeEndPeriod(uint);
#import
//
input bool winmm=true;
//+------------------------------------------------------------------+
//| Expert initialization function |
//+------------------------------------------------------------------+
int OnInit()
{
//--- create timer
timecaps_tag t;
timeGetDevCaps(t,sizeof(timecaps_tag));
Print(t.wPeriodMin," ",t.wPeriodMax);
if(winmm)timeBeginPeriod(1);
//---
return(INIT_SUCCEEDED);
}
//+------------------------------------------------------------------+
//| Expert deinitialization function |
//+------------------------------------------------------------------+
void OnDeinit(const int reason)
{
if(winmm)timeEndPeriod(1);
//--- destroy timer
EventKillTimer();
}
//+------------------------------------------------------------------+
//| Expert tick function |
//+------------------------------------------------------------------+
void OnTick()
{
Comment("bars per second: "+(string)BarsPerSec());
kernel32::Sleep(1);
}
//+--
//----------
uint BarsPerSec()
{
static uint tickcount=GetTickCount();
static uint barsnumber=Bars;
static uint bps;
if(GetTickCount()-tickcount>1000)
{
tickcount=GetTickCount();
bps=Bars-barsnumber;
barsnumber=Bars;
}
return bps;
}
//---
winmm: true (so the Sleep can be 1ms instead of 16 ms) gives more bars per second.
Average delay with winmm: 2000μs
It looks to me as though this is both a hardware limitation and a Window limitation. Nothing other than a processor-melting infinite loop looks capable of delivering an accurate 1-millisecond wait.
The following code manages, on my computer, to deliver an average wait of about 1030μs at the cost of sometimes waiting as little as 900μs. Results may vary on other computers, or under heavy O/S load.
#import "kernel32.dll"
int CreateEventW(int,int,int,int);
int ResetEvent(int);
int WaitForSingleObject(int, int);
int CloseHandle(int);
#import
#import "Winmm.dll"
int timeSetEvent(int, int, int, int, int);
int timeKillEvent(int);
#import
#import "user32.dll"
int PostMessageA(int hWnd,int Msg,int wParam,int lParam);
int RegisterWindowMessageW(string lpString);
#import
// Allow waits of e.g. as little as 900 microseconds
input uint ErrorToleranceMicroseconds = 100;
int glbEvent = 0;
int glbTimer = 0;
int OnInit()
{
glbEvent = CreateEventW(0, 0, 0, 0);
if (!glbEvent) {
Print("Event creation failed");
return 1;
} else {
glbTimer = timeSetEvent(1, 0, glbEvent, 0, 0x0021);
if (!glbTimer) {
Print("Timer creation failed");
return 2;
} else {
SendTickToSelf();
return 0;
}
}
}
void OnDeinit(const int reason)
{
if (glbEvent) CloseHandle(glbEvent);
if (glbTimer) timeKillEvent(glbTimer);
}
void OnTick()
{
ulong total = 0;
ulong samples = 0;
ulong max = 0, min = 999999;
while (!IsStopped()) {
ulong msc = GetMicrosecondCount();
while (GetMicrosecondCount() - msc < (1000 - ErrorToleranceMicroseconds)) {
WaitForSingleObject(glbEvent, 1);
}
// Collect wait statistics
ulong now = GetMicrosecondCount();
ulong wait = now - msc;
total += wait;
if (max < wait) max = wait;
if (min > wait) min = wait;
samples++;
}
Print("Waits (microseconds), avg: ", total / samples, ", max: " , max, ", min: " , min);
}
// Force the EA to start even when there are no real ticks
void SendTickToSelf()
{
int hwnd = WindowHandle(Symbol(), Period());
int msg = RegisterWindowMessageW("MetaTrader4_Internal_Message");
PostMessageA(hwnd, msg, 2, 1);
}
The following code [...]
... more generally, if the aim is to run in the tightest possible loop, while still yielding time to other threads and not maxing out a processor core, then the above technique and WaitForSingleObject() looks more efficient than anything using Sleep(). Average yield/wait is less than 1 millisecond on my computer.
// Yield to other threads, but with absolute minimum delay
WaitForSingleObject(glbEvent, 1);
...
}
Nicely done.
On mine: Waits (microseconds), avg: 1169, max: 32585, min: 900
I have been using wPeriodMin and Sleep to slow down the tester when the speed is set to 32.
The following code [...]
For the record, to be reliable it turns out that the above code also requires a call to timeBeginPeriod(1), as in the other examples above. I hadn't noticed that because I'm using Chrome which does its own call to timeBeginPeriod.