I have the following code:
long long unsigned int GetCurrentTimestamp() { LARGE_INTEGER res; QueryPerformanceCounter(&res); return res.QuadPart; } long long unsigned int initalizeFrequency() { LARGE_INTEGER res; QueryPerformanceFrequency(&res); return res.QuadPart; } //start time stamp boost::posix_time::ptime startTime = boost::posix_time::microsec_clock::local_time(); long long unsigned int start = GetCurrentTimestamp(); // .... // execution that should be measured // .... long long unsigned int end = GetCurrentTimestamp(); boost::posix_time::ptime endTime = boost::posix_time::microsec_clock::local_time(); boost::posix_time::time_duration duration = endTime - startTime; std::cout << "Duration by Boost posix: " << duration.total_microseconds() <<std::endl; std::cout << "Processing time is " << ((end - start) * 1000000 / initalizeFrequency()) << " microsec "<< std::endl;
Result of this code is
Duration by Boost posix: 0 Processing time is 24 microsec
Why there is such a big divergence? Boost sucks as much as it should measure microseconds but it measures microseconds with tenth of microseconds error???