加法运算和乘法运算的耗时问题
乘法运算比加法运算复杂,理论上应该更耗时,但我的测试程序却显示,加法比乘法更耗时,请各位帮忙看一下问题出在哪里?谢谢!
windows下代码:
#include <iostream>
#include <time.h>
using namespace std;
const int ITER = 60000;
int main() {
double a, b, s;
clock_t start, end;
a = 1000.011 / 3;
b = 2.011 / 3;
//cout << "a = " << a << ", b = " << b << endl;
start = clock();
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a;
}
end = clock();
cout << "s=a cost: " << end - start << "ms" << endl;
start = clock();
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a + b;
}
end = clock();
cout << "a+b cost: " << end - start << "ms" << endl;
start = clock();
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a * b;
}
end = clock();
cout << "a*b cost: " << end - start << "ms" << endl;
//cout << "CLOCKS_PER_SEC: " << CLOCKS_PER_SEC << endl;
return 0;
}
结果:
s=a cost: 5693ms
a+b cost: 8760ms
a*b cost: 6590ms
Linux下代码:
#include <iostream>
#include <sys/time.h>
using namespace std;
const int ITER = 60000;
int main() {
double a, b, s;
timeval start, end;
double span;
a = 1000.011 / 3;
b = 2.011 / 3;
//cout << "a = " << a << ", b = " << b << endl;
/*gettimeofday(&start, 0);
for (int i = 0; i < ITER; i++)
for (int j = 0; j < ITER; j++);
gettimeofday(&end, 0);
span = (end.tv_sec - start.tv_sec) * 1000 + (end.tv_usec -start.tv_usec) / 1000;
cout << "loop cost: " << span << "ms" << endl;*/
gettimeofday(&start, 0);
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a;
}
gettimeofday(&end, 0);
span = (end.tv_sec - start.tv_sec) * 1000 + (end.tv_usec -start.tv_usec) / 1000;
cout << "s=a cost: " << span << "ms" << endl;
gettimeofday(&start, 0);
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a + b;
}
gettimeofday(&end, 0);
span = (end.tv_sec - start.tv_sec) * 1000 + (end.tv_usec -start.tv_usec) / 1000;
cout << "a+b cost: " << span << "ms" << endl;
gettimeofday(&start, 0);
for (int i = 0; i < ITER; i++)
for(int j = 0; j < ITER; j++)
{
s = a * b;
}
gettimeofday(&end, 0);
span = (end.tv_sec - start.tv_sec) * 1000 + (end.tv_usec -start.tv_usec) / 1000;
cout << "a*b cost: " << span << "ms" << endl;
return 0;
}
结果:
s=a cost: 5453ms
a+b cost: 9461ms
a*b cost: 6772ms