код
#include <stdio.h>
#include <sys/types.h>
#include <time.h>
#include <unistd.h>
#include <math.h>
//Size of data
long Size;
char c1;
char c2;
long int i;
long int n;
int main(void)
{
Size = 1000000;
c1 = 5;
c2 = 3;
unsigned char a[Size];
unsigned char b[Size];
clock_t c0, c1; /* clock_t is defined on <time.h> and <sys/types.h> as int */
for (i=0; i<=Size; i++)
{
a[i]=2;
b[i]=5;
}
c0 = clock();
for (n=0; n<600; n++)
{
for (i=0; i<=Size-2; i=i+2)
{
a [i] = b[i] + c1;
a [i+1] = b[i+1] * c2;
}
}
c1 = clock();
printf ("CPU time: %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC);
for (i=0; i<=Size; i++)
{
a[i]=2;
b[i]=5;
}
c0 = clock();
for (n=0; n<600; n++)
{
for (i=0; i<=Size-8; i=i+8)
{
a [i] = b[i] + c1;
a [i+1] = b[i+1] * c2;
a [i+2] = b[i+2] + c1;
a [i+3] = b[i+3] * c2;
a [i+4] = b[i+4] + c1;
a [i+5] = b[i+5] * c2;
a [i+6] = b[i+6] + c1;
a [i+7] = b[i+7] * c2;
}
}
c1 = clock();
printf ("4x CPU time: %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC);
for (i=0; i<=Size; i++)
{
a[i]=2;
b[i]=5;
}
c0 = clock();
for (n=0; n<600; n++)
{
for (i=0; i<=Size-20; i=i+20)
{
a [i] = b[i] + c1;
a [i+1] = b[i+1] * c2;
a [i+2] = b[i+2] + c1;
a [i+3] = b[i+3] * c2;
a [i+4] = b[i+4] + c1;
a [i+5] = b[i+5] * c2;
a [i+6] = b[i+6] + c1;
a [i+7] = b[i+7] * c2;
a [i+8] = b[i+8] + c1;
a [i+9] = b[i+9] * c2;
a [i+10] = b[i+10] + c1;
a [i+11] = b[i+11] * c2;
a [i+12] = b[i+12] + c1;
a [i+13] = b[i+13] * c2;
a [i+14] = b[i+14] + c1;
a [i+15] = b[i+15] * c2;
a [i+16] = b[i+16] + c1;
a [i+17] = b[i+17] * c2;
a [i+18] = b[i+18] + c1;
a [i+19] = b[i+19] * c2;
}
}
c1 = clock();
printf ("10x CPU time: %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC);
return 0;
}
Имеется код, как я понял, программа обрабатывает массив состоящий из 1 000 000 элементов 600 раз. Сначала 1 элемент обрабатывается в цикле, затем 4, затем 10.
Я запустил программу в виртуальной машине и получил такие результаты, почему возникла разница?
CPU time: 2.506459
4x CPU time: 2.087346
10x CPU time: 1.991994