Difference in time taken with different types

52 Views Asked by At

I was writing some code and noticed this. When I use double, the time taken is slower than when I use uint64_t. Here is the code I was using: Double:

#include <stdio.h>
#include <sys/time.h>
#include <stdint.h>

int main () {
    double sum = 0;
    double add = 1;

    // Start measuring time
    struct timeval begin, end;
    gettimeofday(&begin, 0);
    
    int iterations = 1000*1000*1000;
    for (int i=0; i<iterations; i++) {
        sum += add;
    }
    
    // Stop measuring time and calculate the elapsed time
    gettimeofday(&end, 0);
    uint64_t seconds = end.tv_sec - begin.tv_sec;
    uint64_t microseconds = end.tv_usec - begin.tv_usec;
    uint64_t elapsed = (seconds * 1000000) + microseconds;
    
    printf("Result: %.20f\n", sum);
    
    printf("Time measured: %lu microseconds.\n", elapsed);
    
    return 0;
}

Uint64_t:

#include <stdio.h>
#include <sys/time.h>
#include <stdint.h>

int main () {
    uint64_t sum = 0;
    uint64_t add = 1;

    // Start measuring time
    struct timeval begin, end;
    gettimeofday(&begin, 0);
    
    int iterations = 1000*1000*1000;
    for (int i=0; i<iterations; i++) {
        sum += add;
    }
    
    // Stop measuring time and calculate the elapsed time
    gettimeofday(&end, 0);
    uint64_t seconds = end.tv_sec - begin.tv_sec;
    uint64_t microseconds = end.tv_usec - begin.tv_usec;
    uint64_t elapsed = (seconds * 1000000) + microseconds;
    
    printf("Result: %lu\n", sum);
    
    printf("Time measured: %lu microseconds.\n", elapsed);
    
    return 0;
}

Here are the results:

Double: 
Result: 1000000000.00000000000000000000
Time measured: 4669474 microseconds.

Uint64-t: 
Result: 1000000000
Time measured: 1888623 microseconds.

Why does this happen?

0

There are 0 best solutions below