gettimeofday计算运行时间,偶尔会有明显偏差?

问题描述 投票:0回答:1

我用

gettimeOfday
来计算代码执行时间,我认为
timer
应该大约等于
timer1+timer2
。然而,在我的测试过程中,偶尔会出现
timer
远大于
timer1+timer2
的情况。这是什么原因呢?

timer1 = 8242 us
timer2 = 440 us
timer = 95027 us

代码:

#include "spidev.h"
#include <poll.h>
#include <time.h>

int SpiDevFd = 0;

struct _SPI_Settings spi_settings;

uint8_t tx[256] = {0};
uint8_t rx[256] = {0};

int ReadData(int fd)
{
        int ret = 0;
        char ch = 0;

        struct timeval start1; 
        struct timeval end1;
        unsigned long timer1 = 0;
        gettimeofday(&start1, NULL);
        ret = read(fd, &ch, sizeof(ch));
        gettimeofday(&end1, NULL);
        timer1 = 1000000 * (end1.tv_sec - start1.tv_sec) + end1.tv_usec - start1.tv_usec;
        printf("timer1 = %ld us\n", timer1);

        ret = read(fd, &ch, sizeof(ch));
        if (ret) {
                struct timeval start2;
                struct timeval end2;
                unsigned long timer2 = 0;
                gettimeofday(&start2, NULL);
                TransferSpi(SpiDevFd, &spi_settings, tx, sizeof(tx), rx);
                gettimeofday(&end2, NULL);
                timer2 = 1000000 * (end2.tv_sec - start2.tv_sec) + end2.tv_usec - start2.tv_usec;
                printf("timer2 = %ld us\n", timer2);
                hex_dump(rx, ARRAY_SIZE(rx), 32, "RX");
                return 0;
        }
        exit(-1);
    return 0;
}

int main() {
    int fd = open("/dev/spiio0", O_RDONLY);
    if (fd < 0) {
        perror("Failed to open GPIO value file\n");
        return 1;
    }

    for (int i = 0; i <= 255; i++)
    {
        tx[i] = i;
    }

        spi_settings.mode = 0;
        spi_settings.bits = 8;
        spi_settings.speed = 10*1000*1000;
        spi_settings.delay = 0;
        SpiDevFd = OpenSpiDev("/dev/spidev2.0", &spi_settings);
    if (SpiDevFd < 0) {
                perror("Failed to spi dev file\n");
                return 1;
        }
        unsigned long max = 0;
        unsigned long min = 8500;
        unsigned long avg = 0;
        int testCnt = 10000;
        for (int i = 0; i < testCnt; i++) {
                struct timeval start;
                struct timeval end;
                unsigned long timer = 0;
                gettimeofday(&start, NULL);
                int ret = ReadData(fd);
                gettimeofday(&end, NULL);
                timer = 1000000 * (end.tv_sec - start.tv_sec) + end.tv_usec - start.tv_usec;
                printf("timer = %ld us\n", timer);
                avg += timer;
                if (timer > max) {
                        max = timer;
                        if (max > 25000) {
                                printf("max ===%ld\n", max);
                                exit(-1);
                        }
                }
                if (timer < min) {
                        min = timer;
                }
        }
        printf("max=%ld us,min=%ld us, avg= %ld us\n", max, min, avg/testCnt);

    close(fd);
    return 0;
}
c linux arm
1个回答
0
投票

请注意,

gettimeofday()
中的差异并不衡量程序执行时间,而是衡量总体经过的时间。因此,操作系统或其他程序可能在给定的时间间隔内运行并添加到测量的时间中。

带着这样的保留,让我们看看测量的是什么:

  • timer1
    测量从 SPI 读取的 1 个字节。
  • timer2
    在未知(无关紧要)函数 TransferSpi 中测量 SPI 传输。
  • timer
    测量上述两项,并附加:
    • 从 SPI 读取附加字节。
    • 两个
      printf()
      陈述
    • 调用未知函数
      hex_dump()
    • (一些表达式的执行时间可能微不足道,在此上下文中可以忽略)

这些额外的项目绝非无关紧要。如果仅靠

printf()
陈述就能解释这种差异,我不会感到惊讶。

我建议单独测量上述项目符号,这应该显示额外的时间花费在哪里。

© www.soinside.com 2019 - 2024. All rights reserved.