Time Delay while sending data through UART using WriteFile intervally for some duration

21 views Asked by At

I have written a sample code for UART serial communication using C program in windows. The Data is sent through UART for 100 ms of duration at an interval of 10ms here is the code

#include <stdio.h>
#include <windows.h>
#include <sys/time.h>
#include <unistd.h>
#include <stdint.h>
#include <unistd.h>

void delay(int s32MilliSeconds)
{

    clock_t start_time = clock();
    while (clock() < (start_time + s32MilliSeconds)){
    }
}


int main()
{


    char* c8PortName = "COM4";
    unsigned int u32BaudRate = 9600;
    
    HANDLE s32HSerial;
    
    s32HSerial = CreateFile(c8PortName, GENERIC_READ | GENERIC_WRITE, 0, 0, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0);

    Sleep(500);

    if (s32HSerial == INVALID_HANDLE_VALUE) 
    {

        fprintf(stderr, "Error opening COM port\n");
        return -1;
    }

    if (!PurgeComm(s32HSerial, PURGE_RXCLEAR | PURGE_TXCLEAR)) 
    {
        fprintf(stderr, "Error purging the serial port\n");
        CloseHandle(s32HSerial);
       
        return -1;
    }


    DCB dcbSerialParams = {0};
    dcbSerialParams.DCBlength = sizeof(dcbSerialParams);

    if (!GetCommState(s32HSerial, &dcbSerialParams)) 
    {
        fprintf(stderr, "Error getting DCB state\n");
        CloseHandle(s32HSerial);

        return -1;
    }

    dcbSerialParams.BaudRate = u32BaudRate;
    dcbSerialParams.ByteSize = 8;
    dcbSerialParams.StopBits = ONESTOPBIT;
    dcbSerialParams.Parity = NOPARITY;

    if (!SetCommState(s32HSerial, &dcbSerialParams)) 
    {
        fprintf(stderr, "Error setting DCB state\n");
        CloseHandle(s32HSerial);

        return -1;
    }

    COMMTIMEOUTS timeouts = { 0 };
    timeouts.ReadIntervalTimeout = MAXDWORD;
    timeouts.ReadTotalTimeoutConstant = 280;
    timeouts.ReadTotalTimeoutMultiplier = 0;
    timeouts.WriteTotalTimeoutConstant = 0;
    timeouts.WriteTotalTimeoutMultiplier = 0;

    if (!SetCommTimeouts(s32HSerial, &timeouts)) {
        
        CloseHandle(s32HSerial);

        return -1;
    }
    
    struct timeval tv;
     clock_t duration = 100;
    clock_t start_time;
    clock_t end_time, st, end;
    
    start_time = clock();
    end_time = start_time + duration;
    
     while (clock() < end_time)
    {
                
        gettimeofday(&tv,NULL);
        unsigned long tf = tv.tv_sec*(uint64_t)1000000 + tv.tv_usec;
        printf("TIME Start: %u\n",tf);
        
        DWORD bytesWritten;
        
        char* c8Data = "aaaaaaaaaaaaaaaaaaaaaaa//";
        unsigned int u32DataSize = strlen(c8Data);

        if (WriteFile(s32HSerial, c8Data, u32DataSize, &bytesWritten, NULL)) 
        {
    //      printf("SUCCESS\n");
        } else
        {
           
            return -1;
        }

        //usleep(10000);
        delay(10);
                        
        gettimeofday(&tv,NULL);
        unsigned long tb = tv.tv_sec*(uint64_t)1000000 + tv.tv_usec;
        printf("TIME End: %u\n",tb);
        printf("TIME delay: %u\n",(tb-tf));
        
        
    
    

    }
    
    
}

and the result is

enter image description here

The Time delay for the first five data sent is constant around 10,000 micro sec and after that it varies around 20 to 25 ms for next 3. Actually the expected number of transfer should be 10 or 9, but in this case its only 6 to 8 even in some cases 5 also. Even this is same case for TCP and UDP. Can any one help regarding this problem..

0

There are 0 answers