Can someone explain why I always get time 0 from the code below? I just want the millisecond timer to calculate the delay between sending and receiving data from the socket, but no matter what I try, I always get the result 0 ... I even tried microseconds just in case my system executed it less than 1 ms
printf("#: ");
bzero(buffer,256);
fgets(buffer,255,stdin);
struct timeval start, end;
unsigned long mtime, seconds, useconds;
gettimeofday(&start, NULL);
n = write(clientSocket,buffer,strlen(buffer));
if (n < 0)
{
error("Error: Unable to write to socket!\n");
}
bzero(buffer,256);
n = read(clientSocket,buffer,255);
gettimeofday(&end, NULL);
seconds = end.tv_sec - start.tv_sec;
useconds = end.tv_usec - start.tv_usec;
mtime = ((seconds) * 1000 + useconds/1000.0) + 0.5;
if (n < 0)
{
error("Error: Unable to read from socket!\n");
}
printf("%s\n",buffer);
printf("Delay: %lu microseconds\n", useconds);
source
share