I am learning C (and Cygwin) and trying to run a simple remote execution system for assignment.
One of the simple requirements that I hung up: "The client will report the time it took the server to respond to each request."
I tried to search and implement other working solutions, but always come back in the end.
Excerpt from what I have:
#include <time.h>
for(;;)
{
bzero(sendline, 1024);
bzero(recvline, 1024);
printf("> ");
fgets(sendline, 1024, stdin);
sendline[strcspn(sendline, "\n")] = 0;
if (strcmp(sendline,"quit") == 0) break;
clock_t start = clock(), diff;
write(sock, sendline, strlen(sendline)+1);
read(sock, recvline, 1024);
sleep(2);
diff = clock() - start;
int msec = diff * 1000 / CLOCKS_PER_SEC;
printf("%s (%d s / %d ms)\n\n", recvline, msec/1000, msec%1000);
}
I also tried to use float and instead of dividing by 1000, multiplying by 10 000 to see if there is any gloss of the value, but always returns 0. Obviously, something should be wrong with the way I implement it, but after a lot of reading I can't figure it out.
- Edit -
Printout of Values:
clock_t start = clock(), diff;
printf("Start time: %lld\n", (long long) start);
sleep(2);
printf("End time: %lld\n", (long long) clock());
diff = clock() - start;
printf("Diff time: %lld\n", (long long) diff);
printf("Clocks per sec: %d", CLOCKS_PER_SEC);
Result: Start time: 15 End time: 15 Acceleration time: 0 Hours per second: 1000
- FINAL OPERATING CODE -
#include <sys/time.h>
struct timeval start, end;
gettimeofday(&start, NULL);
gettimeofday(&end, NULL);
long int usec =
(end.tv_sec * 1000000 + end.tv_usec) -
(start.tv_sec * 1000000 + start.tv_usec);
double msec = (double)usec / 1000;
printf("\n%s (%.3fms)\n\n", recvline, msec);