Checking some POSIX codes, I noticed that using signals is not very accurate. Here is an example client code:
#include <signal.h> #include <stdio.h> #include <stdlib.h> #include <sys/types.h> #include <unistd.h> #define MESSAGE "hello\n" #define PAUSE 15000 int main(int argc, char **argv) { int pid = atoi(argv[1]); size_t i; int j; for (i = 0; i < sizeof MESSAGE; ++i) { for (j = 0; j < MESSAGE[i]; ++j) { kill(pid, SIGUSR1); usleep(PAUSE); } kill(pid, SIGUSR2); usleep(PAUSE); } return 0; }
Here is the server code:
#include <signal.h> #include <stdio.h> #include <sys/types.h> #include <unistd.h> static unsigned char index; static void inc(int sig) { ++index; (void) sig; } static void prt(int sig) { printf("%c", index); fflush(stdout); index = 0; (void) sig; } int main(void) { printf("%ld\n", (long int)getpid()); signal(SIGUSR1, inc); signal(SIGUSR2, prt); for (;;) ; return 0; }
The characters received by the server depend on what PAUSE means. Is this from outside the signals or am I making a mistake? If so, where can I find these environmental considerations (I'm using Linux 2.6.35)?
Note. To execute client code, you must write the server PID in command line arguments.
source share