1 /* A simple MPI example program using synchronous send */
2
3 /* The program consists of one sender process and one receiver */
4 /* The sender process sends a message containing its identifier */
5 /* to the receiver. This receives the message and sends it back */
6 /* Both processes use synchronous send operations (MPI_Ssend) */
7
8 /* Compile the program with 'mpicc -o code6 code6.c' */
9 /* Run the program with 'mpirun -np 2 code6 */
10
11 #include <stdio.h>
12 #include "mpi.h"
13
14 int main(int argc, char* argv[]) {
15 int x, y, np, me;
16 int tag = 42;
17 MPI_Status status;
18
19 MPI_Init(&argc, &argv); /* Initialize MPI */
20 MPI_Comm_size(MPI_COMM_WORLD, &np); /* Get number of processes */
21 MPI_Comm_rank(MPI_COMM_WORLD, &me); /* Get own identifier */
22
23 x = me;
24 if (me == 0) { /* Process 0 does this */
25 printf("Sending to process 1\n");
26 MPI_Ssend(&x, 1, MPI_INT, 1, tag, MPI_COMM_WORLD); /* Synchronous send */
27 printf("Receiving from process 1\n");
28 MPI_Recv (&y, 1, MPI_INT, 1, tag, MPI_COMM_WORLD, &status);
29 printf("Process %d received a message containing value %d\n", me, y);
30 }
31 else
32 { /* Process 1 does this */
33 /* Since we use synchronous send, we have to do the receive-operation */
34 /* first, otherwise we will get a deadlock */
35 MPI_Recv (&y, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &status);
36 MPI_Ssend (&x, 1, MPI_INT, 0, tag, MPI_COMM_WORLD); /* Synchronous send */
37 }
38 MPI_Finalize();
39 }
|
1 /* A simple MPI example program using buffered send */
2 /* The program does exactly the same as code6.c */
3
4 /* The program consists of one sender process and one receiver */
5 /* The sender process sends a message containing its identifier */
6 /* to the receiver. This receives the message and sends it back */
7 /* Both processes use buffered send operations (MPI_Bsend) */
8
9 /* Compile the program with 'mpicc -o code7 code7.c' */
10 /* Run the program with 'mpirun -np 2 code7 */
11
12 #include <stdio.h>
13 #include "mpi.h"
14 #include <stdlib.h>
15
16 #define BUFFSIZE 100 /* Size of the message buffer */
17
18 int main(int argc, char* argv[]) {
19 int x, y, np, me;
20 int buff[BUFFSIZE]; /* Buffer to be used in the communication */
21 int size = BUFFSIZE;
22 int tag = 42;
23 MPI_Status status;
24
25 MPI_Init(&argc, &argv); /* Initialize MPI */
26 MPI_Comm_size(MPI_COMM_WORLD, &np); /* Get number of processes */
27 MPI_Comm_rank(MPI_COMM_WORLD, &me); /* Get own identifier */
28
29 MPI_Buffer_attach(buff, size); /* Create a buffer */
30
31 x = me;
32
33 if (me == 0) { /* Process 0 does this */
34 printf("Sending to process 1\n");
35 MPI_Bsend(&x, 1, MPI_INT, 1, tag, MPI_COMM_WORLD); /* Buffered send */
36 printf("Receiving from process 1\n");
37 MPI_Recv (&y, 1, MPI_INT, 1, tag, MPI_COMM_WORLD, &status);
38 printf("Process %d received a message containing value %d\n", me, y);
39 }
40 else
41 { /* Process 1 does this */
42 /* This program would work even though we changed the order of */
43 /* the send and receive calls here, because the messages are */
44 /* buffered and the processes can continue the execution without */
45 /* waiting for the other process to receive the message */
46 MPI_Recv (&y, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &status);
47 MPI_Bsend (&x, 1, MPI_INT, 0, tag, MPI_COMM_WORLD); /* Buffered send */
48
49 }
50 MPI_Buffer_detach(&buff, &size); /* Detach the buffer */
51 MPI_Finalize();
52 exit(0);
53 }
|
1 /* A simple MPI example program using non-blocking send */
2 /* The program does exactly the same as code6.c */
3
4 /* The program consists of one sender process and one receiver */
5 /* The sender process sends a message containing its identifier */
6 /* to the receiver. This receives the message and sends it back */
7 /* Both processes use non-blocking send and receive operations */
8 /* (MPI_Isend and MPI_Irecv, and MPI_Wait to wait until the message */
9 /* has arrived) */
10
11 /* Compile the program with 'mpicc -o code8 code8.c' */
12 /* Run the program with 'mpirun -np 2 code8 */
13
14 #include <stdio.h>
15 #include "mpi.h"
16 #include <stdlib.h>
17
18 int main(int argc, char* argv[]) {
19 int x, y, np, me;
20 int tag = 42;
21 MPI_Status status;
22 MPI_Request send_req, recv_req; /* Request object for send and receive */
23
24 MPI_Init(&argc, &argv); /* Initialize MPI */
25 MPI_Comm_size(MPI_COMM_WORLD, &np); /* Get number of processes */
26 MPI_Comm_rank(MPI_COMM_WORLD, &me); /* Get own identifier */
27
28 x = me;
29 if (me == 0) { /* Process 0 does this */
30 printf("Process %d sending\n",me);
31 MPI_Isend(&x, 1, MPI_INT, 1, tag, MPI_COMM_WORLD, &send_req);
32 printf("Process %d receiving\n", me);
33 MPI_Irecv (&y, 1, MPI_INT, 1, tag, MPI_COMM_WORLD, &recv_req);
34 /* We could do computations here while we are waiting for communication */
35 MPI_Wait(&send_req, &status);
36 MPI_Wait(&recv_req, &status);
37 printf("Process %d received a message containing value %d\n", me, y);
38 }
39 else
40 {
41 MPI_Irecv (&y, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &recv_req);
42 MPI_Isend (&x, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &send_req);
43 /* We could do computations here while we are waiting for communication */
44 MPI_Wait(&recv_req, &status);
45 MPI_Wait(&send_req, &status);
46
47 }
48 MPI_Finalize();
49 exit(0);
50 }
|
1 /* processes. The sender process sends a message containing its */
2 /* identifier to all the other processes. These receive the message */
3 /* and replies with a message containing their own identifier */
4 /* Both processes use non-blocking send and receive operations */
5 /* (MPI_Isend and MPI_Irecv, and MPI_Waitall) */
6
7 /* Compile the program with 'mpicc -o code9 code9.c' */
8 /* Run the program with 'mpirun -np 4 code9 */
9
10 #include <stdio.h>
11 #include "mpi.h"
12 #include <stdlib.h>
13
14 #define MAXPROC 8 /* Max number of procsses */
15
16 int main(int argc, char* argv[]) {
17 int i, x, np, me;
18 int tag = 42;
19
20 MPI_Status status[MAXPROC];
21 /* Request objects for non-blocking send and receive */
22 MPI_Request send_req[MAXPROC], recv_req[MAXPROC];
23 int y[MAXPROC]; /* Array to receive values in */
24
25 MPI_Init(&argc, &argv); /* Initialize */
26 MPI_Comm_size(MPI_COMM_WORLD, &np); /* Get nr of processes */
27 MPI_Comm_rank(MPI_COMM_WORLD, &me); /* Get own identifier */
28
29 x = me; /* This is the value we send, the process id */
30 if (me == 0) { /* Process 0 does this */
31 /* First check that we have at least 2 and at most MAXPROC processes */
32 if (np<2 || np>MAXPROC) {
33 printf("You have to use at lest 2 and at most %d processes\n", MAXPROC);
34 MPI_Finalize();
35 exit(0);
36 }
37 printf("Process %d sending to all other processes\n",me);
38 /* Send a message containing the process id to all other processes */
39 for (i=1; i<np; i++) {
40 MPI_Isend(&x, 1, MPI_INT, i, tag, MPI_COMM_WORLD, &send_req[i]);
41 }
42 /* While the messages are delivered, we could do computations here */
43 /* Wait until all messages have been sent */
44 /* Note that we use requests and statuses starting from position 1 */
45 MPI_Waitall(np-1, &send_req[1], &status[1]);
46 printf("Process %d receiving from all other processes\n", me);
47 /* Receive a message from all other processes */
48 for (i=1; i<np; i++) {
49 MPI_Irecv (&y[i], 1, MPI_INT, i, tag, MPI_COMM_WORLD, &recv_req[i]);
50 }
51 /* While the messages are delivered, we could do computations here */
52 /* Wait until all messages have been received */
53 /* Requests and statuses start from position 1 */
54 MPI_Waitall(np-1, &recv_req[1], &status[1]);
55
56 /* Print out one line for each message we received */
57 for (i=1; i<np; i++) {
58 printf("Process %d received message from process %d\n", me, y[i]);
59 }
60 printf("Process %d ready\n", me);
61 }
62 else
63 { /* all other processes do this */
64
65 /* Check sanity of the user */
66 if (np<2 || np>MAXPROC) {
67 MPI_Finalize();
68 exit(0);
69 }
70 MPI_Irecv (&y, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &recv_req[0]);
71 MPI_Wait(&recv_req[0], &status[0]);
72 MPI_Isend (&x, 1, MPI_INT, 0, tag, MPI_COMM_WORLD, &send_req[0]);
73 /* Lots of computations here */
74 MPI_Wait(&send_req[0], &status[0]);
75 }
76
77 MPI_Finalize();
78 exit(0);
79 }
|