Skip to content

Commit

Permalink
Fix MPI rank naming to be consistent in slides
Browse files Browse the repository at this point in the history
  • Loading branch information
leopekkas committed Jul 28, 2023
1 parent 1b3ea04 commit 7ee4bc2
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 29 deletions.
20 changes: 10 additions & 10 deletions mpi/docs/03-special-variables.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@ lang: en

<div class=column>
```fortran
if (myid == 0) then
if (rank == 0) then
call mpi_send(message, msgsize, &
MPI_INTEGER, 1, &
42, MPI_COMM_WORLD, rc)
call mpi_recv(recvBuf, arraysize, &
MPI_INTEGER, 1, &
42, MPI_COMM_WORLD, &
status, rc)
else if (myid == 1) then
else if (rank == 1) then
call mpi_send(message, msgsize, &
MPI_INTEGER, 0, &
42, MPI_COMM_WORLD, rc)
Expand All @@ -43,8 +43,8 @@ else if (myid == 1) then
```fortran
! Modulo operation can be used for
! wrapping around
dst = mod(myid + 1, ntasks)
src = mod(myid - 1 + ntasks, ntasks)
dst = mod(rank + 1, ntasks)
src = mod(rank - 1 + ntasks, ntasks)
call mpi_send(message, msgsize, &
MPI_INTEGER, dst, &
Expand All @@ -64,7 +64,7 @@ call mpi_recv(recvBuf, arraysize, &
chosen as the special task in scatter and gather type operations

```c++
if (0 == myid) {
if (0 == rank) {
for (int i=1; i < ntasks; i++) {
MPI_Send(&data, 1, MPI_INT, i, 42, MPI_COMM_WORLD);
}
Expand All @@ -84,13 +84,13 @@ if (0 == myid) {
# Example

```fortran
dst = myid + 1
src = myid - 1
dst = rank + 1
src = rank - 1
if (myid == 0) then
if (rank == 0) then
src = MPI_PROC_NULL
end if
if (myid == ntasks - 1) then
if (rank == ntasks - 1) then
dst = MPI_PROC_NULL
end if
Expand All @@ -115,7 +115,7 @@ call mpi_recv(message, msgsize, MPI_INTEGER, src, 42, MPI_COMM_WORLD, status, rc

<div class=column style="width:50%">
```c++
if (0 == myid) {
if (0 == rank) {
for (int i=1; i < ntasks; i++) {
MPI_Recv(&data, 1, MPI_INT, MPI_ANY_SOURCE,
42, MPI_COMM_WORLD, &status);
Expand Down
21 changes: 10 additions & 11 deletions mpi/docs/04-collectives.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ lang: en

<div class=column>
```fortran
if (my_id == 0) then
if (rank == 0) then
do i = 1, ntasks-1
call mpi_send(a, 1048576, &
MPI_REAL, i, tag, &
Expand Down Expand Up @@ -128,14 +128,14 @@ Assume 4 MPI tasks. What would the (full) program print?

<div class=column>
```fortran
if (my_id==0) then
if (rank==0) then
do i = 1, 16
a(i) = i
end do
end if
call mpi_bcast(a, 16, MPI_INTEGER, 0, &
MPI_COMM_WORLD, rc)
if (my_id==3) print *, a(:)
if (rank==3) print *, a(:)
```
<small>
**A)** `1 2 3 4`<br>
Expand All @@ -146,14 +146,14 @@ if (my_id==3) print *, a(:)
</div>
<div class=column>
```fortran
if (my_id==0) then
if (rank==0) then
do i = 1, 16
a(i) = i
end do
end if
call mpi_scatter(a, 4, MPI_INTEGER, aloc, 4 &
MPI_INTEGER, 0, MPI_COMM_WORLD, rc)
if (my_id==3) print *, aloc(:)
if (rank==3) print *, aloc(:)
```
<small>
**A)** `1 2 3 4`<br>
Expand Down Expand Up @@ -197,7 +197,7 @@ MPI_Scatterv(`sendbuf`{.input}, `sendcounts`{.input}, `displs`{.input}, `sendtyp

<div class=column>
```fortran
if (my_id==0) then
if (rank==0) then
do i = 1, 10
a(i) = i
end do
Expand All @@ -208,7 +208,7 @@ displs(0:3) = [ 0, 1, 3, 6 ]
call mpi_scatterv(a, scounts, &
displs, MPI_INTEGER, &
aloc, scounts(my_id), &
aloc, scounts(rank), &
MPI_INTEGER, 0, &
MPI_COMM_WORLD, rc)
Expand Down Expand Up @@ -383,7 +383,7 @@ MPI_Alltoall(`sendbuf`{.input}, `sendcount`{.input}, `sendtype`{.input}, `recvbu

<div class=column>
```fortran
if (my_id==0) then
if (rank==0) then
do i = 1, 16
a(i) = i
end do
Expand Down Expand Up @@ -497,7 +497,7 @@ MPI_Allreduce(`sendbuf`{.input}, `recvbuf`{.output}, `count`{.input}, `datatype`
```fortran
real :: a(1024), aloc(128)
...
if (my_id==0) then
if (rank==0) then
call random_number(a)
end if
call mpi_scatter(a, 128, MPI_INTEGER, &
Expand Down Expand Up @@ -527,7 +527,7 @@ call mpi_allreduce(rloc, r, 1, MPI_REAL, &
# Common mistakes with collectives

- Using a collective operation within if-rank test:<br>
`if (my_id == 0) call mpi_bcast(...`
`if (rank == 0) call mpi_bcast(...`
- All the processes, both the root (the sender or the gatherer) and
the rest (receivers or senders), must call the collective routine!
- Assuming that all processes making a collective call would complete at the same time
Expand All @@ -546,4 +546,3 @@ call mpi_allreduce(rloc, r, 1, MPI_REAL, &
# Summary

![](img/collective-patterns.png){.center width=100%}

10 changes: 5 additions & 5 deletions mpi/docs/06-user-defined-communicators.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,18 +53,18 @@ new communicators

<div class=column>
```c
if (myid%2 == 0) {
if (rank%2 == 0) {
color = 1;
} else {
color = 2;
}
MPI_Comm_split(MPI_COMM_WORLD, color,
myid, &subcomm);
rank, &subcomm);

MPI_Comm_rank(subcomm, &mysubid);

printf ("I am rank %d in MPI_COMM_WORLD, but"
"%d in Comm %d.\n", myid, mysubid,
"%d in Comm %d.\n", rank, mysubid,
color);
```
Expand Down Expand Up @@ -93,13 +93,13 @@ I am rank 1 in MPI_COMM_WORLD, but 0 in Comm 2.
<div class=column>
<p>
```c
if (myid%2 == 0) {
if (rank%2 == 0) {
color = 1;
} else {
color = 2;
}
MPI_Comm_split(MPI_COMM_WORLD, color,
myid, &subcomm);
rank, &subcomm);
MPI_Comm_rank(subcomm, &mysubid);
MPI_Bcast(sendbuf, 8, MPI_INT, 0,
subcomm);
Expand Down
6 changes: 3 additions & 3 deletions mpi/docs/08-communicator-topologies-cartesian.md
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,8 @@ period=(/ .true., .false. /)
call mpi_dims_create(ntasks, 2, dims, rc)
call mpi_cart_create(mpi_comm_world, 2, dims, period, .true., comm2d, rc)
call mpi_comm_rank(comm2d, my_id, rc)
call mpi_cart_coords(comm2d, my_id, 2, coords, rc)
call mpi_comm_rank(comm2d, rank, rc)
call mpi_cart_coords(comm2d, rank, 2, coords, rc)
```


Expand Down Expand Up @@ -252,4 +252,4 @@ call mpi_sendrecv(buf(1,1), 1, rowtype, nbr_up, tag_up, &

- Neighborhood collectives enable communication between neighbours in process topology
with a single MPI call
- Neighborhood communication provides optimization opportunities for MPI library
- Neighborhood communication provides optimization opportunities for MPI library

0 comments on commit 7ee4bc2

Please sign in to comment.