Документ взят из кэша поисковой машины. Адрес оригинального документа : http://angel.cmc.msu.ru/~ifed/teraflops/Popova_MPI.pdf
Дата изменения: Thu Mar 4 12:27:30 2010
Дата индексирования: Mon Oct 1 19:34:43 2012
Кодировка:
.
MPI.
: . ..

1

MPI







MPI MPI «»

2

MPI







­ - "Message Passing" ­ Message passing : MPI ("Message Passing Interface") PVM ("Parallel Virtual Machine") Shmem, MPT ( Cray) Shared memory ­ , "Thread" ­ (OpenMP, ...) ­

3

MPI


Message passing = +

Process 0

D at a

?

Process 1



D at a D at a D at a D at a D at a D at a D at a D at a

Time

q
MPI




5

MPI


C:

Hello, MPI world!

#include #include "mpi.h" int main(int argc, char **argv){ MPI_Init(&argc, &argv); printf("Hello, MPI world\n"); MPI_Finalize(); return 0; }

6

MPI


C++:

Hello, MPI world!

#include #include "mpi++.h" int main(int argc, char **argv) { MPI::Init (argc, argv); cout << "Hello world" << endl; MPI::Finalize(); return 0; }

7

MPI


Fortran:

Hello, MPI world!

program main include 'mpif.h' integer ierr call MPI_INIT(ierr) print *, 'Hello, MPI world' call MPI_FINALIZE(ierr) end

8

MPI


MPI forum




MPI 1.1 Standard 92-94 MPI 2.0 Standard - 95-97
­ ­

http://www.mcs.anl.gov/mpi http://www.mpi-forum.org/docs/docs.html

9

MPI


MPI


:
­ ­





:
­ ­

10

MPI


MPI



MPI ( ) C typedef
MPI





11


MPI




, , MPI , MPI_COMM_WORLD
­



MPI_Init
MPI

­

12


MPI-
SOME_OTHER_COMM 0 1 2 1 3 4 5 0 2 MPI_COMM_WORLD



13

MPI




MPI , , , ... C:

#include

14

MPI


MPI-

C (case sensitive):
error = MPI_Xxxxx(parameter,...); MPI_Xxxxx(parameter,...);

C++ (case sensitive):
error = MPI::Xxxxx(parameter,...); MPI::Xxxxx(parameter,...);

Fortran:
call MPI_XXXXX(parameter,...,IERR);

15

MPI


MPI


MPI_Init , C: int MPI_Init(int *argc, char ***argv) C + +: void MPI::Init (int& argc, char**& argv) Fortran: INTEGER IERR MPI_INIT (IERR)

16

MPI








C:
MPI_Comm_size(MPI_Comm comm, int *size)

17

MPI


(process rank)


Process ID
­

0 (n-1), n ­

- C:

MPI_Comm_rank(MPI_Comm comm, int *rank)

18

MPI


MPI-
MPI C:

MPI_Finalize() MPI_Abort (MPI_Comm_size(MPI_Comm comm, int*errorcode)

- MPI_Finalize, .

19

MPI


C:

Hello, MPI world!

#include #include "mpi.h" int main(int argc, char **argv){ int rank, size; MPI_Init(&argc, &argv);
MPI_Comm_rank(MPI_COMM_WORLD, &rank); MPI_Comm_size(MPI_COMM_WORLD, &size);

20

printf("Hello, MPI world! I am %d of %d\n",rank,size); MPI_Finalize(); MPI return 0; }


C++:

Hello, MPI world!

21

#include #include "mpi++.h" int main(int argc, char **argv) { MPI::Init (argc, argv); int rank = MPI::COMM_WORLD.Get_rank(); int size = MPI::COMM_WORLD.Get_size(); cout << "Hello world! I am " << rank << " of " << size << endl; MPI::Finalize(); return 0;}

MPI


Fortran: Hello, MPI world!
program main include 'mpif.h' integer rank, size, ierr call MPI_INIT(ierr) call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierr) call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierr) print *, 'Hello world! I am ', rank, ' of ', size call MPI_FINALIZE(ierr) end

22

MPI


Bones.c
#include int main(int argc, char *argv[]) int rank, size; /* ... Non-parallel part of code MPI_Init(&argc, &argv); MPI_Comm_rank(MPI_COMM_WORLD, MPI_Comm_size(MPI_COMM_WORLD, /* ... your code here ... */ MPI_Finalize (); }
MPI

{ */ &rank); &size);

23







­ , MPI MPI: ­ ­

24

MPI


MPI - C
MPI MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ MPI _ Datatype CHAR SHORT I NT LONG UNSI GNED_ CHAR UNSI GNED_ SHORT UNSI GNED UNSI GNED_ LONG FLOAT DOUBLE LONG_ DOUBLE BYTE PACKED C Datatype Signed char Signed short int Signed int Signed log int Unsigned char Unsigned short int Unsigned int Unsigned long int Float Double Long double

25

MPI


«-»
communicator 5 1 2 destination 3 0 4 source


-(Source process) - (Destination process ) - -
MPI

26



"" , , , , .. ­ Send: , , ­ Receive: , ,



27

MPI


()



MPI- , (blocking) (non-blocking) ­ Blocking: ­ Non-blocking: ,



28

MPI


(modes)
(Mode) Synchronous send Buffered send Standard send Ready send Receive (Completion Condition) ( ), ( ) ( ),

29

MPI


«-» ()

(MODE) Standard send Synchronous send Buffered send Ready send Receive

MPI MPI _ Send MPI _ Ssend MPI _ Bsend MPI _ Rsend MPI _ Recv

30

MPI




C:
int MPI_Send(void *buf,int count, MPI_Datatype datatype,int dest, int tag, MPI_Comm comm)

31

MPI


MPI_Send
buf count datatype dest tag
MPI datatype rank - ,

comm

MPI-

32

: MPI_Send(data,500,MPI_FLOAT,6,33,MPI_COMM_WORLD)
MPI


send (MPI_Ssend)


: ("handshake"), , send , , ­ , ­ Idle time ()
MPI





33




Buffered send (MPI_Bsend)




: , : () : MPI MPI_Buffer_attach





MPI_Buffer_detach

34

MPI


Standard send (MPI_Send)


: , ,





35

MPI


Ready send (MPI_Rsend)


: , , - receive : :







36

MPI



C:
int MPI_Recv( void *buf, int count, MPI_Datatype datatype, int source, int tag, MPI_Comm comm, MPI_Status *status)
MPI

37


«-»



rank rank -
MPI









38


Wildcarding ()





MPI_ANY_SOURCE MPI_ANY_TAG - status



39

MPI





MPI_Recv status : ­ Source: status.MPI_SOURCE ­ Tag: status.MPI_TAG ­ Count: MPI_Get_count



40

MPI



, MPI_Recv count ­





C:
int MPI_Get_count (MPI_Status *status, MPI_Datatype datatype, int *count)

41

MPI



2

1 4 3 0

5





42

, : 0 2 2 2 receive, send
MPI




#include #include #include /* Run with two processes */ int main(int argc, char *argv[]) { int rank, i, count; float data[100],value[200]; MPI_Status status; MPI_Init(&argc,&argv); MPI_Comm_rank(MPI_COMM_WORLD,&rank); if(rank==1) { for(i=0;i<100;++i) data[i]=i; MPI_Send(data,100,MPI_FLOAT,0,55,MPI_COMM_WORLD); } else {
MPI

43


()

MPI_Recv(value,200,MPI_FLOAT,MPI_ANY_SOURCE,55, MPI_COMM_WORLD,&status); printf("P:%d Got data from processor %d \n",rank, status.MPI_SOURCE); MPI_Get_count(&status,MPI_FLOAT,&count); printf("P:%d Got %d elements \n",rank,count); printf("P:%d value[5]=%f \n",rank,value[5]); } MPI_Finalize(); }

44

MPI


()

MPI_Recv(value,200,MPI_FLOAT,MPI_ANY_SOURCE,55, MPI_COMM_WORLD,&status); printf("P:%d Got data from processor %d \n",rank, status.MPI_SOURCE); MPI_Get_count(&status,MPI_FLOAT,&count); printf("P:%d Got %d elements \n",rank,count); printf("P:%d value[5]=%f \n",rank,value[5]); } MPI_Finalize(); } Program Output P: 0 Got data from processor 1 P: 0 Got 100 elements P: 0 value[5]=5.000000

45

MPI


MPI_Wtime





C: double MPI_Wtime(void);

46

MPI


:
int count, *buf, source; MPI_Probe(MPI_ANY_SOURCE, 0, comm, &status); source = status.MPI_SOURCE; MPI_Get_count (status, MPI_INT, &count); buf = malloc (count * sizeof (int)); MPI_Recv (buf, count, MPI_INT, source, 0, comm, &status);
47
MPI





int MPI_Bcast( void *buf, int count, MPI_Datatype datatype, int source, MPI_Comm comm) buf - ( ) count - datatype - source - comm - .
MPI

48




int MPI_Reduce( void *sbuf, void *rbuf, int count, MPI_Datatype datatype, MPI_Op op, int root, MPI_Comm comm) sbuf ­ op; rbuf ­ op ( ); count ­ ; datatype ­ ; op ­ ; root ­ - ; comm ­ . MPI_Reduce ,

49

MPI





MPI MPI MPI MPI MPI MPI MPI MPI MPI MPI MPI MPI

_MAX Maximum _MIN Minimum _PROD Product _SUM Sum _LAND Logical and _LOR Logical or _LXOR Logical exclusive or (xor) _BAND Bitwise and _BOR Bitwise or _BXOR Bitwise xor _MAXLOC Maximum value and location _MINLOC Minimum value and location
MPI

50





int MPI_Barrier(MPI_Comm comm )

51

MPI





MPI MPI MPI MPI MP MPI MPI MPI MP MP MPI MPI

_ALLGATHER _ALLGATHERV _ALLREDUCE _ALLTOALL I_ALLTOALLV _BCAST _GATHER _GATHERV I_REDUCE I_SCAN _SCATTER _SCATTERV
MPI

52