Ice Sheet System Model  4.18
Code documentation
Functions
VecToMPISerial.cpp File Reference

gather a Petsc Vector spread across the cluster, onto node 0, and then broadcast to all nodes. More...

#include "../petscincludes.h"
#include "../../../shared/shared.h"

Go to the source code of this file.

Functions

int VecToMPISerial (double **pgathered_vector, Vec vector, ISSM_MPI_Comm comm, bool broadcast)
 

Detailed Description

gather a Petsc Vector spread across the cluster, onto node 0, and then broadcast to all nodes.

Definition in file VecToMPISerial.cpp.

Function Documentation

◆ VecToMPISerial()

int VecToMPISerial ( double **  pgathered_vector,
Vec  vector,
ISSM_MPI_Comm  comm,
bool  broadcast 
)

Definition at line 14 of file VecToMPISerial.cpp.

14  {
15 
16  int i;
17  int num_procs;
18  int my_rank;
19 
20  /*Petsc*/
21  ISSM_MPI_Status status;
22  PetscInt lower_row,upper_row;
23  int range;
24  int * idxn=NULL;
25  int buffer[3];
26 
27  /*intermediary results*/
28  double* local_vector=NULL;
29 
30  /*input*/
31  int vector_size;
32 
33  /*Output*/
34  double* gathered_vector=NULL; //Global vector holding the final assembled vector on all nodes.
35 
36  /*recover my_rank and num_procs*/
37  ISSM_MPI_Comm_size(comm,&num_procs);
38  ISSM_MPI_Comm_rank(comm,&my_rank);
39 
40  VecGetSize(vector,&vector_size);
41  if(vector_size==0){
42  *pgathered_vector=NULL;
43  return 1;
44  }
45 
46  /*Allocate gathered vector on all nodes .*/
47  if(broadcast || my_rank==0){
48  gathered_vector=xNew<double>(vector_size);
49  }
50 
51  /*Allocate local vectors*/
52  VecGetOwnershipRange(vector,&lower_row,&upper_row);
53  upper_row--;
54  range=upper_row-lower_row+1;
55 
56  if (range){
57  idxn=xNew<int>(range);
58  for (i=0;i<range;i++){
59  *(idxn+i)=lower_row+i;
60  }
61  local_vector=xNew<double>(range);
62  /*Extract values from MPI vector to serial local_vector on each node*/
63  VecGetValues(vector,range,idxn,local_vector);
64  }
65 
66  /*Now each node holds its local_vector containing range rows.
67  * We send this local_vector to the gathered_vector on node 0*/
68  for (i=1;i<num_procs;i++){
69  if (my_rank==i){
70  buffer[0]=my_rank;
71  buffer[1]=lower_row;
72  buffer[2]=range;
73  ISSM_MPI_Send(buffer,3,ISSM_MPI_INT,0,1,comm);
74  if (range)ISSM_MPI_Send(local_vector,range,ISSM_MPI_PDOUBLE,0,1,comm);
75  }
76  if (my_rank==0){
77  ISSM_MPI_Recv(buffer,3,ISSM_MPI_INT,i,1,comm,&status);
78  if (buffer[2])ISSM_MPI_Recv(gathered_vector+buffer[1],buffer[2],ISSM_MPI_PDOUBLE,i,1,comm,&status);
79  }
80  }
81 
82  if (my_rank==0){
83  //Still have the local_vector on node 0 to take care of.
84  if (range)memcpy(gathered_vector+lower_row,local_vector,range*sizeof(double));
85  }
86 
87  if(broadcast){
88  /*Now, broadcast gathered_vector from node 0 to other nodes: */
89  ISSM_MPI_Bcast(gathered_vector,vector_size,ISSM_MPI_PDOUBLE,0,comm);
90  }
91 
92  /*Assign output pointers: */
93  *pgathered_vector=gathered_vector;
94 
95  /*free ressources: */
96  xDelete<int>(idxn);
97  xDelete<double>(local_vector);
98 
99  return 1;
100 }
ISSM_MPI_Comm_rank
int ISSM_MPI_Comm_rank(ISSM_MPI_Comm comm, int *rank)
Definition: issmmpi.cpp:198
ISSM_MPI_INT
#define ISSM_MPI_INT
Definition: issmmpi.h:127
ISSM_MPI_Send
int ISSM_MPI_Send(void *buf, int count, ISSM_MPI_Datatype datatype, int dest, int tag, ISSM_MPI_Comm comm)
Definition: issmmpi.cpp:484
ISSM_MPI_Status
int ISSM_MPI_Status
Definition: issmmpi.h:121
ISSM_MPI_PDOUBLE
#define ISSM_MPI_PDOUBLE
Definition: issmmpi.h:126
ISSM_MPI_Bcast
int ISSM_MPI_Bcast(void *buffer, int count, ISSM_MPI_Datatype datatype, int root, ISSM_MPI_Comm comm)
Definition: issmmpi.cpp:162
ISSM_MPI_Recv
int ISSM_MPI_Recv(void *buf, int count, ISSM_MPI_Datatype datatype, int source, int tag, ISSM_MPI_Comm comm, ISSM_MPI_Status *status)
Definition: issmmpi.cpp:342
ISSM_MPI_Comm_size
int ISSM_MPI_Comm_size(ISSM_MPI_Comm comm, int *size)
Definition: issmmpi.cpp:209