source: issm/oecreview/Archive/13393-13976/ISSM-13600-13601.diff

Last change on this file was 13980, checked in by Mathieu Morlighem, 12 years ago

preparing oecreview for 13393-13976'

File size: 4.6 KB
RevLine 
[13980]1Index: ../trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp
2===================================================================
3--- ../trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp (revision 13600)
4+++ ../trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp (revision 13601)
5@@ -15,8 +15,8 @@
6 void MatToSerial(double** poutmatrix,Mat matrix,COMM comm){
7
8 int i;
9- extern int my_rank;
10- extern int num_procs;
11+ int my_rank2;
12+ int num_procs2;
13
14 /*Petsc variables*/
15 PetscInt lower_row,upper_row;
16@@ -28,6 +28,10 @@
17 double* local_matrix=NULL; /*matrix local to each node used for temporary holding matrix values*/
18 int buffer[3];
19
20+ /*recover my_rank2 and num_procs2:*/
21+ MPI_Comm_size(comm,&num_procs2);
22+ MPI_Comm_rank(comm,&my_rank2);
23+
24 /*Output*/
25 double* outmatrix=NULL;
26
27@@ -40,7 +44,7 @@
28 range=upper_row-lower_row+1;
29
30 /*Local and global allocation*/
31- if (my_rank==0)outmatrix=xNew<double>(M*N);
32+ if (my_rank2==0)outmatrix=xNew<double>(M*N);
33
34 if (range){
35 local_matrix=xNew<double>(N*range);
36@@ -60,20 +64,20 @@
37 /*Now each node holds its local_matrix containing range rows.
38 * We send these rows to the matrix on node 0*/
39
40- for (i=1;i<num_procs;i++){
41- if (my_rank==i){
42- buffer[0]=my_rank;
43+ for (i=1;i<num_procs2;i++){
44+ if (my_rank2==i){
45+ buffer[0]=my_rank2;
46 buffer[1]=lower_row;
47 buffer[2]=range;
48 MPI_Send(buffer,3,MPI_INT,0,1,comm);
49 if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1,comm);
50 }
51- if (my_rank==0){
52+ if (my_rank2==0){
53 MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
54 if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1,comm,&status);
55 }
56 }
57- if (my_rank==0){
58+ if (my_rank2==0){
59 //Still have the local_matrix on node 0 to take care of.
60 memcpy(outmatrix,local_matrix,N*range*sizeof(double));
61 }
62Index: ../trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp
63===================================================================
64--- ../trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp (revision 13600)
65+++ ../trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp (revision 13601)
66@@ -15,8 +15,8 @@
67 int VecToMPISerial(double** pgathered_vector, Vec vector,COMM comm){
68
69 int i;
70- extern int num_procs;
71- extern int my_rank;
72+ int num_procs2;
73+ int my_rank2;
74
75 /*Petsc*/
76 MPI_Status status;
77@@ -33,6 +33,10 @@
78
79 /*Output*/
80 double* gathered_vector=NULL; //Global vector holding the final assembled vector on all nodes.
81+
82+ /*recover my_rank2 and num_procs2*/
83+ MPI_Comm_size(comm,&num_procs2);
84+ MPI_Comm_rank(comm,&my_rank2);
85
86 VecGetSize(vector,&vector_size);
87 if(vector_size==0){
88@@ -60,21 +64,21 @@
89
90 /*Now each node holds its local_vector containing range rows.
91 * We send this local_vector to the gathered_vector on node 0*/
92- for (i=1;i<num_procs;i++){
93- if (my_rank==i){
94- buffer[0]=my_rank;
95+ for (i=1;i<num_procs2;i++){
96+ if (my_rank2==i){
97+ buffer[0]=my_rank2;
98 buffer[1]=lower_row;
99 buffer[2]=range;
100 MPI_Send(buffer,3,MPI_INT,0,1,comm);
101 if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1,comm);
102 }
103- if (my_rank==0){
104+ if (my_rank2==0){
105 MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
106 if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1,comm,&status);
107 }
108 }
109
110- if (my_rank==0){
111+ if (my_rank2==0){
112 //Still have the local_vector on node 0 to take care of.
113 if (range)memcpy(gathered_vector+lower_row,local_vector,range*sizeof(double));
114 }
115Index: ../trunk-jpl/src/c/toolkits/petsc/patches/GetOwnershipBoundariesFromRange.cpp
116===================================================================
117--- ../trunk-jpl/src/c/toolkits/petsc/patches/GetOwnershipBoundariesFromRange.cpp (revision 13600)
118+++ ../trunk-jpl/src/c/toolkits/petsc/patches/GetOwnershipBoundariesFromRange.cpp (revision 13601)
119@@ -16,9 +16,13 @@
120 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range,COMM comm){
121
122 /*externals :*/
123- extern int my_rank;
124- extern int num_procs;
125+ int my_rank2;
126+ int num_procs2;
127
128+ /*recover my_rank2 and num_procs2:*/
129+ MPI_Comm_size(comm,&num_procs2);
130+ MPI_Comm_rank(comm,&my_rank2);
131+
132 /*output: */
133 int lower_row,upper_row;
134
135@@ -27,13 +31,13 @@
136 int* allranges=NULL;
137
138 /*Gather all range values into allranges, for all nodes*/
139- allranges=xNew<int>(num_procs);
140+ allranges=xNew<int>(num_procs2);
141 MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT,comm);
142
143 /*From all ranges, get lower row and upper row*/
144 lower_row=0;
145 upper_row=lower_row+allranges[0];
146- for (i=1;i<=my_rank;i++){
147+ for (i=1;i<=my_rank2;i++){
148 lower_row=lower_row+allranges[i-1];
149 upper_row=upper_row+allranges[i];
150 }
Note: See TracBrowser for help on using the repository browser.