Oasis3-MCT
mod_oasis_mpi.F90
Go to the documentation of this file.
1 !> Provides a generic and simpler interface into MPI calls for OASIS.
2 
4 
5 !-------------------------------------------------------------------------------
6 ! PURPOSE: general layer on MPI functions
7 !-------------------------------------------------------------------------------
8 
13 
14  implicit none
15  private
16 
17 ! PUBLIC: Public interfaces
18 
19  public :: oasis_mpi_chkerr
20  public :: oasis_mpi_send
21  public :: oasis_mpi_recv
22  public :: oasis_mpi_bcast
23  public :: oasis_mpi_gathscatvinit
24  public :: oasis_mpi_gatherv
25  public :: oasis_mpi_scatterv
26  public :: oasis_mpi_sum
27  public :: oasis_mpi_min
28  public :: oasis_mpi_max
29  public :: oasis_mpi_commsize
30  public :: oasis_mpi_commrank
31  public :: oasis_mpi_initialized
32  public :: oasis_mpi_wtime
33  public :: oasis_mpi_abort
34  public :: oasis_mpi_barrier
35  public :: oasis_mpi_init
36  public :: oasis_mpi_finalize
37  public :: oasis_mpi_reducelists
38 
39  !> Generic overloaded interface into MPI send
40  interface oasis_mpi_send ; module procedure &
46  end interface
47 
48  !> Generic overloaded interface into MPI receive
49  interface oasis_mpi_recv ; module procedure &
55  end interface
56 
57  !> Generic overloaded interface into MPI broadcast
58  interface oasis_mpi_bcast ; module procedure &
70  end interface
71 
72  !> Generic interface to oasis_mpi_gathScatVInit
73  interface oasis_mpi_gathscatvinit ; module procedure &
75  end interface
76 
77  !> Generic interfaces into an MPI vector gather
78  interface oasis_mpi_gatherv ; module procedure &
80  end interface
81 
82  !> Generic interfaces into an MPI vector scatter
83  interface oasis_mpi_scatterv ; module procedure &
85  end interface
86 
87  !> Generic overloaded interface into MPI sum reduction
88  interface oasis_mpi_sum ; module procedure &
93 #ifndef __NO_16BYTE_REALS
98 #endif
100  oasis_mpi_sumr1, &
101  oasis_mpi_sumr2, &
103  end interface
104 
105  !> Generic overloaded interface into MPI min reduction
106  interface oasis_mpi_min ; module procedure &
107  oasis_mpi_mini0, &
108  oasis_mpi_mini1, &
109  oasis_mpi_minr0, &
111  end interface
112 
113  !> Generic overloaded interface into MPI max reduction
114  interface oasis_mpi_max ; module procedure &
115  oasis_mpi_maxi0, &
116  oasis_mpi_maxi1, &
117  oasis_mpi_maxr0, &
119  end interface
120 
121 ! mpi library include file
122 #include <mpif.h>
123 
124 !===============================================================================
125 CONTAINS
126 !===============================================================================
127 
128 !> Checks MPI error codes and aborts
129 
130 !> This method compares rcode to MPI_SUCCESS. If rcode is an error,
131 !> it queries MPI_ERROR_STRING for the error string associated with rcode, writes
132 !> it out, and aborts with the string passed through the interface.
133 
134 SUBROUTINE oasis_mpi_chkerr(rcode,string)
136  IMPLICIT none
137 
138  !----- arguments ---
139  integer(ip_i4_p), intent(in) :: rcode !< MPI error code
140  character(*), intent(in) :: string !< abort message
141 
142  !----- local ---
143  character(*),parameter :: subname = '(oasis_mpi_chkerr)'
144  character(MPI_MAX_ERROR_STRING) :: lstring
145  integer(ip_i4_p) :: len
146  integer(ip_i4_p) :: ierr
147 
148 !-------------------------------------------------------------------------------
149 ! PURPOSE: layer on MPI error checking
150 !-------------------------------------------------------------------------------
151 
152  call oasis_debug_enter(subname)
153 
154  lstring = ' '
155  if (rcode /= mpi_success) then
156  call mpi_error_string(rcode,lstring,len,ierr)
157  call oasis_mpi_abort(subname//trim(string)//':'//trim(lstring),rcode)
158  endif
159 
160  call oasis_debug_exit(subname)
161 
162 END SUBROUTINE oasis_mpi_chkerr
163 
164 !===============================================================================
165 !===============================================================================
166 
167 !> Send a scalar integer
168 
169 SUBROUTINE oasis_mpi_sendi0(lvec,pid,tag,comm,string)
171  IMPLICIT none
172 
173  !----- arguments ---
174  integer(ip_i4_p), intent(in) :: lvec !< send value
175  integer(ip_i4_p), intent(in) :: pid !< pid to send to
176  integer(ip_i4_p), intent(in) :: tag !< mpi tag
177  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
178  character(*),optional,intent(in) :: string !< to identify caller
179 
180  !----- local ---
181  character(*),parameter :: subname = '(oasis_mpi_sendi0)'
182  integer(ip_i4_p) :: lsize
183  integer(ip_i4_p) :: ierr
184 
185 !-------------------------------------------------------------------------------
186 ! PURPOSE: Send a single integer
187 !-------------------------------------------------------------------------------
188 
189  call oasis_debug_enter(subname)
190 
191  lsize = 1
192 
193  call mpi_send(lvec,lsize,mpi_integer,pid,tag,comm,ierr)
194  if (present(string)) then
195  call oasis_mpi_chkerr(ierr,subname//trim(string))
196  else
197  call oasis_mpi_chkerr(ierr,subname)
198  endif
199 
200  call oasis_debug_exit(subname)
201 
202 END SUBROUTINE oasis_mpi_sendi0
203 
204 !===============================================================================
205 !===============================================================================
206 
207 !> Send an array of 1D integers
208 
209 SUBROUTINE oasis_mpi_sendi1(lvec,pid,tag,comm,string)
211  IMPLICIT none
212 
213  !----- arguments ---
214  integer(ip_i4_p), intent(in) :: lvec(:) !< send values
215  integer(ip_i4_p), intent(in) :: pid !< pid to send to
216  integer(ip_i4_p), intent(in) :: tag !< mpi tag
217  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
218  character(*),optional,intent(in) :: string !< to identify caller
219 
220  !----- local ---
221  character(*),parameter :: subname = '(oasis_mpi_sendi1)'
222  integer(ip_i4_p) :: lsize
223  integer(ip_i4_p) :: ierr
224 
225 !-------------------------------------------------------------------------------
226 ! PURPOSE: Send a vector of integers
227 !-------------------------------------------------------------------------------
228 
229  call oasis_debug_enter(subname)
230 
231  lsize = size(lvec)
232 
233  call mpi_send(lvec,lsize,mpi_integer,pid,tag,comm,ierr)
234  if (present(string)) then
235  call oasis_mpi_chkerr(ierr,subname//trim(string))
236  else
237  call oasis_mpi_chkerr(ierr,subname)
238  endif
239 
240  call oasis_debug_exit(subname)
241 
242 END SUBROUTINE oasis_mpi_sendi1
243 
244 !===============================================================================
245 !===============================================================================
246 
247 !> Send a scalar double
248 
249 SUBROUTINE oasis_mpi_sendr0(lvec,pid,tag,comm,string)
251  IMPLICIT none
252 
253  !----- arguments ---
254  real(ip_double_p),intent(in) :: lvec !< send values
255  integer(ip_i4_p), intent(in) :: pid !< pid to send to
256  integer(ip_i4_p), intent(in) :: tag !< mpi tag
257  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
258  character(*),optional,intent(in) :: string !< to identify caller
259 
260  !----- local ---
261  character(*),parameter :: subname = '(oasis_mpi_sendr0)'
262  integer(ip_i4_p) :: lsize
263  integer(ip_i4_p) :: ierr
264 
265 !-------------------------------------------------------------------------------
266 ! PURPOSE: Send a real scalar
267 !-------------------------------------------------------------------------------
268 
269  call oasis_debug_enter(subname)
270 
271  lsize = 1
272 
273  call mpi_send(lvec,lsize,mpi_real8,pid,tag,comm,ierr)
274  if (present(string)) then
275  call oasis_mpi_chkerr(ierr,subname//trim(string))
276  else
277  call oasis_mpi_chkerr(ierr,subname)
278  endif
279 
280  call oasis_debug_exit(subname)
281 
282 END SUBROUTINE oasis_mpi_sendr0
283 
284 !===============================================================================
285 !===============================================================================
286 
287 !> Send an array of 1D doubles
288 
289 SUBROUTINE oasis_mpi_sendr1(lvec,pid,tag,comm,string)
291  IMPLICIT none
292 
293  !----- arguments ---
294  real(ip_double_p),intent(in) :: lvec(:) !< send values
295  integer(ip_i4_p), intent(in) :: pid !< pid to send to
296  integer(ip_i4_p), intent(in) :: tag !< mpi tag
297  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
298  character(*),optional,intent(in) :: string !< to identify caller
299 
300  !----- local ---
301  character(*),parameter :: subname = '(oasis_mpi_sendr1)'
302  integer(ip_i4_p) :: lsize
303  integer(ip_i4_p) :: ierr
304 
305 !-------------------------------------------------------------------------------
306 ! PURPOSE: Send a vector of reals
307 !-------------------------------------------------------------------------------
308 
309  call oasis_debug_enter(subname)
310 
311  lsize = size(lvec)
312 
313  call mpi_send(lvec,lsize,mpi_real8,pid,tag,comm,ierr)
314  if (present(string)) then
315  call oasis_mpi_chkerr(ierr,subname//trim(string))
316  else
317  call oasis_mpi_chkerr(ierr,subname)
318  endif
319 
320  call oasis_debug_exit(subname)
321 
322 END SUBROUTINE oasis_mpi_sendr1
323 
324 !===============================================================================
325 !===============================================================================
326 
327 !> Send an array of 3D doubles
328 
329 SUBROUTINE oasis_mpi_sendr3(array,pid,tag,comm,string)
331  IMPLICIT none
332 
333  !----- arguments ---
334  real(ip_double_p),intent(in) :: array(:,:,:) !< send values
335  integer(ip_i4_p), intent(in) :: pid !< pid to send to
336  integer(ip_i4_p), intent(in) :: tag !< mpi tag
337  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
338  character(*),optional,intent(in) :: string !< to identify caller
339 
340  !----- local ---
341  character(*),parameter :: subname = '(oasis_mpi_sendr3)'
342  integer(ip_i4_p) :: lsize
343  integer(ip_i4_p) :: ierr
344 
345 !-------------------------------------------------------------------------------
346 ! PURPOSE: Send a vector of reals
347 !-------------------------------------------------------------------------------
348 
349  call oasis_debug_enter(subname)
350 
351  lsize = size(array)
352 
353  call mpi_send(array,lsize,mpi_real8,pid,tag,comm,ierr)
354  if (present(string)) then
355  call oasis_mpi_chkerr(ierr,subname//trim(string))
356  else
357  call oasis_mpi_chkerr(ierr,subname)
358  endif
359 
360  call oasis_debug_exit(subname)
361 
362 END SUBROUTINE oasis_mpi_sendr3
363 
364 !===============================================================================
365 !===============================================================================
366 
367 !> Receive a scalar integer
368 
369 SUBROUTINE oasis_mpi_recvi0(lvec,pid,tag,comm,string)
371  IMPLICIT none
372 
373  !----- arguments ---
374  integer(ip_i4_p), intent(out):: lvec !< receive values
375  integer(ip_i4_p), intent(in) :: pid !< pid to recv from
376  integer(ip_i4_p), intent(in) :: tag !< mpi tag
377  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
378  character(*),optional,intent(in) :: string !< to identify caller
379 
380  !----- local ---
381  character(*),parameter :: subname = '(oasis_mpi_recvi0)'
382  integer(ip_i4_p) :: lsize
383  integer(ip_i4_p) :: status(mpi_status_size) ! mpi status info
384  integer(ip_i4_p) :: ierr
385 
386 !-------------------------------------------------------------------------------
387 ! PURPOSE: Recv a vector of reals
388 !-------------------------------------------------------------------------------
389 
390  call oasis_debug_enter(subname)
391 
392  lsize = 1
393 
394  call mpi_recv(lvec,lsize,mpi_integer,pid,tag,comm,status,ierr)
395  if (present(string)) then
396  call oasis_mpi_chkerr(ierr,subname//trim(string))
397  else
398  call oasis_mpi_chkerr(ierr,subname)
399  endif
400 
401  call oasis_debug_exit(subname)
402 
403 END SUBROUTINE oasis_mpi_recvi0
404 
405 !===============================================================================
406 !===============================================================================
407 
408 !> Receive an array of 1D integers
409 
410 SUBROUTINE oasis_mpi_recvi1(lvec,pid,tag,comm,string)
412  IMPLICIT none
413 
414  !----- arguments ---
415  integer(ip_i4_p), intent(out):: lvec(:) !< receive values
416  integer(ip_i4_p), intent(in) :: pid !< pid to recv from
417  integer(ip_i4_p), intent(in) :: tag !< mpi tag
418  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
419  character(*),optional,intent(in) :: string !< to identify caller
420 
421  !----- local ---
422  character(*),parameter :: subname = '(oasis_mpi_recvi1)'
423  integer(ip_i4_p) :: lsize
424  integer(ip_i4_p) :: status(mpi_status_size) ! mpi status info
425  integer(ip_i4_p) :: ierr
426 
427 !-------------------------------------------------------------------------------
428 ! PURPOSE: Recv a vector of reals
429 !-------------------------------------------------------------------------------
430 
431  call oasis_debug_enter(subname)
432 
433  lsize = size(lvec)
434 
435  call mpi_recv(lvec,lsize,mpi_integer,pid,tag,comm,status,ierr)
436  if (present(string)) then
437  call oasis_mpi_chkerr(ierr,subname//trim(string))
438  else
439  call oasis_mpi_chkerr(ierr,subname)
440  endif
441 
442  call oasis_debug_exit(subname)
443 
444 END SUBROUTINE oasis_mpi_recvi1
445 
446 !===============================================================================
447 !===============================================================================
448 
449 !> Receive a scalar double
450 
451 SUBROUTINE oasis_mpi_recvr0(lvec,pid,tag,comm,string)
453  IMPLICIT none
454 
455  !----- arguments ---
456  real(ip_double_p),intent(out):: lvec !< receive values
457  integer(ip_i4_p), intent(in) :: pid !< pid to recv from
458  integer(ip_i4_p), intent(in) :: tag !< mpi tag
459  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
460  character(*),optional,intent(in) :: string !< to identify caller
461 
462  !----- local ---
463  character(*),parameter :: subname = '(oasis_mpi_recvr0)'
464  integer(ip_i4_p) :: lsize
465  integer(ip_i4_p) :: status(mpi_status_size) ! mpi status info
466  integer(ip_i4_p) :: ierr
467 
468 !-------------------------------------------------------------------------------
469 ! PURPOSE: Recv a vector of reals
470 !-------------------------------------------------------------------------------
471 
472  call oasis_debug_enter(subname)
473 
474  lsize = 1
475 
476  call mpi_recv(lvec,lsize,mpi_real8,pid,tag,comm,status,ierr)
477  if (present(string)) then
478  call oasis_mpi_chkerr(ierr,subname//trim(string))
479  else
480  call oasis_mpi_chkerr(ierr,subname)
481  endif
482 
483  call oasis_debug_exit(subname)
484 
485 END SUBROUTINE oasis_mpi_recvr0
486 
487 !===============================================================================
488 !===============================================================================
489 
490 !> Receive an array of 1D doubles
491 
492 SUBROUTINE oasis_mpi_recvr1(lvec,pid,tag,comm,string)
494  IMPLICIT none
495 
496  !----- arguments ---
497  real(ip_double_p),intent(out):: lvec(:) !< receive values
498  integer(ip_i4_p), intent(in) :: pid !< pid to recv from
499  integer(ip_i4_p), intent(in) :: tag !< mpi tag
500  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
501  character(*),optional,intent(in) :: string !< to identify caller
502 
503  !----- local ---
504  character(*),parameter :: subname = '(oasis_mpi_recvr1)'
505  integer(ip_i4_p) :: lsize
506  integer(ip_i4_p) :: status(mpi_status_size) ! mpi status info
507  integer(ip_i4_p) :: ierr
508 
509 !-------------------------------------------------------------------------------
510 ! PURPOSE: Recv a vector of reals
511 !-------------------------------------------------------------------------------
512 
513  call oasis_debug_enter(subname)
514 
515  lsize = size(lvec)
516 
517  call mpi_recv(lvec,lsize,mpi_real8,pid,tag,comm,status,ierr)
518  if (present(string)) then
519  call oasis_mpi_chkerr(ierr,subname//trim(string))
520  else
521  call oasis_mpi_chkerr(ierr,subname)
522  endif
523 
524  call oasis_debug_exit(subname)
525 
526 END SUBROUTINE oasis_mpi_recvr1
527 
528 !===============================================================================
529 !===============================================================================
530 
531 !> Receive an array of 3D doubles
532 
533 SUBROUTINE oasis_mpi_recvr3(array,pid,tag,comm,string)
535  IMPLICIT none
536 
537  !----- arguments ---
538  real(ip_double_p),intent(out):: array(:,:,:) !< receive values
539  integer(ip_i4_p), intent(in) :: pid !< pid to recv from
540  integer(ip_i4_p), intent(in) :: tag !< mpi tag
541  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
542  character(*),optional,intent(in) :: string !< to identify caller
543 
544  !----- local ---
545  character(*),parameter :: subname = '(oasis_mpi_recvr3)'
546  integer(ip_i4_p) :: lsize
547  integer(ip_i4_p) :: status(mpi_status_size) ! mpi status info
548  integer(ip_i4_p) :: ierr
549 
550 !-------------------------------------------------------------------------------
551 ! PURPOSE: Recv a vector of reals
552 !-------------------------------------------------------------------------------
553 
554  call oasis_debug_enter(subname)
555 
556  lsize = size(array)
557 
558  call mpi_recv(array,lsize,mpi_real8,pid,tag,comm,status,ierr)
559  if (present(string)) then
560  call oasis_mpi_chkerr(ierr,subname//trim(string))
561  else
562  call oasis_mpi_chkerr(ierr,subname)
563  endif
564 
565  call oasis_debug_exit(subname)
566 
567 END SUBROUTINE oasis_mpi_recvr3
568 
569 !===============================================================================
570 !===============================================================================
571 
572 !> Broadcast a scalar integer
573 
574 SUBROUTINE oasis_mpi_bcasti0(vec,comm,string,pebcast)
576  IMPLICIT none
577 
578  !----- arguments ---
579  integer(ip_i4_p), intent(inout):: vec !< values to broadcast
580  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
581  character(*),optional,intent(in) :: string !< to identify caller
582  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
583 
584  !----- local ---
585  character(*),parameter :: subname = '(oasis_mpi_bcasti0)'
586  integer(ip_i4_p) :: ierr
587  integer(ip_i4_p) :: lsize
588  integer(ip_i4_p) :: lpebcast
589 
590 !-------------------------------------------------------------------------------
591 ! PURPOSE: Broadcast an integer
592 !-------------------------------------------------------------------------------
593 
594  call oasis_debug_enter(subname)
595 
596  lsize = 1
597  lpebcast = 0
598  if (present(pebcast)) lpebcast = pebcast
599 
600  call mpi_bcast(vec,lsize,mpi_integer,lpebcast,comm,ierr)
601  if (present(string)) then
602  call oasis_mpi_chkerr(ierr,subname//trim(string))
603  else
604  call oasis_mpi_chkerr(ierr,subname)
605  endif
606 
607  call oasis_debug_exit(subname)
608 
609 END SUBROUTINE oasis_mpi_bcasti0
610 
611 !===============================================================================
612 !===============================================================================
613 
614 !> Broadcast a scalar logical
615 
616 SUBROUTINE oasis_mpi_bcastl0(vec,comm,string,pebcast)
618  IMPLICIT none
619 
620  !----- arguments ---
621  logical, intent(inout):: vec !< values to broadcast
622  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
623  character(*),optional,intent(in) :: string !< to identify caller
624  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
625 
626  !----- local ---
627  character(*),parameter :: subname = '(oasis_mpi_bcastl0)'
628  integer(ip_i4_p) :: ierr
629  integer(ip_i4_p) :: lsize
630  integer(ip_i4_p) :: lpebcast
631 
632 !-------------------------------------------------------------------------------
633 ! PURPOSE: Broadcast a logical
634 !-------------------------------------------------------------------------------
635 
636  call oasis_debug_enter(subname)
637 
638  lsize = 1
639  lpebcast = 0
640  if (present(pebcast)) lpebcast = pebcast
641 
642  call mpi_bcast(vec,lsize,mpi_logical,lpebcast,comm,ierr)
643  if (present(string)) then
644  call oasis_mpi_chkerr(ierr,subname//trim(string))
645  else
646  call oasis_mpi_chkerr(ierr,subname)
647  endif
648 
649  call oasis_debug_exit(subname)
650 
651 END SUBROUTINE oasis_mpi_bcastl0
652 
653 !===============================================================================
654 !===============================================================================
655 
656 !> Broadcast a character string
657 
658 SUBROUTINE oasis_mpi_bcastc0(vec,comm,string,pebcast)
660  IMPLICIT none
661 
662  !----- arguments ---
663  character(len=*), intent(inout):: vec !< values to broadcast
664  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
665  character(*),optional,intent(in) :: string !< to identify caller
666  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
667 
668  !----- local ---
669  character(*),parameter :: subname = '(oasis_mpi_bcastc0)'
670  integer(ip_i4_p) :: ierr
671  integer(ip_i4_p) :: lsize
672  integer(ip_i4_p) :: lpebcast
673 
674 !-------------------------------------------------------------------------------
675 ! PURPOSE: Broadcast a character string
676 !-------------------------------------------------------------------------------
677 
678  call oasis_debug_enter(subname)
679 
680  lsize = len(vec)
681  lpebcast = 0
682  if (present(pebcast)) lpebcast = pebcast
683 
684  call mpi_bcast(vec,lsize,mpi_character,lpebcast,comm,ierr)
685  if (present(string)) then
686  call oasis_mpi_chkerr(ierr,subname//trim(string))
687  else
688  call oasis_mpi_chkerr(ierr,subname)
689  endif
690 
691  call oasis_debug_exit(subname)
692 
693 END SUBROUTINE oasis_mpi_bcastc0
694 
695 !===============================================================================
696 !===============================================================================
697 
698 !> Broadcast an array of 1D character strings
699 
700 SUBROUTINE oasis_mpi_bcastc1(vec,comm,string,pebcast)
702  IMPLICIT none
703 
704  !----- arguments ---
705  character(len=*), intent(inout):: vec(:) !< values to broadcast
706  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
707  character(*),optional,intent(in) :: string !< to identify caller
708  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
709 
710  !----- local ---
711  character(*),parameter :: subname = '(oasis_mpi_bcastc1)'
712  integer(ip_i4_p) :: ierr
713  integer(ip_i4_p) :: lsize
714  integer(ip_i4_p) :: lpebcast
715 
716 !-------------------------------------------------------------------------------
717 ! PURPOSE: Broadcast a character string
718 !-------------------------------------------------------------------------------
719 
720  call oasis_debug_enter(subname)
721 
722  lsize = size(vec)*len(vec)
723  lpebcast = 0
724  if (present(pebcast)) lpebcast = pebcast
725 
726  call mpi_bcast(vec,lsize,mpi_character,lpebcast,comm,ierr)
727  if (present(string)) then
728  call oasis_mpi_chkerr(ierr,subname//trim(string))
729  else
730  call oasis_mpi_chkerr(ierr,subname)
731  endif
732 
733  call oasis_debug_exit(subname)
734 
735 END SUBROUTINE oasis_mpi_bcastc1
736 
737 !===============================================================================
738 !===============================================================================
739 
740 !> Broadcast a scalar double
741 
742 SUBROUTINE oasis_mpi_bcastr0(vec,comm,string,pebcast)
744  IMPLICIT none
745 
746  !----- arguments ---
747  real(ip_double_p), intent(inout):: vec !< values to broadcast
748  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
749  character(*),optional,intent(in) :: string !< to identify caller
750  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
751 
752  !----- local ---
753  character(*),parameter :: subname = '(oasis_mpi_bcastr0)'
754  integer(ip_i4_p) :: ierr
755  integer(ip_i4_p) :: lsize
756  integer(ip_i4_p) :: lpebcast
757 
758 !-------------------------------------------------------------------------------
759 ! PURPOSE: Broadcast a real
760 !-------------------------------------------------------------------------------
761 
762  call oasis_debug_enter(subname)
763 
764  lsize = 1
765  lpebcast = 0
766  if (present(pebcast)) lpebcast = pebcast
767 
768  call mpi_bcast(vec,lsize,mpi_real8,lpebcast,comm,ierr)
769  if (present(string)) then
770  call oasis_mpi_chkerr(ierr,subname//trim(string))
771  else
772  call oasis_mpi_chkerr(ierr,subname)
773  endif
774 
775  call oasis_debug_exit(subname)
776 
777 END SUBROUTINE oasis_mpi_bcastr0
778 
779 !===============================================================================
780 !===============================================================================
781 
782 !> Broadcast an array of 1D integers
783 
784 SUBROUTINE oasis_mpi_bcasti1(vec,comm,string,pebcast)
786  IMPLICIT none
787 
788  !----- arguments ---
789  integer(ip_i4_p), intent(inout):: vec(:) !< values to broadcast
790  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
791  character(*),optional,intent(in) :: string !< to identify caller
792  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
793 
794  !----- local ---
795  character(*),parameter :: subname = '(oasis_mpi_bcasti1)'
796  integer(ip_i4_p) :: ierr
797  integer(ip_i4_p) :: lsize
798  integer(ip_i4_p) :: lpebcast
799 
800 !-------------------------------------------------------------------------------
801 ! PURPOSE: Broadcast a vector of integers
802 !-------------------------------------------------------------------------------
803 
804  call oasis_debug_enter(subname)
805 
806  lsize = size(vec)
807  lpebcast = 0
808  if (present(pebcast)) lpebcast = pebcast
809 
810  call mpi_bcast(vec,lsize,mpi_integer,lpebcast,comm,ierr)
811  if (present(string)) then
812  call oasis_mpi_chkerr(ierr,subname//trim(string))
813  else
814  call oasis_mpi_chkerr(ierr,subname)
815  endif
816 
817  call oasis_debug_exit(subname)
818 
819 END SUBROUTINE oasis_mpi_bcasti1
820 
821 !===============================================================================
822 !===============================================================================
823 
824 !> Broadcast an array of 1D logicals
825 
826 SUBROUTINE oasis_mpi_bcastl1(vec,comm,string,pebcast)
828  IMPLICIT none
829 
830  !----- arguments ---
831  logical, intent(inout):: vec(:) !< values to broadcast
832  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
833  character(*),optional,intent(in) :: string !< to identify caller
834  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
835 
836  !----- local ---
837  character(*),parameter :: subname = '(oasis_mpi_bcastl1)'
838  integer(ip_i4_p) :: ierr
839  integer(ip_i4_p) :: lsize
840  integer(ip_i4_p) :: lpebcast
841 
842 !-------------------------------------------------------------------------------
843 ! PURPOSE: Broadcast a logical
844 !-------------------------------------------------------------------------------
845 
846  call oasis_debug_enter(subname)
847 
848  lsize = size(vec)
849  lpebcast = 0
850  if (present(pebcast)) lpebcast = pebcast
851 
852  call mpi_bcast(vec,lsize,mpi_logical,lpebcast,comm,ierr)
853  if (present(string)) then
854  call oasis_mpi_chkerr(ierr,subname//trim(string))
855  else
856  call oasis_mpi_chkerr(ierr,subname)
857  endif
858 
859  call oasis_debug_exit(subname)
860 
861 END SUBROUTINE oasis_mpi_bcastl1
862 
863 !===============================================================================
864 !===============================================================================
865 
866 !> Broadcast an array of 1D doubles
867 
868 SUBROUTINE oasis_mpi_bcastr1(vec,comm,string,pebcast)
870  IMPLICIT none
871 
872  !----- arguments ---
873  real(ip_double_p), intent(inout):: vec(:) !< values to broadcast
874  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
875  character(*),optional,intent(in) :: string !< to identify caller
876  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
877 
878  !----- local ---
879  character(*),parameter :: subname = '(oasis_mpi_bcastr1)'
880  integer(ip_i4_p) :: ierr
881  integer(ip_i4_p) :: lsize
882  integer(ip_i4_p) :: lpebcast
883 
884 !-------------------------------------------------------------------------------
885 ! PURPOSE: Broadcast a vector of reals
886 !-------------------------------------------------------------------------------
887 
888  call oasis_debug_enter(subname)
889 
890  lsize = size(vec)
891  lpebcast = 0
892  if (present(pebcast)) lpebcast = pebcast
893 
894  call mpi_bcast(vec,lsize,mpi_real8,lpebcast,comm,ierr)
895  if (present(string)) then
896  call oasis_mpi_chkerr(ierr,subname//trim(string))
897  else
898  call oasis_mpi_chkerr(ierr,subname)
899  endif
900 
901  call oasis_debug_exit(subname)
902 
903 END SUBROUTINE oasis_mpi_bcastr1
904 
905 !===============================================================================
906 !===============================================================================
907 
908 !> Broadcast an array of 2D doubles
909 
910 SUBROUTINE oasis_mpi_bcastr2(arr,comm,string,pebcast)
912  IMPLICIT none
913 
914  !----- arguments -----
915  real(ip_double_p), intent(inout):: arr(:,:) !< values to broadcast
916  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
917  character(*),optional,intent(in) :: string !< to identify caller
918  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
919 
920  !----- local -----
921  integer(ip_i4_p) :: ierr
922  integer(ip_i4_p) :: lsize
923  integer(ip_i4_p) :: lpebcast
924 
925  !----- formats -----
926  character(*),parameter :: subname = '(oasis_mpi_bcastr2)'
927 
928 !-------------------------------------------------------------------------------
929 ! PURPOSE: Broadcast a 2d array of reals
930 !-------------------------------------------------------------------------------
931 
932  call oasis_debug_enter(subname)
933 
934  lsize = size(arr)
935  lpebcast = 0
936  if (present(pebcast)) lpebcast = pebcast
937 
938  call mpi_bcast(arr,lsize,mpi_real8,lpebcast,comm,ierr)
939  if (present(string)) then
940  call oasis_mpi_chkerr(ierr,subname//trim(string))
941  else
942  call oasis_mpi_chkerr(ierr,subname)
943  endif
944 
945  call oasis_debug_exit(subname)
946 
947 END SUBROUTINE oasis_mpi_bcastr2
948 
949 !===============================================================================
950 !===============================================================================
951 
952 !> Broadcast an array of 2D integers
953 
954 SUBROUTINE oasis_mpi_bcasti2(arr,comm,string,pebcast)
956  IMPLICIT none
957 
958  !----- arguments -----
959  integer, intent(inout):: arr(:,:) !< values to broadcast
960  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
961  character(*),optional,intent(in) :: string !< to identify caller
962  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
963 
964  !----- local -----
965  integer(ip_i4_p) :: ierr
966  integer(ip_i4_p) :: lsize
967  integer(ip_i4_p) :: lpebcast
968 
969  !----- formats -----
970  character(*),parameter :: subname = '(oasis_mpi_bcasti2)'
971 
972 !-------------------------------------------------------------------------------
973 ! PURPOSE: Broadcast a 2d array of integers
974 !-------------------------------------------------------------------------------
975 
976  call oasis_debug_enter(subname)
977 
978  lsize = size(arr)
979  lpebcast = 0
980  if (present(pebcast)) lpebcast = pebcast
981 
982  call mpi_bcast(arr,lsize,mpi_integer,lpebcast,comm,ierr)
983  if (present(string)) then
984  call oasis_mpi_chkerr(ierr,subname//trim(string))
985  else
986  call oasis_mpi_chkerr(ierr,subname)
987  endif
988 
989  call oasis_debug_exit(subname)
990 
991 END SUBROUTINE oasis_mpi_bcasti2
992 
993 !===============================================================================
994 !===============================================================================
995 
996 !> Broadcast an array of 3D doubles
997 
998 SUBROUTINE oasis_mpi_bcastr3(arr,comm,string,pebcast)
1000  IMPLICIT none
1001 
1002  !----- arguments -----
1003  real(ip_double_p), intent(inout):: arr(:,:,:) !< values to broadcast
1004  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1005  character(*),optional,intent(in) :: string !< to identify caller
1006  integer(ip_i4_p), optional, intent(in) :: pebcast !< bcast pe, default is task 0
1007 
1008  !----- local -----
1009  integer(ip_i4_p) :: ierr
1010  integer(ip_i4_p) :: lsize
1011  integer(ip_i4_p) :: lpebcast
1012 
1013  !----- formats -----
1014  character(*),parameter :: subname = '(oasis_mpi_bcastr3)'
1015 
1016 !-------------------------------------------------------------------------------
1017 ! PURPOSE: Broadcast a 3d array of reals
1018 !-------------------------------------------------------------------------------
1019 
1020  call oasis_debug_enter(subname)
1021 
1022  lsize = size(arr)
1023  lpebcast = 0
1024  if (present(pebcast)) lpebcast = pebcast
1025 
1026  call mpi_bcast(arr,lsize,mpi_real8,lpebcast,comm,ierr)
1027  if (present(string)) then
1028  call oasis_mpi_chkerr(ierr,subname//trim(string))
1029  else
1030  call oasis_mpi_chkerr(ierr,subname)
1031  endif
1032 
1033  call oasis_debug_exit(subname)
1034 
1035 END SUBROUTINE oasis_mpi_bcastr3
1036 
1037 !===============================================================================
1038 !===============================================================================
1039 
1040 !> Initialize variables for oasis_mpi_gatherv and oasis_mpi_scatterv
1041 
1042 !> This method initializes glob1DArr, globSize, and displs for use
1043 !> in the oasis_mpi_gatherv and oasis_mpi_scatterv routines. locArr is the
1044 !> distributed array to gather from or scatter to.
1045 
1046 SUBROUTINE oasis_mpi_gathscatvinitr1(comm, rootid, locArr, glob1DArr, globSize, &
1047  displs, string )
1049  IMPLICIT none
1050 
1051  !----- arguments -----
1052  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1053  integer(ip_i4_p), intent(in) :: rootid !< MPI task to gather/scatter on
1054  real(ip_double_p),intent(in) :: locArr(:) !< Local array of distributed data
1055  real(ip_double_p),pointer :: glob1DArr(:) !< Global 1D array of gathered data
1056  integer(ip_i4_p), pointer :: globSize(:) !< Size of each distributed piece
1057  integer(ip_i4_p), pointer :: displs(:) !< Displacements for receive
1058  character(*),optional,intent(in):: string !< to identify caller
1059 
1060  !----- local -----
1061  integer(ip_i4_p) :: npes ! Number of MPI tasks
1062  integer(ip_i4_p) :: locSize ! Size of local distributed data
1063  integer(ip_i4_p), pointer :: sendSize(:) ! Size to send for initial gather
1064  integer(ip_i4_p) :: i ! Index
1065  integer(ip_i4_p) :: rank ! Rank of this MPI task
1066  integer(ip_i4_p) :: nSize ! Maximum size to send
1067  integer(ip_i4_p) :: ierr ! Error code
1068  integer(ip_i4_p) :: nSiz1D ! Size of 1D global array
1069  integer(ip_i4_p) :: maxSize ! Maximum size
1070 
1071  !----- formats -----
1072  character(*),parameter :: subname = '(oasis_mpi_gathScatvInitr1)'
1073 
1074 !-------------------------------------------------------------------------------
1075 ! PURPOSE: Setup arrays for a gatherv/scatterv operation
1076 !-------------------------------------------------------------------------------
1077 
1078  call oasis_debug_enter(subname)
1079 
1080  locsize = size(locarr)
1081  call oasis_mpi_commsize( comm, npes )
1082  call oasis_mpi_commrank( comm, rank )
1083  allocate( globsize(npes) )
1084  !
1085  ! --- Gather the send global sizes from each MPI task -----------------------
1086  !
1087  allocate( sendsize(npes) )
1088  sendsize(:) = 1
1089  globsize(:) = 1
1090  call mpi_gather( locsize, 1, mpi_integer, globsize, sendsize, &
1091  mpi_integer, rootid, comm, ierr )
1092  if (present(string)) then
1093  call oasis_mpi_chkerr(ierr,subname//trim(string))
1094  else
1095  call oasis_mpi_chkerr(ierr,subname)
1096  endif
1097  deallocate( sendsize )
1098  !
1099  ! --- Prepare the displacement and allocate arrays -------------------------
1100  !
1101  allocate( displs(npes) )
1102  displs(1) = 0
1103  if ( rootid /= rank )then
1104  maxsize = 1
1105  globsize = 1
1106  else
1107  maxsize = maxval(globsize)
1108  end if
1109  nsiz1d = min(maxsize,globsize(1))
1110  do i = 2, npes
1111  nsize = min(maxsize,globsize(i-1))
1112  displs(i) = displs(i-1) + nsize
1113  nsiz1d = nsiz1d + min(maxsize,globsize(i))
1114  end do
1115  allocate( glob1darr(nsiz1d) )
1116  !----- Do some error checking for the root task arrays computed ----
1117  if ( rootid == rank )then
1118  if ( nsiz1d /= sum(globsize) ) &
1119  call oasis_mpi_abort( subname//" : Error, size of global array not right" )
1120  if ( any(displs < 0) .or. any(displs >= nsiz1d) ) &
1121  call oasis_mpi_abort( subname//" : Error, displacement array not right" )
1122  if ( (displs(npes)+globsize(npes)) /= nsiz1d ) &
1123  call oasis_mpi_abort( subname//" : Error, displacement array values too big" )
1124  end if
1125 
1126  call oasis_debug_exit(subname)
1127 
1128 END SUBROUTINE oasis_mpi_gathscatvinitr1
1129 
1130 !===============================================================================
1131 !===============================================================================
1132 
1133 !> Gather a vector of distributed data to a rootid
1134 
1135 !> This method passes in glob1DArr, globSize, and displs computed
1136 !> in the oasis_mpi_gathscatvinit routine and uses that information to
1137 !> gather the locArr into the glob1Darr on processor rootid in communicator
1138 !> comm.
1139 
1140 SUBROUTINE oasis_mpi_gathervr1(locarr, locSize, glob1DArr, globSize, displs, rootid, &
1141  comm, string )
1143  IMPLICIT none
1144 
1145  !----- arguments -----
1146  real(ip_double_p),intent(in) :: locArr(:) !< Local array
1147  real(ip_double_p),intent(inout) :: glob1DArr(:) !< Global 1D array to receive in on
1148  integer(ip_i4_p), intent(in) :: locSize !< Number to send from this PE
1149  integer(ip_i4_p), intent(in) :: globSize(:) !< Number to receive from each PE
1150  integer(ip_i4_p), intent(in) :: displs(:) !< Displacements for receives
1151  integer(ip_i4_p), intent(in) :: rootid !< MPI task to gather on
1152  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1153  character(*),optional,intent(in):: string !< to identify caller
1154 
1155  !----- local -----
1156  integer(ip_i4_p) :: ierr ! Error code
1157 
1158  !----- formats -----
1159  character(*),parameter :: subname = '(oasis_mpi_gathervr1)'
1160 
1161 !-------------------------------------------------------------------------------
1162 ! PURPOSE: Gather a 1D array of reals
1163 !-------------------------------------------------------------------------------
1164 
1165  call oasis_debug_enter(subname)
1166 
1167  call mpi_gatherv( locarr, locsize, mpi_real8, glob1darr, globsize, displs, &
1168  mpi_real8, rootid, comm, ierr )
1169  if (present(string)) then
1170  call oasis_mpi_chkerr(ierr,subname//trim(string))
1171  else
1172  call oasis_mpi_chkerr(ierr,subname)
1173  endif
1174 
1175  call oasis_debug_exit(subname)
1176 
1177 END SUBROUTINE oasis_mpi_gathervr1
1178 
1179 !===============================================================================
1180 !===============================================================================
1181 
1182 !> Scatter a vector of global data from a rootid
1183 
1184 !> This method passes in glob1DArr, globSize, and displs computed
1185 !> in the oasis_mpi_gathscatvinit routine and uses that information to
1186 !> scatter glob1Darr on processor rootid in communicator comm to locarr
1187 !> on other processors.
1188 
1189 SUBROUTINE oasis_mpi_scattervr1(locarr, locSize, glob1Darr, globSize, displs, rootid, &
1190  comm, string )
1192  IMPLICIT none
1193 
1194  !----- arguments -----
1195  real(ip_double_p),intent(out) :: locarr(:) !< Local array
1196  real(ip_double_p),intent(in) :: glob1Darr(:) !< Global 1D array to send from
1197  integer(ip_i4_p), intent(in) :: locSize !< Number to receive this PE
1198  integer(ip_i4_p), intent(in) :: globSize(:) !< Number to send to each PE
1199  integer(ip_i4_p), intent(in) :: displs(:) !< Displacements for send
1200  integer(ip_i4_p), intent(in) :: rootid !< MPI task to scatter on
1201  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1202  character(*),optional,intent(in):: string !< to identify caller
1203 
1204  !----- local -----
1205  integer(ip_i4_p) :: ierr ! Error code
1206 
1207  !----- formats -----
1208  character(*),parameter :: subname = '(oasis_mpi_scattervr1)'
1209 
1210 !-------------------------------------------------------------------------------
1211 ! PURPOSE: Scatter a 1D array of reals
1212 !-------------------------------------------------------------------------------
1213 
1214  call oasis_debug_enter(subname)
1215 
1216  call mpi_scatterv( glob1darr, globsize, displs, mpi_real8, locarr, locsize, &
1217  mpi_real8, rootid, comm, ierr )
1218  if (present(string)) then
1219  call oasis_mpi_chkerr(ierr,subname//trim(string))
1220  else
1221  call oasis_mpi_chkerr(ierr,subname)
1222  endif
1223 
1224  call oasis_debug_exit(subname)
1225 
1226 END SUBROUTINE oasis_mpi_scattervr1
1227 
1228 
1229 !===============================================================================
1230 !===============================================================================
1231 
1232 !> Compute a global Sum for a scalar integer
1233 
1234 SUBROUTINE oasis_mpi_sumi0(lvec,gvec,comm,string,all)
1236  IMPLICIT none
1237 
1238  !----- arguments ---
1239  integer(ip_i4_p), intent(in) :: lvec !< local values
1240  integer(ip_i4_p), intent(out):: gvec !< global values
1241  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1242  character(*),optional,intent(in) :: string !< to identify caller
1243  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1244 
1245  !----- local ---
1246  character(*),parameter :: subname = '(oasis_mpi_sumi0)'
1247  logical :: lall
1248  character(len=256) :: lstring
1249  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1250  integer(ip_i4_p) :: lsize
1251  integer(ip_i4_p) :: gsize
1252  integer(ip_i4_p) :: ierr
1253 
1254 !-------------------------------------------------------------------------------
1255 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1256 ! already computed
1257 !-------------------------------------------------------------------------------
1258 
1259  call oasis_debug_enter(subname)
1260 
1261  reduce_type = mpi_sum
1262  if (present(all)) then
1263  lall = all
1264  else
1265  lall = .false.
1266  endif
1267  if (present(string)) then
1268  lstring = trim(subname)//":"//trim(string)
1269  else
1270  lstring = trim(subname)
1271  endif
1272 
1273  lsize = 1
1274  gsize = 1
1275 
1276  if (lsize /= gsize) then
1277  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1278  endif
1279 
1280  if (lall) then
1281  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
1282  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1283  else
1284  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
1285  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1286  endif
1287 
1288  call oasis_debug_exit(subname)
1289 
1290 END SUBROUTINE oasis_mpi_sumi0
1291 
1292 !===============================================================================
1293 !===============================================================================
1294 
1295 !> Compute a 1D array of global sums for an array of 1D integers
1296 
1297 !> This sums an array of local integers to an array of summed integers.
1298 !> This does not reduce the array to a scalar.
1299 
1300 SUBROUTINE oasis_mpi_sumi1(lvec,gvec,comm,string,all)
1302  IMPLICIT none
1303 
1304  !----- arguments ---
1305  integer(ip_i4_p), intent(in) :: lvec(:) !< local values
1306  integer(ip_i4_p), intent(out):: gvec(:) !< global values
1307  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1308  character(*),optional,intent(in) :: string !< to identify caller
1309  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1310 
1311  !----- local ---
1312  character(*),parameter :: subname = '(oasis_mpi_sumi1)'
1313  logical :: lall
1314  character(len=256) :: lstring
1315  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1316  integer(ip_i4_p) :: lsize
1317  integer(ip_i4_p) :: gsize
1318  integer(ip_i4_p) :: ierr
1319 
1320 !-------------------------------------------------------------------------------
1321 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1322 ! already computed
1323 !-------------------------------------------------------------------------------
1324 
1325  call oasis_debug_enter(subname)
1326 
1327  reduce_type = mpi_sum
1328  if (present(all)) then
1329  lall = all
1330  else
1331  lall = .false.
1332  endif
1333  if (present(string)) then
1334  lstring = trim(subname)//":"//trim(string)
1335  else
1336  lstring = trim(subname)
1337  endif
1338 
1339  lsize = size(lvec)
1340  gsize = size(gvec)
1341 
1342  if (lsize /= gsize) then
1343  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1344  endif
1345 
1346  if (lall) then
1347  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
1348  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1349  else
1350  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
1351  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1352  endif
1353 
1354  call oasis_debug_exit(subname)
1355 
1356 END SUBROUTINE oasis_mpi_sumi1
1357 
1358 !===============================================================================
1359 !===============================================================================
1360 
1361 !> Compute a global sum for a scalar 8 byte integer
1362 
1363 SUBROUTINE oasis_mpi_sumb0(lvec,gvec,comm,string,all)
1365  IMPLICIT none
1366 
1367  !----- arguments ---
1368  integer(ip_i8_p), intent(in) :: lvec !< local values
1369  integer(ip_i8_p), intent(out):: gvec !< global values
1370  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1371  character(*),optional,intent(in) :: string !< to identify caller
1372  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1373 
1374  !----- local ---
1375  character(*),parameter :: subname = '(oasis_mpi_sumb0)'
1376  logical :: lall
1377  character(len=256) :: lstring
1378  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1379  integer(ip_i4_p) :: lsize
1380  integer(ip_i4_p) :: gsize
1381  integer(ip_i4_p) :: ierr
1382 
1383 !-------------------------------------------------------------------------------
1384 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1385 ! already computed
1386 !-------------------------------------------------------------------------------
1387 
1388  call oasis_debug_enter(subname)
1389 
1390  reduce_type = mpi_sum
1391  if (present(all)) then
1392  lall = all
1393  else
1394  lall = .false.
1395  endif
1396  if (present(string)) then
1397  lstring = trim(subname)//":"//trim(string)
1398  else
1399  lstring = trim(subname)
1400  endif
1401 
1402  lsize = 1
1403  gsize = 1
1404 
1405  if (lsize /= gsize) then
1406  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1407  endif
1408 
1409  if (lall) then
1410  call mpi_allreduce(lvec,gvec,gsize,mpi_integer8,reduce_type,comm,ierr)
1411  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1412  else
1413  call mpi_reduce(lvec,gvec,gsize,mpi_integer8,reduce_type,0,comm,ierr)
1414  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1415  endif
1416 
1417  call oasis_debug_exit(subname)
1418 
1419 END SUBROUTINE oasis_mpi_sumb0
1420 
1421 !===============================================================================
1422 !===============================================================================
1423 
1424 !> Compute a 1D array of global sums for an array of 1D 8 byte integers
1425 
1426 !> This sums an array of local integers to an array of summed integers.
1427 !> This does not reduce the array to a scalar.
1428 
1429 SUBROUTINE oasis_mpi_sumb1(lvec,gvec,comm,string,all)
1431  IMPLICIT none
1432 
1433  !----- arguments ---
1434  integer(ip_i8_p), intent(in) :: lvec(:) !< local values
1435  integer(ip_i8_p), intent(out):: gvec(:) !< global values
1436  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1437  character(*),optional,intent(in) :: string !< to identify caller
1438  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1439 
1440  !----- local ---
1441  character(*),parameter :: subname = '(oasis_mpi_sumb1)'
1442  logical :: lall
1443  character(len=256) :: lstring
1444  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1445  integer(ip_i4_p) :: lsize
1446  integer(ip_i4_p) :: gsize
1447  integer(ip_i4_p) :: ierr
1448 
1449 !-------------------------------------------------------------------------------
1450 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1451 ! already computed
1452 !-------------------------------------------------------------------------------
1453 
1454  call oasis_debug_enter(subname)
1455 
1456  reduce_type = mpi_sum
1457  if (present(all)) then
1458  lall = all
1459  else
1460  lall = .false.
1461  endif
1462  if (present(string)) then
1463  lstring = trim(subname)//":"//trim(string)
1464  else
1465  lstring = trim(subname)
1466  endif
1467 
1468  lsize = size(lvec)
1469  gsize = size(gvec)
1470 
1471  if (lsize /= gsize) then
1472  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1473  endif
1474 
1475  if (lall) then
1476  call mpi_allreduce(lvec,gvec,gsize,mpi_integer8,reduce_type,comm,ierr)
1477  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1478  else
1479  call mpi_reduce(lvec,gvec,gsize,mpi_integer8,reduce_type,0,comm,ierr)
1480  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1481  endif
1482 
1483  call oasis_debug_exit(subname)
1484 
1485 END SUBROUTINE oasis_mpi_sumb1
1486 
1487 !===============================================================================
1488 !===============================================================================
1489 
1490 !> Compute a global sum for a scalar double
1491 
1492 SUBROUTINE oasis_mpi_sumr0(lvec,gvec,comm,string,all)
1494  IMPLICIT none
1495 
1496  !----- arguments ---
1497  real(ip_double_p), intent(in) :: lvec !< local values
1498  real(ip_double_p), intent(out):: gvec !< global values
1499  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1500  character(*),optional,intent(in) :: string !< to identify caller
1501  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1502 
1503  !----- local ---
1504  character(*),parameter :: subname = '(oasis_mpi_sumr0)'
1505  logical :: lall
1506  character(len=256) :: lstring
1507  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1508  integer(ip_i4_p) :: lsize
1509  integer(ip_i4_p) :: gsize
1510  integer(ip_i4_p) :: ierr
1511 
1512 !-------------------------------------------------------------------------------
1513 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1514 ! already computed
1515 !-------------------------------------------------------------------------------
1516 
1517  call oasis_debug_enter(subname)
1518 
1519  reduce_type = mpi_sum
1520  if (present(all)) then
1521  lall = all
1522  else
1523  lall = .false.
1524  endif
1525  if (present(string)) then
1526  lstring = trim(subname)//":"//trim(string)
1527  else
1528  lstring = trim(subname)
1529  endif
1530 
1531  lsize = 1
1532  gsize = 1
1533 
1534  if (lsize /= gsize) then
1535  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1536  endif
1537 
1538  if (lall) then
1539  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
1540  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1541  else
1542  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
1543  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1544  endif
1545 
1546  call oasis_debug_exit(subname)
1547 
1548 END SUBROUTINE oasis_mpi_sumr0
1549 
1550 !===============================================================================
1551 !===============================================================================
1552 
1553 !> Compute a 1D array of global sums for an array of 1D doubles
1554 
1555 !> This sums an array of local doubles to an array of summed doubles.
1556 !> This does not reduce the array to a scalar.
1557 
1558 SUBROUTINE oasis_mpi_sumr1(lvec,gvec,comm,string,all)
1560  IMPLICIT none
1561 
1562  !----- arguments ---
1563  real(ip_double_p), intent(in) :: lvec(:) !< local values
1564  real(ip_double_p), intent(out):: gvec(:) !< global values
1565  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1566  character(*),optional,intent(in) :: string !< to identify caller
1567  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1568 
1569  !----- local ---
1570  character(*),parameter :: subname = '(oasis_mpi_sumr1)'
1571  logical :: lall
1572  character(len=256) :: lstring
1573  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1574  integer(ip_i4_p) :: lsize
1575  integer(ip_i4_p) :: gsize
1576  integer(ip_i4_p) :: ierr
1577 
1578 !-------------------------------------------------------------------------------
1579 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1580 ! already computed
1581 !-------------------------------------------------------------------------------
1582 
1583  call oasis_debug_enter(subname)
1584 
1585  reduce_type = mpi_sum
1586  if (present(all)) then
1587  lall = all
1588  else
1589  lall = .false.
1590  endif
1591  if (present(string)) then
1592  lstring = trim(subname)//":"//trim(string)
1593  else
1594  lstring = trim(subname)
1595  endif
1596 
1597  lsize = size(lvec)
1598  gsize = size(gvec)
1599 
1600  if (lsize /= gsize) then
1601  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1602  endif
1603 
1604  if (lall) then
1605  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
1606  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1607  else
1608  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
1609  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1610  endif
1611 
1612  call oasis_debug_exit(subname)
1613 
1614 END SUBROUTINE oasis_mpi_sumr1
1615 
1616 !===============================================================================
1617 !===============================================================================
1618 
1619 !> Compute a 2D array of global sums for an array of 2D doubles
1620 
1621 !> This sums an array of local doubles to an array of summed doubles.
1622 !> This does not reduce the array to a scalar.
1623 
1624 SUBROUTINE oasis_mpi_sumr2(lvec,gvec,comm,string,all)
1626  IMPLICIT none
1627 
1628  !----- arguments ---
1629  real(ip_double_p), intent(in) :: lvec(:,:)!< local values
1630  real(ip_double_p), intent(out):: gvec(:,:)!< global values
1631  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1632  character(*),optional,intent(in) :: string !< to identify caller
1633  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1634 
1635  !----- local ---
1636  character(*),parameter :: subname = '(oasis_mpi_sumr2)'
1637  logical :: lall
1638  character(len=256) :: lstring
1639  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1640  integer(ip_i4_p) :: lsize
1641  integer(ip_i4_p) :: gsize
1642  integer(ip_i4_p) :: ierr
1643 
1644 !-------------------------------------------------------------------------------
1645 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1646 ! already computed
1647 !-------------------------------------------------------------------------------
1648 
1649  call oasis_debug_enter(subname)
1650 
1651  reduce_type = mpi_sum
1652  if (present(all)) then
1653  lall = all
1654  else
1655  lall = .false.
1656  endif
1657  if (present(string)) then
1658  lstring = trim(subname)//":"//trim(string)
1659  else
1660  lstring = trim(subname)
1661  endif
1662 
1663  lsize = size(lvec)
1664  gsize = size(gvec)
1665 
1666  if (lsize /= gsize) then
1667  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1668  endif
1669 
1670  if (lall) then
1671  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
1672  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1673  else
1674  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
1675  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1676  endif
1677 
1678  call oasis_debug_exit(subname)
1679 
1680 END SUBROUTINE oasis_mpi_sumr2
1681 
1682 !===============================================================================
1683 !===============================================================================
1684 
1685 !> Compute a 3D array of global sums for an array of 3D doubles
1686 
1687 !> This sums an array of local doubles to an array of summed doubles.
1688 !> This does not reduce the array to a scalar.
1689 
1690 SUBROUTINE oasis_mpi_sumr3(lvec,gvec,comm,string,all)
1692  IMPLICIT none
1693 
1694  !----- arguments ---
1695  real(ip_double_p), intent(in) :: lvec(:,:,:) !< local values
1696  real(ip_double_p), intent(out):: gvec(:,:,:) !< global values
1697  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1698  character(*),optional,intent(in) :: string !< to identify caller
1699  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1700 
1701  !----- local ---
1702  character(*),parameter :: subname = '(oasis_mpi_sumr3)'
1703  logical :: lall
1704  character(len=256) :: lstring
1705  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1706  integer(ip_i4_p) :: lsize
1707  integer(ip_i4_p) :: gsize
1708  integer(ip_i4_p) :: ierr
1709 
1710 !-------------------------------------------------------------------------------
1711 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1712 ! already computed
1713 !-------------------------------------------------------------------------------
1714 
1715  call oasis_debug_enter(subname)
1716 
1717  reduce_type = mpi_sum
1718  if (present(all)) then
1719  lall = all
1720  else
1721  lall = .false.
1722  endif
1723  if (present(string)) then
1724  lstring = trim(subname)//":"//trim(string)
1725  else
1726  lstring = trim(subname)
1727  endif
1728 
1729  lsize = size(lvec)
1730  gsize = size(gvec)
1731 
1732  if (lsize /= gsize) then
1733  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1734  endif
1735 
1736  if (lall) then
1737  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
1738  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1739  else
1740  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
1741  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1742  endif
1743 
1744  call oasis_debug_exit(subname)
1745 
1746 END SUBROUTINE oasis_mpi_sumr3
1747 
1748 !===============================================================================
1749 
1750 #ifndef __NO_16BYTE_REALS
1751 !===============================================================================
1752 
1753 !> Compute a global sum for a scalar quad
1754 
1755 SUBROUTINE oasis_mpi_sumq0(lvec,gvec,comm,string,all)
1757  IMPLICIT none
1758 
1759  !----- arguments ---
1760  real(ip_quad_p), intent(in) :: lvec !< local values
1761  real(ip_quad_p), intent(out):: gvec !< global values
1762  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1763  character(*),optional,intent(in) :: string !< to identify caller
1764  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1765 
1766  !----- local ---
1767  character(*),parameter :: subname = '(oasis_mpi_sumq0)'
1768  logical :: lall
1769  character(len=256) :: lstring
1770  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1771  integer(ip_i4_p) :: lsize
1772  integer(ip_i4_p) :: gsize
1773  integer(ip_i4_p) :: ierr
1774 
1775 !-------------------------------------------------------------------------------
1776 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1777 ! already computed
1778 !-------------------------------------------------------------------------------
1779 
1780  call oasis_debug_enter(subname)
1781 
1782  reduce_type = mpi_sum
1783  if (present(all)) then
1784  lall = all
1785  else
1786  lall = .false.
1787  endif
1788  if (present(string)) then
1789  lstring = trim(subname)//":"//trim(string)
1790  else
1791  lstring = trim(subname)
1792  endif
1793 
1794  lsize = 1
1795  gsize = 1
1796 
1797  if (lsize /= gsize) then
1798  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1799  endif
1800 
1801  if (lall) then
1802  call mpi_allreduce(lvec,gvec,gsize,mpi_real16,reduce_type,comm,ierr)
1803  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1804  else
1805  call mpi_reduce(lvec,gvec,gsize,mpi_real16,reduce_type,0,comm,ierr)
1806  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1807  endif
1808 
1809  call oasis_debug_exit(subname)
1810 
1811 END SUBROUTINE oasis_mpi_sumq0
1812 
1813 !===============================================================================
1814 !===============================================================================
1815 
1816 !> Compute a 1D array of global sums for an array of 1D quads
1817 
1818 !> This sums an array of local quads to an array of summed quads.
1819 !> This does not reduce the array to a scalar.
1820 
1821 SUBROUTINE oasis_mpi_sumq1(lvec,gvec,comm,string,all)
1823  IMPLICIT none
1824 
1825  !----- arguments ---
1826  real(ip_quad_p), intent(in) :: lvec(:) !< local values
1827  real(ip_quad_p), intent(out):: gvec(:) !< global values
1828  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1829  character(*),optional,intent(in) :: string !< to identify caller
1830  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1831 
1832  !----- local ---
1833  character(*),parameter :: subname = '(oasis_mpi_sumq1)'
1834  logical :: lall
1835  character(len=256) :: lstring
1836  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1837  integer(ip_i4_p) :: lsize
1838  integer(ip_i4_p) :: gsize
1839  integer(ip_i4_p) :: ierr
1840 
1841 !-------------------------------------------------------------------------------
1842 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1843 ! already computed
1844 !-------------------------------------------------------------------------------
1845 
1846  call oasis_debug_enter(subname)
1847 
1848  reduce_type = mpi_sum
1849  if (present(all)) then
1850  lall = all
1851  else
1852  lall = .false.
1853  endif
1854  if (present(string)) then
1855  lstring = trim(subname)//":"//trim(string)
1856  else
1857  lstring = trim(subname)
1858  endif
1859 
1860  lsize = size(lvec)
1861  gsize = size(gvec)
1862 
1863  if (lsize /= gsize) then
1864  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1865  endif
1866 
1867  if (lall) then
1868  call mpi_allreduce(lvec,gvec,gsize,mpi_real16,reduce_type,comm,ierr)
1869  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1870  else
1871  call mpi_reduce(lvec,gvec,gsize,mpi_real16,reduce_type,0,comm,ierr)
1872  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1873  endif
1874 
1875  call oasis_debug_exit(subname)
1876 
1877 END SUBROUTINE oasis_mpi_sumq1
1878 
1879 !===============================================================================
1880 !===============================================================================
1881 
1882 !> Compute a 2D array of global sums for an array of 2D quads
1883 
1884 !> This sums an array of local quads to an array of summed quads.
1885 !> This does not reduce the array to a scalar.
1886 
1887 SUBROUTINE oasis_mpi_sumq2(lvec,gvec,comm,string,all)
1889  IMPLICIT none
1890 
1891  !----- arguments ---
1892  real(ip_quad_p), intent(in) :: lvec(:,:)!< local values
1893  real(ip_quad_p), intent(out):: gvec(:,:)!< global values
1894  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1895  character(*),optional,intent(in) :: string !< to identify caller
1896  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1897 
1898  !----- local ---
1899  character(*),parameter :: subname = '(oasis_mpi_sumq2)'
1900  logical :: lall
1901  character(len=256) :: lstring
1902  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1903  integer(ip_i4_p) :: lsize
1904  integer(ip_i4_p) :: gsize
1905  integer(ip_i4_p) :: ierr
1906 
1907 !-------------------------------------------------------------------------------
1908 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1909 ! already computed
1910 !-------------------------------------------------------------------------------
1911 
1912  call oasis_debug_enter(subname)
1913 
1914  reduce_type = mpi_sum
1915  if (present(all)) then
1916  lall = all
1917  else
1918  lall = .false.
1919  endif
1920  if (present(string)) then
1921  lstring = trim(subname)//":"//trim(string)
1922  else
1923  lstring = trim(subname)
1924  endif
1925 
1926  lsize = size(lvec)
1927  gsize = size(gvec)
1928 
1929  if (lsize /= gsize) then
1930  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1931  endif
1932 
1933  if (lall) then
1934  call mpi_allreduce(lvec,gvec,gsize,mpi_real16,reduce_type,comm,ierr)
1935  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
1936  else
1937  call mpi_reduce(lvec,gvec,gsize,mpi_real16,reduce_type,0,comm,ierr)
1938  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
1939  endif
1940 
1941  call oasis_debug_exit(subname)
1942 
1943 END SUBROUTINE oasis_mpi_sumq2
1944 
1945 !===============================================================================
1946 !===============================================================================
1947 
1948 !> Compute a 3D array of global sums for an array of 3D quads
1949 
1950 !> This sums an array of local quads to an array of summed quads.
1951 !> This does not reduce the array to a scalar.
1952 
1953 SUBROUTINE oasis_mpi_sumq3(lvec,gvec,comm,string,all)
1955  IMPLICIT none
1956 
1957  !----- arguments ---
1958  real(ip_quad_p), intent(in) :: lvec(:,:,:) !< local values
1959  real(ip_quad_p), intent(out):: gvec(:,:,:) !< global values
1960  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
1961  character(*),optional,intent(in) :: string !< to identify caller
1962  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
1963 
1964  !----- local ---
1965  character(*),parameter :: subname = '(oasis_mpi_sumq3)'
1966  logical :: lall
1967  character(len=256) :: lstring
1968  integer(ip_i4_p) :: reduce_type ! mpi reduction type
1969  integer(ip_i4_p) :: lsize
1970  integer(ip_i4_p) :: gsize
1971  integer(ip_i4_p) :: ierr
1972 
1973 !-------------------------------------------------------------------------------
1974 ! PURPOSE: Finds sum of a distributed vector of values, assume local sum
1975 ! already computed
1976 !-------------------------------------------------------------------------------
1977 
1978  call oasis_debug_enter(subname)
1979 
1980  reduce_type = mpi_sum
1981  if (present(all)) then
1982  lall = all
1983  else
1984  lall = .false.
1985  endif
1986  if (present(string)) then
1987  lstring = trim(subname)//":"//trim(string)
1988  else
1989  lstring = trim(subname)
1990  endif
1991 
1992  lsize = size(lvec)
1993  gsize = size(gvec)
1994 
1995  if (lsize /= gsize) then
1996  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
1997  endif
1998 
1999  if (lall) then
2000  call mpi_allreduce(lvec,gvec,gsize,mpi_real16,reduce_type,comm,ierr)
2001  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2002  else
2003  call mpi_reduce(lvec,gvec,gsize,mpi_real16,reduce_type,0,comm,ierr)
2004  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2005  endif
2006 
2007  call oasis_debug_exit(subname)
2008 
2009 END SUBROUTINE oasis_mpi_sumq3
2010 
2011 !===============================================================================
2012 ! __NO_16BYTE_REALS
2013 #endif
2014 !===============================================================================
2015 
2016 !> Compute a global minimum for a scalar integer
2017 
2018 SUBROUTINE oasis_mpi_mini0(lvec,gvec,comm,string,all)
2020  IMPLICIT none
2021 
2022  !----- arguments ---
2023  integer(ip_i4_p), intent(in) :: lvec !< local values
2024  integer(ip_i4_p), intent(out):: gvec !< global values
2025  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2026  character(*),optional,intent(in) :: string !< to identify caller
2027  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2028 
2029  !----- local ---
2030  character(*),parameter :: subname = '(oasis_mpi_mini0)'
2031  logical :: lall
2032  character(len=256) :: lstring
2033  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2034  integer(ip_i4_p) :: lsize
2035  integer(ip_i4_p) :: gsize
2036  integer(ip_i4_p) :: ierr
2037 
2038 !-------------------------------------------------------------------------------
2039 ! PURPOSE: Finds min of a distributed vector of values, assume local min
2040 ! already computed
2041 !-------------------------------------------------------------------------------
2042 
2043  call oasis_debug_enter(subname)
2044 
2045  reduce_type = mpi_min
2046  if (present(all)) then
2047  lall = all
2048  else
2049  lall = .false.
2050  endif
2051  if (present(string)) then
2052  lstring = trim(subname)//":"//trim(string)
2053  else
2054  lstring = trim(subname)
2055  endif
2056 
2057  lsize = 1
2058  gsize = 1
2059 
2060  if (lsize /= gsize) then
2061  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2062  endif
2063 
2064  if (lall) then
2065  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
2066  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2067  else
2068  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
2069  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2070  endif
2071 
2072  call oasis_debug_exit(subname)
2073 
2074 END SUBROUTINE oasis_mpi_mini0
2075 
2076 !===============================================================================
2077 !===============================================================================
2078 
2079 !> Compute an array of global minimums for an array of 1D integers
2080 
2081 SUBROUTINE oasis_mpi_mini1(lvec,gvec,comm,string,all)
2083  IMPLICIT none
2084 
2085  !----- arguments ---
2086  integer(ip_i4_p), intent(in) :: lvec(:) !< local values
2087  integer(ip_i4_p), intent(out):: gvec(:) !< global values
2088  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2089  character(*),optional,intent(in) :: string !< to identify caller
2090  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2091 
2092  !----- local ---
2093  character(*),parameter :: subname = '(oasis_mpi_mini1)'
2094  logical :: lall
2095  character(len=256) :: lstring
2096  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2097  integer(ip_i4_p) :: lsize
2098  integer(ip_i4_p) :: gsize
2099  integer(ip_i4_p) :: ierr
2100 
2101 !-------------------------------------------------------------------------------
2102 ! PURPOSE: Finds min of a distributed vector of values, assume local min
2103 ! already computed
2104 !-------------------------------------------------------------------------------
2105 
2106  call oasis_debug_enter(subname)
2107 
2108  reduce_type = mpi_min
2109  if (present(all)) then
2110  lall = all
2111  else
2112  lall = .false.
2113  endif
2114  if (present(string)) then
2115  lstring = trim(subname)//":"//trim(string)
2116  else
2117  lstring = trim(subname)
2118  endif
2119 
2120  lsize = size(lvec)
2121  gsize = size(gvec)
2122 
2123  if (lsize /= gsize) then
2124  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2125  endif
2126 
2127  if (lall) then
2128  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
2129  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2130  else
2131  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
2132  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2133  endif
2134 
2135  call oasis_debug_exit(subname)
2136 
2137 END SUBROUTINE oasis_mpi_mini1
2138 
2139 !===============================================================================
2140 !===============================================================================
2141 
2142 !> Compute an global minimum for a scalar double
2143 
2144 SUBROUTINE oasis_mpi_minr0(lvec,gvec,comm,string,all)
2146  IMPLICIT none
2147 
2148  !----- arguments ---
2149  real(ip_double_p), intent(in) :: lvec !< local values
2150  real(ip_double_p), intent(out):: gvec !< global values
2151  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2152  character(*),optional,intent(in) :: string !< to identify caller
2153  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2154 
2155  !----- local ---
2156  character(*),parameter :: subname = '(oasis_mpi_minr0)'
2157  logical :: lall
2158  character(len=256) :: lstring
2159  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2160  integer(ip_i4_p) :: lsize
2161  integer(ip_i4_p) :: gsize
2162  integer(ip_i4_p) :: ierr
2163 
2164 !-------------------------------------------------------------------------------
2165 ! PURPOSE: Finds min of a distributed vector of values, assume local min
2166 ! already computed
2167 !-------------------------------------------------------------------------------
2168 
2169  call oasis_debug_enter(subname)
2170 
2171  reduce_type = mpi_min
2172  if (present(all)) then
2173  lall = all
2174  else
2175  lall = .false.
2176  endif
2177  if (present(string)) then
2178  lstring = trim(subname)//":"//trim(string)
2179  else
2180  lstring = trim(subname)
2181  endif
2182 
2183  lsize = 1
2184  gsize = 1
2185 
2186  if (lsize /= gsize) then
2187  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2188  endif
2189 
2190  if (lall) then
2191  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
2192  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2193  else
2194  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
2195  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2196  endif
2197 
2198  call oasis_debug_exit(subname)
2199 
2200 END SUBROUTINE oasis_mpi_minr0
2201 
2202 !===============================================================================
2203 !===============================================================================
2204 
2205 !> Compute an array of global minimums for an array of 1D doubles
2206 
2207 SUBROUTINE oasis_mpi_minr1(lvec,gvec,comm,string,all)
2209  IMPLICIT none
2210 
2211  !----- arguments ---
2212  real(ip_double_p), intent(in) :: lvec(:) !< local values
2213  real(ip_double_p), intent(out):: gvec(:) !< global values
2214  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2215  character(*),optional,intent(in) :: string !< to identify caller
2216  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2217 
2218  !----- local ---
2219  character(*),parameter :: subname = '(oasis_mpi_minr1)'
2220  logical :: lall
2221  character(len=256) :: lstring
2222  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2223  integer(ip_i4_p) :: lsize
2224  integer(ip_i4_p) :: gsize
2225  integer(ip_i4_p) :: ierr
2226 
2227 !-------------------------------------------------------------------------------
2228 ! PURPOSE: Finds min of a distributed vector of values, assume local min
2229 ! already computed
2230 !-------------------------------------------------------------------------------
2231 
2232  call oasis_debug_enter(subname)
2233 
2234  reduce_type = mpi_min
2235  if (present(all)) then
2236  lall = all
2237  else
2238  lall = .false.
2239  endif
2240  if (present(string)) then
2241  lstring = trim(subname)//":"//trim(string)
2242  else
2243  lstring = trim(subname)
2244  endif
2245 
2246  lsize = size(lvec)
2247  gsize = size(gvec)
2248 
2249  if (lsize /= gsize) then
2250  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2251  endif
2252 
2253  if (lall) then
2254  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
2255  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2256  else
2257  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
2258  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2259  endif
2260 
2261  call oasis_debug_exit(subname)
2262 
2263 END SUBROUTINE oasis_mpi_minr1
2264 
2265 !===============================================================================
2266 !===============================================================================
2267 
2268 !> Compute a global maximum for a scalar integer
2269 
2270 SUBROUTINE oasis_mpi_maxi0(lvec,gvec,comm,string,all)
2272  IMPLICIT none
2273 
2274  !----- arguments ---
2275  integer(ip_i4_p), intent(in) :: lvec !< local values
2276  integer(ip_i4_p), intent(out):: gvec !< global values
2277  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2278  character(*),optional,intent(in) :: string !< to identify caller
2279  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2280 
2281  !----- local ---
2282  character(*),parameter :: subname = '(oasis_mpi_maxi0)'
2283  logical :: lall
2284  character(len=256) :: lstring
2285  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2286  integer(ip_i4_p) :: lsize
2287  integer(ip_i4_p) :: gsize
2288  integer(ip_i4_p) :: ierr
2289 
2290 !-------------------------------------------------------------------------------
2291 ! PURPOSE: Finds max of a distributed vector of values, assume local max
2292 ! already computed
2293 !-------------------------------------------------------------------------------
2294 
2295  call oasis_debug_enter(subname)
2296 
2297  reduce_type = mpi_max
2298  if (present(all)) then
2299  lall = all
2300  else
2301  lall = .false.
2302  endif
2303  if (present(string)) then
2304  lstring = trim(subname)//":"//trim(string)
2305  else
2306  lstring = trim(subname)
2307  endif
2308 
2309  lsize = 1
2310  gsize = 1
2311 
2312  if (lsize /= gsize) then
2313  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2314  endif
2315 
2316  if (lall) then
2317  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
2318  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2319  else
2320  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
2321  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2322  endif
2323 
2324  call oasis_debug_exit(subname)
2325 
2326 END SUBROUTINE oasis_mpi_maxi0
2327 
2328 !===============================================================================
2329 !===============================================================================
2330 
2331 !> Compute an array of global maximums for an array of 1D integers
2332 
2333 SUBROUTINE oasis_mpi_maxi1(lvec,gvec,comm,string,all)
2335  IMPLICIT none
2336 
2337  !----- arguments ---
2338  integer(ip_i4_p), intent(in) :: lvec(:) !< local values
2339  integer(ip_i4_p), intent(out):: gvec(:) !< global values
2340  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2341  character(*),optional,intent(in) :: string !< to identify caller
2342  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2343 
2344  !----- local ---
2345  character(*),parameter :: subname = '(oasis_mpi_maxi1)'
2346  logical :: lall
2347  character(len=256) :: lstring
2348  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2349  integer(ip_i4_p) :: lsize
2350  integer(ip_i4_p) :: gsize
2351  integer(ip_i4_p) :: ierr
2352 
2353 !-------------------------------------------------------------------------------
2354 ! PURPOSE: Finds max of a distributed vector of values, assume local max
2355 ! already computed
2356 !-------------------------------------------------------------------------------
2357 
2358  call oasis_debug_enter(subname)
2359 
2360  reduce_type = mpi_max
2361  if (present(all)) then
2362  lall = all
2363  else
2364  lall = .false.
2365  endif
2366  if (present(string)) then
2367  lstring = trim(subname)//":"//trim(string)
2368  else
2369  lstring = trim(subname)
2370  endif
2371 
2372  lsize = size(lvec)
2373  gsize = size(gvec)
2374 
2375  if (lsize /= gsize) then
2376  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2377  endif
2378 
2379  if (lall) then
2380  call mpi_allreduce(lvec,gvec,gsize,mpi_integer,reduce_type,comm,ierr)
2381  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2382  else
2383  call mpi_reduce(lvec,gvec,gsize,mpi_integer,reduce_type,0,comm,ierr)
2384  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2385  endif
2386 
2387  call oasis_debug_exit(subname)
2388 
2389 END SUBROUTINE oasis_mpi_maxi1
2390 
2391 !===============================================================================
2392 !===============================================================================
2393 
2394 !> Compute a global maximum for a scalar double
2395 
2396 SUBROUTINE oasis_mpi_maxr0(lvec,gvec,comm,string,all)
2398  IMPLICIT none
2399 
2400  !----- arguments ---
2401  real(ip_double_p), intent(in) :: lvec !< local values
2402  real(ip_double_p), intent(out):: gvec !< global values
2403  integer(ip_i4_p), intent(in) :: comm !< mpi communicator
2404  character(*),optional,intent(in) :: string !< to identify caller
2405  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2406 
2407  !----- local ---
2408  character(*),parameter :: subname = '(oasis_mpi_maxr0)'
2409  logical :: lall
2410  character(len=256) :: lstring
2411  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2412  integer(ip_i4_p) :: lsize
2413  integer(ip_i4_p) :: gsize
2414  integer(ip_i4_p) :: ierr
2415 
2416 !-------------------------------------------------------------------------------
2417 ! PURPOSE: Finds max of a distributed vector of values, assume local max
2418 ! already computed
2419 !-------------------------------------------------------------------------------
2420 
2421  call oasis_debug_enter(subname)
2422 
2423  reduce_type = mpi_max
2424  if (present(all)) then
2425  lall = all
2426  else
2427  lall = .false.
2428  endif
2429  if (present(string)) then
2430  lstring = trim(subname)//":"//trim(string)
2431  else
2432  lstring = trim(subname)
2433  endif
2434 
2435  lsize = 1
2436  gsize = 1
2437 
2438  if (lsize /= gsize) then
2439  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2440  endif
2441 
2442  if (lall) then
2443  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
2444  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2445  else
2446  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
2447  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2448  endif
2449 
2450  call oasis_debug_exit(subname)
2451 
2452 END SUBROUTINE oasis_mpi_maxr0
2453 
2454 !===============================================================================
2455 !===============================================================================
2456 
2457 !> Compute an array of global maximums for an array of 1D doubles
2458 
2459 SUBROUTINE oasis_mpi_maxr1(lvec,gvec,comm,string,all)
2461  IMPLICIT none
2462 
2463  !----- arguments ---
2464  real(ip_double_p), intent(in) :: lvec(:) !< local values
2465  real(ip_double_p), intent(out):: gvec(:) !< global values
2466  integer(ip_i4_p) , intent(in) :: comm !< mpi communicator
2467  character(*),optional,intent(in) :: string !< to identify caller
2468  logical, optional,intent(in) :: all !< if true call allreduce, otherwise reduce to task 0
2469 
2470  !----- local ---
2471  character(*),parameter :: subname = '(oasis_mpi_maxr1)'
2472  logical :: lall
2473  character(len=256) :: lstring
2474  integer(ip_i4_p) :: reduce_type ! mpi reduction type
2475  integer(ip_i4_p) :: lsize
2476  integer(ip_i4_p) :: gsize
2477  integer(ip_i4_p) :: ierr
2478 
2479 !-------------------------------------------------------------------------------
2480 ! PURPOSE: Finds max of a distributed vector of values, assume local max
2481 ! already computed
2482 !-------------------------------------------------------------------------------
2483 
2484  call oasis_debug_enter(subname)
2485 
2486  reduce_type = mpi_max
2487  if (present(all)) then
2488  lall = all
2489  else
2490  lall = .false.
2491  endif
2492  if (present(string)) then
2493  lstring = trim(subname)//":"//trim(string)
2494  else
2495  lstring = trim(subname)
2496  endif
2497 
2498  lsize = size(lvec)
2499  gsize = size(gvec)
2500 
2501  if (lsize /= gsize) then
2502  call oasis_mpi_abort(subname//" lsize,gsize incompatable "//trim(string))
2503  endif
2504 
2505  if (lall) then
2506  call mpi_allreduce(lvec,gvec,gsize,mpi_real8,reduce_type,comm,ierr)
2507  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_ALLREDUCE")
2508  else
2509  call mpi_reduce(lvec,gvec,gsize,mpi_real8,reduce_type,0,comm,ierr)
2510  call oasis_mpi_chkerr(ierr,trim(lstring)//" MPI_REDUCE")
2511  endif
2512 
2513  call oasis_debug_exit(subname)
2514 
2515 END SUBROUTINE oasis_mpi_maxr1
2516 
2517 !===============================================================================
2518 !===============================================================================
2519 
2520 !> Get the total number of tasks associated with a communicator
2521 
2522 SUBROUTINE oasis_mpi_commsize(comm,size,string)
2524  IMPLICIT none
2525 
2526  !----- arguments ---
2527  integer,intent(in) :: comm !< mpi communicator
2528  integer,intent(out) :: size !< output comm size
2529  character(*),optional,intent(in) :: string !< to identify caller
2530 
2531  !----- local ---
2532  character(*),parameter :: subname = '(oasis_mpi_commsize)'
2533  integer(ip_i4_p) :: ierr
2534 
2535 !-------------------------------------------------------------------------------
2536 ! PURPOSE: MPI commsize
2537 !-------------------------------------------------------------------------------
2538 
2539  call oasis_debug_enter(subname)
2540 
2541  call mpi_comm_size(comm,size,ierr)
2542  if (present(string)) then
2543  call oasis_mpi_chkerr(ierr,subname//trim(string))
2544  else
2545  call oasis_mpi_chkerr(ierr,subname)
2546  endif
2547 
2548  call oasis_debug_exit(subname)
2549 
2550 END SUBROUTINE oasis_mpi_commsize
2551 
2552 !===============================================================================
2553 !===============================================================================
2554 
2555 !> Get the rank (task ID) for a task in a communicator
2556 
2557 SUBROUTINE oasis_mpi_commrank(comm,rank,string)
2559  IMPLICIT none
2560 
2561  !----- arguments ---
2562  integer,intent(in) :: comm !< mpi communicator
2563  integer,intent(out) :: rank !< output task ID
2564  character(*),optional,intent(in) :: string !< to identify caller
2565 
2566  !----- local ---
2567  character(*),parameter :: subname = '(oasis_mpi_commrank)'
2568  integer(ip_i4_p) :: ierr
2569 
2570 !-------------------------------------------------------------------------------
2571 ! PURPOSE: MPI commrank
2572 !-------------------------------------------------------------------------------
2573 
2574  call oasis_debug_enter(subname)
2575 
2576  call mpi_comm_rank(comm,rank,ierr)
2577  if (present(string)) then
2578  call oasis_mpi_chkerr(ierr,subname//trim(string))
2579  else
2580  call oasis_mpi_chkerr(ierr,subname)
2581  endif
2582 
2583  call oasis_debug_exit(subname)
2584 
2585 END SUBROUTINE oasis_mpi_commrank
2586 
2587 !===============================================================================
2588 !===============================================================================
2589 
2590 !> Check whether MPI has been initialized
2591 
2592 SUBROUTINE oasis_mpi_initialized(flag,string)
2594  IMPLICIT none
2595 
2596  !----- arguments ---
2597  logical,intent(out) :: flag !< true if MPI_INITIALIZED has been called
2598  character(*),optional,intent(in) :: string !< to identify caller
2599 
2600  !----- local ---
2601  character(*),parameter :: subName = '(oasis_mpi_initialized)'
2602  integer(ip_i4_p) :: ierr
2603 
2604 !-------------------------------------------------------------------------------
2605 ! PURPOSE: MPI initialized
2606 !-------------------------------------------------------------------------------
2607 
2608  call oasis_debug_enter(subname)
2609 
2610  call mpi_initialized(flag,ierr)
2611  if (present(string)) then
2612  call oasis_mpi_chkerr(ierr,subname//trim(string))
2613  else
2614  call oasis_mpi_chkerr(ierr,subname)
2615  endif
2616 
2617  call oasis_debug_exit(subname)
2618 
2619 END SUBROUTINE oasis_mpi_initialized
2620 
2621 !===============================================================================
2622 !===============================================================================
2623 
2624 !> Return a timestamp from MPI_WTIME
2625 
2626 SUBROUTINE oasis_mpi_wtime(wtime)
2628  IMPLICIT none
2629 
2630  !----- arguments ---
2631  real(ip_r8_p), intent(out) :: wtime !< time in MPI_WTIME units
2632 
2633  !----- local ---
2634  character(*),parameter :: subName = '(oasis_mpi_wtime)'
2635 
2636 !-------------------------------------------------------------------------------
2637 ! PURPOSE: MPI wtime
2638 !-------------------------------------------------------------------------------
2639 
2640  call oasis_debug_enter(subname)
2641 
2642  wtime = mpi_wtime()
2643 
2644  call oasis_debug_exit(subname)
2645 
2646 END SUBROUTINE oasis_mpi_wtime
2647 
2648 !===============================================================================
2649 !===============================================================================
2650 
2651 !> Write error messages and Call MPI_ABORT
2652 
2653 SUBROUTINE oasis_mpi_abort(string,rcode)
2655  IMPLICIT none
2656 
2657  !----- arguments ---
2658  character(*),optional,intent(in) :: string !< to identify caller
2659  integer,optional,intent(in) :: rcode !< optional code
2660 
2661  !----- local ---
2662  character(*),parameter :: subName = '(oasis_mpi_abort)'
2663  character(len=256) :: lstr
2664  integer(ip_i4_p) :: ierr
2665  integer :: rc ! return code
2666 
2667 !-------------------------------------------------------------------------------
2668 ! PURPOSE: MPI abort
2669 !-------------------------------------------------------------------------------
2670 
2671  call oasis_debug_enter(subname)
2672 
2673  if ( present(string) .and. present(rcode)) then
2674  write(lstr,'(a,i6.6)') trim(string)//' rcode = ',rcode
2675  elseif (present(string)) then
2676  lstr = trim(string)
2677  else
2678  lstr = ' '
2679  endif
2680 
2681  IF ( PRESENT(rcode)) THEN
2682  CALL oasis_abort(cd_routine=subname,cd_message=trim(string),file=__file__,line=__line__,rcode=rcode)
2683  ELSE
2684  CALL oasis_abort(cd_routine=subname,cd_message=trim(string),file=__file__,line=__line__)
2685  ENDIF
2686 
2687  call oasis_debug_exit(subname)
2688 
2689 END SUBROUTINE oasis_mpi_abort
2690 
2691 !===============================================================================
2692 !===============================================================================
2693 
2694 !> Call MPI_BARRIER for a particular communicator
2695 
2696 SUBROUTINE oasis_mpi_barrier(comm,string)
2698  IMPLICIT none
2699 
2700  !----- arguments ---
2701  integer,intent(in) :: comm !< mpi communicator
2702  character(*),optional,intent(in) :: string !< to identify caller
2703 
2704  !----- local ---
2705  character(*),parameter :: subname = '(oasis_mpi_barrier)'
2706  integer(ip_i4_p) :: ierr
2707 
2708 !-------------------------------------------------------------------------------
2709 ! PURPOSE: MPI barrier
2710 !-------------------------------------------------------------------------------
2711 
2712  call oasis_debug_enter(subname)
2713 
2714  call mpi_barrier(comm,ierr)
2715  if (present(string)) then
2716  call oasis_mpi_chkerr(ierr,subname//trim(string))
2717  else
2718  call oasis_mpi_chkerr(ierr,subname)
2719  endif
2720 
2721  call oasis_debug_exit(subname)
2722 
2723 END SUBROUTINE oasis_mpi_barrier
2724 
2725 !===============================================================================
2726 !===============================================================================
2727 
2728 !> Call MPI_INIT
2729 
2730 SUBROUTINE oasis_mpi_init(string)
2732  IMPLICIT none
2733 
2734  !----- arguments ---
2735  character(*),optional,intent(in) :: string !< to identify caller
2736 
2737  !----- local ---
2738  character(*),parameter :: subname = '(oasis_mpi_init)'
2739  integer(ip_i4_p) :: ierr
2740 
2741 !-------------------------------------------------------------------------------
2742 ! PURPOSE: MPI init
2743 !-------------------------------------------------------------------------------
2744 
2745  call oasis_debug_enter(subname)
2746 
2747  call mpi_init(ierr)
2748  if (present(string)) then
2749  call oasis_mpi_chkerr(ierr,subname//trim(string))
2750  else
2751  call oasis_mpi_chkerr(ierr,subname)
2752  endif
2753 
2754  call oasis_debug_exit(subname)
2755 
2756 END SUBROUTINE oasis_mpi_init
2757 
2758 !===============================================================================
2759 !===============================================================================
2760 
2761 !> Call MPI_FINALZE
2762 
2763 SUBROUTINE oasis_mpi_finalize(string)
2765  IMPLICIT none
2766 
2767  !----- arguments ---
2768  character(*),optional,intent(in) :: string !< to identify caller
2769 
2770  !----- local ---
2771  character(*),parameter :: subname = '(oasis_mpi_finalize)'
2772  integer(ip_i4_p) :: ierr
2773 
2774 !-------------------------------------------------------------------------------
2775 ! PURPOSE: MPI finalize
2776 !-------------------------------------------------------------------------------
2777 
2778  call oasis_debug_enter(subname)
2779 
2780  call mpi_finalize(ierr)
2781  if (present(string)) then
2782  call oasis_mpi_chkerr(ierr,subname//trim(string))
2783  else
2784  call oasis_mpi_chkerr(ierr,subname)
2785  endif
2786 
2787  call oasis_debug_exit(subname)
2788 
2789 END SUBROUTINE oasis_mpi_finalize
2790 
2791 !===============================================================================
2792 !===============================================================================
2793 
2794 !> Custom method for reducing MPI lists across pes for OASIS
2795 
2796 SUBROUTINE oasis_mpi_reducelists(linp1,comm,cntout,lout1,callstr,fastcheck,fastcheckout, &
2797  linp2,lout2,spval2,linp3,lout3,spval3,linp4,lout4,spval4)
2799  IMPLICIT none
2800 
2801  !----- arguments ---
2802  character(*),pointer,intent(in) :: linp1(:) !< input list on each task
2803  integer ,intent(in) :: comm !< mpi communicator
2804  integer ,intent(out) :: cntout !< size of lout1 list
2805  character(*),pointer,intent(inout) :: lout1(:) !< reduced output list, same on all tasks
2806  character(*) ,intent(in) :: callstr !< to identify caller
2807  logical ,intent(in) ,optional :: fastcheck !< run a fastcheck first
2808  logical ,intent(out) ,optional :: fastcheckout !< true if fastcheck worked
2809  character(*),pointer,intent(in) ,optional :: linp2(:) !< input list on each task
2810  character(*),pointer,intent(inout),optional :: lout2(:) !< reduced output list, same on all tasks
2811  character(*) ,intent(in) ,optional :: spval2 !< unset value for linp2
2812  integer ,pointer,intent(in) ,optional :: linp3(:) !< input list on each task
2813  integer ,pointer,intent(inout),optional :: lout3(:) !< reduced output list, same on all tasks
2814  integer ,intent(in) ,optional :: spval3 !< unset value for linp3
2815  integer ,pointer,intent(in) ,optional :: linp4(:) !< input list on each task
2816  integer ,pointer,intent(inout),optional :: lout4(:) !< reduced output list, same on all tasks
2817  integer ,intent(in) ,optional :: spval4 !< unset value for linp4
2818 
2819  !----- local ---
2820  integer(kind=ip_i4_p) :: m,n,k,p
2821  integer(kind=ip_i4_p) :: llen,lsize
2822  integer(kind=ip_i4_p) :: cnt, cntr
2823  integer(kind=ip_i4_p) :: commrank, commsize
2824  integer(kind=ip_i4_p) :: listcheck, listcheckall
2825  integer(kind=ip_i4_p) :: maxloops, sendid, recvid, kfac
2826  logical :: found, present2, present3, present4
2827  integer(kind=ip_i4_p) :: status(mpi_status_size) ! mpi status info
2828  character(len=ic_lvar2),pointer :: recv_varf1(:),varf1a(:),varf1b(:)
2829  character(len=ic_lvar2),pointer :: recv_varf2(:),varf2a(:),varf2b(:)
2830  integer(kind=ip_i4_p) ,pointer :: recv_varf3(:),varf3a(:),varf3b(:)
2831  integer(kind=ip_i4_p) ,pointer :: recv_varf4(:),varf4a(:),varf4b(:)
2832  character(len=ic_lvar2) :: string
2833  logical, parameter :: local_timers_on = .false.
2834  integer(ip_i4_p) :: ierr
2835  character(*),parameter :: subname = '(oasis_mpi_reducelists)'
2836 
2837 !-------------------------------------------------------------------------------
2838 ! PURPOSE: Custom method for reducing MPI lists for OASIS using a log2
2839 ! algorithm. This generates a list on all tasks that consists of the intersection
2840 ! of all the values on all the tasks with each value listed once. linp1
2841 ! is the input list, possibly different on each task. lout1
2842 ! is the resulting list, the same on each task, consistenting of all unique
2843 ! values of linp1 from all tasks. This ultimately reduces the list onto
2844 ! the root task and then it's broadcast. The reduction occurs via a binary
2845 ! type reduction from tasks to other tasks.
2846 !-------------------------------------------------------------------------------
2847 
2848  call oasis_debug_enter(subname)
2849 
2850  string = trim(callstr)
2851  if (present(fastcheckout)) fastcheckout = .false. ! by default
2852  call oasis_mpi_commrank(comm,commrank,string=subname//trim(string))
2853  call oasis_mpi_commsize(comm,commsize,string=subname//trim(string))
2854 
2855  !-----------------------------------------------
2856  !> * Check argument consistency
2857  !-----------------------------------------------
2858 
2859  if ((present(linp2) .and. .not.present(lout2)) .or. &
2860  (present(lout2) .and. .not.present(linp2))) then
2861  call oasis_mpi_abort(subname//trim(string)//" linp2 lout2 both must be present ")
2862  endif
2863  present2 = present(linp2)
2864 
2865  if ((present(linp3) .and. .not.present(lout3)) .or. &
2866  (present(lout3) .and. .not.present(linp3))) then
2867  call oasis_mpi_abort(subname//trim(string)//" linp3 lout3 both must be present ")
2868  endif
2869  present3 = present(linp3)
2870 
2871  if ((present(linp4) .and. .not.present(lout4)) .or. &
2872  (present(lout4) .and. .not.present(linp4))) then
2873  call oasis_mpi_abort(subname//trim(string)//" linp4 lout4 both must be present ")
2874  endif
2875  present4 = present(linp4)
2876 
2877  if (len(linp1) > len(varf1a)) then
2878  call oasis_mpi_abort(subname//trim(string)//" linp1 too long ")
2879  endif
2880 
2881  if (present(linp2)) then
2882  if (size(linp2) /= size(linp1)) then
2883  call oasis_mpi_abort(subname//trim(string)//" linp1 linp2 not same size ")
2884  endif
2885  if (len(linp2) > len(varf2a)) then
2886  call oasis_mpi_abort(subname//trim(string)//" linp2 too long ")
2887  endif
2888  if (len(varf1a) /= len(varf2a)) then
2889  call oasis_mpi_abort(subname//trim(string)//" varf1a varf2a not same len ")
2890  endif
2891  endif
2892 
2893  if (present(linp3)) then
2894  if (size(linp3) /= size(linp1)) then
2895  call oasis_mpi_abort(subname//trim(string)//" linp1 linp3 not same size ")
2896  endif
2897  endif
2898 
2899  if (present(linp4)) then
2900  if (size(linp4) /= size(linp1)) then
2901  call oasis_mpi_abort(subname//trim(string)//" linp1 linp4 not same size ")
2902  endif
2903  endif
2904 
2905  !-----------------------------------------------
2906  !> * Fast compare on all tasks
2907  ! If all tasks have same list, just skip the reduction
2908  !-----------------------------------------------
2909 
2910  if (present(fastcheck)) then
2911  if (fastcheck) then
2912 
2913  if (local_timers_on) call oasis_timer_start(trim(string)//'_rl_fastcheck')
2914 
2915  lsize = -1
2916  if (commrank == 0) then
2917  lsize = size(linp1)
2918  endif
2919  call oasis_mpi_bcast(lsize, comm, subname//trim(string)//' lsize check')
2920 
2921  ! varf1a holds linp1 from root on all tasks
2922  allocate(varf1a(lsize))
2923  varf1a = ' '
2924  if (commrank == 0) then
2925  varf1a(1:lsize) = linp1(1:lsize)
2926  endif
2927  call oasis_mpi_bcast(varf1a, comm, subname//trim(string)//' varf1a check')
2928 
2929  listcheck = 1
2930  if (oasis_debug >= 20) then
2931  write(nulprt,*) subname//trim(string),' sizes ',lsize,size(linp1)
2932  endif
2933  if (lsize /= size(linp1)) listcheck = 0
2934  n = 0
2935  do while (listcheck == 1 .and. n < lsize)
2936  n = n + 1
2937  if (varf1a(n) /= linp1(n)) listcheck = 0
2938  if (oasis_debug >= 20) then
2939  write(nulprt,*) subname//trim(string),' fcheck varf1a ',n,trim(linp1(n)),' ',trim(linp1(n)),listcheck
2940  endif
2941  enddo
2942  deallocate(varf1a)
2943  call oasis_mpi_min(listcheck,listcheckall,comm, subname//trim(string)//' listcheck',all=.true.)
2944 
2945  if (oasis_debug >= 15) then
2946  write(nulprt,*) subname//trim(string),' listcheck = ',listcheck,listcheckall
2947  endif
2948  if (local_timers_on) call oasis_timer_stop(trim(string)//'_rl_fastcheck')
2949 
2950  !-------------------------------------------------
2951  ! linp1 same on all tasks, update lout1, lout2, lout3, lout4 and return
2952  !-------------------------------------------------
2953 
2954  if (listcheckall == 1) then
2955  cntout = lsize
2956  allocate(lout1(lsize))
2957  lout1(1:lsize) = linp1(1:lsize)
2958  if (present2) then
2959  allocate(lout2(lsize))
2960  lout2(1:lsize) = linp2(1:lsize)
2961  endif
2962  if (present3) then
2963  allocate(lout3(lsize))
2964  lout3(1:lsize) = linp3(1:lsize)
2965  endif
2966  if (present4) then
2967  allocate(lout4(lsize))
2968  lout4(1:lsize) = linp4(1:lsize)
2969  endif
2970  call oasis_debug_exit(subname)
2971  if (present(fastcheckout)) fastcheckout = .true.
2972  return
2973  endif
2974 
2975  endif ! fastcheck
2976  endif ! present fastcheck
2977 
2978  !-----------------------------------------------
2979  !> * Generate initial unique local name list
2980  !-----------------------------------------------
2981 
2982  llen = len(linp1)
2983  lsize = size(linp1)
2984  if (oasis_debug >= 15) then
2985  write(nulprt,*) subname//trim(string),' len, size = ',llen,lsize
2986  call oasis_flush(nulprt)
2987  endif
2988 
2989  allocate(varf1a(max(lsize,20))) ! 20 is arbitrary starting number
2990  if (present2) allocate(varf2a(max(lsize,20))) ! 20 is arbitrary starting number
2991  if (present3) allocate(varf3a(max(lsize,20))) ! 20 is arbitrary starting number
2992  if (present4) allocate(varf4a(max(lsize,20))) ! 20 is arbitrary starting number
2993  cnt = 0
2994  do n = 1,lsize
2995  p = 0
2996  found = .false.
2997  do while (p < cnt .and. .not.found)
2998  p = p + 1
2999  if (linp1(n) == varf1a(p)) found = .true.
3000  enddo
3001  if (.not.found) then
3002  cnt = cnt + 1
3003  varf1a(cnt) = linp1(n)
3004  if (present2) varf2a(cnt) = linp2(n)
3005  if (present3) varf3a(cnt) = linp3(n)
3006  if (present4) varf4a(cnt) = linp4(n)
3007  endif
3008  enddo
3009 
3010  !-----------------------------------------------
3011  !> * Log2 reduction of linp over tasks to root
3012  !-----------------------------------------------
3013 
3014  maxloops = int(sqrt(float(commsize+1)))+1
3015  do m = 1,maxloops
3016 
3017  kfac = 2**m
3018 
3019  recvid = commrank + kfac/2 ! task to recv from
3020  if (mod(commrank,kfac) /= 0 .or. &
3021  recvid < 0 .or. recvid > commsize-1) &
3022  recvid = -1
3023 
3024  sendid = commrank - kfac/2 ! task to send to
3025  if (mod(commrank+kfac/2,kfac) /= 0 .or. &
3026  sendid < 0 .or. sendid > commsize-1) &
3027  sendid = -1
3028 
3029  if (oasis_debug >= 15) then
3030  write(nulprt,*) subname//trim(string),' send/recv ids ',m,commrank,sendid,recvid
3031  call oasis_flush(nulprt)
3032  endif
3033 
3034  !-----------------------------------------------
3035  !> * Send list
3036  !-----------------------------------------------
3037 
3038  if (sendid >= 0) then
3039  if (local_timers_on) call oasis_timer_start(trim(string)//'_rl_send')
3040  call mpi_send(cnt, 1, mpi_integer, sendid, 5900+m, comm, ierr)
3041  call oasis_mpi_chkerr(ierr,subname//trim(string)//':send cnt')
3042  if (cnt > 0) then
3043  if (oasis_debug >= 15) then
3044  write(nulprt,*) subname//trim(string),' send size ',commrank,m,cnt,ic_lvar2
3045  call oasis_flush(nulprt)
3046  endif
3047  call mpi_send(varf1a(1:cnt), cnt*ic_lvar2, mpi_character, sendid, 6900+m, comm, ierr)
3048  call oasis_mpi_chkerr(ierr,subname//trim(string)//':send varf1a')
3049  if (present2) then
3050  call mpi_send(varf2a(1:cnt), cnt*ic_lvar2, mpi_character, sendid, 7900+m, comm, ierr)
3051  call oasis_mpi_chkerr(ierr,subname//trim(string)//':send varf2a')
3052  endif
3053  if (present3) then
3054  call mpi_send(varf3a(1:cnt), cnt, mpi_integer, sendid, 8900+m, comm, ierr)
3055  call oasis_mpi_chkerr(ierr,subname//trim(string)//':send varf3a')
3056  endif
3057  if (present4) then
3058  call mpi_send(varf4a(1:cnt), cnt, mpi_integer, sendid, 9900+m, comm, ierr)
3059  call oasis_mpi_chkerr(ierr,subname//trim(string)//':send varf4a')
3060  endif
3061  endif ! cnt > 0
3062  if (local_timers_on) call oasis_timer_stop (trim(string)//'_rl_send')
3063  endif ! sendid >= 0
3064 
3065  !-----------------------------------------------
3066  !> * Recv list
3067  !> * Determine the unique list
3068  !-----------------------------------------------
3069 
3070  if (recvid >= 0) then
3071  if (local_timers_on) call oasis_timer_start (trim(string)//'_rl_recv')
3072  call mpi_recv(cntr, 1, mpi_integer, recvid, 5900+m, comm, status, ierr)
3073  call oasis_mpi_chkerr(ierr,subname//trim(string)//':recv cntr')
3074  if (cntr > 0) then
3075  if (oasis_debug >= 15) then
3076  write(nulprt,*) subname//trim(string),' recv size ',commrank,m,cntr,ic_lvar2
3077  call oasis_flush(nulprt)
3078  endif
3079  allocate(recv_varf1(cntr))
3080  call mpi_recv(recv_varf1, cntr*ic_lvar2, mpi_character, recvid, 6900+m, comm, status, ierr)
3081  call oasis_mpi_chkerr(ierr,subname//trim(string)//':recv varf1')
3082  if (present2) then
3083  allocate(recv_varf2(cntr))
3084  call mpi_recv(recv_varf2, cntr*ic_lvar2, mpi_character, recvid, 7900+m, comm, status, ierr)
3085  call oasis_mpi_chkerr(ierr,subname//trim(string)//':recv varf2')
3086  endif
3087  if (present3) then
3088  allocate(recv_varf3(cntr))
3089  call mpi_recv(recv_varf3, cntr, mpi_integer, recvid, 8900+m, comm, status, ierr)
3090  call oasis_mpi_chkerr(ierr,subname//trim(string)//':recv varf3')
3091  endif
3092  if (present4) then
3093  allocate(recv_varf4(cntr))
3094  call mpi_recv(recv_varf4, cntr, mpi_integer, recvid, 9900+m, comm, status, ierr)
3095  call oasis_mpi_chkerr(ierr,subname//trim(string)//':recv varf4')
3096  endif
3097  endif ! cntr > 0
3098  if (local_timers_on) call oasis_timer_stop (trim(string)//'_rl_recv')
3099 
3100  if (local_timers_on) call oasis_timer_start(trim(string)//'_rl_rootsrch')
3101  do n = 1,cntr
3102  if (oasis_debug >= 15) write(nulprt,*) subname//trim(string),' check recv_varf1 ',m,n,trim(recv_varf1(n))
3103 
3104  p = 0
3105  found = .false.
3106  do while (p < cnt .and. .not.found)
3107  p = p + 1
3108  if (recv_varf1(n) == varf1a(p)) then
3109  found = .true.
3110  if (present2) then
3111  if (present(spval2)) then
3112  !--- use something other than spval2 if it exists and check consistency
3113  if (varf2a(p) == spval2) then
3114  varf2a(p) = recv_varf2(n)
3115  elseif (recv_varf2(n) /= spval2 .and. varf2a(p) /= recv_varf2(n)) then
3116  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3117  'inconsistent linp2 value: '//trim(recv_varf2(n))//':'//trim(varf1a(p))//':'//trim(varf2a(p)), &
3118  file=__file__,line=__line__)
3119  endif
3120  else
3121  if (varf2a(p) /= recv_varf2(n)) then
3122  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3123  'inconsistent linp2 value: '//trim(recv_varf2(n))//':'//trim(varf1a(p))//':'//trim(varf2a(p)), &
3124  file=__file__,line=__line__)
3125  endif
3126  endif
3127  endif
3128  if (present3) then
3129  if (present(spval3)) then
3130  !--- use something other than spval3 if it exists and check consistency
3131  if (varf3a(p) == spval3) then
3132  varf3a(p) = recv_varf3(n)
3133  elseif (recv_varf3(n) /= spval3 .and. varf3a(p) /= recv_varf3(n)) then
3134  write(nulprt,*) subname//trim(string),astr,'inconsistent linp3 var: ',&
3135  recv_varf3(n),':',trim(varf1a(p)),':',varf3a(p)
3136  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3137  'inconsistent linp3 value: '//trim(varf1a(p)), &
3138  file=__file__,line=__line__)
3139  endif
3140  else
3141  if (varf3a(p) /= recv_varf3(n)) then
3142  write(nulprt,*) subname//trim(string),astr,'inconsistent linp3 var: ',&
3143  recv_varf3(n),':',trim(varf1a(p)),':',varf3a(p)
3144  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3145  'inconsistent linp3 value: '//trim(varf1a(p)), &
3146  file=__file__,line=__line__)
3147  endif
3148  endif
3149  endif
3150  if (present4) then
3151  if (present(spval4)) then
3152  !--- use something other than spval4 if it exists and check consistency
3153  if (varf4a(p) == spval4) then
3154  varf4a(p) = recv_varf4(n)
3155  elseif (recv_varf4(n) /= spval4 .and. varf4a(p) /= recv_varf4(n)) then
3156  write(nulprt,*) subname//trim(string),astr,'inconsistent linp4 var: ',&
3157  recv_varf4(n),':',trim(varf1a(p)),':',varf4a(p)
3158  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3159  'inconsistent linp4 value: '//trim(varf1a(p)), &
3160  file=__file__,line=__line__)
3161  endif
3162  else
3163  if (varf4a(p) /= recv_varf4(n)) then
3164  write(nulprt,*) subname//trim(string),astr,'inconsistent linp4 var: ',&
3165  recv_varf4(n),':',trim(varf1a(p)),':',varf4a(p)
3166  call oasis_abort(cd_routine=subname//trim(string),cd_message= &
3167  'inconsistent linp4 value: '//trim(varf1a(p)), &
3168  file=__file__,line=__line__)
3169  endif
3170  endif
3171  endif
3172  endif
3173  enddo
3174  if (.not.found) then
3175  cnt = cnt + 1
3176  if (cnt > size(varf1a)) then
3177  allocate(varf1b(size(varf1a)))
3178  varf1b = varf1a
3179  deallocate(varf1a)
3180  if (oasis_debug >= 15) then
3181  write(nulprt,*) subname//trim(string),' resize varf1a ',size(varf1b),cnt+cntr
3182  call oasis_flush(nulprt)
3183  endif
3184  allocate(varf1a(cnt+cntr))
3185  varf1a(1:size(varf1b)) = varf1b(1:size(varf1b))
3186  deallocate(varf1b)
3187  if (present2) then
3188  allocate(varf2b(size(varf2a)))
3189  varf2b = varf2a
3190  deallocate(varf2a)
3191  if (oasis_debug >= 15) then
3192  write(nulprt,*) subname//trim(string),' resize varf2a ',size(varf2b),cnt+cntr
3193  call oasis_flush(nulprt)
3194  endif
3195  allocate(varf2a(cnt+cntr))
3196  varf2a(1:size(varf2b)) = varf2b(1:size(varf2b))
3197  deallocate(varf2b)
3198  endif
3199  if (present3) then
3200  allocate(varf3b(size(varf3a)))
3201  varf3b = varf3a
3202  deallocate(varf3a)
3203  if (oasis_debug >= 15) then
3204  write(nulprt,*) subname//trim(string),' resize varf3a ',size(varf3b),cnt+cntr
3205  call oasis_flush(nulprt)
3206  endif
3207  allocate(varf3a(cnt+cntr))
3208  varf3a(1:size(varf3b)) = varf3b(1:size(varf3b))
3209  deallocate(varf3b)
3210  endif
3211  if (present4) then
3212  allocate(varf4b(size(varf4a)))
3213  varf4b = varf4a
3214  deallocate(varf4a)
3215  if (oasis_debug >= 15) then
3216  write(nulprt,*) subname//trim(string),' resize varf4a ',size(varf4b),cnt+cntr
3217  call oasis_flush(nulprt)
3218  endif
3219  allocate(varf4a(cnt+cntr))
3220  varf4a(1:size(varf4b)) = varf4b(1:size(varf4b))
3221  deallocate(varf4b)
3222  endif
3223  endif
3224  varf1a(cnt) = recv_varf1(n)
3225  if (present2) varf2a(cnt) = recv_varf2(n)
3226  if (present3) varf3a(cnt) = recv_varf3(n)
3227  if (present4) varf4a(cnt) = recv_varf4(n)
3228  endif
3229  enddo ! cntr
3230  if (local_timers_on) call oasis_timer_stop(trim(string)//'_rl_rootsrch')
3231  if (cntr > 0) then
3232  deallocate(recv_varf1)
3233  if (present2) deallocate(recv_varf2)
3234  if (present3) deallocate(recv_varf3)
3235  if (present4) deallocate(recv_varf4)
3236  endif
3237 
3238  endif ! recvid >= 0
3239 
3240  enddo ! maxloops
3241 
3242  !-------------------------------------------------
3243  !> * Broadcast the list information to all tasks from root
3244  !-------------------------------------------------
3245 
3246  if (local_timers_on) then
3247  call oasis_timer_start(trim(string)//'_rl_bcast_barrier')
3248  if (comm /= mpi_comm_null) &
3249  call mpi_barrier(comm, ierr)
3250  call oasis_timer_stop(trim(string)//'_rl_bcast_barrier')
3251  endif
3252  if (local_timers_on) call oasis_timer_start(trim(string)//'_rl_bcast')
3253  call oasis_mpi_bcast(cnt,comm,subname//trim(string)//' cnt')
3254  cntout = cnt
3255  allocate(lout1(cntout))
3256  if (commrank == 0) then
3257  do n = 1,cntout
3258  lout1(n) = trim(varf1a(n))
3259  enddo
3260  endif
3261  deallocate(varf1a)
3262  call oasis_mpi_bcast(lout1,comm,subname//trim(string)//' lout1')
3263 
3264  if (present2) then
3265  allocate(lout2(cntout))
3266  if (commrank == 0) then
3267  do n = 1,cntout
3268  lout2(n) = trim(varf2a(n))
3269  enddo
3270  endif
3271  deallocate(varf2a)
3272  call oasis_mpi_bcast(lout2,comm,subname//trim(string)//' lout2')
3273  endif
3274 
3275  if (present3) then
3276  allocate(lout3(cntout))
3277  if (commrank == 0) then
3278  do n = 1,cntout
3279  lout3(n) = varf3a(n)
3280  enddo
3281  endif
3282  deallocate(varf3a)
3283  call oasis_mpi_bcast(lout3,comm,subname//trim(string)//' lout3')
3284  endif
3285 
3286  if (present4) then
3287  allocate(lout4(cntout))
3288  if (commrank == 0) then
3289  do n = 1,cntout
3290  lout4(n) = varf4a(n)
3291  enddo
3292  endif
3293  deallocate(varf4a)
3294  call oasis_mpi_bcast(lout4,comm,subname//trim(string)//' lout4')
3295  endif
3296 
3297  !--- document
3298 
3299  if (oasis_debug >= 15) then
3300  do n = 1,cnt
3301  if (present2 .and. present3 .and. present4) then
3302  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),' ',trim(lout2(n)),lout3(n),lout4(n)
3303  elseif (present2 .and. present3) then
3304  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),' ',trim(lout2(n)),lout3(n)
3305  elseif (present2 .and. present4) then
3306  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),' ',trim(lout2(n)),lout4(n)
3307  elseif (present3 .and. present4) then
3308  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),' ',lout3(n),lout4(n)
3309  elseif (present2) then
3310  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),' ',trim(lout2(n))
3311  elseif (present3) then
3312  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),lout3(n)
3313  elseif (present4) then
3314  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n)),lout4(n)
3315  else
3316  write(nulprt,*) subname,trim(string),' list: ',n,trim(lout1(n))
3317  endif
3318  enddo
3319  call oasis_flush(nulprt)
3320  endif
3321  if (local_timers_on) call oasis_timer_stop (trim(string)//'_rl_bcast')
3322 
3323  call oasis_debug_exit(subname)
3324 
3325 END SUBROUTINE oasis_mpi_reducelists
3326 
3327 !===============================================================================
3328 !===============================================================================
3329 
3330 END MODULE mod_oasis_mpi
subroutine oasis_mpi_mini1(lvec, gvec, comm, string, all)
Compute an array of global minimums for an array of 1D integers.
Generic overloaded interface into MPI sum reduction.
Generic interfaces into an MPI vector gather.
System type methods.
subroutine oasis_mpi_bcastr0(vec, comm, string, pebcast)
Broadcast a scalar double.
subroutine, public oasis_mpi_chkerr(rcode, string)
Checks MPI error codes and aborts.
integer, parameter ic_lvar2
character(len= *), parameter, public astr
Provides a common location for several OASIS variables.
subroutine oasis_mpi_bcasti1(vec, comm, string, pebcast)
Broadcast an array of 1D integers.
Generic overloaded interface into MPI send.
Generic overloaded interface into MPI max reduction.
subroutine oasis_mpi_bcastl1(vec, comm, string, pebcast)
Broadcast an array of 1D logicals.
subroutine oasis_mpi_sumq0(lvec, gvec, comm, string, all)
Compute a global sum for a scalar quad.
integer(kind=ip_intwp_p) nulprt
Generic overloaded interface into MPI broadcast.
subroutine oasis_mpi_maxi0(lvec, gvec, comm, string, all)
Compute a global maximum for a scalar integer.
subroutine oasis_mpi_maxr1(lvec, gvec, comm, string, all)
Compute an array of global maximums for an array of 1D doubles.
subroutine oasis_mpi_bcastc1(vec, comm, string, pebcast)
Broadcast an array of 1D character strings.
subroutine, public oasis_mpi_abort(string, rcode)
Write error messages and Call MPI_ABORT.
subroutine, public oasis_abort(id_compid, cd_routine, cd_message, file, line, rcode)
OASIS abort method, publically available to users.
subroutine oasis_mpi_mini0(lvec, gvec, comm, string, all)
Compute a global minimum for a scalar integer.
subroutine oasis_mpi_bcastr3(arr, comm, string, pebcast)
Broadcast an array of 3D doubles.
subroutine, public oasis_mpi_init(string)
Call MPI_INIT.
subroutine oasis_mpi_sendi1(lvec, pid, tag, comm, string)
Send an array of 1D integers.
subroutine oasis_mpi_sumr3(lvec, gvec, comm, string, all)
Compute a 3D array of global sums for an array of 3D doubles.
Generic interfaces into an MPI vector scatter.
subroutine oasis_mpi_maxi1(lvec, gvec, comm, string, all)
Compute an array of global maximums for an array of 1D integers.
subroutine oasis_mpi_sumq3(lvec, gvec, comm, string, all)
Compute a 3D array of global sums for an array of 3D quads.
subroutine, public oasis_mpi_initialized(flag, string)
Check whether MPI has been initialized.
Defines kinds for OASIS.
subroutine oasis_mpi_sumi1(lvec, gvec, comm, string, all)
Compute a 1D array of global sums for an array of 1D integers.
subroutine oasis_mpi_recvr3(array, pid, tag, comm, string)
Receive an array of 3D doubles.
Provides a generic and simpler interface into MPI calls for OASIS.
subroutine oasis_mpi_sumb1(lvec, gvec, comm, string, all)
Compute a 1D array of global sums for an array of 1D 8 byte integers.
subroutine, public oasis_debug_enter(string)
Used when a subroutine is entered, write info to log file at some debug level.
Generic overloaded interface into MPI min reduction.
subroutine oasis_mpi_gathscatvinitr1(comm, rootid, locArr, glob1DArr, globSize, displs, string)
Initialize variables for oasis_mpi_gatherv and oasis_mpi_scatterv.
subroutine, public oasis_mpi_reducelists(linp1, comm, cntout, lout1, callstr, fastcheck, fastcheckout, linp2, lout2, spval2, linp3, lout3, spval3, linp4, lout4, spval4)
Custom method for reducing MPI lists across pes for OASIS.
subroutine oasis_mpi_minr0(lvec, gvec, comm, string, all)
Compute an global minimum for a scalar double.
subroutine oasis_mpi_bcastc0(vec, comm, string, pebcast)
Broadcast a character string.
subroutine oasis_mpi_sumq1(lvec, gvec, comm, string, all)
Compute a 1D array of global sums for an array of 1D quads.
subroutine oasis_mpi_sendi0(lvec, pid, tag, comm, string)
Send a scalar integer.
Generic interface to oasis_mpi_gathScatVInit.
subroutine, public oasis_timer_start(timer_label, barrier)
Start a timer.
integer(kind=ip_i4_p) oasis_debug
subroutine, public oasis_timer_stop(timer_label)
Stop a timer.
subroutine, public oasis_mpi_barrier(comm, string)
Call MPI_BARRIER for a particular communicator.
subroutine, public oasis_mpi_commsize(comm, size, string)
Get the total number of tasks associated with a communicator.
subroutine oasis_mpi_recvr1(lvec, pid, tag, comm, string)
Receive an array of 1D doubles.
subroutine oasis_mpi_scattervr1(locarr, locSize, glob1Darr, globSize, displs, rootid, comm, string)
Scatter a vector of global data from a rootid.
subroutine oasis_mpi_maxr0(lvec, gvec, comm, string, all)
Compute a global maximum for a scalar double.
subroutine oasis_mpi_recvi0(lvec, pid, tag, comm, string)
Receive a scalar integer.
subroutine, public oasis_mpi_wtime(wtime)
Return a timestamp from MPI_WTIME.
subroutine oasis_mpi_gathervr1(locarr, locSize, glob1DArr, globSize, displs, rootid, comm, string)
Gather a vector of distributed data to a rootid.
subroutine oasis_mpi_sumi0(lvec, gvec, comm, string, all)
Compute a global Sum for a scalar integer.
subroutine oasis_mpi_bcastr2(arr, comm, string, pebcast)
Broadcast an array of 2D doubles.
subroutine, public oasis_flush(nu)
Flushes output to file.
subroutine oasis_mpi_recvi1(lvec, pid, tag, comm, string)
Receive an array of 1D integers.
subroutine oasis_mpi_sendr0(lvec, pid, tag, comm, string)
Send a scalar double.
subroutine oasis_mpi_bcastr1(vec, comm, string, pebcast)
Broadcast an array of 1D doubles.
subroutine oasis_mpi_sumq2(lvec, gvec, comm, string, all)
Compute a 2D array of global sums for an array of 2D quads.
subroutine, public oasis_mpi_finalize(string)
Call MPI_FINALZE.
subroutine oasis_mpi_sumb0(lvec, gvec, comm, string, all)
Compute a global sum for a scalar 8 byte integer.
Performance timer methods.
subroutine oasis_mpi_bcasti0(vec, comm, string, pebcast)
Broadcast a scalar integer.
subroutine oasis_mpi_bcastl0(vec, comm, string, pebcast)
Broadcast a scalar logical.
subroutine oasis_mpi_sumr0(lvec, gvec, comm, string, all)
Compute a global sum for a scalar double.
subroutine oasis_mpi_sendr1(lvec, pid, tag, comm, string)
Send an array of 1D doubles.
subroutine oasis_mpi_sumr1(lvec, gvec, comm, string, all)
Compute a 1D array of global sums for an array of 1D doubles.
Generic overloaded interface into MPI receive.
subroutine oasis_mpi_sumr2(lvec, gvec, comm, string, all)
Compute a 2D array of global sums for an array of 2D doubles.
subroutine oasis_mpi_minr1(lvec, gvec, comm, string, all)
Compute an array of global minimums for an array of 1D doubles.
subroutine, public oasis_debug_exit(string)
Used when a subroutine is exited, write info to log file at some debug level.
subroutine, public oasis_mpi_commrank(comm, rank, string)
Get the rank (task ID) for a task in a communicator.
subroutine oasis_mpi_sendr3(array, pid, tag, comm, string)
Send an array of 3D doubles.
subroutine oasis_mpi_recvr0(lvec, pid, tag, comm, string)
Receive a scalar double.
subroutine oasis_mpi_bcasti2(arr, comm, string, pebcast)
Broadcast an array of 2D integers.