ROMS
Loading...
Searching...
No Matches
distribute.F
Go to the documentation of this file.
1#undef D_DEBUG
2#include "cppdefs.h"
4#ifdef DISTRIBUTE
5!
6!git $Id$
7!================================================== Hernan G. Arango ===
8! Copyright (c) 2002-2025 The ROMS Group !
9! Licensed under a MIT/X style license !
10! See License_ROMS.md !
11!=======================================================================
12! !
13! These routines are used for distrubuted-memory communications !
14! between parallel nodes: !
15! !
16! mp_aggregate2d aggregates 2D tiled data into a 2D global array !
17! mp_aggregate3d aggregates 3D tiled data into a 3D global array !
18! mp_barrier barrier sychronization !
19! mp_bcastf broadcasts floating point variables !
20! mp_bcasti broadcasts integer variables !
21! mp_bcastl broadcasts logical variables !
22! mp_bcasts broadcasts character variables !
23! mp_bcast_struc broadcats NetCDF IDs of an IO_TYPE structure !
24! mp_boundary exchanges boundary data between tiles !
25! mp_assemblef_1d assembles 1D floating point array from tiles !
26! mp_assemblef_2d assembles 2D floating point array from tiles !
27! mp_assemblef_3d assembles 3D floating point array from tiles !
28! mp_assemblei_1d assembles 1D integer array from tiles !
29! mp_assemblei_2d assembles 2D integer array from tiles !
30! mp_collect_f collects 1D floating point array from tiles !
31! mp_collect_i collects 1D integer array from tiles !
32! mp_dump writes 2D and 3D tiles arrays for debugging !
33! mp_gather2d collects a 2D tiled array for output purposes !
34# ifdef GRID_EXTRACT
35! mp_gather2d_xtr collects a 2D tiled extract array for output !
36# endif
37! mp_gather3d collects a 3D tiled array for output purposes !
38! mp_gather_state collects state vector for unpacking of variables !
39! mp_ncread1d reads in 1D state array from NetCDF file !
40! mp_ncread2d reads in 2D state array from NetCDF file !
41! mp_ncwrite1d writes out 1D state array into NetCDF file !
42! mp_ncwrite2d writes out 2D state array into NetCDF file !
43! mp_reduce global reduction operations !
44! mp_reduce2 global reduction operations (MINLOC, MAXLOC) !
45! mp_scatter2d scatters input data to a 2D tiled array !
46# ifdef GRID_EXTRACT
47! mp_scatter2d_xtr scatters input data to a 2D tiled extract array !
48# endif
49! mp_scatter3d scatters input data to a 3D tiled array !
50! mp_scatter_state scatters global data for packing of state vector !
51! !
52! Notice that the tile halo exchange can be found in "mp_exchange.F" !
53! !
54!=======================================================================
55!
56 USE mod_param
57 USE mod_parallel
58 USE mod_iounits
59 USE mod_scalars
60!
61 USE mp_exchange_mod, ONLY : mp_exchange2d, &
62# ifdef grid_extract
63 & mp_exchange2d_xtr, &
64# endif
66!
67 INTERFACE mp_assemble
68 MODULE PROCEDURE mp_assemblef_1d
69 MODULE PROCEDURE mp_assemblef_2d
70 MODULE PROCEDURE mp_assemblef_3d
71 MODULE PROCEDURE mp_assemblei_1d
72 MODULE PROCEDURE mp_assemblei_2d
73 END INTERFACE mp_assemble
74!
75 INTERFACE mp_bcastf
76# ifdef SINGLE_PRECISION
77 MODULE PROCEDURE mp_bcastf_0dp ! double-precision exchanges
78 MODULE PROCEDURE mp_bcastf_1dp ! double-precision exchanges
79 MODULE PROCEDURE mp_bcastf_2dp ! double-precision exchanges
80 MODULE PROCEDURE mp_bcastf_3dp ! double-precision exchanges
81# endif
82 MODULE PROCEDURE mp_bcastf_0d
83 MODULE PROCEDURE mp_bcastf_1d
84 MODULE PROCEDURE mp_bcastf_2d
85 MODULE PROCEDURE mp_bcastf_3d
86 MODULE PROCEDURE mp_bcastf_4d
87 END INTERFACE mp_bcastf
88!
89 INTERFACE mp_bcastl
90 MODULE PROCEDURE mp_bcastl_0d
91 MODULE PROCEDURE mp_bcastl_1d
92 MODULE PROCEDURE mp_bcastl_2d
93 END INTERFACE mp_bcastl
94!
95 INTERFACE mp_bcasti
96 MODULE PROCEDURE mp_bcasti_0d
97 MODULE PROCEDURE mp_bcasti_1d
98 MODULE PROCEDURE mp_bcasti_2d
99 END INTERFACE mp_bcasti
100!
101 INTERFACE mp_bcasts
102 MODULE PROCEDURE mp_bcasts_0d
103 MODULE PROCEDURE mp_bcasts_1d
104 MODULE PROCEDURE mp_bcasts_2d
105 MODULE PROCEDURE mp_bcasts_3d
106 END INTERFACE mp_bcasts
107!
108 INTERFACE mp_collect
109 MODULE PROCEDURE mp_collect_f
110 MODULE PROCEDURE mp_collect_i
111 END INTERFACE mp_collect
112!
113 INTERFACE mp_reduce
114 MODULE PROCEDURE mp_reduce_i8 ! integer reduction
115# ifdef SINGLE_PRECISION
116 MODULE PROCEDURE mp_reduce_0dp ! double-precision reduction
117 MODULE PROCEDURE mp_reduce_1dp ! double-precision reduction
118# endif
119 MODULE PROCEDURE mp_reduce_0d
120 MODULE PROCEDURE mp_reduce_1d
121 END INTERFACE mp_reduce
122!
123 CONTAINS
124!
125 SUBROUTINE mp_barrier (ng, model, InpComm)
126!
127!***********************************************************************
128! !
129! This routine blocks the caller until all group members have called !
130! it. !
131! !
132! On Input: !
133! !
134! ng Nested grid number. !
135! model Calling model identifier. !
136! InpComm Communicator handle (integer, OPTIONAL). !
137! !
138!***********************************************************************
139!
140! Imported variable declarations.
141!
142 integer, intent(in) :: ng, model
143
144 integer, intent(in), optional :: InpComm
145!
146! Local variable declarations.
147!
148 integer :: MyCOMM, MyError
149!
150 character (len=*), parameter :: MyFile = &
151 & __FILE__//", mp_barrier"
152
153# ifdef PROFILE
154!
155!-----------------------------------------------------------------------
156! Turn on time clocks.
157!-----------------------------------------------------------------------
158!
159 CALL wclock_on (ng, model, 72, __line__, myfile)
160# endif
161# ifdef MPI
162!
163!-----------------------------------------------------------------------
164! Set distributed-memory communicator handle (context ID).
165!-----------------------------------------------------------------------
166!
167 IF (PRESENT(inpcomm)) THEN
168 mycomm=inpcomm
169 ELSE
170 mycomm=ocn_comm_world
171 END IF
172# endif
173!
174!-----------------------------------------------------------------------
175! Synchronize all distribute-memory nodes in the group.
176!-----------------------------------------------------------------------
177!
178# ifdef MPI
179 CALL mpi_barrier (mycomm, myerror)
180# endif
181# ifdef PROFILE
182!
183!-----------------------------------------------------------------------
184! Turn off time clocks.
185!-----------------------------------------------------------------------
186!
187 CALL wclock_off (ng, model, 72, __line__, myfile)
188# endif
189!
190 RETURN
191 END SUBROUTINE mp_barrier
192
193# ifdef SINGLE_PRECISION
194!
195 SUBROUTINE mp_bcastf_0dp (ng, model, A, InpComm)
196!
197!***********************************************************************
198! !
199! This routine broadcasts a double-precision scalar variable to all !
200! processors in the communicator. It is called by all the members in !
201! the group. !
202! !
203! On Input: !
204! !
205! ng Nested grid number. !
206! model Calling model identifier. !
207! A Variable to broadcast (real). !
208! InpComm Communicator handle (integer, OPTIONAL). !
209! !
210! On Output: !
211! !
212! A Broadcasted variable. !
213! !
214!***********************************************************************
215!
216! Imported variable declarations.
217!
218 integer, intent(in) :: ng, model
219
220 integer, intent(in), optional :: InpComm
221
222 real(dp), intent(inout) :: A
223!
224! Local variable declarations
225!
226 integer :: Lstr, MyCOMM, MyError, Npts, Serror
227!
228 character (len=MPI_MAX_ERROR_STRING) :: string
229
230 character (len=*), parameter :: MyFile = &
231 & __FILE__//", mp_bcastf_0dp"
232
233# ifdef PROFILE
234!
235!-----------------------------------------------------------------------
236! Turn on time clocks.
237!-----------------------------------------------------------------------
238!
239 CALL wclock_on (ng, model, 64, __line__, myfile)
240# endif
241# ifdef MPI
242!
243!-----------------------------------------------------------------------
244! Set distributed-memory communicator handle (context ID).
245!-----------------------------------------------------------------------
246!
247 IF (PRESENT(inpcomm)) THEN
248 mycomm=inpcomm
249 ELSE
250 mycomm=ocn_comm_world
251 END IF
252# endif
253!
254!-----------------------------------------------------------------------
255! Broadcast requested variable.
256!-----------------------------------------------------------------------
257!
258 npts=1
259# ifdef MPI
260 CALL mpi_bcast (a, npts, mp_double, mymaster, mycomm, myerror)
261 IF (myerror.ne.mpi_success) THEN
262 CALL mpi_error_string (myerror, string, lstr, serror)
263 lstr=len_trim(string)
264 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
265 10 FORMAT (/,' MP_BCASTF_0DP - error during ',a,' call, Task = ', &
266 & i3.3,' Error = ',i3,/,13x,a)
267 exit_flag=2
268 RETURN
269 END IF
270# endif
271# ifdef PROFILE
272!
273!-----------------------------------------------------------------------
274! Turn off time clocks.
275!-----------------------------------------------------------------------
276!
277 CALL wclock_off (ng, model, 64, __line__, myfile)
278# endif
279!
280 RETURN
281 END SUBROUTINE mp_bcastf_0dp
282!
283 SUBROUTINE mp_bcastf_1dp (ng, model, A, InpComm)
284!
285!***********************************************************************
286! !
287! This routine broadcasts a 1D double-precission, non-tiled, array !
288! to all processors in the communicator. It is called by all the !
289! members in the group. !
290! !
291! On Input: !
292! !
293! ng Nested grid number. !
294! model Calling model identifier. !
295! A 1D array to broadcast (real). !
296! InpComm Communicator handle (integer, OPTIONAL). !
297! !
298! On Output: !
299! !
300! A Broadcasted 1D array. !
301! !
302!***********************************************************************
303!
304! Imported variable declarations.
305!
306 integer, intent(in) :: ng, model
307
308 integer, intent(in), optional :: InpComm
309!
310 real(dp), intent(inout) :: A(:)
311!
312! Local variable declarations
313!
314 integer :: Lstr, MyCOMM, MyError, Npts, Serror
315!
316 character (len=MPI_MAX_ERROR_STRING) :: string
317
318 character (len=*), parameter :: MyFile = &
319 & __FILE__//", mp_bcastf_1dp"
320
321# ifdef PROFILE
322!
323!-----------------------------------------------------------------------
324! Turn on time clocks.
325!-----------------------------------------------------------------------
326!
327 CALL wclock_on (ng, model, 64, __line__, myfile)
328# endif
329# ifdef MPI
330!
331!-----------------------------------------------------------------------
332! Set distributed-memory communicator handle (context ID).
333!-----------------------------------------------------------------------
334!
335 IF (PRESENT(inpcomm)) THEN
336 mycomm=inpcomm
337 ELSE
338 mycomm=ocn_comm_world
339 END IF
340# endif
341!
342!-----------------------------------------------------------------------
343! Broadcast requested variable.
344!-----------------------------------------------------------------------
345!
346 npts=ubound(a, dim=1)
347
348# ifdef MPI
349 CALL mpi_bcast (a, npts, mp_double, mymaster, mycomm, myerror)
350 IF (myerror.ne.mpi_success) THEN
351 CALL mpi_error_string (myerror, string, lstr, serror)
352 lstr=len_trim(string)
353 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
354 10 FORMAT (/,' MP_BCASTF_1DP - error during ',a,' call, Task = ', &
355 & i3.3,' Error = ',i3,/,13x,a)
356 exit_flag=2
357 RETURN
358 END IF
359# endif
360# ifdef PROFILE
361!
362!-----------------------------------------------------------------------
363! Turn off time clocks.
364!-----------------------------------------------------------------------
365!
366 CALL wclock_off (ng, model, 64, __line__, myfile)
367# endif
368!
369 RETURN
370 END SUBROUTINE mp_bcastf_1dp
371!
372 SUBROUTINE mp_bcastf_2dp (ng, model, A, InpComm)
373!
374!***********************************************************************
375! !
376! This routine broadcasts a 2D double-preision, non-tiled, array !
377! to all processors in the communicator. It is called by all the !
378! members in the group. !
379! !
380! On Input: !
381! !
382! ng Nested grid number. !
383! model Calling model identifier. !
384! A 2D array to broadcast (real). !
385! InpComm Communicator handle (integer, OPTIONAL). !
386! !
387! On Output: !
388! !
389! A Broadcasted 2D array. !
390! !
391!***********************************************************************
392!
393! Imported variable declarations.
394!
395 integer, intent(in) :: ng, model
396
397 integer, intent(in), optional :: InpComm
398!
399 real(dp), intent(inout) :: A(:,:)
400!
401! Local variable declarations
402!
403 integer :: Lstr, MyCOMM, MyError, Npts, Serror
404
405 integer :: Asize(2)
406!
407 character (len=MPI_MAX_ERROR_STRING) :: string
408
409 character (len=*), parameter :: MyFile = &
410 & __FILE__//", mp_bcastf_2dp"
411
412# ifdef PROFILE
413!
414!-----------------------------------------------------------------------
415! Turn on time clocks.
416!-----------------------------------------------------------------------
417!
418 CALL wclock_on (ng, model, 64, __line__, myfile)
419# endif
420# ifdef MPI
421!
422!-----------------------------------------------------------------------
423! Set distributed-memory communicator handle (context ID).
424!-----------------------------------------------------------------------
425!
426 IF (PRESENT(inpcomm)) THEN
427 mycomm=inpcomm
428 ELSE
429 mycomm=ocn_comm_world
430 END IF
431# endif
432!
433!-----------------------------------------------------------------------
434! Broadcast requested variable.
435!-----------------------------------------------------------------------
436!
437 asize(1)=ubound(a, dim=1)
438 asize(2)=ubound(a, dim=2)
439 npts=asize(1)*asize(2)
440
441# ifdef MPI
442 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
443 IF (myerror.ne.mpi_success) THEN
444 CALL mpi_error_string (myerror, string, lstr, serror)
445 lstr=len_trim(string)
446 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
447 10 FORMAT (/,' MP_BCASTF_2DP - error during ',a,' call, Task = ', &
448 & i3.3,' Error = ',i3,/,13x,a)
449 exit_flag=2
450 RETURN
451 END IF
452# endif
453# ifdef PROFILE
454!
455!-----------------------------------------------------------------------
456! Turn off time clocks.
457!-----------------------------------------------------------------------
458!
459 CALL wclock_off (ng, model, 64, __line__, myfile)
460# endif
461!
462 RETURN
463 END SUBROUTINE mp_bcastf_2dp
464!
465 SUBROUTINE mp_bcastf_3dp (ng, model, A, InpComm)
466!
467!***********************************************************************
468! !
469! This routine broadcasts a 3D double-precision, non-tiled, array !
470! to all processors in the communicator. It is called by all the !
471! members in the group. !
472! !
473! On Input: !
474! !
475! ng Nested grid number. !
476! model Calling model identifier. !
477! A 3D array to broadcast (real). !
478! InpComm Communicator handle (integer, OPTIONAL). !
479! !
480! On Output: !
481! !
482! A Broadcasted 3D array. !
483! !
484!***********************************************************************
485!
486! Imported variable declarations.
487!
488 integer, intent(in) :: ng, model
489
490 integer, intent(in), optional :: InpComm
491!
492 real(dp), intent(inout) :: A(:,:,:)
493!
494! Local variable declarations
495!
496 integer :: Lstr, MyCOMM, MyError, Npts, Serror
497
498 integer :: Asize(3)
499!
500 character (len=MPI_MAX_ERROR_STRING) :: string
501
502 character (len=*), parameter :: MyFile = &
503 & __FILE__//", mp_bcastf_3d"
504
505# ifdef PROFILE
506!
507!-----------------------------------------------------------------------
508! Turn on time clocks.
509!-----------------------------------------------------------------------
510!
511 CALL wclock_on (ng, model, 64, __line__, myfile)
512# endif
513# ifdef MPI
514!
515!-----------------------------------------------------------------------
516! Set distributed-memory communicator handle (context ID).
517!-----------------------------------------------------------------------
518!
519 IF (PRESENT(inpcomm)) THEN
520 mycomm=inpcomm
521 ELSE
522 mycomm=ocn_comm_world
523 END IF
524# endif
525!
526!-----------------------------------------------------------------------
527! Broadcast requested variable.
528!-----------------------------------------------------------------------
529!
530 asize(1)=ubound(a, dim=1)
531 asize(2)=ubound(a, dim=2)
532 asize(3)=ubound(a, dim=3)
533 npts=asize(1)*asize(2)*asize(3)
534
535# ifdef MPI
536 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
537 IF (myerror.ne.mpi_success) THEN
538 CALL mpi_error_string (myerror, string, lstr, serror)
539 lstr=len_trim(string)
540 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
541 10 FORMAT (/,' MP_BCASTF_3DP - error during ',a,' call, Task = ', &
542 & i3.3,' Error = ',i3,/,13x,a)
543 exit_flag=2
544 RETURN
545 END IF
546# endif
547# ifdef PROFILE
548!
549!-----------------------------------------------------------------------
550! Turn off time clocks.
551!-----------------------------------------------------------------------
552!
553 CALL wclock_off (ng, model, 64, __line__, myfile)
554# endif
555!
556 RETURN
557 END SUBROUTINE mp_bcastf_3dp
558# endif
559!
560 SUBROUTINE mp_bcastf_0d (ng, model, A, InpComm)
561!
562!***********************************************************************
563! !
564! This routine broadcasts a floating-point scalar variable to all !
565! processors in the communicator. It is called by all the members !
566! in the group. !
567! !
568! On Input: !
569! !
570! ng Nested grid number. !
571! model Calling model identifier. !
572! A Variable to broadcast (real). !
573! InpComm Communicator handle (integer, OPTIONAL). !
574! !
575! On Output: !
576! !
577! A Broadcasted variable. !
578! !
579!***********************************************************************
580!
581! Imported variable declarations.
582!
583 integer, intent(in) :: ng, model
584
585 integer, intent(in), optional :: InpComm
586!
587 real(r8), intent(inout) :: A
588!
589! Local variable declarations
590!
591 integer :: Lstr, MyCOMM, MyError, Npts, Serror
592!
593 character (len=MPI_MAX_ERROR_STRING) :: string
594
595 character (len=*), parameter :: MyFile = &
596 & __FILE__//", mp_bcastf_0d"
597
598# ifdef PROFILE
599!
600!-----------------------------------------------------------------------
601! Turn on time clocks.
602!-----------------------------------------------------------------------
603!
604 CALL wclock_on (ng, model, 64, __line__, myfile)
605# endif
606# ifdef MPI
607!
608!-----------------------------------------------------------------------
609! Set distributed-memory communicator handle (context ID).
610!-----------------------------------------------------------------------
611!
612 IF (PRESENT(inpcomm)) THEN
613 mycomm=inpcomm
614 ELSE
615 mycomm=ocn_comm_world
616 END IF
617# endif
618!
619!-----------------------------------------------------------------------
620! Broadcast requested variable.
621!-----------------------------------------------------------------------
622!
623 npts=1
624# ifdef MPI
625 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
626 IF (myerror.ne.mpi_success) THEN
627 CALL mpi_error_string (myerror, string, lstr, serror)
628 lstr=len_trim(string)
629 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
630 10 FORMAT (/,' MP_BCASTF_0D - error during ',a,' call, Task = ', &
631 & i3.3,' Error = ',i3,/,13x,a)
632 exit_flag=2
633 RETURN
634 END IF
635# endif
636# ifdef PROFILE
637!
638!-----------------------------------------------------------------------
639! Turn off time clocks.
640!-----------------------------------------------------------------------
641!
642 CALL wclock_off (ng, model, 64, __line__, myfile)
643# endif
644!
645 RETURN
646 END SUBROUTINE mp_bcastf_0d
647!
648 SUBROUTINE mp_bcastf_1d (ng, model, A, InpComm)
649!
650!***********************************************************************
651! !
652! This routine broadcasts a 1D floating-point, non-tiled, array !
653! to all processors in the communicator. It is called by all the !
654! members in the group. !
655! !
656! On Input: !
657! !
658! ng Nested grid number. !
659! model Calling model identifier. !
660! A 1D array to broadcast (real). !
661! InpComm Communicator handle (integer, OPTIONAL). !
662! !
663! On Output: !
664! !
665! A Broadcasted 1D array. !
666! !
667!***********************************************************************
668!
669! Imported variable declarations.
670!
671 integer, intent(in) :: ng, model
672
673 integer, intent(in), optional :: InpComm
674!
675 real(r8), intent(inout) :: A(:)
676!
677! Local variable declarations
678!
679 integer :: Lstr, MyCOMM, MyError, Npts, Serror
680!
681 character (len=MPI_MAX_ERROR_STRING) :: string
682
683 character (len=*), parameter :: MyFile = &
684 & __FILE__//", mp_bcastf_1d"
685
686# ifdef PROFILE
687!
688!-----------------------------------------------------------------------
689! Turn on time clocks.
690!-----------------------------------------------------------------------
691!
692 CALL wclock_on (ng, model, 64, __line__, myfile)
693# endif
694# ifdef MPI
695!
696!-----------------------------------------------------------------------
697! Set distributed-memory communicator handle (context ID).
698!-----------------------------------------------------------------------
699!
700 IF (PRESENT(inpcomm)) THEN
701 mycomm=inpcomm
702 ELSE
703 mycomm=ocn_comm_world
704 END IF
705# endif
706!
707!-----------------------------------------------------------------------
708! Broadcast requested variable.
709!-----------------------------------------------------------------------
710!
711 npts=ubound(a, dim=1)
712
713# ifdef MPI
714 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
715 IF (myerror.ne.mpi_success) THEN
716 CALL mpi_error_string (myerror, string, lstr, serror)
717 lstr=len_trim(string)
718 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
719 10 FORMAT (/,' MP_BCASTF_1D - error during ',a,' call, Task = ', &
720 & i3.3,' Error = ',i3,/,13x,a)
721 exit_flag=2
722 RETURN
723 END IF
724# endif
725# ifdef PROFILE
726!
727!-----------------------------------------------------------------------
728! Turn off time clocks.
729!-----------------------------------------------------------------------
730!
731 CALL wclock_off (ng, model, 64, __line__, myfile)
732# endif
733!
734 RETURN
735 END SUBROUTINE mp_bcastf_1d
736!
737 SUBROUTINE mp_bcastf_2d (ng, model, A, InpComm)
738!
739!***********************************************************************
740! !
741! This routine broadcasts a 2D floating-point, non-tiled, array !
742! to all processors in the communicator. It is called by all the !
743! members in the group. !
744! !
745! On Input: !
746! !
747! ng Nested grid number. !
748! model Calling model identifier. !
749! A 2D array to broadcast (real). !
750! InpComm Communicator handle (integer, OPTIONAL). !
751! !
752! On Output: !
753! !
754! A Broadcasted 2D array. !
755! !
756!***********************************************************************
757!
758! Imported variable declarations.
759!
760 integer, intent(in) :: ng, model
761
762 integer, intent(in), optional :: InpComm
763!
764 real(r8), intent(inout) :: A(:,:)
765!
766! Local variable declarations
767!
768 integer :: Lstr, MyCOMM, MyError, Npts, Serror
769
770 integer :: Asize(2)
771!
772 character (len=MPI_MAX_ERROR_STRING) :: string
773
774 character (len=*), parameter :: MyFile = &
775 & __FILE__//", mp_bcastf_2d"
776
777# ifdef PROFILE
778!
779!-----------------------------------------------------------------------
780! Turn on time clocks.
781!-----------------------------------------------------------------------
782!
783 CALL wclock_on (ng, model, 64, __line__, myfile)
784# endif
785# ifdef MPI
786!
787!-----------------------------------------------------------------------
788! Set distributed-memory communicator handle (context ID).
789!-----------------------------------------------------------------------
790!
791 IF (PRESENT(inpcomm)) THEN
792 mycomm=inpcomm
793 ELSE
794 mycomm=ocn_comm_world
795 END IF
796# endif
797!
798!-----------------------------------------------------------------------
799! Broadcast requested variable.
800!-----------------------------------------------------------------------
801!
802 asize(1)=ubound(a, dim=1)
803 asize(2)=ubound(a, dim=2)
804 npts=asize(1)*asize(2)
805
806# ifdef MPI
807 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
808 IF (myerror.ne.mpi_success) THEN
809 CALL mpi_error_string (myerror, string, lstr, serror)
810 lstr=len_trim(string)
811 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
812 10 FORMAT (/,' MP_BCASTF_2D - error during ',a,' call, Task = ', &
813 & i3.3,' Error = ',i3,/,13x,a)
814 exit_flag=2
815 RETURN
816 END IF
817# endif
818# ifdef PROFILE
819!
820!-----------------------------------------------------------------------
821! Turn off time clocks.
822!-----------------------------------------------------------------------
823!
824 CALL wclock_off (ng, model, 64, __line__, myfile)
825# endif
826!
827 RETURN
828 END SUBROUTINE mp_bcastf_2d
829!
830 SUBROUTINE mp_bcastf_3d (ng, model, A, InpComm)
831!
832!***********************************************************************
833! !
834! This routine broadcasts a 3D floating-point, non-tiled, array !
835! to all processors in the communicator. It is called by all the !
836! members in the group. !
837! !
838! On Input: !
839! !
840! ng Nested grid number. !
841! model Calling model identifier. !
842! A 3D array to broadcast (real). !
843! InpComm Communicator handle (integer, OPTIONAL). !
844! !
845! On Output: !
846! !
847! A Broadcasted 3D array. !
848! !
849!***********************************************************************
850!
851! Imported variable declarations.
852!
853 integer, intent(in) :: ng, model
854
855 integer, intent(in), optional :: InpComm
856!
857 real(r8), intent(inout) :: A(:,:,:)
858!
859! Local variable declarations
860!
861 integer :: Lstr, MyCOMM, MyError, Npts, Serror
862
863 integer :: Asize(3)
864!
865 character (len=MPI_MAX_ERROR_STRING) :: string
866
867 character (len=*), parameter :: MyFile = &
868 & __FILE__//", mp_bcastf_3d"
869
870# ifdef PROFILE
871!
872!-----------------------------------------------------------------------
873! Turn on time clocks.
874!-----------------------------------------------------------------------
875!
876 CALL wclock_on (ng, model, 64, __line__, myfile)
877# endif
878# ifdef MPI
879!
880!-----------------------------------------------------------------------
881! Set distributed-memory communicator handle (context ID).
882!-----------------------------------------------------------------------
883!
884 IF (PRESENT(inpcomm)) THEN
885 mycomm=inpcomm
886 ELSE
887 mycomm=ocn_comm_world
888 END IF
889# endif
890!
891!-----------------------------------------------------------------------
892! Broadcast requested variable.
893!-----------------------------------------------------------------------
894!
895 asize(1)=ubound(a, dim=1)
896 asize(2)=ubound(a, dim=2)
897 asize(3)=ubound(a, dim=3)
898 npts=asize(1)*asize(2)*asize(3)
899
900# ifdef MPI
901 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
902 IF (myerror.ne.mpi_success) THEN
903 CALL mpi_error_string (myerror, string, lstr, serror)
904 lstr=len_trim(string)
905 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
906 10 FORMAT (/,' MP_BCASTF_3D - error during ',a,' call, Task = ', &
907 & i3.3,' Error = ',i3,/,13x,a)
908 exit_flag=2
909 RETURN
910 END IF
911# endif
912# ifdef PROFILE
913!
914!-----------------------------------------------------------------------
915! Turn off time clocks.
916!-----------------------------------------------------------------------
917!
918 CALL wclock_off (ng, model, 64, __line__, myfile)
919# endif
920!
921 RETURN
922 END SUBROUTINE mp_bcastf_3d
923!
924 SUBROUTINE mp_bcastf_4d (ng, model, A, InpComm)
925!
926!***********************************************************************
927! !
928! This routine broadcasts a 4D floating-point, non-tiled, array !
929! to all processors in the communicator. It is called by all the !
930! members in the group. !
931! !
932! On Input: !
933! !
934! ng Nested grid number. !
935! model Calling model identifier. !
936! A 4D array to broadcast (real). !
937! !
938! On Output: !
939! !
940! A Broadcasted 4D array. !
941! !
942!***********************************************************************
943!
944! Imported variable declarations.
945!
946 integer, intent(in) :: ng, model
947
948 integer, intent(in), optional :: InpComm
949!
950 real(r8), intent(inout) :: A(:,:,:,:)
951!
952! Local variable declarations
953!
954 integer :: Lstr, MyCOMM, MyError, Npts, Serror
955
956 integer :: Asize(4)
957!
958 character (len=MPI_MAX_ERROR_STRING) :: string
959
960 character (len=*), parameter :: MyFile = &
961 & __FILE__//", mp_bcastf_4d"
962
963# ifdef PROFILE
964!
965!-----------------------------------------------------------------------
966! Turn on time clocks.
967!-----------------------------------------------------------------------
968!
969 CALL wclock_on (ng, model, 64, __line__, myfile)
970# endif
971# ifdef MPI
972!
973!-----------------------------------------------------------------------
974! Set distributed-memory communicator handle (context ID).
975!-----------------------------------------------------------------------
976!
977 IF (PRESENT(inpcomm)) THEN
978 mycomm=inpcomm
979 ELSE
980 mycomm=ocn_comm_world
981 END IF
982# endif
983!
984!-----------------------------------------------------------------------
985! Broadcast requested variable.
986!-----------------------------------------------------------------------
987!
988 asize(1)=ubound(a, dim=1)
989 asize(2)=ubound(a, dim=2)
990 asize(3)=ubound(a, dim=3)
991 asize(4)=ubound(a, dim=4)
992 npts=asize(1)*asize(2)*asize(3)*asize(4)
993
994# ifdef MPI
995 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
996 IF (myerror.ne.mpi_success) THEN
997 CALL mpi_error_string (myerror, string, lstr, serror)
998 lstr=len_trim(string)
999 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1000 10 FORMAT (/,' MP_BCASTF_4D - error during ',a,' call, Task = ', &
1001 & i3.3,' Error = ',i3,/,13x,a)
1002 exit_flag=2
1003 RETURN
1004 END IF
1005# endif
1006# ifdef PROFILE
1007!
1008!-----------------------------------------------------------------------
1009! Turn off time clocks.
1010!-----------------------------------------------------------------------
1011!
1012 CALL wclock_off (ng, model, 64, __line__, myfile)
1013# endif
1014!
1015 RETURN
1016 END SUBROUTINE mp_bcastf_4d
1017!
1018 SUBROUTINE mp_bcasti_0d (ng, model, A, InpComm)
1019!
1020!***********************************************************************
1021! !
1022! This routine broadcasts an integer scalar variable to all !
1023! processors in the communicator. It is called by all the !
1024! members in the group. !
1025! !
1026! On Input: !
1027! !
1028! ng Nested grid number. !
1029! model Calling model identifier. !
1030! A Variable to broadcast (integer). !
1031! InpComm Communicator handle (integer, OPTIONAL). !
1032! !
1033! On Output: !
1034! !
1035! A Broadcasted variable. !
1036! !
1037!***********************************************************************
1038!
1039! Imported variable declarations.
1040!
1041 integer, intent(in) :: ng, model
1042
1043 integer, intent(in), optional :: InpComm
1044
1045 integer, intent(inout) :: A
1046!
1047! Local variable declarations
1048!
1049 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1050!
1051 character (len=MPI_MAX_ERROR_STRING) :: string
1052
1053 character (len=*), parameter :: MyFile = &
1054 & __FILE__//", mp_bcasti_0d"
1055
1056# ifdef PROFILE
1057!
1058!-----------------------------------------------------------------------
1059! Turn on time clocks.
1060!-----------------------------------------------------------------------
1061!
1062 IF (lwclock) THEN
1063 CALL wclock_on (ng, model, 64, __line__, myfile)
1064 END IF
1065# endif
1066# ifdef MPI
1067!
1068!-----------------------------------------------------------------------
1069! Set distributed-memory communicator handle (context ID).
1070!-----------------------------------------------------------------------
1071!
1072 IF (PRESENT(inpcomm)) THEN
1073 mycomm=inpcomm
1074 ELSE
1075 mycomm=ocn_comm_world
1076 END IF
1077# endif
1078!
1079!-----------------------------------------------------------------------
1080! Broadcast requested variable.
1081!-----------------------------------------------------------------------
1082!
1083 npts=1
1084# ifdef MPI
1085 CALL mpi_bcast (a, npts, mpi_integer, mymaster, ocn_comm_world, &
1086 & myerror)
1087 IF (myerror.ne.mpi_success) THEN
1088 CALL mpi_error_string (myerror, string, lstr, serror)
1089 lstr=len_trim(string)
1090 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1091 10 FORMAT (/,' MP_BCASTI_0D - error during ',a,' call, Task = ', &
1092 & i3.3,' Error = ',i3,/,13x,a)
1093 RETURN
1094 END IF
1095# endif
1096# ifdef PROFILE
1097!
1098!-----------------------------------------------------------------------
1099! Turn off time clocks.
1100!-----------------------------------------------------------------------
1101!
1102 IF (lwclock) THEN
1103 CALL wclock_off (ng, model, 64, __line__, myfile)
1104 END IF
1105# endif
1106!
1107 RETURN
1108 END SUBROUTINE mp_bcasti_0d
1109!
1110 SUBROUTINE mp_bcasti_1d (ng, model, A, InpComm)
1111!
1112!***********************************************************************
1113! !
1114! This routine broadcasts a 1D non-tiled, integer array to all !
1115! processors in the communicator. It is called by all the !
1116! members in the group. !
1117! !
1118! On Input: !
1119! !
1120! ng Nested grid number. !
1121! model Calling model identifier. !
1122! A 1D array to broadcast (integer). !
1123! InpComm Communicator handle (integer, OPTIONAL). !
1124! !
1125! On Output: !
1126! !
1127! A Broadcasted 1D array. !
1128! !
1129!***********************************************************************
1130!
1131! Imported variable declarations.
1132!
1133 integer, intent(in) :: ng, model
1134
1135 integer, intent(in), optional :: InpComm
1136
1137 integer, intent(inout) :: A(:)
1138!
1139! Local variable declarations
1140!
1141 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1142!
1143 character (len=MPI_MAX_ERROR_STRING) :: string
1144
1145 character (len=*), parameter :: MyFile = &
1146 & __FILE__//", mp_bcasti_1d"
1147
1148# ifdef PROFILE
1149!
1150!-----------------------------------------------------------------------
1151! Turn on time clocks.
1152!-----------------------------------------------------------------------
1153!
1154 CALL wclock_on (ng, model, 64, __line__, myfile)
1155# endif
1156# ifdef MPI
1157!
1158!-----------------------------------------------------------------------
1159! Set distributed-memory communicator handle (context ID).
1160!-----------------------------------------------------------------------
1161!
1162 IF (PRESENT(inpcomm)) THEN
1163 mycomm=inpcomm
1164 ELSE
1165 mycomm=ocn_comm_world
1166 END IF
1167# endif
1168!
1169!-----------------------------------------------------------------------
1170! Broadcast requested variable.
1171!-----------------------------------------------------------------------
1172!
1173 npts=ubound(a, dim=1)
1174
1175# ifdef MPI
1176 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
1177 IF (myerror.ne.mpi_success) THEN
1178 CALL mpi_error_string (myerror, string, lstr, serror)
1179 lstr=len_trim(string)
1180 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1181 10 FORMAT (/,' MP_BCASTI_1D - error during ',a,' call, Task = ', &
1182 & i3.3,' Error = ',i3,/,13x,a)
1183 exit_flag=2
1184 RETURN
1185 END IF
1186# endif
1187# ifdef PROFILE
1188!
1189!-----------------------------------------------------------------------
1190! Turn off time clocks.
1191!-----------------------------------------------------------------------
1192!
1193 CALL wclock_off (ng, model, 64, __line__, myfile)
1194# endif
1195!
1196 RETURN
1197 END SUBROUTINE mp_bcasti_1d
1198!
1199 SUBROUTINE mp_bcasti_2d (ng, model, A, InpComm)
1200!
1201!***********************************************************************
1202! !
1203! This routine broadcasts a 2D non-tiled, integer array to all !
1204! processors in the communicator. It is called by all the !
1205! members in the group. !
1206! !
1207! On Input: !
1208! !
1209! ng Nested grid number. !
1210! model Calling model identifier. !
1211! A 2D array to broadcast (integer). !
1212! InpComm Communicator handle (integer, OPTIONAL). !
1213! !
1214! On Output: !
1215! !
1216! A Broadcasted 2D array. !
1217! !
1218!***********************************************************************
1219!
1220! Imported variable declarations.
1221!
1222 integer, intent(in) :: ng, model
1223
1224 integer, intent(in), optional :: InpComm
1225
1226 integer, intent(inout) :: A(:,:)
1227!
1228! Local variable declarations
1229!
1230 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1231 integer :: Asize(2)
1232!
1233 character (len=MPI_MAX_ERROR_STRING) :: string
1234
1235 character (len=*), parameter :: MyFile = &
1236 & __FILE__//", mp_bcasti_2d"
1237
1238# ifdef PROFILE
1239!
1240!-----------------------------------------------------------------------
1241! Turn on time clocks.
1242!-----------------------------------------------------------------------
1243!
1244 CALL wclock_on (ng, model, 64, __line__, myfile)
1245# endif
1246# ifdef MPI
1247!
1248!-----------------------------------------------------------------------
1249! Set distributed-memory communicator handle (context ID).
1250!-----------------------------------------------------------------------
1251!
1252 IF (PRESENT(inpcomm)) THEN
1253 mycomm=inpcomm
1254 ELSE
1255 mycomm=ocn_comm_world
1256 END IF
1257# endif
1258!
1259!-----------------------------------------------------------------------
1260! Broadcast requested variable.
1261!-----------------------------------------------------------------------
1262!
1263 asize(1)=ubound(a, dim=1)
1264 asize(2)=ubound(a, dim=2)
1265 npts=asize(1)*asize(2)
1266
1267# ifdef MPI
1268 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
1269 IF (myerror.ne.mpi_success) THEN
1270 CALL mpi_error_string (myerror, string, lstr, serror)
1271 lstr=len_trim(string)
1272 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1273 10 FORMAT (/,' MP_BCASTI_2D - error during ',a,' call, Task = ', &
1274 & i3.3,' Error = ',i3,/,13x,a)
1275 exit_flag=2
1276 RETURN
1277 END IF
1278# endif
1279# ifdef PROFILE
1280!
1281!-----------------------------------------------------------------------
1282! Turn off time clocks.
1283!-----------------------------------------------------------------------
1284!
1285 CALL wclock_off (ng, model, 64, __line__, myfile)
1286# endif
1287!
1288 RETURN
1289 END SUBROUTINE mp_bcasti_2d
1290!
1291 SUBROUTINE mp_bcastl_0d (ng, model, A, InpComm)
1292!
1293!***********************************************************************
1294! !
1295! This routine broadcasts a logical scalar variable to all !
1296! processors in the communicator. It is called by all the !
1297! members in the group. !
1298! !
1299! On Input: !
1300! !
1301! ng Nested grid number. !
1302! model Calling model identifier. !
1303! A Variable to broadcast (logical). !
1304! InpComm Communicator handle (integer, OPTIONAL). !
1305! !
1306! On Output: !
1307! !
1308! A Broadcasted variable. !
1309! !
1310!***********************************************************************
1311!
1312! Imported variable declarations.
1313!
1314 integer, intent(in) :: ng, model
1315
1316 integer, intent(in), optional :: InpComm
1317!
1318 logical, intent(inout) :: A
1319!
1320! Local variable declarations
1321!
1322 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1323!
1324 character (len=MPI_MAX_ERROR_STRING) :: string
1325
1326 character (len=*), parameter :: MyFile = &
1327 & __FILE__//", mp_bcastl_0d"
1328
1329# ifdef PROFILE
1330!
1331!-----------------------------------------------------------------------
1332! Turn on time clocks.
1333!-----------------------------------------------------------------------
1334!
1335 CALL wclock_on (ng, model, 64, __line__, myfile)
1336# endif
1337# ifdef MPI
1338!
1339!-----------------------------------------------------------------------
1340! Set distributed-memory communicator handle (context ID).
1341!-----------------------------------------------------------------------
1342!
1343 IF (PRESENT(inpcomm)) THEN
1344 mycomm=inpcomm
1345 ELSE
1346 mycomm=ocn_comm_world
1347 END IF
1348# endif
1349!
1350!-----------------------------------------------------------------------
1351! Broadcast requested variable.
1352!-----------------------------------------------------------------------
1353!
1354 npts=1
1355# ifdef MPI
1356 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1357 IF (myerror.ne.mpi_success) THEN
1358 CALL mpi_error_string (myerror, string, lstr, serror)
1359 lstr=len_trim(string)
1360 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1361 10 FORMAT (/,' MP_BCASTL_0D - error during ',a,' call, Task = ', &
1362 & i3.3,' Error = ',i3,/,13x,a)
1363 exit_flag=2
1364 RETURN
1365 END IF
1366# endif
1367# ifdef PROFILE
1368!
1369!-----------------------------------------------------------------------
1370! Turn off time clocks.
1371!-----------------------------------------------------------------------
1372!
1373 CALL wclock_off (ng, model, 64, __line__, myfile)
1374# endif
1375!
1376 RETURN
1377 END SUBROUTINE mp_bcastl_0d
1378!
1379 SUBROUTINE mp_bcastl_1d (ng, model, A, InpComm)
1380!
1381!***********************************************************************
1382! !
1383! This routine broadcasts a 1D nontiled, logical array to all !
1384! processors in the communicator. It is called by all the !
1385! members in the group. !
1386! !
1387! On Input: !
1388! !
1389! ng Nested grid number. !
1390! model Calling model identifier. !
1391! A 1D array to broadcast (logical). !
1392! InpComm Communicator handle (integer, OPTIONAL). !
1393! !
1394! On Output: !
1395! !
1396! A Broadcasted 1D array. !
1397! !
1398!***********************************************************************
1399!
1400! Imported variable declarations.
1401!
1402 integer, intent(in) :: ng, model
1403
1404 integer, intent(in), optional :: InpComm
1405!
1406 logical, intent(inout) :: A(:)
1407!
1408! Local variable declarations
1409!
1410 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1411!
1412 character (len=MPI_MAX_ERROR_STRING) :: string
1413
1414 character (len=*), parameter :: MyFile = &
1415 & __FILE__//", mp_bcastl_1d"
1416
1417# ifdef PROFILE
1418!
1419!-----------------------------------------------------------------------
1420! Turn on time clocks.
1421!-----------------------------------------------------------------------
1422!
1423 CALL wclock_on (ng, model, 64, __line__, myfile)
1424# endif
1425# ifdef MPI
1426!
1427!-----------------------------------------------------------------------
1428! Set distributed-memory communicator handle (context ID).
1429!-----------------------------------------------------------------------
1430!
1431 IF (PRESENT(inpcomm)) THEN
1432 mycomm=inpcomm
1433 ELSE
1434 mycomm=ocn_comm_world
1435 END IF
1436# endif
1437!
1438!-----------------------------------------------------------------------
1439! Broadcast requested variable.
1440!-----------------------------------------------------------------------
1441!
1442 npts=ubound(a, dim=1)
1443
1444# ifdef MPI
1445 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1446 IF (myerror.ne.mpi_success) THEN
1447 CALL mpi_error_string (myerror, string, lstr, serror)
1448 lstr=len_trim(string)
1449 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1450 10 FORMAT (/,' MP_BCASTL_1D - error during ',a,' call, Task = ', &
1451 & i3.3,' Error = ',i3,/,13x,a)
1452 exit_flag=2
1453 RETURN
1454 END IF
1455# endif
1456# ifdef PROFILE
1457!
1458!-----------------------------------------------------------------------
1459! Turn off time clocks.
1460!-----------------------------------------------------------------------
1461!
1462 CALL wclock_off (ng, model, 64, __line__, myfile)
1463# endif
1464!
1465 RETURN
1466 END SUBROUTINE mp_bcastl_1d
1467!
1468 SUBROUTINE mp_bcastl_2d (ng, model, A, InpComm)
1469!
1470!***********************************************************************
1471! !
1472! This routine broadcasts a 2D non-tiled, logical array to all !
1473! processors in the communicator. It is called by all the !
1474! members in the group. !
1475! !
1476! On Input: !
1477! !
1478! ng Nested grid number. !
1479! model Calling model identifier. !
1480! A 2D array to broadcast (logical). !
1481! InpComm Communicator handle (integer, OPTIONAL). !
1482! !
1483! On Output: !
1484! !
1485! A Broadcasted 2D array. !
1486! !
1487!***********************************************************************
1488!
1489! Imported variable declarations.
1490!
1491 integer, intent(in) :: ng, model
1492
1493 integer, intent(in), optional :: InpComm
1494!
1495 logical, intent(inout) :: A(:,:)
1496!
1497! Local variable declarations
1498!
1499 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1500 integer :: Asize(2)
1501!
1502 character (len=MPI_MAX_ERROR_STRING) :: string
1503
1504 character (len=*), parameter :: MyFile = &
1505 & __FILE__//", mp_bcastl_2d"
1506
1507# ifdef PROFILE
1508!
1509!-----------------------------------------------------------------------
1510! Turn on time clocks.
1511!-----------------------------------------------------------------------
1512!
1513 CALL wclock_on (ng, model, 64, __line__, myfile)
1514# endif
1515# ifdef MPI
1516!
1517!-----------------------------------------------------------------------
1518! Set distributed-memory communicator handle (context ID).
1519!-----------------------------------------------------------------------
1520!
1521 IF (PRESENT(inpcomm)) THEN
1522 mycomm=inpcomm
1523 ELSE
1524 mycomm=ocn_comm_world
1525 END IF
1526# endif
1527!
1528!-----------------------------------------------------------------------
1529! Broadcast requested variable.
1530!-----------------------------------------------------------------------
1531!
1532 asize(1)=ubound(a, dim=1)
1533 asize(2)=ubound(a, dim=2)
1534 npts=asize(1)*asize(2)
1535
1536# ifdef MPI
1537 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1538 IF (myerror.ne.mpi_success) THEN
1539 CALL mpi_error_string (myerror, string, lstr, serror)
1540 lstr=len_trim(string)
1541 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1542 10 FORMAT (/,' MP_BCASTL_2D - error during ',a,' call, Task = ', &
1543 & i3.3,' Error = ',i3,/,13x,a)
1544 exit_flag=2
1545 RETURN
1546 END IF
1547# endif
1548# ifdef PROFILE
1549!
1550!-----------------------------------------------------------------------
1551! Turn off time clocks.
1552!-----------------------------------------------------------------------
1553!
1554 CALL wclock_off (ng, model, 64, __line__, myfile)
1555# endif
1556!
1557 RETURN
1558 END SUBROUTINE mp_bcastl_2d
1559!
1560 SUBROUTINE mp_bcasts_0d (ng, model, A, InpComm)
1561!
1562!***********************************************************************
1563! !
1564! This routine broadcasts a string scalar variable to all processors !
1565! in the communicator. It is called by all the members in the group. !
1566! !
1567! On Input: !
1568! !
1569! ng Nested grid number. !
1570! model Calling model identifier. !
1571! A Variable to broadcast (string). !
1572! InpComm Communicator handle (integer, OPTIONAL). !
1573! !
1574! On Output: !
1575! !
1576! A Broadcasted variable. !
1577! !
1578!***********************************************************************
1579!
1580! Imported variable declarations.
1581!
1582 integer, intent(in) :: ng, model
1583
1584 integer, intent(in), optional :: InpComm
1585!
1586 character (len=*), intent(inout) :: A
1587!
1588! Local variable declarations
1589!
1590 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1591!
1592 character (len=MPI_MAX_ERROR_STRING) :: string
1593
1594 character (len=*), parameter :: MyFile = &
1595 & __FILE__//", mp_bcasts_0d"
1596
1597# ifdef PROFILE
1598!
1599!-----------------------------------------------------------------------
1600! Turn on time clocks.
1601!-----------------------------------------------------------------------
1602!
1603 IF (lwclock) THEN
1604 CALL wclock_on (ng, model, 64, __line__, myfile)
1605 END IF
1606# endif
1607# ifdef MPI
1608!
1609!-----------------------------------------------------------------------
1610! Set distributed-memory communicator handle (context ID).
1611!-----------------------------------------------------------------------
1612!
1613 IF (PRESENT(inpcomm)) THEN
1614 mycomm=inpcomm
1615 ELSE
1616 mycomm=ocn_comm_world
1617 END IF
1618# endif
1619!
1620!-----------------------------------------------------------------------
1621! Broadcast requested variable.
1622!-----------------------------------------------------------------------
1623!
1624 nchars=len(a)
1625# ifdef MPI
1626 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1627 IF (myerror.ne.mpi_success) THEN
1628 CALL mpi_error_string (myerror, string, lstr, serror)
1629 lstr=len_trim(string)
1630 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1631 10 FORMAT (/,' MP_BCASTS_0D - error during ',a,' call, Task = ', &
1632 & i3.3,' Error = ',i3,/,13x,a)
1633 exit_flag=2
1634 RETURN
1635 END IF
1636# endif
1637# ifdef PROFILE
1638!
1639!-----------------------------------------------------------------------
1640! Turn off time clocks.
1641!-----------------------------------------------------------------------
1642!
1643 IF (lwclock) THEN
1644 CALL wclock_off (ng, model, 64, __line__, myfile)
1645 END IF
1646# endif
1647!
1648 RETURN
1649 END SUBROUTINE mp_bcasts_0d
1650!
1651 SUBROUTINE mp_bcasts_1d (ng, model, A, InpComm)
1652!
1653!***********************************************************************
1654! !
1655! This routine broadcasts a 1D string array to all processors in the !
1656! communicator. It is called by all the members in the group. !
1657! !
1658! On Input: !
1659! !
1660! ng Nested grid number. !
1661! model Calling model identifier. !
1662! A 1D array to broadcast (string). !
1663! InpComm Communicator handle (integer, OPTIONAL). !
1664! !
1665! On Output: !
1666! !
1667! A Broadcasted 1D array. !
1668! !
1669!***********************************************************************
1670!
1671! Imported variable declarations.
1672!
1673 integer, intent(in) :: ng, model
1674
1675 integer, intent(in), optional :: InpComm
1676!
1677 character (len=*), intent(inout) :: A(:)
1678!
1679! Local variable declarations
1680!
1681 integer :: Asize, Lstr, MyCOMM, MyError, Nchars, Serror
1682!
1683 character (len=MPI_MAX_ERROR_STRING) :: string
1684
1685 character (len=*), parameter :: MyFile = &
1686 & __FILE__//", mp_bcasts_1d"
1687
1688# ifdef PROFILE
1689!
1690!-----------------------------------------------------------------------
1691! Turn on time clocks.
1692!-----------------------------------------------------------------------
1693!
1694 CALL wclock_on (ng, model, 64, __line__, myfile)
1695# endif
1696# ifdef MPI
1697!
1698!-----------------------------------------------------------------------
1699! Set distributed-memory communicator handle (context ID).
1700!-----------------------------------------------------------------------
1701!
1702 IF (PRESENT(inpcomm)) THEN
1703 mycomm=inpcomm
1704 ELSE
1705 mycomm=ocn_comm_world
1706 END IF
1707# endif
1708!
1709!-----------------------------------------------------------------------
1710! Broadcast requested variable.
1711!-----------------------------------------------------------------------
1712!
1713 asize=ubound(a, dim=1)
1714 nchars=len(a(1))*asize
1715
1716# ifdef MPI
1717 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1718 IF (myerror.ne.mpi_success) THEN
1719 CALL mpi_error_string (myerror, string, lstr, serror)
1720 lstr=len_trim(string)
1721 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1722 10 FORMAT (/,' MP_BCASTS_1D - error during ',a,' call, Task = ', &
1723 & i3.3,' Error = ',i3,/,13x,a)
1724 exit_flag=2
1725 RETURN
1726 END IF
1727# endif
1728# ifdef PROFILE
1729!
1730!-----------------------------------------------------------------------
1731! Turn off time clocks.
1732!-----------------------------------------------------------------------
1733!
1734 CALL wclock_off (ng, model, 64, __line__, myfile)
1735# endif
1736!
1737 RETURN
1738 END SUBROUTINE mp_bcasts_1d
1739!
1740 SUBROUTINE mp_bcasts_2d (ng, model, A, InpComm)
1741!
1742!***********************************************************************
1743! !
1744! This routine broadcasts a 2D string array to all processors in the !
1745! communicator. It is called by all the members in the group. !
1746! !
1747! On Input: !
1748! !
1749! ng Nested grid number. !
1750! model Calling model identifier. !
1751! A 2D array to broadcast (string). !
1752! InpComm Communicator handle (integer, OPTIONAL). !
1753! !
1754! On Output: !
1755! !
1756! A Broadcasted 2D array. !
1757! !
1758!***********************************************************************
1759!
1760! Imported variable declarations.
1761!
1762 integer, intent(in) :: ng, model
1763
1764 integer, intent(in), optional :: InpComm
1765!
1766 character (len=*), intent(inout) :: A(:,:)
1767!
1768! Local variable declarations
1769!
1770 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1771 integer :: Asize(2)
1772!
1773 character (len=MPI_MAX_ERROR_STRING) :: string
1774
1775 character (len=*), parameter :: MyFile = &
1776 & __FILE__//", mp_bcasts_2d"
1777
1778# ifdef PROFILE
1779!
1780!-----------------------------------------------------------------------
1781! Turn on time clocks.
1782!-----------------------------------------------------------------------
1783!
1784 CALL wclock_on (ng, model, 64, __line__, myfile)
1785# endif
1786# ifdef MPI
1787!
1788!-----------------------------------------------------------------------
1789! Set distributed-memory communicator handle (context ID).
1790!-----------------------------------------------------------------------
1791!
1792 IF (PRESENT(inpcomm)) THEN
1793 mycomm=inpcomm
1794 ELSE
1795 mycomm=ocn_comm_world
1796 END IF
1797# endif
1798!
1799!-----------------------------------------------------------------------
1800! Broadcast requested variable.
1801!-----------------------------------------------------------------------
1802!
1803 asize(1)=ubound(a, dim=1)
1804 asize(2)=ubound(a, dim=2)
1805 nchars=len(a(1,1))*asize(1)*asize(2)
1806
1807# ifdef MPI
1808 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1809 IF (myerror.ne.mpi_success) THEN
1810 CALL mpi_error_string (myerror, string, lstr, serror)
1811 lstr=len_trim(string)
1812 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1813 10 FORMAT (/,' MP_BCASTS_2D - error during ',a,' call, Task = ', &
1814 & i3.3,' Error = ',i3,/,13x,a)
1815 exit_flag=2
1816 RETURN
1817 END IF
1818# endif
1819# ifdef PROFILE
1820!
1821!-----------------------------------------------------------------------
1822! Turn off time clocks.
1823!-----------------------------------------------------------------------
1824!
1825 CALL wclock_off (ng, model, 64, __line__, myfile)
1826# endif
1827!
1828 RETURN
1829 END SUBROUTINE mp_bcasts_2d
1830!
1831 SUBROUTINE mp_bcasts_3d (ng, model, A, InpComm)
1832!
1833!***********************************************************************
1834! !
1835! This routine broadcasts a 3D string array to all processors in the !
1836! communicator. It is called by all the members in the group. !
1837! !
1838! On Input: !
1839! !
1840! ng Nested grid number. !
1841! model Calling model identifier. !
1842! A 3D array to broadcast (string). !
1843! InpComm Communicator handle (integer, OPTIONAL). !
1844! !
1845! On Output: !
1846! !
1847! A Broadcasted 3D array. !
1848! !
1849!***********************************************************************
1850!
1851! Imported variable declarations.
1852!
1853 integer, intent(in) :: ng, model
1854
1855 integer, intent(in), optional :: InpComm
1856!
1857 character (len=*), intent(inout) :: A(:,:,:)
1858!
1859! Local variable declarations
1860!
1861 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1862 integer :: Asize(3)
1863!
1864 character (len=MPI_MAX_ERROR_STRING) :: string
1865
1866 character (len=*), parameter :: MyFile = &
1867 & __FILE__//", mp_bcasts_3d"
1868
1869# ifdef PROFILE
1870!
1871!-----------------------------------------------------------------------
1872! Turn on time clocks.
1873!-----------------------------------------------------------------------
1874!
1875 CALL wclock_on (ng, model, 64, __line__, myfile)
1876# endif
1877# ifdef MPI
1878!
1879!-----------------------------------------------------------------------
1880! Set distributed-memory communicator handle (context ID).
1881!-----------------------------------------------------------------------
1882!
1883 IF (PRESENT(inpcomm)) THEN
1884 mycomm=inpcomm
1885 ELSE
1886 mycomm=ocn_comm_world
1887 END IF
1888# endif
1889!
1890!-----------------------------------------------------------------------
1891! Broadcast requested variable.
1892!-----------------------------------------------------------------------
1893!
1894 asize(1)=ubound(a, dim=1)
1895 asize(2)=ubound(a, dim=2)
1896 asize(3)=ubound(a, dim=3)
1897 nchars=len(a(1,1,1))*asize(1)*asize(2)*asize(3)
1898
1899# ifdef MPI
1900 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1901 IF (myerror.ne.mpi_success) THEN
1902 CALL mpi_error_string (myerror, string, lstr, serror)
1903 lstr=len_trim(string)
1904 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1905 10 FORMAT (/,' MP_BCASTS_3D - error during ',a,' call, Task = ', &
1906 & i3.3,' Error = ',i3,/,13x,a)
1907 exit_flag=2
1908 RETURN
1909 END IF
1910# endif
1911# ifdef PROFILE
1912!
1913!-----------------------------------------------------------------------
1914! Turn off time clocks.
1915!-----------------------------------------------------------------------
1916!
1917 CALL wclock_off (ng, model, 64, __line__, myfile)
1918# endif
1919!
1920 RETURN
1921 END SUBROUTINE mp_bcasts_3d
1922!
1923 SUBROUTINE mp_bcast_struc (ng, model, S, InpComm)
1924!
1925!***********************************************************************
1926! !
1927! This routine broadcasts the NetCDF IDs of a TYPE_IO structure to !
1928! all processors in the communicator. It is called by all the !
1929! members in the group. !
1930! !
1931! On Input: !
1932! !
1933! ng Nested grid number. !
1934! model Calling model identifier. !
1935! S ROMS I/O structure, TYPE(T_IO). !
1936! InpComm Communicator handle (integer, OPTIONAL). !
1937! !
1938! On Output: !
1939! !
1940! S Broadcasted ROMS I/O structure. !
1941! !
1942!***********************************************************************
1943!
1944! Imported variable declarations.
1945!
1946 integer, intent(in) :: ng, model
1947
1948 integer, intent(in), optional :: InpComm
1949!
1950 TYPE(t_io), intent(inout) :: S(:)
1951!
1952! Local variable declarations
1953!
1954 integer :: Lstr, MyCOMM, MyError, Nchars, Npts, Serror
1955 integer :: ibuffer(5)
1956!
1957 character (len=MPI_MAX_ERROR_STRING) :: string
1958
1959 character (len=*), parameter :: MyFile = &
1960 & __FILE__//", mp_bcast_struc"
1961
1962# ifdef PROFILE
1963!
1964!-----------------------------------------------------------------------
1965! Turn on time clocks.
1966!-----------------------------------------------------------------------
1967!
1968 CALL wclock_on (ng, model, 64, __line__, myfile)
1969# endif
1970# ifdef MPI
1971!
1972!-----------------------------------------------------------------------
1973! Set distributed-memory communicator handle (context ID).
1974!-----------------------------------------------------------------------
1975!
1976 IF (PRESENT(inpcomm)) THEN
1977 mycomm=inpcomm
1978 ELSE
1979 mycomm=ocn_comm_world
1980 END IF
1981# endif
1982!
1983!-----------------------------------------------------------------------
1984! Broadcast variables in structure.
1985!-----------------------------------------------------------------------
1986
1987# ifdef MPI
1988!
1989! Structure scalar integer variables.
1990!
1991 ibuffer(1)=s(ng)%Nfiles
1992 ibuffer(2)=s(ng)%Fcount
1993 ibuffer(3)=s(ng)%load
1994 ibuffer(4)=s(ng)%Rindex
1995 ibuffer(5)=s(ng)%ncid
1996!
1997 npts=5
1998 CALL mpi_bcast (ibuffer, npts, mpi_integer, mymaster, &
1999 & mycomm, myerror)
2000 IF (myerror.ne.mpi_success) THEN
2001 CALL mpi_error_string (myerror, string, lstr, serror)
2002 lstr=len_trim(string)
2003 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2004 10 FORMAT (/,' MP_BCAST_STRUC - error during ',a,' call, Task = ', &
2005 & i3.3,' Error = ',i3,/,13x,a)
2006 exit_flag=2
2007 RETURN
2008 ELSE
2009 s(ng)%Nfiles=ibuffer(1)
2010 s(ng)%Fcount=ibuffer(2)
2011 s(ng)%load =ibuffer(3)
2012 s(ng)%Rindex=ibuffer(4)
2013 s(ng)%ncid =ibuffer(5)
2014 END IF
2015!
2016! Variables IDs.
2017!
2018 npts=ubound(s(ng)%Vid, dim=1)
2019 CALL mpi_bcast (s(ng)%Vid, npts, mpi_integer, mymaster, &
2020 & mycomm, myerror)
2021 IF (myerror.ne.mpi_success) THEN
2022 CALL mpi_error_string (myerror, string, lstr, serror)
2023 lstr=len_trim(string)
2024 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2025 exit_flag=2
2026 RETURN
2027 END IF
2028
2029# ifdef SOLVE3D
2030!
2031! Tracer variables IDs.
2032!
2033 npts=ubound(s(ng)%Tid, dim=1)
2034 CALL mpi_bcast (s(ng)%Tid, npts, mpi_integer, mymaster, &
2035 & mycomm, myerror)
2036 IF (myerror.ne.mpi_success) THEN
2037 CALL mpi_error_string (myerror, string, lstr, serror)
2038 lstr=len_trim(string)
2039 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2040 exit_flag=2
2041 RETURN
2042 END IF
2043# endif
2044!
2045! Structure Filenames.
2046!
2047 nchars=len(s(ng)%head)
2048 CALL mpi_bcast (s(ng)%head, nchars, mpi_byte, mymaster, &
2049 & mycomm, myerror)
2050 IF (myerror.ne.mpi_success) THEN
2051 CALL mpi_error_string (myerror, string, lstr, serror)
2052 lstr=len_trim(string)
2053 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2054 exit_flag=2
2055 RETURN
2056 END IF
2057!
2058 nchars=len(s(ng)%base)
2059 CALL mpi_bcast (s(ng)%base, nchars, mpi_byte, mymaster, &
2060 & mycomm, myerror)
2061 IF (myerror.ne.mpi_success) THEN
2062 CALL mpi_error_string (myerror, string, lstr, serror)
2063 lstr=len_trim(string)
2064 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2065 exit_flag=2
2066 RETURN
2067 END IF
2068!
2069 nchars=len(s(ng)%name)
2070 CALL mpi_bcast (s(ng)%name, nchars, mpi_byte, mymaster, &
2071 & mycomm, myerror)
2072 IF (myerror.ne.mpi_success) THEN
2073 CALL mpi_error_string (myerror, string, lstr, serror)
2074 lstr=len_trim(string)
2075 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2076 exit_flag=2
2077 RETURN
2078 END IF
2079!
2080 nchars=len(s(ng)%files(1))*s(ng)%Nfiles
2081 CALL mpi_bcast (s(ng)%files, nchars, mpi_byte, mymaster, &
2082 & mycomm, myerror)
2083 IF (myerror.ne.mpi_success) THEN
2084 CALL mpi_error_string (myerror, string, lstr, serror)
2085 lstr=len_trim(string)
2086 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2087 exit_flag=2
2088 RETURN
2089 END IF
2090# endif
2091# ifdef PROFILE
2092!
2093!-----------------------------------------------------------------------
2094! Turn off time clocks.
2095!-----------------------------------------------------------------------
2096!
2097 CALL wclock_off (ng, model, 64, __line__, myfile)
2098# endif
2099!
2100 RETURN
2101 END SUBROUTINE mp_bcast_struc
2102!
2103 SUBROUTINE mp_boundary (ng, model, Imin, Imax, &
2104 & LBi, UBi, LBk, UBk, &
2105 & update, A)
2106!
2107!***********************************************************************
2108! !
2109! This routine exchanges boundary arrays between tiles. !
2110! !
2111! On Input: !
2112! !
2113! ng Nested grid number. !
2114! model Calling model identifier. !
2115! Imin Starting tile index. !
2116! Imax Ending tile index. !
2117! Jstr Starting tile index in the J-direction. !
2118! Jend Ending tile index in the J-direction. !
2119! LBi I-dimension Lower bound. !
2120! UBi I-dimension Upper bound. !
2121! LBk K-dimension Lower bound, if any. Otherwise, a value !
2122! of one is expected. !
2123! LBk K-dimension Upper bound, if any. Otherwise, a value !
2124! of one is expected. !
2125! UBk K-dimension Upper bound. !
2126! update Switch activated by the node that updated the !
2127! boundary data. !
2128! A Boundary array (1D or 2D) to process. !
2129! !
2130! On Output: !
2131! !
2132! A Updated boundary array (1D or 2D). !
2133! !
2134!***********************************************************************
2135!
2136! Imported variable declarations.
2137!
2138 logical, intent(in) :: update
2139!
2140 integer, intent(in) :: ng, model, Imin, Imax
2141 integer, intent(in) :: LBi, UBi, LBk, UBk
2142!
2143 real(r8), intent(inout) :: A(LBi:UBi,LBk:UBk)
2144!
2145! Local variable declarations.
2146!
2147 integer :: Ilen, Ioff, Lstr, MyError, Nnodes, Npts, Serror
2148 integer :: i, ik, k, kc, rank
2149!
2150 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1)) :: Asend
2151
2152# if defined BOUNDARY_ALLGATHER
2153 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1), & & 0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2154# elif defined BOUNDARY_ALLREDUCE
2155 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1)) :: Arecv
2156# endif
2157!
2158 character (len=MPI_MAX_ERROR_STRING) :: string
2159
2160 character (len=*), parameter :: MyFile = &
2161 & __FILE__//", mp_boundary"
2162
2163# ifdef PROFILE
2164!
2165!-----------------------------------------------------------------------
2166! Turn on time clocks.
2167!-----------------------------------------------------------------------
2168!
2169 CALL wclock_on (ng, model, 68, __line__, myfile)
2170# endif
2171!
2172!-----------------------------------------------------------------------
2173! Pack boundary data. Zero-out boundary array except points updated
2174! by the appropriate node, so sum reduction can be perfomed during
2175! unpacking.
2176!-----------------------------------------------------------------------
2177!
2178! Maximum automatic buffer memory size in bytes.
2179!
2180 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
2181 & SIZE(arecv))*kind(a),r8))
2182!
2183! Initialize buffer to the full range so unpacking is correct with
2184! summation. This also allows even exchange of segments with
2185! communication routine "mpi_allgather".
2186!
2187 ilen=ubi-lbi+1
2188 ioff=1-lbi
2189 npts=ilen*(ubk-lbk+1)
2190 DO i=1,npts
2191 asend(i)=0.0_r8
2192 END DO
2193!
2194! If a boundary tile, load boundary data.
2195!
2196 IF (update) THEN
2197 DO k=lbk,ubk
2198 kc=(k-lbk)*ilen
2199 DO i=imin,imax
2200 ik=i+ioff+kc
2201 asend(ik)=a(i,k)
2202 END DO
2203 END DO
2204 END IF
2205!
2206!-----------------------------------------------------------------------
2207! Collect data from all nodes.
2208!-----------------------------------------------------------------------
2209!
2210# ifdef MPI
2211# if defined BOUNDARY_ALLGATHER
2212 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2213 & ocn_comm_world, myerror)
2214 IF (myerror.ne.mpi_success) THEN
2215 CALL mpi_error_string (myerror, string, lstr, serror)
2216 lstr=len_trim(string)
2217 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
2218 & string(1:lstr)
2219 10 FORMAT (/,' MP_BOUNDARY - error during ',a,' call, Task = ', &
2220 & i3.3,' Error = ',i3,/,15x,a)
2221 exit_flag=2
2222 RETURN
2223 END IF
2224# elif defined BOUNDARY_ALLREDUCE
2225 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2226 & ocn_comm_world, myerror)
2227 IF (myerror.ne.mpi_success) THEN
2228 CALL mpi_error_string (myerror, string, lstr, serror)
2229 lstr=len_trim(string)
2230 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
2231 & string(1:lstr)
2232 10 FORMAT (/,' MP_BOUNDARY - error during ',a,' call, Task = ', &
2233 & i3.3,' Error = ',i3,/,15x,a)
2234 exit_flag=2
2235 RETURN
2236 END IF
2237# endif
2238# endif
2239!
2240!-----------------------------------------------------------------------
2241! Unpack data: reduction sum.
2242!-----------------------------------------------------------------------
2243!
2244# if defined BOUNDARY_ALLGATHER
2245 nnodes=ntilei(ng)*ntilej(ng)-1
2246 ik=0
2247 DO k=lbk,ubk
2248 DO i=lbi,ubi
2249 a(i,k)=0.0_r8
2250 ik=ik+1
2251 DO rank=0,nnodes
2252 a(i,k)=a(i,k)+arecv(ik,rank)
2253 END DO
2254 END DO
2255 END DO
2256# elif defined BOUNDARY_ALLREDUCE
2257 ik=0
2258 DO k=lbk,ubk
2259 DO i=lbi,ubi
2260 ik=ik+1
2261 a(i,k)=arecv(ik)
2262 END DO
2263 END DO
2264# endif
2265# ifdef PROFILE
2266!
2267!-----------------------------------------------------------------------
2268! Turn off time clocks.
2269!-----------------------------------------------------------------------
2270!
2271 CALL wclock_off (ng, model, 68, __line__, myfile)
2272# endif
2273!
2274 RETURN
2275 END SUBROUTINE mp_boundary
2276!
2277 SUBROUTINE mp_assemblef_1d (ng, model, Npts, Aspv, A, InpComm)
2279!***********************************************************************
2280! !
2281! This routine assembles a 1D floating-point array from all members !
2282! in the group. The collection of data from all nodes is achieved !
2283! as a reduction sum. !
2284! !
2285! On Input: !
2286! !
2287! ng Nested grid number. !
2288! model Calling model identifier. !
2289! Npts Number of collected data points, PROD(SIZE(A)). !
2290! Aspv Special value indicating that an array element is !
2291! not operated by the current parallel node. It must !
2292! be zero to collect data by a global reduction sum. !
2293! A 1D array to collect. !
2294! InpComm Communicator handle (integer, OPTIONAL). !
2295! !
2296! On Output: !
2297! !
2298! A Assembled 1D array. !
2299! !
2300!***********************************************************************
2301!
2302! Imported variable declarations.
2303!
2304 integer, intent(in) :: ng, model, Npts
2305
2306 integer, intent(in), optional :: InpComm
2307!
2308 real(r8), intent(in) :: Aspv
2309
2310 real(r8), intent(inout) :: A(:)
2311!
2312! Local variable declarations.
2313!
2314 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2315 integer :: i, rank, request
2316
2317 integer, dimension(MPI_STATUS_SIZE) :: status
2318!
2319# if defined ASSEMBLE_ALLGATHER
2320 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2321# elif defined ASSEMBLE_ALLREDUCE
2322 real(r8), dimension(Npts) :: Asend
2323# elif defined ASSEMBLE_SENDRECV
2324 real(r8), allocatable :: Arecv(:)
2325# endif
2326!
2327 character (len=MPI_MAX_ERROR_STRING) :: string
2328
2329 character (len=*), parameter :: MyFile = &
2330 & __FILE__//", mp_assemblef_1d"
2331
2332# ifdef PROFILE
2333!
2334!-----------------------------------------------------------------------
2335! Turn on time clocks.
2336!-----------------------------------------------------------------------
2337!
2338 CALL wclock_on (ng, model, 70, __line__, myfile)
2339# endif
2340# ifdef MPI
2341!
2342!-----------------------------------------------------------------------
2343! Set distributed-memory communicator handle (context ID).
2344!-----------------------------------------------------------------------
2345!
2346 IF (PRESENT(inpcomm)) THEN
2347 mycomm=inpcomm
2348 ELSE
2349 mycomm=ocn_comm_world
2350 END IF
2351# endif
2352!
2353!-----------------------------------------------------------------------
2354! Check input parameters.
2355!-----------------------------------------------------------------------
2356!
2357! Maximum automatic buffer memory size in bytes.
2358!
2359# if defined ASSEMBLE_ALLGATHER
2360 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
2361# else
2362 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
2363# endif
2364!
2365 mynpts=ubound(a, dim=1)
2366 IF (npts.ne.mynpts) THEN
2367 IF (master) THEN
2368 WRITE (stdout,10) npts, mynpts
2369 END IF
2370 exit_flag=7
2371 END IF
2372!
2373 IF (aspv.ne.0.0_r8) THEN
2374 IF (master) THEN
2375 WRITE (stdout,20) aspv
2376 END IF
2377 exit_flag=7
2378 END IF
2379!
2380!-----------------------------------------------------------------------
2381! Collect data from all nodes.
2382!-----------------------------------------------------------------------
2383!
2384# if defined ASSEMBLE_ALLGATHER
2385 CALL mpi_allgather (a, npts, mp_float, arecv, npts, mp_float, &
2386 & mycomm, myerror)
2387 IF (myerror.ne.mpi_success) THEN
2388 CALL mpi_error_string (myerror, string, lstr, serror)
2389 lstr=len_trim(string)
2390 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2391 & string(1:lstr)
2392 exit_flag=2
2393 RETURN
2394 END IF
2395!
2396! Pack data according to special values: sum or ignore.
2397!
2398 nnodes=ntilei(ng)*ntilej(ng)-1
2399 IF (aspv.eq.0.0_r8) THEN
2400 DO i=1,npts
2401 a(i)=0.0_r8
2402 DO rank=0,nnodes
2403 a(i)=a(i)+arecv(i,rank)
2404 END DO
2405 END DO
2406 ELSE
2407 DO i=1,npts
2408 DO rank=0,nnodes
2409 IF (arecv(i,rank).ne.aspv) THEN
2410 a(i)=arecv(i,rank)
2411 END IF
2412 END DO
2413 END DO
2414 END IF
2415
2416# elif defined ASSEMBLE_ALLREDUCE
2417!
2418! Coppy data to send.
2419!
2420 DO i=1,npts
2421 asend(i)=a(i)
2422 END DO
2423!
2424! Collect data from all nodes as a reduced sum.
2425!
2426 CALL mpi_allreduce (asend, a, npts, mp_float, mpi_sum, &
2427 & mycomm, myerror)
2428 IF (myerror.ne.mpi_success) THEN
2429 CALL mpi_error_string (myerror, string, lstr, serror)
2430 lstr=len_trim(string)
2431 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2432 & string(1:lstr)
2433 exit_flag=2
2434 RETURN
2435 END IF
2436
2437# elif defined ASSEMBLE_SENDRECV
2438
2439 IF (myrank.eq.mymaster) THEN
2440!
2441! If master node, allocate and receive buffer.
2442!
2443 IF (.not.allocated(arecv)) THEN
2444 allocate (arecv(npts))
2445 END IF
2446!
2447! If master node, loop over other nodes to receive and accumulate the
2448! data.
2449!
2450 DO rank=1,ntilei(ng)*ntilej(ng)-1
2451 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2452 & mycomm, request, myerror)
2453 CALL mpi_wait (request, status, myerror)
2454 IF (myerror.ne.mpi_success) THEN
2455 CALL mpi_error_string (myerror, string, lstr, serror)
2456 lstr=len_trim(string)
2457 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2458 exit_flag=2
2459 RETURN
2460 END IF
2461 DO i=1,npts
2462 a(i)=a(i)+arecv(i)
2463 END DO
2464 END DO
2465 deallocate (arecv)
2466!
2467! Otherwise, send data to master node.
2468!
2469 ELSE
2470 CALL mpi_isend (a, npts, mp_float, mymaster, myrank+5, &
2471 & mycomm, request, myerror)
2472 CALL mpi_wait (request, status, myerror)
2473 IF (myerror.ne.mpi_success) THEN
2474 CALL mpi_error_string (myerror, string, lstr, serror)
2475 lstr=len_trim(string)
2476 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2477 exit_flag=2
2478 RETURN
2479 END IF
2480 END IF
2481!
2482! Broadcast accumulated (full) data to all nodes.
2483!
2484 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
2485 IF (myerror.ne.mpi_success) THEN
2486 CALL mpi_error_string (myerror, string, lstr, serror)
2487 lstr=len_trim(string)
2488 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2489 exit_flag=2
2490 RETURN
2491 END IF
2492# endif
2493
2494# ifdef PROFILE
2495!
2496!-----------------------------------------------------------------------
2497! Turn off time clocks.
2498!-----------------------------------------------------------------------
2499!
2500 CALL wclock_off (ng, model, 70, __line__, myfile)
2501# endif
2502!
2503 10 FORMAT (/,' MP_ASSEMBLEF_1D - inconsistent array size, Npts = ', &
2504 & i10,2x,i10,/,19x,'number of addressed array elements ', &
2505 & 'is incorrect.')
2506 20 FORMAT (/,' MP_ASSEMBLEF_1D - illegal special value, Aspv = ', &
2507 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
2508 & 'reduction.')
2509 30 FORMAT (/,' MP_ASSEMBLEF_1D - error during ',a,' call, Task = ', &
2510 & i3.3,' Error = ',i3,/,19x,a)
2511!
2512 RETURN
2513 END SUBROUTINE mp_assemblef_1d
2514!
2515 SUBROUTINE mp_assemblef_2d (ng, model, Npts, Aspv, A, InpComm)
2517!***********************************************************************
2518! !
2519! This routine assembles a 2D floating-point array from all members !
2520! in the group. The collection of data from all nodes is achieved !
2521! as a reduction sum. !
2522! !
2523! On Input: !
2524! !
2525! ng Nested grid number. !
2526! model Calling model identifier. !
2527! Npts Number of collected data points, PROD(SIZE(A)). !
2528! Aspv Special value indicating that an array element is !
2529! not operated by the current parallel node. It must !
2530! be zero to collect data by a global reduction sum. !
2531! A 2D array to collect. !
2532! InpComm Communicator handle (integer, OPTIONAL). !
2533! !
2534! On Output: !
2535! !
2536! A Assembled 2D array. !
2537! !
2538!***********************************************************************
2539!
2540! Imported variable declarations.
2541!
2542 integer, intent(in) :: ng, model, Npts
2543
2544 integer, intent(in), optional :: InpComm
2545!
2546 real(r8), intent(in) :: Aspv
2547
2548 real(r8), intent(inout) :: A(:,:)
2549!
2550! Local variable declarations.
2551!
2552 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2553 integer :: i, rank, request
2554
2555 integer :: Asize(2)
2556
2557 integer, dimension(MPI_STATUS_SIZE) :: status
2558!
2559# if defined ASSEMBLE_ALLGATHER
2560 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2561 real(r8), dimension(Npts) :: Asend
2562# elif defined ASSEMBLE_ALLREDUCE
2563 real(r8), dimension(Npts) :: Arecv, Asend
2564# elif defined ASSEMBLE_SENDRECV
2565 real(r8), allocatable :: Arecv(:)
2566 real(r8), dimension(Npts) :: Asend
2567# endif
2568!
2569 character (len=MPI_MAX_ERROR_STRING) :: string
2570
2571 character (len=*), parameter :: MyFile = &
2572 & __FILE__//", mp_assemblef_2d"
2573
2574# ifdef PROFILE
2575!
2576!-----------------------------------------------------------------------
2577! Turn on time clocks.
2578!-----------------------------------------------------------------------
2579!
2580 CALL wclock_on (ng, model, 70, __line__, myfile)
2581# endif
2582# ifdef MPI
2583!
2584!-----------------------------------------------------------------------
2585! Set distributed-memory communicator handle (context ID).
2586!-----------------------------------------------------------------------
2587!
2588 IF (PRESENT(inpcomm)) THEN
2589 mycomm=inpcomm
2590 ELSE
2591 mycomm=ocn_comm_world
2592 END IF
2593# endif
2594!
2595!-----------------------------------------------------------------------
2596! Check input parameters.
2597!-----------------------------------------------------------------------
2598!
2599! Maximum automatic buffer memory size in bytes.
2600!
2601# if defined ASSEMBLE_ALLGATHER
2602 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
2603# else
2604 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
2605# endif
2606!
2607 asize(1)=ubound(a, dim=1)
2608 asize(2)=ubound(a, dim=2)
2609 mynpts=asize(1)*asize(2)
2610 IF (npts.ne.mynpts) THEN
2611 IF (master) THEN
2612 WRITE (stdout,10) npts, mynpts
2613 END IF
2614 exit_flag=7
2615 END IF
2616!
2617 IF (aspv.ne.0.0_r8) THEN
2618 IF (master) THEN
2619 WRITE (stdout,20) aspv
2620 END IF
2621 exit_flag=7
2622 END IF
2623!
2624!-----------------------------------------------------------------------
2625! Collect data from all nodes.
2626!-----------------------------------------------------------------------
2627!
2628! Reshape input 2D data into 1D array to facilitate communications.
2629!
2630 asend=reshape(a, (/npts/))
2631
2632# if defined ASSEMBLE_ALLGATHER
2633!
2634! Collect data from all nodes.
2635!
2636 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2637 & mycomm, myerror)
2638 IF (myerror.ne.mpi_success) THEN
2639 CALL mpi_error_string (myerror, string, lstr, serror)
2640 lstr=len_trim(string)
2641 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2642 & string(1:lstr)
2643 exit_flag=2
2644 RETURN
2645 END IF
2646!
2647! Pack data according to special values: sum or ignore.
2648!
2649 nnodes=ntilei(ng)*ntilej(ng)-1
2650 IF (aspv.eq.0.0_r8) THEN
2651 DO i=1,npts
2652 asend(i)=0.0_r8
2653 DO rank=0,nnodes
2654 asend(i)=asend(i)+arecv(i,rank)
2655 END DO
2656 END DO
2657 ELSE
2658 DO i=1,npts
2659 DO rank=0,nnodes
2660 IF (arecv(i,rank).ne.aspv) THEN
2661 asend(i)=arecv(i,rank)
2662 END IF
2663 END DO
2664 END DO
2665 END IF
2666!
2667! Load collected data in output 2D array.
2668!
2669 a=reshape(asend, asize)
2670
2671# elif defined ASSEMBLE_ALLREDUCE
2672!
2673! Collect data from all nodes as a reduced sum.
2674!
2675 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2676 & mycomm, myerror)
2677 IF (myerror.ne.mpi_success) THEN
2678 CALL mpi_error_string (myerror, string, lstr, serror)
2679 lstr=len_trim(string)
2680 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2681 & string(1:lstr)
2682 exit_flag=2
2683 RETURN
2684 END IF
2685!
2686! Load collected data into output 2D array.
2687!
2688 a=reshape(arecv, asize)
2689
2690# elif defined ASSEMBLE_SENDRECV
2691!
2692 IF (myrank.eq.mymaster) THEN
2693!
2694! If master node, allocate and receive buffer.
2695!
2696 IF (.not.allocated(arecv)) THEN
2697 allocate (arecv(npts))
2698 END IF
2699!
2700! If master node, loop over other nodes to receive and accumulate the
2701! data.
2702!
2703 DO rank=1,ntilei(ng)*ntilej(ng)-1
2704 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2705 & mycomm, request, myerror)
2706 CALL mpi_wait (request, status, myerror)
2707 IF (myerror.ne.mpi_success) THEN
2708 CALL mpi_error_string (myerror, string, lstr, serror)
2709 lstr=len_trim(string)
2710 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2711 exit_flag=2
2712 RETURN
2713 END IF
2714 DO i=1,npts
2715 asend(i)=asend(i)+arecv(i)
2716 END DO
2717 END DO
2718 deallocate (arecv)
2719!
2720! Load collected data in output 2D array.
2721!
2722 a=reshape(asend, asize)
2723!
2724! Otherwise, send data to master node.
2725!
2726 ELSE
2727 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+5, &
2728 & mycomm, request, myerror)
2729 CALL mpi_wait (request, status, myerror)
2730 IF (myerror.ne.mpi_success) THEN
2731 CALL mpi_error_string (myerror, string, lstr, serror)
2732 lstr=len_trim(string)
2733 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2734 exit_flag=2
2735 RETURN
2736 END IF
2737 END IF
2738!
2739! Broadcast accumulated (full) data to all nodes.
2740!
2741 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
2742 IF (myerror.ne.mpi_success) THEN
2743 CALL mpi_error_string (myerror, string, lstr, serror)
2744 lstr=len_trim(string)
2745 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2746 exit_flag=2
2747 RETURN
2748 END IF
2749# endif
2750# ifdef PROFILE
2751!
2752!-----------------------------------------------------------------------
2753! Turn off time clocks.
2754!-----------------------------------------------------------------------
2755!
2756 CALL wclock_off (ng, model, 70, __line__, myfile)
2757# endif
2758!
2759 10 FORMAT (/,' MP_ASSEMBLEF_2D - inconsistent array size, Npts = ', &
2760 & i10,2x,i10,/,19x,'number of addressed array elements ', &
2761 & 'is incorrect.')
2762 20 FORMAT (/,' MP_ASSEMBLEF_2D - illegal special value, Aspv = ', &
2763 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
2764 & 'reduction.')
2765 30 FORMAT (/,' MP_ASSEMBLEF_2D - error during ',a,' call, Task = ', &
2766 & i3.3,' Error = ',i3,/,19x,a)
2767!
2768 RETURN
2769 END SUBROUTINE mp_assemblef_2d
2770!
2771 SUBROUTINE mp_assemblef_3d (ng, model, Npts, Aspv, A, InpComm)
2773!***********************************************************************
2774! !
2775! This routine assembles a 3D floating-point array from all members !
2776! in the group. The collection of data from all nodes is achieved !
2777! as a reduction sum. !
2778! !
2779! On Input: !
2780! !
2781! ng Nested grid number. !
2782! model Calling model identifier. !
2783! Npts Number of collected data points, PROD(SIZE(A)). !
2784! Aspv Special value indicating that an array element is !
2785! not operated by the current parallel node. It must !
2786! be zero to collect data by a global reduction sum. !
2787! A 3D array to collect. !
2788! InpComm Communicator handle (integer, OPTIONAL). !
2789! !
2790! On Output: !
2791! !
2792! A Assembled 3D array. !
2793! !
2794!***********************************************************************
2795!
2796! Imported variable declarations.
2797!
2798 integer, intent(in) :: ng, model, Npts
2799
2800 integer, intent(in), optional :: InpComm
2801
2802 real(r8), intent(in) :: Aspv
2803
2804 real(r8), intent(inout) :: A(:,:,:)
2805!
2806! Local variable declarations.
2807!
2808 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2809 integer :: i, rank, request
2810
2811 integer :: Asize(3)
2812
2813 integer, dimension(MPI_STATUS_SIZE) :: status
2814!
2815# if defined ASSEMBLE_ALLGATHER
2816 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2817 real(r8), dimension(Npts) :: Asend
2818# elif defined ASSEMBLE_ALLREDUCE
2819 real(r8), dimension(Npts) :: Arecv, Asend
2820# elif defined ASSEMBLE_SENDRECV
2821 real(r8), allocatable :: Arecv(:)
2822 real(r8), dimension(Npts) :: Asend
2823# endif
2824!
2825 character (len=MPI_MAX_ERROR_STRING) :: string
2826
2827 character (len=*), parameter :: MyFile = &
2828 & __FILE__//", mp_assemblef_3d"
2829
2830# ifdef PROFILE
2831!
2832!-----------------------------------------------------------------------
2833! Turn on time clocks.
2834!-----------------------------------------------------------------------
2835!
2836 CALL wclock_on (ng, model, 70, __line__, myfile)
2837# endif
2838# ifdef MPI
2839!
2840!-----------------------------------------------------------------------
2841! Set distributed-memory communicator handle (context ID).
2842!-----------------------------------------------------------------------
2843!
2844 IF (PRESENT(inpcomm)) THEN
2845 mycomm=inpcomm
2846 ELSE
2847 mycomm=ocn_comm_world
2848 END IF
2849# endif
2850!
2851!-----------------------------------------------------------------------
2852! Check input parameters.
2853!-----------------------------------------------------------------------
2854!
2855! Maximum automatic buffer memory size in bytes.
2856!
2857# if defined ASSEMBLE_ALLGATHER
2858 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
2859# else
2860 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
2861# endif
2862!
2863 asize(1)=ubound(a, dim=1)
2864 asize(2)=ubound(a, dim=2)
2865 asize(3)=ubound(a, dim=3)
2866 mynpts=asize(1)*asize(2)*asize(3)
2867 IF (npts.ne.mynpts) THEN
2868 IF (master) THEN
2869 WRITE (stdout,10) npts, mynpts
2870 END IF
2871 exit_flag=7
2872 END IF
2873!
2874 IF (aspv.ne.0.0_r8) THEN
2875 IF (master) THEN
2876 WRITE (stdout,20) aspv
2877 END IF
2878 exit_flag=7
2879 END IF
2880!
2881!-----------------------------------------------------------------------
2882! Collect data from all nodes.
2883!-----------------------------------------------------------------------
2884!
2885! Reshape input 3D data into 1D array to facilitate communications.
2886!
2887 asend=reshape(a, (/npts/))
2888
2889# if defined ASSEMBLE_ALLGATHER
2890!
2891! Collect data from all nodes.
2892!
2893 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2894 & mycomm, myerror)
2895 IF (myerror.ne.mpi_success) THEN
2896 CALL mpi_error_string (myerror, string, lstr, serror)
2897 lstr=len_trim(string)
2898 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2899 & string(1:lstr)
2900 exit_flag=2
2901 RETURN
2902 END IF
2903!
2904! Pack data according to special values: sum or ignore.
2905!
2906 nnodes=ntilei(ng)*ntilej(ng)-1
2907 IF (aspv.eq.0.0_r8) THEN
2908 DO i=1,npts
2909 asend(i)=0.0_r8
2910 DO rank=0,nnodes
2911 asend(i)=asend(i)+arecv(i,rank)
2912 END DO
2913 END DO
2914 ELSE
2915 DO i=1,npts
2916 DO rank=0,nnodes
2917 IF (arecv(i,rank).ne.aspv) THEN
2918 asend(i)=arecv(i,rank)
2919 END IF
2920 END DO
2921 END DO
2922 END IF
2923!
2924! Load collected data into output 3D array.
2925!
2926 a=reshape(asend, asize)
2927
2928# elif defined ASSEMBLE_ALLREDUCE
2929!
2930! Collect data from all nodes as a reduced sum.
2931!
2932 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2933 & mycomm, myerror)
2934 IF (myerror.ne.mpi_success) THEN
2935 CALL mpi_error_string (myerror, string, lstr, serror)
2936 lstr=len_trim(string)
2937 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2938 & string(1:lstr)
2939 exit_flag=2
2940 RETURN
2941 END IF
2942!
2943! Load collected data into output 3D array.
2944!
2945 a=reshape(arecv, asize)
2946
2947# elif defined ASSEMBLE_SENDRECV
2948!
2949 IF (myrank.eq.mymaster) THEN
2950!
2951! If master node, allocate and receive buffer.
2952!
2953 IF (.not.allocated(arecv)) THEN
2954 allocate (arecv(npts))
2955 END IF
2956!
2957! If master node, loop over other nodes to receive and accumulate the
2958! data.
2959!
2960 DO rank=1,ntilei(ng)*ntilej(ng)-1
2961 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2962 & mycomm, request, myerror)
2963 CALL mpi_wait (request, status, myerror)
2964 IF (myerror.ne.mpi_success) THEN
2965 CALL mpi_error_string (myerror, string, lstr, serror)
2966 lstr=len_trim(string)
2967 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2968 exit_flag=2
2969 RETURN
2970 END IF
2971 DO i=1,npts
2972 asend(i)=asend(i)+arecv(i)
2973 END DO
2974 END DO
2975 deallocate (arecv)
2976!
2977! Load collected data into output 3D array.
2978!
2979 a=reshape(asend, asize)
2980!
2981! Otherwise, send data to master node.
2982!
2983 ELSE
2984 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+5, &
2985 & mycomm, request, myerror)
2986 CALL mpi_wait (request, status, myerror)
2987 IF (myerror.ne.mpi_success) THEN
2988 CALL mpi_error_string (myerror, string, lstr, serror)
2989 lstr=len_trim(string)
2990 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2991 exit_flag=2
2992 RETURN
2993 END IF
2994 END IF
2995!
2996! Broadcast accumulated (full) data to all nodes.
2997!
2998 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
2999 IF (myerror.ne.mpi_success) THEN
3000 CALL mpi_error_string (myerror, string, lstr, serror)
3001 lstr=len_trim(string)
3002 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3003 exit_flag=2
3004 RETURN
3005 END IF
3006# endif
3007# ifdef PROFILE
3008!
3009!-----------------------------------------------------------------------
3010! Turn off time clocks.
3011!-----------------------------------------------------------------------
3012!
3013 CALL wclock_off (ng, model, 70, __line__, myfile)
3014# endif
3015!
3016 10 FORMAT (/,' MP_ASSEMBLEF_3D - inconsistent array size, Npts = ', &
3017 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3018 & 'is incorrect.')
3019 20 FORMAT (/,' MP_ASSEMBLEF_3D - illegal special value, Aspv = ', &
3020 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
3021 & 'reduction.')
3022 30 FORMAT (/,' MP_ASSEMBLEF_3D - error during ',a,' call, Task = ', &
3023 & i3.3,' Error = ',i3,/,19x,a)
3024!
3025 RETURN
3026 END SUBROUTINE mp_assemblef_3d
3027!
3028 SUBROUTINE mp_assemblei_1d (ng, model, Npts, Aspv, A, InpComm)
3030!***********************************************************************
3031! !
3032! This routine assembles a 1D integer array from all members in the !
3033! group. The collection of data from all nodes is achieved as a !
3034! reduction sum. !
3035! !
3036! On Input: !
3037! !
3038! ng Nested grid number. !
3039! model Calling model identifier. !
3040! Npts Number of collected data points, PROD(SIZE(A)). !
3041! Aspv Special value indicating that an array element is !
3042! not operated by the current parallel node. It must !
3043! be zero to collect data by a global reduction sum. !
3044! A 1D array to collect. !
3045! InpComm Communicator handle (integer, OPTIONAL). !
3046! !
3047! On Output: !
3048! !
3049! A Assembled 1D array. !
3050! !
3051!***********************************************************************
3052!
3053! Imported variable declarations.
3054!
3055 integer, intent(in) :: ng, model, Npts
3056
3057 integer, intent(in), optional :: InpComm
3058
3059 integer, intent(in) :: Aspv
3060
3061 integer, intent(inout) :: A(:)
3062!
3063! Local variable declarations.
3064!
3065 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
3066 integer :: i, rank, request
3067
3068 integer, dimension(MPI_STATUS_SIZE) :: status
3069
3070# if defined ASSEMBLE_ALLGATHER
3071 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3072# elif defined ASSEMBLE_ALLREDUCE
3073 integer, dimension(Npts) :: Asend
3074# elif defined ASSEMBLE_SENDRECV
3075 integer, allocatable :: Arecv(:)
3076# endif
3077!
3078 character (len=MPI_MAX_ERROR_STRING) :: string
3079
3080 character (len=*), parameter :: MyFile = &
3081 & __FILE__//", mp_assemblei_1d"
3082
3083# ifdef PROFILE
3084!
3085!-----------------------------------------------------------------------
3086! Turn on time clocks.
3087!-----------------------------------------------------------------------
3088!
3089 CALL wclock_on (ng, model, 70, __line__, myfile)
3090# endif
3091# ifdef MPI
3092!
3093!-----------------------------------------------------------------------
3094! Set distributed-memory communicator handle (context ID).
3095!-----------------------------------------------------------------------
3096!
3097 IF (PRESENT(inpcomm)) THEN
3098 mycomm=inpcomm
3099 ELSE
3100 mycomm=ocn_comm_world
3101 END IF
3102# endif
3103!
3104!-----------------------------------------------------------------------
3105! Check input parameters.
3106!-----------------------------------------------------------------------
3107!
3108! Maximum automatic buffer memory size in bytes.
3109!
3110# if defined ASSEMBLE_ALLGATHER
3111 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3112# else
3113 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3114# endif
3115!
3116 mynpts=ubound(a, dim=1)
3117 IF (npts.ne.mynpts) THEN
3118 IF (master) THEN
3119 WRITE (stdout,10) npts, mynpts
3120 END IF
3121 exit_flag=7
3122 END IF
3123!
3124 IF (aspv.ne.0) THEN
3125 IF (master) THEN
3126 WRITE (stdout,20) aspv
3127 END IF
3128 exit_flag=7
3129 END IF
3130!
3131!-----------------------------------------------------------------------
3132! Collect data from all nodes.
3133!-----------------------------------------------------------------------
3134!
3135# if defined ASSEMBLE_ALLGATHER
3136 CALL mpi_allgather (a, npts, mpi_integer, &
3137 & arecv, npts, mpi_integer, &
3138 & mycomm, myerror)
3139 IF (myerror.ne.mpi_success) THEN
3140 CALL mpi_error_string (myerror, string, lstr, serror)
3141 lstr=len_trim(string)
3142 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
3143 & string(1:lstr)
3144 exit_flag=2
3145 RETURN
3146 END IF
3147!
3148! Pack data according to special values: sum or ignore.
3149!
3150 nnodes=ntilei(ng)*ntilej(ng)-1
3151 IF (aspv.eq.0.0_r8) THEN
3152 DO i=1,npts
3153 a(i)=0.0_r8
3154 DO rank=0,nnodes
3155 a(i)=a(i)+arecv(i,rank)
3156 END DO
3157 END DO
3158 ELSE
3159 DO i=1,npts
3160 DO rank=0,nnodes
3161 IF (arecv(i,rank).ne.aspv) THEN
3162 a(i)=arecv(i,rank)
3163 END IF
3164 END DO
3165 END DO
3166 END IF
3167
3168# elif defined ASSEMBLE_ALLREDUCE
3169!
3170! Copy data to send.
3171!
3172 DO i=1,npts
3173 asend(i)=a(i)
3174 END DO
3175!
3176! Collect data from all nodes as a reduced sum.
3177!
3178 CALL mpi_allreduce (asend, a, npts, mpi_integer, mpi_sum, &
3179 & mycomm, myerror)
3180 IF (myerror.ne.mpi_success) THEN
3181 CALL mpi_error_string (myerror, string, lstr, serror)
3182 lstr=len_trim(string)
3183 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
3184 & string(1:lstr)
3185 exit_flag=2
3186 RETURN
3187 END IF
3188
3189# elif defined ASSEMBLE_SENDRECV
3190
3191 IF (myrank.eq.mymaster) THEN
3192!
3193! If master node, allocate and receive buffer.
3194!
3195 IF (.not.allocated(arecv)) THEN
3196 allocate (arecv(npts))
3197 END IF
3198!
3199! If master node, loop over other nodes to receive and accumulate the
3200! data.
3201!
3202 DO rank=1,ntilei(ng)*ntilej(ng)-1
3203 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3204 & mycomm, request, myerror)
3205 CALL mpi_wait (request, status, myerror)
3206 IF (myerror.ne.mpi_success) THEN
3207 CALL mpi_error_string (myerror, string, lstr, serror)
3208 lstr=len_trim(string)
3209 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
3210 exit_flag=2
3211 RETURN
3212 END IF
3213 DO i=1,npts
3214 a(i)=a(i)+arecv(i)
3215 END DO
3216 END DO
3217 deallocate (arecv)
3218!
3219! Otherwise, send data to master node.
3220!
3221 ELSE
3222 CALL mpi_isend (a, npts, mpi_integer, mymaster, myrank+5, &
3223 & mycomm, request, myerror)
3224 CALL mpi_wait (request, status, myerror)
3225 IF (myerror.ne.mpi_success) THEN
3226 CALL mpi_error_string (myerror, string, lstr, serror)
3227 lstr=len_trim(string)
3228 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3229 exit_flag=2
3230 RETURN
3231 END IF
3232 END IF
3233!
3234! Broadcast accumulated (full) data to all nodes.
3235!
3236 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3237 IF (myerror.ne.mpi_success) THEN
3238 CALL mpi_error_string (myerror, string, lstr, serror)
3239 lstr=len_trim(string)
3240 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3241 exit_flag=2
3242 RETURN
3243 END IF
3244# endif
3245# ifdef PROFILE
3246!
3247!-----------------------------------------------------------------------
3248! Turn off time clocks.
3249!-----------------------------------------------------------------------
3250!
3251 CALL wclock_off (ng, model, 70, __line__, myfile)
3252# endif
3253!
3254 10 FORMAT (/,' MP_ASSEMBLEI_1D - inconsistent array size, Npts = ', &
3255 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3256 & 'is incorrect.')
3257 20 FORMAT (/,' MP_ASSEMBLEI_1D - illegal special value, Aspv = ',i4, &
3258 & /,19x,'a zero value is needed for global reduction.')
3259 30 FORMAT (/,' MP_ASSEMBLEI_1D - error during ',a,' call, Task = ', &
3260 & i3.3,' Error = ',i3,/,19x,a)
3261!
3262 RETURN
3263 END SUBROUTINE mp_assemblei_1d
3264!
3265 SUBROUTINE mp_assemblei_2d (ng, model, Npts, Aspv, A, InpComm)
3267!***********************************************************************
3268! !
3269! This routine assembles a 2D integer array from all members in the !
3270! group. The collection of data from all nodes is achieved as a !
3271! reduction sum. !
3272! !
3273! On Input: !
3274! !
3275! ng Nested grid number. !
3276! model Calling model identifier. !
3277! Npts Number of collected data points, PROD(SIZE(A)). !
3278! Aspv Special value indicating that an array element is !
3279! not operated by the current parallel node. It must !
3280! be zero to collect data by a global reduction sum. !
3281! A 2D array to collect. !
3282! InpComm Communicator handle (integer, OPTIONAL). !
3283! !
3284! On Output: !
3285! !
3286! A Assembled 2D array. !
3287! !
3288!***********************************************************************
3289!
3290! Imported variable declarations.
3291!
3292 integer, intent(in) :: ng, model, Npts
3293
3294 integer, intent(in), optional :: InpComm
3295
3296 integer, intent(in) :: Aspv
3297
3298 integer, intent(inout) :: A(:,:)
3299!
3300! Local variable declarations.
3301!
3302 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
3303 integer :: i, rank, request
3304
3305 integer :: Asize(2)
3306
3307 integer, dimension(MPI_STATUS_SIZE) :: status
3308
3309# if defined ASSEMBLE_ALLGATHER
3310 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3311 integer, dimension(Npts) :: Asend
3312# elif defined ASSEMBLE_ALLREDUCE
3313 integer, dimension(Npts) :: Arecv, Asend
3314# elif defined ASSEMBLE_SENDRECV
3315 integer, allocatable :: Arecv(:)
3316 integer, dimension(Npts) :: Asend
3317# endif
3318!
3319 character (len=MPI_MAX_ERROR_STRING) :: string
3320
3321 character (len=*), parameter :: MyFile = &
3322 & __FILE__//", mp_assemblei_2d"
3323
3324# ifdef PROFILE
3325!
3326!-----------------------------------------------------------------------
3327! Turn on time clocks.
3328!-----------------------------------------------------------------------
3329!
3330 CALL wclock_on (ng, model, 70, __line__, myfile)
3331# endif
3332# ifdef MPI
3333!
3334!-----------------------------------------------------------------------
3335! Set distributed-memory communicator handle (context ID).
3336!-----------------------------------------------------------------------
3337!
3338 IF (PRESENT(inpcomm)) THEN
3339 mycomm=inpcomm
3340 ELSE
3341 mycomm=ocn_comm_world
3342 END IF
3343# endif
3344!
3345!-----------------------------------------------------------------------
3346! Check input parameters.
3347!-----------------------------------------------------------------------
3348!
3349! Maximum automatic buffer memory size in bytes.
3350!
3351# if defined ASSEMBLE_ALLGATHER
3352 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
3353# else
3354 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
3355# endif
3356!
3357 asize(1)=ubound(a, dim=1)
3358 asize(2)=ubound(a, dim=2)
3359 mynpts=asize(1)*asize(2)
3360 IF (npts.ne.mynpts) THEN
3361 IF (master) THEN
3362 WRITE (stdout,10) npts, mynpts
3363 END IF
3364 exit_flag=7
3365 END IF
3366!
3367 IF (aspv.ne.0) THEN
3368 IF (master) THEN
3369 WRITE (stdout,20) aspv
3370 END IF
3371 exit_flag=7
3372 END IF
3373!
3374!-----------------------------------------------------------------------
3375! Collect data from all nodes.
3376!-----------------------------------------------------------------------
3377!
3378! Reshape input 2D data into 1D array to facilitate communications.
3379!
3380 asend=reshape(a, (/npts/))
3381
3382# if defined ASSEMBLE_ALLGATHER
3383!
3384! Collect data from all nodes.
3385!
3386 CALL mpi_allgather (asend, npts, mpi_integer, &
3387 & arecv, npts, mpi_integer, &
3388 & mycomm, myerror)
3389 IF (myerror.ne.mpi_success) THEN
3390 CALL mpi_error_string (myerror, string, lstr, serror)
3391 lstr=len_trim(string)
3392 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
3393 & string(1:lstr)
3394 exit_flag=2
3395 RETURN
3396 END IF
3397!
3398! Pack data according to special values: sum or ignore.
3399!
3400 nnodes=ntilei(ng)*ntilej(ng)-1
3401 IF (aspv.eq.0.0_r8) THEN
3402 DO i=1,npts
3403 asend(i)=0.0_r8
3404 DO rank=0,nnodes
3405 asend(i)=asend(i)+arecv(i,rank)
3406 END DO
3407 END DO
3408 ELSE
3409 DO i=1,npts
3410 DO rank=0,nnodes
3411 IF (arecv(i,rank).ne.aspv) THEN
3412 asend(i)=arecv(i,rank)
3413 END IF
3414 END DO
3415 END DO
3416 END IF
3417!
3418! Load collected data in output 2D array.
3419!
3420 a=reshape(asend, asize)
3421
3422# elif defined ASSEMBLE_ALLREDUCE
3423!
3424! Collect data from all nodes as a reduced sum.
3425!
3426 CALL mpi_allreduce (asend, arecv, npts, mpi_integer, mpi_sum, &
3427 & mycomm, myerror)
3428 IF (myerror.ne.mpi_success) THEN
3429 CALL mpi_error_string (myerror, string, lstr, serror)
3430 lstr=len_trim(string)
3431 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
3432 & string(1:lstr)
3433 exit_flag=2
3434 RETURN
3435 END IF
3436!
3437! Load collected data.
3438!
3439 a=reshape(arecv, asize)
3440
3441# elif defined ASSEMBLE_SENDRECV
3442!
3443 IF (myrank.eq.mymaster) THEN
3444!
3445! If master node, allocate and receive buffer.
3446!
3447 IF (.not.allocated(arecv)) THEN
3448 allocate (arecv(npts))
3449 END IF
3450!
3451! If master node, loop over other nodes to receive and accumulate the
3452! data.
3453!
3454 DO rank=1,ntilei(ng)*ntilej(ng)-1
3455 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3456 & mycomm, request, myerror)
3457 CALL mpi_wait (request, status, myerror)
3458 IF (myerror.ne.mpi_success) THEN
3459 CALL mpi_error_string (myerror, string, lstr, serror)
3460 lstr=len_trim(string)
3461 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
3462 exit_flag=2
3463 RETURN
3464 END IF
3465 DO i=1,npts
3466 asend(i)=asend(i)+arecv(i)
3467 END DO
3468 END DO
3469 deallocate (arecv)
3470!
3471! Load collected data in output 2D array.
3472!
3473 a=reshape(asend, asize)
3474!
3475! Otherwise, send data to master node.
3476!
3477 ELSE
3478 CALL mpi_isend (asend, npts, mpi_integer, mymaster, myrank+5, &
3479 & mycomm, request, myerror)
3480 CALL mpi_wait (request, status, myerror)
3481 IF (myerror.ne.mpi_success) THEN
3482 CALL mpi_error_string (myerror, string, lstr, serror)
3483 lstr=len_trim(string)
3484 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3485 exit_flag=2
3486 RETURN
3487 END IF
3488 END IF
3489!
3490! Broadcast accumulated (full) data to all nodes.
3491!
3492 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3493 IF (myerror.ne.mpi_success) THEN
3494 CALL mpi_error_string (myerror, string, lstr, serror)
3495 lstr=len_trim(string)
3496 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3497 exit_flag=2
3498 RETURN
3499 END IF
3500# endif
3501# ifdef PROFILE
3502!
3503!-----------------------------------------------------------------------
3504! Turn off time clocks.
3505!-----------------------------------------------------------------------
3506!
3507 CALL wclock_off (ng, model, 70, __line__, myfile)
3508# endif
3509!
3510 10 FORMAT (/,' MP_ASSEMBLEI_2D - inconsistent array size, Npts = ', &
3511 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3512 & 'is incorrect.')
3513 20 FORMAT (/,' MP_ASSEMBLEI_2D - illegal special value, Aspv = ',i4, &
3514 & /,19x,'a zero value is needed for global reduction.')
3515 30 FORMAT (/,' MP_ASSEMBLEI_2D - error during ',a,' call, Task = ', &
3516 & i3.3,' Error = ',i3,/,19x,a)
3517!
3518 RETURN
3519 END SUBROUTINE mp_assemblei_2d
3520!
3521 SUBROUTINE mp_collect_f (ng, model, Npts, Aspv, A, InpComm)
3523!***********************************************************************
3524! !
3525! This routine collects a 1D floating-point array from all members !
3526! in the group. Then, it packs distributed data by removing the !
3527! special values. This routine is used when extracting station !
3528! data from tiled arrays. !
3529! !
3530! On Input: !
3531! !
3532! ng Nested grid number. !
3533! model Calling model identifier. !
3534! Npts Number of collected data points. !
3535! Aspv Special value indicating no data. This implies that !
3536! desired data is tile unbouded. !
3537! A Collected data. !
3538! InpComm Communicator handle (integer, OPTIONAL). !
3539! !
3540! On Output: !
3541! !
3542! A Collected data. !
3543! !
3544!***********************************************************************
3545!
3546! Imported variable declarations.
3547!
3548 integer, intent(in) :: ng, model, Npts
3549
3550 integer, intent(in), optional :: InpComm
3551!
3552 real(r8), intent(in) :: Aspv
3553
3554 real(r8), intent(inout) :: A(Npts)
3555!
3556! Local variable declarations.
3557!
3558 integer :: Lstr, MyCOMM, MyError, Nnodes, Serror
3559 integer :: i, rank, request
3560
3561 integer, dimension(MPI_STATUS_SIZE) :: status
3562!
3563# if defined COLLECT_ALLGATHER
3564 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3565# elif defined COLLECT_ALLREDUCE
3566 real(r8), dimension(Npts) :: Asend
3567# else
3568 real(r8), allocatable :: Arecv(:)
3569# endif
3570!
3571 character (len=MPI_MAX_ERROR_STRING) :: string
3572
3573 character (len=*), parameter :: MyFile = &
3574 & __FILE__//", mp_collect_f"
3575
3576# ifdef PROFILE
3577!
3578!-----------------------------------------------------------------------
3579! Turn on time clocks.
3580!-----------------------------------------------------------------------
3581!
3582 CALL wclock_on (ng, model, 69, __line__, myfile)
3583# endif
3584# ifdef MPI
3585!
3586!-----------------------------------------------------------------------
3587! Set distributed-memory communicator handle (context ID).
3588!-----------------------------------------------------------------------
3589!
3590 IF (PRESENT(inpcomm)) THEN
3591 mycomm=inpcomm
3592 ELSE
3593 mycomm=ocn_comm_world
3594 END IF
3595# endif
3596!
3597!-----------------------------------------------------------------------
3598! Collect data from all nodes.
3599!-----------------------------------------------------------------------
3600!
3601! Maximum automatic buffer memory size in bytes.
3602!
3603# if defined COLLECT_ALLGATHER
3604 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3605# else
3606 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3607# endif
3608
3609# if defined COLLECT_ALLGATHER
3610!
3611 CALL mpi_allgather (a, npts, mp_float, arecv, npts, mp_float, &
3612 & mycomm, myerror)
3613 IF (myerror.ne.mpi_success) THEN
3614 CALL mpi_error_string (myerror, string, lstr, serror)
3615 lstr=len_trim(string)
3616 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
3617 & string(1:lstr)
3618 exit_flag=2
3619 RETURN
3620 END IF
3621!
3622! Pack data according to special values: sum or ignore.
3623!
3624 nnodes=ntilei(ng)*ntilej(ng)-1
3625 IF (aspv.eq.0.0_r8) THEN
3626 DO i=1,npts
3627 a(i)=0.0_r8
3628 DO rank=0,nnodes
3629 a(i)=a(i)+arecv(i,rank)
3630 END DO
3631 END DO
3632 ELSE
3633 DO i=1,npts
3634 DO rank=0,nnodes
3635 IF (arecv(i,rank).ne.aspv) THEN
3636 a(i)=arecv(i,rank)
3637 END IF
3638 END DO
3639 END DO
3640 END IF
3641# elif defined COLLECT_ALLREDUCE
3642!
3643! Copy data to send.
3644!
3645 DO i=1,npts
3646 asend(i)=a(i)
3647 END DO
3648!
3649! Collect data from all nodes as a reduced sum.
3650!
3651 CALL mpi_allreduce (asend, a, npts, mp_float, mpi_sum, &
3652 & mycomm, myerror)
3653 IF (myerror.ne.mpi_success) THEN
3654 CALL mpi_error_string (myerror, string, lstr, serror)
3655 lstr=len_trim(string)
3656 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
3657 & string(1:lstr)
3658 exit_flag=2
3659 RETURN
3660 END IF
3661# else
3662!
3663 IF (myrank.eq.mymaster) THEN
3664!
3665! If master node, allocate and receive buffer.
3666!
3667 IF (.not.allocated(arecv)) THEN
3668 allocate (arecv(npts))
3669 END IF
3670!
3671! If master node, loop over other nodes to receive and accumulate the
3672! data.
3673!
3674 DO rank=1,ntilei(ng)*ntilej(ng)-1
3675 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
3676 & mycomm, request, myerror)
3677 CALL mpi_wait (request, status, myerror)
3678 IF (myerror.ne.mpi_success) THEN
3679 CALL mpi_error_string (myerror, string, lstr, serror)
3680 lstr=len_trim(string)
3681 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
3682 exit_flag=2
3683 RETURN
3684 END IF
3685 DO i=1,npts
3686 a(i)=a(i)+arecv(i)
3687 END DO
3688 END DO
3689 deallocate (arecv)
3690!
3691! Otherwise, send data to master node.
3692!
3693 ELSE
3694 CALL mpi_isend (a, npts, mp_float, mymaster, myrank+5, &
3695 & mycomm, request, myerror)
3696 CALL mpi_wait (request, status, myerror)
3697 IF (myerror.ne.mpi_success) THEN
3698 CALL mpi_error_string (myerror, string, lstr, serror)
3699 lstr=len_trim(string)
3700 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3701 exit_flag=2
3702 RETURN
3703 END IF
3704 END IF
3705!
3706! Broadcast accumulated (full) data to all nodes.
3707!
3708 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
3709 IF (myerror.ne.mpi_success) THEN
3710 CALL mpi_error_string (myerror, string, lstr, serror)
3711 lstr=len_trim(string)
3712 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3713 exit_flag=2
3714 RETURN
3715 END IF
3716# endif
3717 10 FORMAT (/,' MP_COLLECT_F - error during ',a,' call, Task = ', &
3718 & i3.3,' Error = ',i3,/,14x,a)
3719
3720# ifdef PROFILE
3721!
3722!-----------------------------------------------------------------------
3723! Turn off time clocks.
3724!-----------------------------------------------------------------------
3725!
3726 CALL wclock_off (ng, model, 69, __line__, myfile)
3727# endif
3728!
3729 RETURN
3730 END SUBROUTINE mp_collect_f
3731!
3732 SUBROUTINE mp_collect_i (ng, model, Npts, Aspv, A, InpComm)
3734!***********************************************************************
3735! !
3736! This routine collects a 1D integer array from all members in !
3737! the group. Then, it packs distributed data by removing the !
3738! special values. This routine is used when extracting station !
3739! data from tiled arrays. !
3740! !
3741! On Input: !
3742! !
3743! ng Nested grid number. !
3744! model Calling model identifier. !
3745! Npts Number of collected data points. !
3746! Aspv Special value indicating no data. This implies that !
3747! desired data is tile unbouded. !
3748! A Collected data. !
3749! InpComm Communicator handle (integer, OPTIONAL). !
3750! !
3751! On Output: !
3752! !
3753! A Collected data. !
3754! !
3755!***********************************************************************
3756!
3757! Imported variable declarations.
3758!
3759 integer, intent(in) :: ng, model, Npts
3760
3761 integer, intent(in) :: Aspv
3762
3763 integer, intent(in), optional :: InpComm
3764
3765 integer, intent(inout) :: A(Npts)
3766!
3767! Local variable declarations.
3768!
3769 integer :: Lstr, MyCOMM, MyError, Nnodes, Serror
3770 integer :: i, rank, request
3771
3772 integer, dimension(MPI_STATUS_SIZE) :: status
3773
3774# if defined COLLECT_ALLGATHER
3775 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3776# elif defined COLLECT_ALLREDUCE
3777 integer, dimension(Npts) :: Asend
3778# else
3779 integer, allocatable :: Arecv(:)
3780# endif
3781!
3782 character (len=MPI_MAX_ERROR_STRING) :: string
3783
3784 character (len=*), parameter :: MyFile = &
3785 & __FILE__//", mp_collect_i"
3786
3787# ifdef PROFILE
3788!
3789!-----------------------------------------------------------------------
3790! Turn on time clocks.
3791!-----------------------------------------------------------------------
3792!
3793 CALL wclock_on (ng, model, 69, __line__, myfile)
3794# endif
3795# ifdef MPI
3796!
3797!-----------------------------------------------------------------------
3798! Set distributed-memory communicator handle (context ID).
3799!-----------------------------------------------------------------------
3800!
3801 IF (PRESENT(inpcomm)) THEN
3802 mycomm=inpcomm
3803 ELSE
3804 mycomm=ocn_comm_world
3805 END IF
3806# endif
3807!
3808!-----------------------------------------------------------------------
3809! Collect data from all nodes.
3810!-----------------------------------------------------------------------
3811!
3812! Maximum automatic buffer memory size in bytes.
3813!
3814# if defined COLLECT_ALLGATHER
3815 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3816# else
3817 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3818# endif
3819
3820# if defined COLLECT_ALLGATHER
3821!
3822 CALL mpi_allgather (a, npts, mpi_integer, arecv, npts, &
3823 & mpi_integer, mycomm, myerror)
3824 IF (myerror.ne.mpi_success) THEN
3825 CALL mpi_error_string (myerror, string, lstr, serror)
3826 lstr=len_trim(string)
3827 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
3828 & string(1:lstr)
3829 exit_flag=2
3830 RETURN
3831 END IF
3832!
3833! Pack data according to special values: sum or ignore.
3834!
3835 nnodes=ntilei(ng)*ntilej(ng)-1
3836 IF (aspv.eq.0) THEN
3837 DO i=1,npts
3838 a(i)=0
3839 DO rank=0,nnodes
3840 a(i)=a(i)+arecv(i,rank)
3841 END DO
3842 END DO
3843 ELSE
3844 DO i=1,npts
3845 DO rank=0,nnodes
3846 IF (arecv(i,rank).ne.aspv) THEN
3847 a(i)=arecv(i,rank)
3848 END IF
3849 END DO
3850 END DO
3851 END IF
3852# elif defined COLLECT_ALLREDUCE
3853!
3854! Copy data to send.
3855!
3856 DO i=1,npts
3857 asend(i)=a(i)
3858 END DO
3859!
3860! Collect data from all nodes as a reduced sum.
3861!
3862 CALL mpi_allreduce (asend, a, npts, mpi_integer, mpi_sum, &
3863 & mycomm, myerror)
3864 IF (myerror.ne.mpi_success) THEN
3865 CALL mpi_error_string (myerror, string, lstr, serror)
3866 lstr=len_trim(string)
3867 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
3868 & string(1:lstr)
3869 exit_flag=2
3870 RETURN
3871 END IF
3872# else
3873!
3874 IF (myrank.eq.mymaster) THEN
3875!
3876! If master node, allocate and receive buffer.
3877!
3878 IF (.not.allocated(arecv)) THEN
3879 allocate (arecv(npts))
3880 END IF
3881!
3882! If master node, loop over other nodes to receive and accumulate the
3883! data.
3884!
3885 DO rank=1,ntilei(ng)*ntilej(ng)-1
3886 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3887 & mycomm, request, myerror)
3888 CALL mpi_wait (request, status, myerror)
3889 IF (myerror.ne.mpi_success) THEN
3890 CALL mpi_error_string (myerror, string, lstr, serror)
3891 lstr=len_trim(string)
3892 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
3893 exit_flag=2
3894 RETURN
3895 END IF
3896 DO i=1,npts
3897 a(i)=a(i)+arecv(i)
3898 END DO
3899 END DO
3900 deallocate (arecv)
3901!
3902! Otherwise, send data to master node.
3903!
3904 ELSE
3905 CALL mpi_isend (a, npts, mpi_integer, mymaster, myrank+5, &
3906 & mycomm, request, myerror)
3907 CALL mpi_wait (request, status, myerror)
3908 IF (myerror.ne.mpi_success) THEN
3909 CALL mpi_error_string (myerror, string, lstr, serror)
3910 lstr=len_trim(string)
3911 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3912 exit_flag=2
3913 RETURN
3914 END IF
3915 END IF
3916!
3917! Broadcast accumulated (full) data to all nodes.
3918!
3919 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3920 IF (myerror.ne.mpi_success) THEN
3921 CALL mpi_error_string (myerror, string, lstr, serror)
3922 lstr=len_trim(string)
3923 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3924 exit_flag=2
3925 RETURN
3926 END IF
3927# endif
3928 10 FORMAT (/,' MP_COLLECT_I - error during ',a,' call, Task = ', &
3929 & i3.3,' Error = ',i3,/,14x,a)
3930
3931# ifdef PROFILE
3932!
3933!-----------------------------------------------------------------------
3934! Turn off time clocks.
3935!-----------------------------------------------------------------------
3936!
3937 CALL wclock_off (ng, model, 69, __line__, myfile)
3938# endif
3939!
3940 RETURN
3941 END SUBROUTINE mp_collect_i
3942!
3943 SUBROUTINE mp_gather2d (ng, model, LBi, UBi, LBj, UBj, &
3944 & tindex, gtype, Ascl, &
3945# ifdef MASKING
3946 & Amask, &
3947# endif
3948 & A, Npts, Awrk, SetFillVal)
3949!
3950!***********************************************************************
3951! !
3952! This routine collects a 2D tiled, floating-point array from each !
3953! spawned node and stores it into one dimensional global array. It !
3954! is used to collect and pack output data. !
3955! !
3956! On Input: !
3957! !
3958! ng Nested grid number. !
3959! model Calling model identifier. !
3960! LBi I-dimension Lower bound. !
3961! UBi I-dimension Upper bound. !
3962! LBj J-dimension Lower bound. !
3963! UBj J-dimension Upper bound. !
3964! tindex Time record index to process. !
3965! gtype C-grid type. If negative and Land-Sea is available, !
3966! only water-points processed. !
3967! Ascl Factor to scale field before writing. !
3968! Amask Land/Sea mask, if any. !
3969! A 2D tiled, floating-point array to process. !
3970! SetFillVal Logical switch to set fill value in land areas !
3971! (optional). !
3972! !
3973! On Output: !
3974! !
3975! Npts Number of points processed in Awrk. !
3976! Awrk Collected data from each node packed into 1D array !
3977! in column-major order. That is, in the same way !
3978! that Fortran multi-dimensional arrays are stored !
3979! in memory. !
3980! !
3981!***********************************************************************
3982!
3983! Imported variable declarations.
3984!
3985 logical, intent(in), optional :: SetFillVal
3986!
3987 integer, intent(in) :: ng, model, tindex, gtype
3988 integer, intent(in) :: LBi, UBi, LBj, UBj
3989 integer, intent(out) :: Npts
3990!
3991 real(dp), intent(in) :: Ascl
3992
3993# ifdef MASKING
3994 real(r8), intent(in) :: Amask(LBi:UBi,LBj:UBj)
3995# endif
3996 real(r8), intent(in) :: A(LBi:UBi,LBj:UBj)
3997 real(r8), intent(out) :: Awrk(:)
3998!
3999! Local variable declarations.
4000!
4001# ifdef MASKING
4002 logical :: LandFill
4003# endif
4004 integer :: Cgrid, Ntasks, ghost, rank
4005 integer :: Io, Ie, Jo, Je, Ioff, Joff
4006 integer :: Imin, Imax, Jmin, Jmax
4007 integer :: iLB, iUB, jLB, jUB
4008 integer :: Asize, Isize, Jsize, IJsize
4009 integer :: Lstr, MyError, MyType, Serror, Srequest
4010 integer :: i, ic, ij, j, jc, nc
4011!
4012 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
4013# ifdef GATHER_SENDRECV
4014 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: MySize
4015 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
4016
4017 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
4018 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
4019!
4020 real(r8), allocatable :: Arecv(:,:)
4021# else
4022 real(r8), allocatable :: Arecv(:)
4023# endif
4024 real(r8), allocatable :: Asend(:)
4025!
4026 character (len=MPI_MAX_ERROR_STRING) :: string
4027
4028 character (len=*), parameter :: MyFile = &
4029 & __FILE__//", mp_gather2d"
4030
4031# ifdef PROFILE
4032!
4033!-----------------------------------------------------------------------
4034! Turn on time clocks.
4035!-----------------------------------------------------------------------
4036!
4037 CALL wclock_on (ng, model, 66, __line__, myfile)
4038# endif
4039!
4040!-----------------------------------------------------------------------
4041! Set horizontal starting and ending indices for parallel domain
4042! partitions in the XI- and ETA-directions.
4043!-----------------------------------------------------------------------
4044!
4045! Maximum automatic buffer memory size in bytes.
4046!
4047 bmemmax(ng)=max(bmemmax(ng), real(tilesize(ng)*kind(a),r8))
4048!
4049! Set full grid first and last point according to staggered C-grid
4050! classification. Notice that the offsets are for the private array
4051! counter.
4052!
4053 mytype=abs(gtype)
4054
4055 SELECT CASE (mytype)
4056 CASE (p2dvar, p3dvar)
4057 io=iobounds(ng) % ILB_psi
4058 ie=iobounds(ng) % IUB_psi
4059 jo=iobounds(ng) % JLB_psi
4060 je=iobounds(ng) % JUB_psi
4061 ioff=0
4062 joff=1
4063 CASE (r2dvar, r3dvar)
4064 io=iobounds(ng) % ILB_rho
4065 ie=iobounds(ng) % IUB_rho
4066 jo=iobounds(ng) % JLB_rho
4067 je=iobounds(ng) % JUB_rho
4068 ioff=1
4069 joff=0
4070 CASE (u2dvar, u3dvar)
4071 io=iobounds(ng) % ILB_u
4072 ie=iobounds(ng) % IUB_u
4073 jo=iobounds(ng) % JLB_u
4074 je=iobounds(ng) % JUB_u
4075 ioff=0
4076 joff=0
4077 CASE (v2dvar, v3dvar)
4078 io=iobounds(ng) % ILB_v
4079 ie=iobounds(ng) % IUB_v
4080 jo=iobounds(ng) % JLB_v
4081 je=iobounds(ng) % JUB_v
4082 ioff=1
4083 joff=1
4084 CASE DEFAULT ! RHO-points
4085 io=iobounds(ng) % ILB_rho
4086 ie=iobounds(ng) % IUB_rho
4087 jo=iobounds(ng) % JLB_rho
4088 je=iobounds(ng) % JUB_rho
4089 ioff=1
4090 joff=0
4091 END SELECT
4092!
4093 isize=ie-io+1
4094 jsize=je-jo+1
4095 ijsize=isize*jsize
4096 npts=isize*jsize
4097!
4098! Set "GATHERV" counts and displacement vectors. Use non-overlapping
4099! (ghost=0) ranges according to tile rank.
4100!
4101 ghost=0
4102!
4103 SELECT CASE (mytype)
4104 CASE (p2dvar, p3dvar)
4105 cgrid=1
4106 CASE (r2dvar, r3dvar)
4107 cgrid=2
4108 CASE (u2dvar, u3dvar)
4109 cgrid=3
4110 CASE (v2dvar, v3dvar)
4111 cgrid=4
4112 CASE DEFAULT ! RHO-points
4113 cgrid=2
4114 END SELECT
4115!
4116 ntasks=ntilei(ng)*ntilej(ng)
4117 nc=0
4118 DO rank=0,ntasks-1
4119 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4120 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4121 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4122 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4123# ifdef GATHER_SENDRECV
4124 mysize(rank)=(iub-ilb+1)*(jub-jlb+1)
4125# endif
4126 displs(rank)=nc
4127 DO j=jlb,jub
4128 DO i=ilb,iub
4129 nc=nc+1
4130 END DO
4131 END DO
4132 counts(rank)=nc-displs(rank)
4133 END DO
4134!
4135!-----------------------------------------------------------------------
4136! Pack and scale input tiled data.
4137!-----------------------------------------------------------------------
4138!
4139 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
4140 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
4141 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
4142 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
4143!
4144 asize=(imax-imin+1)*(jmax-jmin+1)
4145 allocate ( asend(asize) )
4146 asend=0.0_r8
4147!
4148 nc=0
4149 DO j=jmin,jmax
4150 DO i=imin,imax
4151 nc=nc+1
4152 asend(nc)=a(i,j)*ascl
4153 END DO
4154 END DO
4155
4156# ifdef MASKING
4157!
4158! If overwriting Land/Sea mask or processing water-points only, flag
4159! land-points with special value.
4160!
4161 IF (PRESENT(setfillval)) THEN
4162 landfill=setfillval
4163 ELSE
4164 landfill=tindex.gt.0
4165 END IF
4166 IF (gtype.lt.0) THEN
4167 nc=0
4168 DO j=jmin,jmax
4169 DO i=imin,imax
4170 nc=nc+1
4171 IF (amask(i,j).eq.0.0_r8) THEN
4172 asend(nc)=spval
4173 END IF
4174 END DO
4175 END DO
4176 ELSE IF (landfill) THEN
4177 nc=0
4178 DO j=jmin,jmax
4179 DO i=imin,imax
4180 nc=nc+1
4181 IF (amask(i,j).eq.0.0_r8) THEN
4182 asend(nc)=spval
4183 END IF
4184 END DO
4185 END DO
4186 END IF
4187# endif
4188!
4189!-----------------------------------------------------------------------
4190! Gather requested global data from tiled arrays.
4191!-----------------------------------------------------------------------
4192
4193# ifdef GATHER_SENDRECV
4194!
4195 allocate ( arecv(ijsize, ntasks-1) )
4196 arecv=0.0_r8
4197!
4198! If master processor, unpack the send buffer since there is not
4199! need to distribute.
4200!
4201 IF (myrank.eq.mymaster) THEN
4202 nc=0
4203 DO j=jmin,jmax
4204 jc=(j-joff)*isize
4205 DO i=imin,imax
4206 nc=nc+1
4207 ic=i+ioff+jc
4208 awrk(ic)=asend(nc)
4209 END DO
4210 END DO
4211 END IF
4212!
4213! Send, receive, and unpack data.
4214!
4215 IF (myrank.eq.mymaster) THEN
4216 DO rank=1,ntasks-1
4217 CALL mpi_irecv (arecv(1,rank), mysize(rank), mp_float, rank, &
4218 & rank+5, ocn_comm_world, rrequest(rank), &
4219 & myerror)
4220 END DO
4221 DO rank=1,ntasks-1
4222 CALL mpi_wait (rrequest(rank), rstatus, myerror)
4223 IF (myerror.ne.mpi_success) THEN
4224 CALL mpi_error_string (myerror, string, lstr, serror)
4225 lstr=len_trim(string)
4226 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
4227 10 FORMAT (/,' MP_GATHER2D - error during ',a, &
4228 & ' call, Task = ',i3.3,' Error = ',i3,/,13x,a)
4229 exit_flag=2
4230 RETURN
4231 END IF
4232!
4233 imin=bounds(ng) % Imin(cgrid,ghost,rank)
4234 imax=bounds(ng) % Imax(cgrid,ghost,rank)
4235 jmin=bounds(ng) % Jmin(cgrid,ghost,rank)
4236 jmax=bounds(ng) % Jmax(cgrid,ghost,rank)
4237!
4238 nc=0
4239 DO j=jmin,jmax
4240 jc=(j-joff)*isize
4241 DO i=imin,imax
4242 nc=nc+1
4243 ic=i+ioff+jc
4244 awrk(ic)=arecv(nc,rank)
4245 END DO
4246 END DO
4247 END DO
4248 ELSE
4249 CALL mpi_isend (asend, asize, mp_float, mymaster, &
4250 & myrank+5, ocn_comm_world, srequest, myerror)
4251 CALL mpi_wait (srequest, sstatus, myerror)
4252 IF (myerror.ne.mpi_success) THEN
4253 CALL mpi_error_string (myerror, string, lstr, serror)
4254 lstr=len_trim(string)
4255 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
4256 exit_flag=2
4257 RETURN
4258 END IF
4259 END IF
4260
4261# else
4262!
4263! Gather local tiled data into a global array.
4264!
4265 allocate ( arecv(ijsize) )
4266 arecv=0.0_r8
4267!
4268 CALL mpi_gatherv (asend, asize, mp_float, &
4269 & arecv, counts, displs, mp_float, &
4270 & mymaster, ocn_comm_world, myerror)
4271 IF (myerror.ne.mpi_success) THEN
4272 CALL mpi_error_string (myerror, string, lstr, serror)
4273 WRITE (stdout,10) 'MPI_GATHERV', myrank, myerror, trim(string)
4274 10 FORMAT (/,' MP_GATHER2D - error during ',a,' call, Task = ', &
4275 & i3.3, ' Error = ',i3,/,15x,a)
4276 exit_flag=2
4277 RETURN
4278 END IF
4279!
4280! Unpack gathered data in a continuous memory order and remap every
4281! task segment.
4282!
4283 nc=0
4284 DO rank=0,ntasks-1
4285 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4286 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4287 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4288 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4289 DO j=jlb,jub
4290 jc=(j-joff)*isize
4291 DO i=ilb,iub
4292 ij=i+ioff+jc
4293 nc=nc+1
4294 awrk(ij)=arecv(nc)
4295 END DO
4296 END DO
4297 END DO
4298# endif
4299# ifdef MASKING
4300!
4301! If pocessing only water-points, remove land points and repack.
4302!
4303 IF ((myrank.eq.mymaster).and.(gtype.lt.0)) THEN
4304 nc=0
4305 DO i=1,ijsize
4306 IF (awrk(i).lt.spval) THEN
4307 nc=nc+1
4308 awrk(nc)=awrk(i)
4309 END IF
4310 END DO
4311 npts=nc
4312 END IF
4313# endif
4314!
4315! Deallocate local arrays.
4316!
4317 deallocate (arecv)
4318 deallocate (asend)
4319
4320# ifdef PROFILE
4321!
4322!-----------------------------------------------------------------------
4323! Turn off time clocks.
4324!-----------------------------------------------------------------------
4325!
4326 CALL wclock_off (ng, model, 66, __line__, myfile)
4327# endif
4328!
4329 RETURN
4330 END SUBROUTINE mp_gather2d
4331
4332# ifdef GRID_EXTRACT
4333!
4334 SUBROUTINE mp_gather2d_xtr (ng, model, LBi, UBi, LBj, UBj, &
4335 & tindex, gtype, Ascl, &
4336# ifdef MASKING
4337 & Amask, &
4338# endif
4339 & A, Npts, Awrk, SetFillVal)
4340!
4341!***********************************************************************
4342! !
4343! This routine collects a 2D tiled, floating-point array from each !
4344! spawned node and stores it into one dimensional global array. It !
4345! is used to collect and pack output data. !
4346! !
4347! On Input: !
4348! !
4349! ng Nested grid number. !
4350! model Calling model identifier. !
4351! LBi I-dimension Lower bound. !
4352! UBi I-dimension Upper bound. !
4353! LBj J-dimension Lower bound. !
4354! UBj J-dimension Upper bound. !
4355! tindex Time record index to process. !
4356! gtype C-grid type. If negative and Land-Sea is available, !
4357! only water-points processed. !
4358! Ascl Factor to scale field before writing. !
4359! Amask Land/Sea mask, if any. !
4360! A 2D tiled, floating-point array to process. !
4361! SetFillVal Logical switch to set fill value in land areas !
4362! (optional). !
4363! !
4364! On Output: !
4365! !
4366! Npts Number of points processed in Awrk. !
4367! Awrk Collected data from each node packed into 1D array !
4368! in column-major order. That is, in the same way !
4369! that Fortran multi-dimensional arrays are stored !
4370! in memory. !
4371! !
4372!***********************************************************************
4373!
4374! Imported variable declarations.
4375!
4376 logical, intent(in), optional :: SetFillVal
4377!
4378 integer, intent(in) :: ng, model, tindex, gtype
4379 integer, intent(in) :: LBi, UBi, LBj, UBj
4380 integer, intent(out) :: Npts
4381!
4382 real(dp), intent(in) :: Ascl
4383
4384# ifdef MASKING
4385 real(r8), intent(in) :: Amask(LBi:UBi,LBj:UBj)
4386# endif
4387 real(r8), intent(in) :: A(LBi:UBi,LBj:UBj)
4388 real(r8), intent(out) :: Awrk(:)
4389!
4390! Local variable declarations.
4391!
4392# ifdef MASKING
4393 logical :: LandFill
4394# endif
4395 integer :: Cgrid, Ntasks, ghost, rank
4396 integer :: Io, Ie, Jo, Je, Ioff, Joff
4397 integer :: Imin, Imax, Jmin, Jmax
4398 integer :: iLB, iUB, jLB, jUB
4399 integer :: Asize, Isize, Jsize, IJsize
4400 integer :: Lstr, MyError, MyType, Serror, Srequest
4401 integer :: i, ic, ij, j, jc, nc
4402!
4403 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
4404# ifdef GATHER_SENDRECV
4405 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: MySize
4406 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
4407
4408 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
4409 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
4410!
4411 real(r8), allocatable :: Arecv(:,:)
4412# else
4413 real(r8), allocatable :: Arecv(:)
4414# endif
4415 real(r8), allocatable :: Asend(:)
4416!
4417 character (len=MPI_MAX_ERROR_STRING) :: string
4418
4419 character (len=*), parameter :: MyFile = &
4420 & __FILE__//", mp_gather2d_xtr"
4421
4422# ifdef PROFILE
4423!
4424!-----------------------------------------------------------------------
4425! Turn on time clocks.
4426!-----------------------------------------------------------------------
4427!
4428 CALL wclock_on (ng, model, 66, __line__, myfile)
4429# endif
4430!
4431!-----------------------------------------------------------------------
4432! Set horizontal starting and ending indices for parallel domain
4433! partitions in the XI- and ETA-directions.
4434!-----------------------------------------------------------------------
4435!
4436! Maximum automatic buffer memory size in bytes.
4437!
4438 bmemmax(ng)=max(bmemmax(ng), real(tilesize(ng)*kind(a),r8))
4439!
4440! Set full grid first and last point according to staggered C-grid
4441! classification. Notice that the offsets are for the private array
4442! counter.
4443!
4444 mytype=abs(gtype)
4445
4446 SELECT CASE (mytype)
4447 CASE (p2dvar, p3dvar)
4448 io=xtr_iobounds(ng) % ILB_psi
4449 ie=xtr_iobounds(ng) % IUB_psi
4450 jo=xtr_iobounds(ng) % JLB_psi
4451 je=xtr_iobounds(ng) % JUB_psi
4452 ioff=0
4453 joff=1
4454 CASE (r2dvar, r3dvar)
4455 io=xtr_iobounds(ng) % ILB_rho
4456 ie=xtr_iobounds(ng) % IUB_rho
4457 jo=xtr_iobounds(ng) % JLB_rho
4458 je=xtr_iobounds(ng) % JUB_rho
4459 ioff=1
4460 joff=0
4461 CASE (u2dvar, u3dvar)
4462 io=xtr_iobounds(ng) % ILB_u
4463 ie=xtr_iobounds(ng) % IUB_u
4464 jo=xtr_iobounds(ng) % JLB_u
4465 je=xtr_iobounds(ng) % JUB_u
4466 ioff=0
4467 joff=0
4468 CASE (v2dvar, v3dvar)
4469 io=xtr_iobounds(ng) % ILB_v
4470 ie=xtr_iobounds(ng) % IUB_v
4471 jo=xtr_iobounds(ng) % JLB_v
4472 je=xtr_iobounds(ng) % JUB_v
4473 ioff=1
4474 joff=1
4475 CASE DEFAULT ! RHO-points
4476 io=xtr_iobounds(ng) % ILB_rho
4477 ie=xtr_iobounds(ng) % IUB_rho
4478 jo=xtr_iobounds(ng) % JLB_rho
4479 je=xtr_iobounds(ng) % JUB_rho
4480 ioff=1
4481 joff=0
4482 END SELECT
4483!
4484 isize=ie-io+1
4485 jsize=je-jo+1
4486 ijsize=isize*jsize
4487 npts=isize*jsize
4488!
4489! Set 'GATHERV' counts and displacement vectors. Use non-overlapping
4490! (ghost=0) ranges according to tile rank.
4491!
4492 ghost=0
4493!
4494 SELECT CASE (mytype)
4495 CASE (p2dvar, p3dvar)
4496 cgrid=1
4497 CASE (r2dvar, r3dvar)
4498 cgrid=2
4499 CASE (u2dvar, u3dvar)
4500 cgrid=3
4501 CASE (v2dvar, v3dvar)
4502 cgrid=4
4503 CASE DEFAULT ! RHO-points
4504 cgrid=2
4505 END SELECT
4506!
4507 ntasks=ntilei(ng)*ntilej(ng)
4508 nc=0
4509 DO rank=0,ntasks-1
4510 ilb=xtr_bounds(ng) % Imin(cgrid,ghost,rank)
4511 iub=xtr_bounds(ng) % Imax(cgrid,ghost,rank)
4512 jlb=xtr_bounds(ng) % Jmin(cgrid,ghost,rank)
4513 jub=xtr_bounds(ng) % Jmax(cgrid,ghost,rank)
4514# ifdef GATHER_SENDRECV
4515 mysize(rank)=(iub-ilb+1)*(jub-jlb+1)
4516# endif
4517 displs(rank)=nc
4518 DO j=jlb,jub
4519 DO i=ilb,iub
4520 nc=nc+1
4521 END DO
4522 END DO
4523 counts(rank)=nc-displs(rank)
4524 END DO
4525!
4526!-----------------------------------------------------------------------
4527! Pack and scale input tiled data.
4528!-----------------------------------------------------------------------
4529!
4530 imin=xtr_bounds(ng) % Imin(cgrid,ghost,myrank)
4531 imax=xtr_bounds(ng) % Imax(cgrid,ghost,myrank)
4532 jmin=xtr_bounds(ng) % Jmin(cgrid,ghost,myrank)
4533 jmax=xtr_bounds(ng) % Jmax(cgrid,ghost,myrank)
4534!
4535 asize=(imax-imin+1)*(jmax-jmin+1)
4536 allocate ( asend(asize) )
4537 asend=0.0_r8
4538!
4539 nc=0
4540 DO j=jmin,jmax
4541 DO i=imin,imax
4542 nc=nc+1
4543 asend(nc)=a(i,j)*ascl
4544 END DO
4545 END DO
4546
4547# ifdef MASKING
4548!
4549! If overwriting Land/Sea mask or processing water-points only, flag
4550! land-points with special value.
4551!
4552 IF (PRESENT(setfillval)) THEN
4553 landfill=setfillval
4554 ELSE
4555 landfill=tindex.gt.0
4556 END IF
4557 IF (gtype.lt.0) THEN
4558 nc=0
4559 DO j=jmin,jmax
4560 DO i=imin,imax
4561 nc=nc+1
4562 IF (amask(i,j).eq.0.0_r8) THEN
4563 asend(nc)=spval
4564 END IF
4565 END DO
4566 END DO
4567 ELSE IF (landfill) THEN
4568 nc=0
4569 DO j=jmin,jmax
4570 DO i=imin,imax
4571 nc=nc+1
4572 IF (amask(i,j).eq.0.0_r8) THEN
4573 asend(nc)=spval
4574 END IF
4575 END DO
4576 END DO
4577 END IF
4578# endif
4579!
4580!-----------------------------------------------------------------------
4581! Gather requested global data from tiled arrays.
4582!-----------------------------------------------------------------------
4583
4584# ifdef GATHER_SENDRECV
4585!
4586 allocate ( arecv(ijsize, ntasks-1) )
4587 arecv=0.0_r8
4588!
4589! If master processor, unpack the send buffer since there is not
4590! need to distribute.
4591!
4592 IF (myrank.eq.mymaster) THEN
4593 nc=0
4594 DO j=jmin,jmax
4595 jc=(j-joff)*isize
4596 DO i=imin,imax
4597 nc=nc+1
4598 ic=i+ioff+jc
4599 awrk(ic)=asend(nc)
4600 END DO
4601 END DO
4602 END IF
4603!
4604! Send, receive, and unpack data.
4605!
4606 IF (myrank.eq.mymaster) THEN
4607 DO rank=1,ntasks-1
4608 CALL mpi_irecv (arecv(1,rank), mysize(rank), mp_float, rank, &
4609 & rank+5, ocn_comm_world, rrequest(rank), &
4610 & myerror)
4611 END DO
4612 DO rank=1,ntasks-1
4613 CALL mpi_wait (rrequest(rank), rstatus, myerror)
4614 IF (myerror.ne.mpi_success) THEN
4615 CALL mpi_error_string (myerror, string, lstr, serror)
4616 lstr=len_trim(string)
4617 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
4618 10 FORMAT (/,' MP_GATHER2D_XTR - error during ',a, &
4619 & ' call, Task = ',i3.3,' Error = ',i3,/,13x,a)
4620 exit_flag=2
4621 RETURN
4622 END IF
4623!
4624 imin=xtr_bounds(ng) % Imin(cgrid,ghost,rank)
4625 imax=xtr_bounds(ng) % Imax(cgrid,ghost,rank)
4626 jmin=xtr_bounds(ng) % Jmin(cgrid,ghost,rank)
4627 jmax=xtr_bounds(ng) % Jmax(cgrid,ghost,rank)
4628!
4629 nc=0
4630 DO j=jmin,jmax
4631 jc=(j-joff)*isize
4632 DO i=imin,imax
4633 nc=nc+1
4634 ic=i+ioff+jc
4635 awrk(ic)=arecv(nc,rank)
4636 END DO
4637 END DO
4638 END DO
4639 ELSE
4640 CALL mpi_isend (asend, mysize(myrank), mp_float, mymaster, &
4641 & myrank+5, ocn_comm_world, srequest, myerror)
4642 CALL mpi_wait (srequest, sstatus, myerror)
4643 IF (myerror.ne.mpi_success) THEN
4644 CALL mpi_error_string (myerror, string, lstr, serror)
4645 lstr=len_trim(string)
4646 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
4647 exit_flag=2
4648 RETURN
4649 END IF
4650 END IF
4651
4652# else
4653!
4654! Gather local tiled data into a global array.
4655!
4656 allocate ( arecv(ijsize) )
4657 arecv=0.0_r8
4658!
4659 CALL mpi_gatherv (asend, asize, mp_float, &
4660 & arecv, counts, displs, mp_float, &
4661 & mymaster, ocn_comm_world, myerror)
4662 IF (myerror.ne.mpi_success) THEN
4663 WRITE (stdout,10) 'MPI_GATHERV', myrank, myerror, trim(string)
4664 CALL mpi_error_string (myerror, string, lstr, serror)
4665 10 FORMAT (/,' MP_GATHER2D_XTR - error during ',a,' call, Task = ',&
4666 & i3.3, ' Error = ',i3,/,15x,a)
4667 exit_flag=2
4668 RETURN
4669 END IF
4670!
4671! Unpack gathered data in a continuous memory order and remap every
4672! task segment.
4673!
4674 nc=0
4675 DO rank=0,ntasks-1
4676 ilb=xtr_bounds(ng) % Imin(cgrid,ghost,rank)
4677 iub=xtr_bounds(ng) % Imax(cgrid,ghost,rank)
4678 jlb=xtr_bounds(ng) % Jmin(cgrid,ghost,rank)
4679 jub=xtr_bounds(ng) % Jmax(cgrid,ghost,rank)
4680 DO j=jlb,jub
4681 jc=(j-joff)*isize
4682 DO i=ilb,iub
4683 ij=i+ioff+jc
4684 nc=nc+1
4685 awrk(ij)=arecv(nc)
4686 END DO
4687 END DO
4688 END DO
4689# endif
4690# ifdef MASKING
4691!
4692! If pocessing only water-points, remove land points and repack.
4693!
4694 IF ((myrank.eq.mymaster).and.(gtype.lt.0)) THEN
4695 nc=0
4696 DO i=1,ijsize
4697 IF (awrk(i).lt.spval) THEN
4698 nc=nc+1
4699 awrk(nc)=awrk(i)
4700 END IF
4701 END DO
4702 npts=nc
4703 END IF
4704# endif
4705!
4706! Deallocate local arrays.
4707!
4708 deallocate (arecv)
4709 deallocate (asend)
4710
4711# ifdef PROFILE
4712!
4713!-----------------------------------------------------------------------
4714! Turn off time clocks.
4715!-----------------------------------------------------------------------
4716!
4717 CALL wclock_off (ng, model, 66, __line__, myfile)
4718# endif
4719!
4720 RETURN
4721 END SUBROUTINE mp_gather2d_xtr
4722# endif
4723!
4724 SUBROUTINE mp_gather3d (ng, model, LBi, UBi, LBj, UBj, LBk, UBk, &
4725 & tindex, gtype, Ascl, &
4726# ifdef MASKING
4727 & Amask, &
4728# endif
4729 & A, Npts, Awrk, SetFillVal)
4730!
4731!***********************************************************************
4732! !
4733! This routine collects a 3D tiled, floating-point array from each !
4734! spawned node and stores it into one dimensional global array. It !
4735! is used to collect and pack output data. !
4736! !
4737! On Input: !
4738! !
4739! ng Nested grid number. !
4740! model Calling model identifier. !
4741! LBi I-dimension Lower bound. !
4742! UBi I-dimension Upper bound. !
4743! LBj J-dimension Lower bound. !
4744! UBj J-dimension Upper bound. !
4745! LBk K-dimension Lower bound. !
4746! UBk K-dimension Upper bound. !
4747! tindex Time record index to process. !
4748! gtype C-grid type. If negative and Land-Sea is available, !
4749! only water-points processed. !
4750! Ascl Factor to scale field before writing. !
4751! Amask Land/Sea mask, if any. !
4752! A 3D tiled, floating-point array to process. !
4753! SetFillVal Logical switch to set fill value in land areas !
4754! (optional). !
4755! !
4756! On Output: !
4757! !
4758! Npts Number of points processed in Awrk. !
4759! Awrk Collected data from each node packed into 1D array !
4760! in column-major order. That is, in the same way !
4761! that Fortran multi-dimensional arrays are stored !
4762! in memory. !
4763! !
4764!***********************************************************************
4765!
4766! Imported variable declarations.
4767!
4768 logical, intent(in), optional :: SetFillVal
4769!
4770 integer, intent(in) :: ng, model, tindex, gtype
4771 integer, intent(in) :: LBi, UBi, LBj, UBj, LBk, UBk
4772 integer, intent(out) :: Npts
4773!
4774 real(dp), intent(in) :: Ascl
4775
4776# ifdef MASKING
4777 real(r8), intent(in) :: Amask(LBi:UBi,LBj:UBj)
4778# endif
4779 real(r8), intent(in) :: A(LBi:UBi,LBj:UBj,LBk:UBk)
4780 real(r8), intent(out) :: Awrk(:)
4781!
4782! Local variable declarations.
4783!
4784# ifdef MASKING
4785 logical :: LandFill
4786!
4787# endif
4788 integer :: Cgrid, ghost, rank
4789 integer :: Io, Ie, Jo, Je, Ioff, Joff, Koff
4790 integer :: Imin, Imax, Jmin, Jmax
4791 integer :: iLB, iUB, jLB, jUB
4792 integer :: Asize, Isize, Jsize, Ksize, IJsize
4793 integer :: Lstr, MyError, MyType, Serror, Srequest
4794 integer :: i, ic, ijk, j, jc, k, kc, nc
4795!
4796 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
4797# ifdef GATHER_SENDRECV
4798 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: MySize
4799 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
4800
4801 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
4802 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
4803!
4804 real(r8), allocatable :: Arecv(:,:)
4805# else
4806 real(r8), allocatable :: Arecv(:)
4807# endif
4808 real(r8), allocatable :: Asend(:)
4809!
4810 character (len=MPI_MAX_ERROR_STRING) :: string
4811
4812 character (len=*), parameter :: MyFile = &
4813 & __FILE__//", mp_gather3d"
4814
4815# ifdef PROFILE
4816!
4817!-----------------------------------------------------------------------
4818! Turn on time clocks.
4819!-----------------------------------------------------------------------
4820!
4821 CALL wclock_on (ng, model, 66, __line__, myfile)
4822# endif
4823!
4824!-----------------------------------------------------------------------
4825! Set horizontal starting and ending indices for parallel domain
4826! partitions in the XI- and ETA-directions.
4827!-----------------------------------------------------------------------
4828!
4829! Maximum automatic buffer memory size in bytes.
4830!
4831 bmemmax(ng)=max(bmemmax(ng), real(2*SIZE(awrk)*kind(a),r8))
4832!
4833! Set full grid first and last point according to staggered C-grid
4834! classification. Notice that the offsets are for the private array
4835! counter.
4836!
4837 mytype=abs(gtype)
4838
4839 SELECT CASE (mytype)
4840 CASE (p2dvar, p3dvar)
4841 io=iobounds(ng) % ILB_psi
4842 ie=iobounds(ng) % IUB_psi
4843 jo=iobounds(ng) % JLB_psi
4844 je=iobounds(ng) % JUB_psi
4845 ioff=0
4846 joff=1
4847 CASE (r2dvar, r3dvar)
4848 io=iobounds(ng) % ILB_rho
4849 ie=iobounds(ng) % IUB_rho
4850 jo=iobounds(ng) % JLB_rho
4851 je=iobounds(ng) % JUB_rho
4852 ioff=1
4853 joff=0
4854 CASE (u2dvar, u3dvar)
4855 io=iobounds(ng) % ILB_u
4856 ie=iobounds(ng) % IUB_u
4857 jo=iobounds(ng) % JLB_u
4858 je=iobounds(ng) % JUB_u
4859 ioff=0
4860 joff=0
4861 CASE (v2dvar, v3dvar)
4862 io=iobounds(ng) % ILB_v
4863 ie=iobounds(ng) % IUB_v
4864 jo=iobounds(ng) % JLB_v
4865 je=iobounds(ng) % JUB_v
4866 ioff=1
4867 joff=1
4868 CASE DEFAULT ! RHO-points
4869 io=iobounds(ng) % ILB_rho
4870 ie=iobounds(ng) % IUB_rho
4871 jo=iobounds(ng) % JLB_rho
4872 je=iobounds(ng) % JUB_rho
4873 ioff=1
4874 joff=0
4875 END SELECT
4876!
4877 IF (lbk.eq.0) THEN
4878 koff=0
4879 ELSE
4880 koff=1
4881 END IF
4882!
4883 isize=ie-io+1
4884 jsize=je-jo+1
4885 ksize=ubk-lbk+1
4886 ijsize=isize*jsize
4887 npts=ijsize*ksize
4888!
4889! Set "GATHERV" counts and displacement vectors. Use non-overlapping
4890! (ghost=0) ranges according to tile rank.
4891!
4892 ghost=0
4893!
4894 SELECT CASE (mytype)
4895 CASE (p2dvar, p3dvar)
4896 cgrid=1
4897 CASE (r2dvar, r3dvar)
4898 cgrid=2
4899 CASE (u2dvar, u3dvar)
4900 cgrid=3
4901 CASE (v2dvar, v3dvar)
4902 cgrid=4
4903 CASE DEFAULT ! RHO-points
4904 cgrid=2
4905 END SELECT
4906!
4907 ntasks=ntilei(ng)*ntilej(ng)
4908 nc=0
4909 DO rank=0,ntasks-1
4910 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4911 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4912 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4913 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4914# ifdef GATHER_SENDRECV
4915 mysize(rank)=(iub-ilb+1)*(jub-jlb+1)*(ubk-lbk+1)
4916# endif
4917 displs(rank)=nc
4918 DO k=lbk,ubk
4919 DO j=jlb,jub
4920 DO i=ilb,iub
4921 nc=nc+1
4922 END DO
4923 END DO
4924 END DO
4925 counts(rank)=nc-displs(rank)
4926 END DO
4927!
4928!-----------------------------------------------------------------------
4929! Pack and scale input tiled data.
4930!-----------------------------------------------------------------------
4931!
4932 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
4933 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
4934 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
4935 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
4936!
4937 asize=(imax-imin+1)*(jmax-jmin+1)*(ubk-lbk+1)
4938 allocate ( asend(asize) )
4939 asend=0.0_r8
4940!
4941 nc=0
4942 DO k=lbk,ubk
4943 DO j=jmin,jmax
4944 DO i=imin,imax
4945 nc=nc+1
4946 asend(nc)=a(i,j,k)*ascl
4947 END DO
4948 END DO
4949 END DO
4950
4951# ifdef MASKING
4952!
4953! If overwriting Land/Sea mask or processing water-points only, flag
4954! land-points with special value.
4955!
4956 IF (PRESENT(setfillval)) THEN
4957 landfill=setfillval
4958 ELSE
4959 landfill=tindex.gt.0
4960 END IF
4961 IF (gtype.lt.0) THEN
4962 nc=0
4963 DO k=lbk,ubk
4964 DO j=jmin,jmax
4965 DO i=imin,imax
4966 nc=nc+1
4967 IF (amask(i,j).eq.0.0_r8) THEN
4968 asend(nc)=spval
4969 END IF
4970 END DO
4971 END DO
4972 END DO
4973 ELSE IF (landfill) THEN
4974 nc=0
4975 DO k=lbk,ubk
4976 DO j=jmin,jmax
4977 DO i=imin,imax
4978 nc=nc+1
4979 IF (amask(i,j).eq.0.0_r8) THEN
4980 asend(nc)=spval
4981 END IF
4982 END DO
4983 END DO
4984 END DO
4985 END IF
4986# endif
4987!
4988!-----------------------------------------------------------------------
4989! Gather requested global data from tiled arrays.
4990!-----------------------------------------------------------------------
4991
4992# ifdef GATHER_SENDRECV
4993!
4994 allocate ( arecv(ijsize*ksize, ntasks-1) )
4995 arecv=0.0_r8
4996!
4997! If master processor, unpack the send buffer since there is not
4998! need to distribute.
4999!
5000 IF (myrank.eq.mymaster) THEN
5001 nc=0
5002 DO k=lbk,ubk
5003 kc=(k-koff)*ijsize
5004 DO j=jmin,jmax
5005 jc=(j-joff)*isize
5006 DO i=imin,imax
5007 nc=nc+1
5008 ic=i+ioff+jc+kc
5009 awrk(ic)=asend(nc)
5010 END DO
5011 END DO
5012 END DO
5013 END IF
5014!
5015! Send, receive, and unpack data.
5016!
5017 IF (myrank.eq.mymaster) THEN
5018 DO rank=1,ntasks-1
5019 CALL mpi_irecv (arecv(1,rank), mysize(rank), mp_float, rank, &
5020 & rank+5, ocn_comm_world, rrequest(rank), &
5021 & myerror)
5022 END DO
5023 DO rank=1,ntasks-1
5024 CALL mpi_wait (rrequest(rank), rstatus, myerror)
5025 IF (myerror.ne.mpi_success) THEN
5026 CALL mpi_error_string (myerror, string, lstr, serror)
5027 lstr=len_trim(string)
5028 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
5029 10 FORMAT (/,' MP_GATHER3D - error during ',a,' call, Task = ',&
5030 & i3.3,' Error = ',i3,/,13x,a)
5031 exit_flag=2
5032 RETURN
5033 END IF
5034!
5035 imin=bounds(ng) % Imin(cgrid,ghost,rank)
5036 imax=bounds(ng) % Imax(cgrid,ghost,rank)
5037 jmin=bounds(ng) % Jmin(cgrid,ghost,rank)
5038 jmax=bounds(ng) % Jmax(cgrid,ghost,rank)
5039!
5040 nc=0
5041 DO k=lbk,ubk
5042 kc=(k-koff)*ijsize
5043 DO j=jmin,jmax
5044 jc=(j-joff)*isize
5045 DO i=imin,imax
5046 nc=nc+1
5047 ic=i+ioff+jc+kc
5048 awrk(ic)=arecv(nc,rank)
5049 END DO
5050 END DO
5051 END DO
5052 END DO
5053 ELSE
5054 CALL mpi_isend (asend, mysize(myrank), mp_float, mymaster, &
5055 & myrank+5, ocn_comm_world, srequest, myerror)
5056 CALL mpi_wait (srequest, sstatus, myerror)
5057 IF (myerror.ne.mpi_success) THEN
5058 CALL mpi_error_string (myerror, string, lstr, serror)
5059 lstr=len_trim(string)
5060 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5061 exit_flag=2
5062 RETURN
5063 END IF
5064 END IF
5065
5066# else
5067!
5068! Gather local tiled data into a global array.
5069!
5070 allocate ( arecv(ijsize*ksize) )
5071 arecv=0.0_r8
5072!
5073 CALL mpi_gatherv (asend, asize, mp_float, &
5074 & arecv, counts, displs, mp_float, &
5075 & mymaster, ocn_comm_world, myerror)
5076 IF (myerror.ne.mpi_success) THEN
5077 CALL mpi_error_string (myerror, string, lstr, serror)
5078 WRITE (stdout,10) 'MPI_GATHERV', myrank, myerror, trim(string)
5079 10 FORMAT (/,' MP_GATHER3D - error during ',a,' call, Task = ', &
5080 & i3.3, ' Error = ',i3,/,15x,a)
5081 exit_flag=2
5082 RETURN
5083 END IF
5084!
5085! Unpack gathered data in a continuous memory order and remap every
5086! task segment.
5087!
5088 nc=0
5089 DO rank=0,ntasks-1
5090 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
5091 iub=bounds(ng) % Imax(cgrid,ghost,rank)
5092 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
5093 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
5094 DO k=lbk,ubk
5095 kc=(k-koff)*ijsize
5096 DO j=jlb,jub
5097 jc=(j-joff)*isize
5098 DO i=ilb,iub
5099 ijk=i+ioff+jc+kc
5100 nc=nc+1
5101 awrk(ijk)=arecv(nc)
5102 END DO
5103 END DO
5104 END DO
5105 END DO
5106# endif
5107# ifdef MASKING
5108!
5109! If pocessing only water-points, remove land points and repack.
5110!
5111 IF ((myrank.eq.mymaster).and.(gtype.lt.0)) THEN
5112 nc=0
5113 DO i=1,ijsize*ksize
5114 IF (awrk(i).lt.spval) THEN
5115 nc=nc+1
5116 awrk(nc)=awrk(i)
5117 END IF
5118 END DO
5119 npts=nc
5120 END IF
5121# endif
5122!
5123! Deallocate local arrays.
5124!
5125 deallocate (arecv)
5126 deallocate (asend)
5127
5128# ifdef PROFILE
5129!
5130!-----------------------------------------------------------------------
5131! Turn off time clocks.
5132!-----------------------------------------------------------------------
5133!
5134 CALL wclock_off (ng, model, 66, __line__, myfile)
5135# endif
5136!
5137 RETURN
5138 END SUBROUTINE mp_gather3d
5139!
5140 SUBROUTINE mp_gather_state (ng, model, Mstr, Mend, Asize, &
5141 & A, Awrk)
5142!
5143!***********************************************************************
5144! !
5145! This routine gathers (threaded to global) state data to all nodes !
5146! in the group. This routine is used to unpack the state data for !
5147! the GST analysis propagators. !
5148! !
5149! On Input: !
5150! !
5151! ng Nested grid number. !
5152! model Calling model identifier. !
5153! Mstr Threaded array lower bound. !
5154! Mend Threaded array upper bound. !
5155! Asize Size of the full state. !
5156! A Threaded 1D array process. !
5157! !
5158! On Output: !
5159! !
5160! Awrk Collected data from each node packed into 1D full !
5161! state array. !
5162! !
5163!***********************************************************************
5164!
5165! Imported variable declarations.
5166!
5167 integer, intent(in) :: ng, model
5168 integer, intent(in) :: Mstr, Mend, Asize
5169!
5170 real(r8), intent(in) :: A(Mstr:Mend)
5171 real(r8), intent(out) :: Awrk(Asize)
5172!
5173! Local variable declarations.
5174!
5175 integer :: LB, Lstr, MyError, Serror
5176 integer :: i, np, rank, request
5177
5178 integer :: my_bounds(2)
5179 integer, dimension(MPI_STATUS_SIZE) :: status
5180 integer, dimension(2,0:NtileI(ng)*NtileJ(ng)-1) :: Abounds
5181!
5182 character (len=MPI_MAX_ERROR_STRING) :: string
5183
5184 character (len=*), parameter :: MyFile = &
5185 & __FILE__//", mp_gather_state"
5186
5187# ifdef PROFILE
5188!
5189!-----------------------------------------------------------------------
5190! Turn on time clocks.
5191!-----------------------------------------------------------------------
5192!
5193 CALL wclock_on (ng, model, 66, __line__, myfile)
5194# endif
5195!
5196!-----------------------------------------------------------------------
5197! Collect data from all nodes.
5198!-----------------------------------------------------------------------
5199!
5200! Collect data lower and upper bound dimensions.
5201!
5202 np=2
5203 my_bounds(1)=mstr
5204 my_bounds(2)=mend
5205 CALL mpi_allgather (my_bounds, np, mpi_integer, abounds, np, &
5206 & mpi_integer, ocn_comm_world, myerror)
5207 IF (myerror.ne.mpi_success) THEN
5208 CALL mpi_error_string (myerror, string, lstr, serror)
5209 lstr=len_trim(string)
5210 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5211 & string(1:lstr)
5212 10 FORMAT (/,' MP_GATHER_STATE - error during ',a, &
5213 & ' call, Task = ',i3.3,' Error = ',i3,/,13x,a)
5214 exit_flag=2
5215 RETURN
5216 END IF
5217!
5218! If master node, loop over other nodes and receive the data.
5219!
5220 IF (myrank.eq.mymaster) THEN
5221 DO rank=1,ntilei(ng)*ntilej(ng)-1
5222 np=abounds(2,rank)-abounds(1,rank)+1
5223 lb=abounds(1,rank)
5224 CALL mpi_irecv (awrk(lb:), np, mp_float, rank, rank+5, &
5225 & ocn_comm_world, request, myerror)
5226 CALL mpi_wait (request, status, myerror)
5227 IF (myerror.ne.mpi_success) THEN
5228 CALL mpi_error_string (myerror, string, lstr, serror)
5229 lstr=len_trim(string)
5230 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
5231 exit_flag=2
5232 RETURN
5233 END IF
5234 END DO
5235!
5236! Load master node contribution.
5237!
5238 DO i=mstr,mend
5239 awrk(i)=a(i)
5240 END DO
5241!
5242! Otherwise, send data to master node.
5243!
5244 ELSE
5245 np=mend-mstr+1
5246 CALL mpi_isend (a(mstr:), np, mp_float, mymaster, myrank+5, &
5247 & ocn_comm_world, request, myerror)
5248 CALL mpi_wait (request, status, myerror)
5249 IF (myerror.ne.mpi_success) THEN
5250 CALL mpi_error_string (myerror, string, lstr, serror)
5251 lstr=len_trim(string)
5252 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5253 exit_flag=2
5254 RETURN
5255 END IF
5256 END IF
5257!
5258! Broadcast collected data to all nodes.
5259!
5260 CALL mpi_bcast (awrk, asize, mp_float, mymaster, ocn_comm_world, &
5261 & myerror)
5262 IF (myerror.ne.mpi_success) THEN
5263 CALL mpi_error_string (myerror, string, lstr, serror)
5264 lstr=len_trim(string)
5265 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
5266 exit_flag=2
5267 RETURN
5268 END IF
5269
5270# ifdef PROFILE
5271!
5272!-----------------------------------------------------------------------
5273! Turn off time clocks.
5274!-----------------------------------------------------------------------
5275!
5276 CALL wclock_off (ng, model, 66, __line__, myfile)
5277# endif
5278!
5279 RETURN
5280 END SUBROUTINE mp_gather_state
5281!
5282 FUNCTION mp_ncread1d (ng, model, ncid, ncvname, ncname, &
5283 & ncrec, LB1, UB1, Ascale, A) &
5284 & result(io_error)
5285!
5286!***********************************************************************
5287! !
5288! This function reads floating point 1D state array from specified !
5289! NetCDF file and scatters it to the other nodes. !
5290! !
5291! On Input: !
5292! !
5293! ng Nested grid number. !
5294! model Calling model identifier. !
5295! ncid NetCDF file ID. !
5296! ncvname NetCDF variable name. !
5297! ncname NetCDF file name. !
5298! ncrec NetCDF record index to write. If negative, it !
5299! assumes that the variable is recordless. !
5300! LB1 First-dimension Lower bound. !
5301! UB1 First-dimension Upper bound. !
5302! Ascale Factor to scale field after reading (real). !
5303! !
5304! On Output: !
5305! !
5306! A Field to read in (real). !
5307! io_error Error flag (integer). !
5308! !
5309! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5310! dependency. Instead we need original NetCDF library module !
5311! "USE netcdf". !
5312! !
5313!***********************************************************************
5314!
5315 USE netcdf
5316!
5317! Imported variable declarations.
5318!
5319 integer, intent(in) :: ng, model, ncid, ncrec
5320 integer, intent(in) :: lb1, ub1
5321!
5322 real(r8), intent(in) :: ascale
5323
5324 real(r8), intent(out) :: a(lb1:ub1)
5325!
5326 character (len=*), intent(in) :: ncvname
5327 character (len=*), intent(in) :: ncname
5328!
5329! Local variable declarations.
5330!
5331 integer :: lstr, myerror, npts, serror
5332 integer :: i, j, np, rank, request, varid
5333 integer :: io_error
5334 integer :: ibuffer(2), my_bounds(2), start(1), total(1)
5335
5336 integer, dimension(MPI_STATUS_SIZE) :: status
5337 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: asize
5338!
5339 real(r8), allocatable :: asend(:)
5340!
5341 character (len=MPI_MAX_ERROR_STRING) :: string
5342
5343 character (len=*), parameter :: myfile = &
5344 & __FILE__//", mp_ncread1d_nf90"
5345
5346# ifdef PROFILE
5347!
5348!-----------------------------------------------------------------------
5349! Turn on time clocks.
5350!-----------------------------------------------------------------------
5351!
5352 CALL wclock_on (ng, model, 67, __line__, myfile)
5353# endif
5354!
5355!-----------------------------------------------------------------------
5356! Read requested NetCDF file and scatter it to all nodes.
5357!-----------------------------------------------------------------------
5358!
5359 io_error=nf90_noerr
5360!
5361! Collect data lower and upper bounds dimensions.
5362!
5363 np=2
5364 my_bounds(1)=lb1
5365 my_bounds(2)=ub1
5366 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5367 & asize, np, mpi_integer, &
5368 & ocn_comm_world, myerror)
5369 IF (myerror.ne.mpi_success) THEN
5370 CALL mpi_error_string (myerror, string, lstr, serror)
5371 lstr=len_trim(string)
5372 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5373 & string(1:lstr)
5374 exit_flag=2
5375 RETURN
5376 END IF
5377!
5378! If not master node, receive data from master node.
5379!
5380 IF (myrank.ne.mymaster) THEN
5381 np=ub1-lb1+1
5382 CALL mpi_irecv (a(lb1:), np, mp_float, mymaster, myrank+5, &
5383 & ocn_comm_world, request, myerror)
5384 CALL mpi_wait (request, status, myerror)
5385 IF (myerror.ne.mpi_success) THEN
5386 CALL mpi_error_string (myerror, string, lstr, serror)
5387 lstr=len_trim(string)
5388 WRITE (stdout,10) 'MPI_IRECV', myrank, myerror, string(1:lstr)
5389 exit_flag=2
5390 RETURN
5391 END IF
5392!
5393! Scale recieved (read) data.
5394!
5395 DO i=lb1,ub1
5396 a(i)=a(i)*ascale
5397 END DO
5398!
5399! Otherwise, if master node allocate the send buffer.
5400!
5401 ELSE
5402 npts=0
5403 DO rank=0,ntilei(ng)*ntilej(ng)-1
5404 np=asize(2,rank)-asize(1,rank)+1
5405 npts=max(npts, np)
5406 END DO
5407 IF (.not.allocated(asend)) THEN
5408 allocate (asend(npts))
5409 END IF
5410!
5411! If master node, loop over all nodes and read buffers to send.
5412!
5413 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5414 IF (io_error.ne.nf90_noerr) THEN
5415 WRITE (stdout,20) trim(ncvname), trim(ncname)
5416 exit_flag=2
5417 ioerror=io_error
5418 END IF
5419 IF (exit_flag.eq.noerror) THEN
5420 DO rank=0,ntilei(ng)*ntilej(ng)-1
5421 start(1)=asize(1,rank)
5422 total(1)=asize(2,rank)-asize(1,rank)+1
5423 io_error=nf90_get_var(ncid, varid, asend, start, total)
5424 IF (io_error.ne.nf90_noerr) THEN
5425 WRITE (stdout,30) trim(ncvname), trim(ncname)
5426 exit_flag=2
5427 ioerror=io_error
5428 EXIT
5429 END IF
5430!
5431! Send buffer to all nodes, except itself.
5432!
5433 IF (rank.eq.mymaster) THEN
5434 np=0
5435 DO i=lb1,ub1
5436 np=np+1
5437 a(i)=asend(np)*ascale
5438 END DO
5439 ELSE
5440 np=asize(2,rank)-asize(1,rank)+1
5441 CALL mpi_isend (asend, np, mp_float, rank, rank+5, &
5442 & ocn_comm_world, request, myerror)
5443 CALL mpi_wait (request, status, myerror)
5444 IF (myerror.ne.mpi_success) THEN
5445 CALL mpi_error_string (myerror, string, lstr, serror)
5446 lstr=len_trim(string)
5447 WRITE (stdout,10) 'MPI_ISEND', rank, myerror, &
5448 & string(1:lstr)
5449 exit_flag=2
5450 RETURN
5451 END IF
5452 END IF
5453 END DO
5454 END IF
5455 END IF
5456!
5457! Broadcast error flags to all nodes.
5458!
5459 ibuffer(1)=exit_flag
5460 ibuffer(2)=ioerror
5461 CALL mp_bcasti (ng, model, ibuffer)
5462 exit_flag=ibuffer(1)
5463 ioerror=ibuffer(2)
5464!
5465! Maximum automatic buffer memory size in bytes.
5466!
5467 bmemmax(ng)=max(bmemmax(ng), real(SIZE(asend)*kind(a),r8))
5468!
5469! Deallocate send buffer.
5470!
5471 IF (allocated(asend).and.(myrank.eq.mymaster)) THEN
5472 deallocate (asend)
5473 END IF
5474
5475# ifdef PROFILE
5476!
5477!-----------------------------------------------------------------------
5478! Turn on time clocks.
5479!-----------------------------------------------------------------------
5480!
5481 CALL wclock_off (ng, model, 67, __line__, myfile)
5482# endif
5483!
5484 10 FORMAT (/,' MP_NCREAD1D - error during ',a,' call, Task = ',i0, &
5485 & ' Error = ',i0,/,15x,a)
5486 20 FORMAT (/,' MP_NCREAD1D - error while inquiring ID for', &
5487 & ' variable: ',a,/,15x,'in file: ',a)
5488 30 FORMAT (/,' MP_NCREAD1D - error while reading variable: ',a, &
5489 & /,15x,'in file: ',a)
5490!
5491 RETURN
5492 END FUNCTION mp_ncread1d
5493!
5494 FUNCTION mp_ncread2d (ng, model, ncid, ncvname, ncname, &
5495 & ncrec, LB1, UB1, LB2, UB2, Ascale, A) &
5496 & result(io_error)
5497!
5498!***********************************************************************
5499! !
5500! This function reads floating point 2D state array from specified !
5501! NetCDF file and scatters it to the other nodes. !
5502! !
5503! On Input: !
5504! !
5505! ng Nested grid number. !
5506! model Calling model identifier. !
5507! ncid NetCDF file ID. !
5508! ncvname NetCDF variable name. !
5509! ncname NetCDF file name. !
5510! ncrec NetCDF record index to write. If negative, it !
5511! assumes that the variable is recordless. !
5512! LB1 First-dimension Lower bound. !
5513! UB1 First-dimension Upper bound. !
5514! LB2 Second-dimension Lower bound. !
5515! UB2 Second-dimension Upper bound. !
5516! Ascale Factor to scale field after reading (real). !
5517! !
5518! On Output: !
5519! !
5520! A Field to read in (real). !
5521! io_error Error flag (integer). !
5522! !
5523! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5524! dependency. Instead we need original NetCDF library module !
5525! "USE netcdf". !
5526! !
5527!***********************************************************************
5528!
5529 USE netcdf
5530!
5531! Imported variable declarations.
5532!
5533 integer, intent(in) :: ng, model, ncid, ncrec
5534 integer, intent(in) :: lb1, ub1, lb2, ub2
5535!
5536 real(r8), intent(in) :: ascale
5537
5538 real(r8), intent(out) :: a(lb1:ub1,lb2:ub2)
5539!
5540 character (len=*), intent(in) :: ncvname
5541 character (len=*), intent(in) :: ncname
5542!
5543! Local variable declarations.
5544!
5545 integer :: lstr, myerror, npts, serror
5546 integer :: i, j, np, rank, request, varid
5547 integer :: io_error
5548 integer :: ibuffer(2), my_bounds(4), start(2), total(2)
5549
5550 integer, dimension(MPI_STATUS_SIZE) :: status
5551 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: asize
5552!
5553 real(r8), allocatable :: asend(:)
5554!
5555 character (len=MPI_MAX_ERROR_STRING) :: string
5556
5557 character (len=*), parameter :: myfile = &
5558 & __FILE__//", mp_ncread2d_nf90"
5559
5560# ifdef PROFILE
5561!
5562!-----------------------------------------------------------------------
5563! Turn on time clocks.
5564!-----------------------------------------------------------------------
5565!
5566 CALL wclock_on (ng, model, 67, __line__, myfile)
5567# endif
5568!
5569!-----------------------------------------------------------------------
5570! Read requested NetCDF file and scatter it to all nodes.
5571!-----------------------------------------------------------------------
5572!
5573 io_error=nf90_noerr
5574!
5575! Collect data lower and upper bounds dimensions.
5576!
5577 np=4
5578 my_bounds(1)=lb1
5579 my_bounds(2)=ub1
5580 my_bounds(3)=lb2
5581 my_bounds(4)=ub2
5582 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5583 & asize, np, mpi_integer, &
5584 & ocn_comm_world, myerror)
5585 IF (myerror.ne.mpi_success) THEN
5586 CALL mpi_error_string (myerror, string, lstr, serror)
5587 lstr=len_trim(string)
5588 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5589 & string(1:lstr)
5590 exit_flag=2
5591 RETURN
5592 END IF
5593!
5594! If not master node, receive data from master node.
5595!
5596 IF (myrank.ne.mymaster) THEN
5597 np=(ub1-lb1+1)*(ub2-lb2+1)
5598 CALL mpi_irecv (a(lb1,lb2), np, mp_float, mymaster, myrank+5, &
5599 & ocn_comm_world, request, myerror)
5600 CALL mpi_wait (request, status, myerror)
5601 IF (myerror.ne.mpi_success) THEN
5602 CALL mpi_error_string (myerror, string, lstr, serror)
5603 lstr=len_trim(string)
5604 WRITE (stdout,10) 'MPI_IRECV', myrank, myerror, string(1:lstr)
5605 exit_flag=2
5606 RETURN
5607 END IF
5608!
5609! Scale recieved (read) data.
5610!
5611 DO j=lb2,ub2
5612 DO i=lb1,ub1
5613 a(i,j)=a(i,j)*ascale
5614 END DO
5615 END DO
5616!
5617! Otherwise, if master node allocate the send buffer.
5618!
5619 ELSE
5620 npts=0
5621 DO rank=0,ntilei(ng)*ntilej(ng)-1
5622 np=(asize(2,rank)-asize(1,rank)+1)* &
5623 & (asize(4,rank)-asize(3,rank)+1)
5624 npts=max(npts, np)
5625 END DO
5626 IF (.not.allocated(asend)) THEN
5627 allocate (asend(npts))
5628 END IF
5629!
5630! If master node, loop over all nodes and read buffers to send.
5631!
5632 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5633 IF (io_error.ne.nf90_noerr) THEN
5634 WRITE (stdout,20) trim(ncvname), trim(ncname)
5635 exit_flag=2
5636 ioerror=io_error
5637 END IF
5638 IF (exit_flag.eq.noerror) THEN
5639 DO rank=0,ntilei(ng)*ntilej(ng)-1
5640 start(1)=asize(1,rank)
5641 total(1)=asize(2,rank)-asize(1,rank)+1
5642 start(2)=asize(3,rank)
5643 total(2)=asize(4,rank)-asize(3,rank)+1
5644 io_error=nf90_get_var(ncid, varid, asend, start, total)
5645 IF (io_error.ne.nf90_noerr) THEN
5646 WRITE (stdout,30) trim(ncvname), trim(ncname)
5647 exit_flag=2
5648 ioerror=io_error
5649 EXIT
5650 END IF
5651!
5652! Send buffer to all nodes, except itself.
5653!
5654 IF (rank.eq.mymaster) THEN
5655 np=0
5656 DO j=lb2,ub2
5657 DO i=lb1,ub1
5658 np=np+1
5659 a(i,j)=asend(np)*ascale
5660 END DO
5661 END DO
5662 ELSE
5663 np=(asize(2,rank)-asize(1,rank)+1)* &
5664 & (asize(4,rank)-asize(3,rank)+1)
5665 CALL mpi_isend (asend, np, mp_float, rank, rank+5, &
5666 & ocn_comm_world, request, myerror)
5667 CALL mpi_wait (request, status, myerror)
5668 IF (myerror.ne.mpi_success) THEN
5669 CALL mpi_error_string (myerror, string, lstr, serror)
5670 lstr=len_trim(string)
5671 WRITE (stdout,10) 'MPI_ISEND', rank, myerror, &
5672 & string(1:lstr)
5673 exit_flag=2
5674 RETURN
5675 END IF
5676 END IF
5677 END DO
5678 END IF
5679 END IF
5680!
5681! Broadcast error flags to all nodes.
5682!
5683 ibuffer(1)=exit_flag
5684 ibuffer(2)=ioerror
5685 CALL mp_bcasti (ng, model, ibuffer)
5686 exit_flag=ibuffer(1)
5687 ioerror=ibuffer(2)
5688!
5689! Maximum automatic buffer memory size in bytes.
5690!
5691 bmemmax(ng)=max(bmemmax(ng), real(SIZE(asend)*kind(a),r8))
5692!
5693! Deallocate send buffer.
5694!
5695 IF (allocated(asend).and.(myrank.eq.mymaster)) THEN
5696 deallocate (asend)
5697 END IF
5698
5699# ifdef PROFILE
5700!
5701!-----------------------------------------------------------------------
5702! Turn on time clocks.
5703!-----------------------------------------------------------------------
5704!
5705 CALL wclock_off (ng, model, 67, __line__, myfile)
5706# endif
5707!
5708 10 FORMAT (/,' MP_NCREAD2D - error during ',a,' call, Task = ',i0, &
5709 & ' Error = ',i0,/,15x,a)
5710 20 FORMAT (/,' MP_NCREAD2D - error while inquiring ID for', &
5711 & ' variable: ',a,/,15x,'in file: ',a)
5712 30 FORMAT (/,' MP_NCREAD2D - error while reading variable: ',a, &
5713 & /,15x,'in file: ',a)
5714!
5715 RETURN
5716 END FUNCTION mp_ncread2d
5717!
5718 FUNCTION mp_ncwrite1d (ng, model, ncid, ncvname, ncname, &
5719 & ncrec, LB1, UB1, Ascale, A) &
5720 & result(io_error)
5721!
5722!***********************************************************************
5723! !
5724! This function collects floating point 1D state array data from the !
5725! other nodes and writes it into specified NetCDF file. !
5726! !
5727! On Input: !
5728! !
5729! ng Nested grid number. !
5730! model Calling model identifier. !
5731! ncid NetCDF file ID. !
5732! ncvname NetCDF variable name. !
5733! ncname NetCDF file name. !
5734! ncrec NetCDF record index to write. If negative, it !
5735! assumes that the variable is recordless. !
5736! LB1 First-dimension Lower bound. !
5737! UB1 First-dimension Upper bound. !
5738! Ascale Factor to scale field before writing (real). !
5739! A Field to write out (real). !
5740! !
5741! On Output: !
5742! !
5743! io_error Error flag (integer). !
5744! !
5745! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5746! dependency. Instead we need original NetCDF library module !
5747! "USE netcdf". !
5748! !
5749!***********************************************************************
5750!
5751 USE netcdf
5752!
5753! Imported variable declarations.
5754!
5755 integer, intent(in) :: ng, model, ncid, ncrec
5756 integer, intent(in) :: lb1, ub1
5757!
5758 real(r8), intent(in) :: ascale
5759
5760 real(r8), intent(in) :: a(lb1:ub1)
5761
5762 character (len=*), intent(in) :: ncvname
5763 character (len=*), intent(in) :: ncname
5764!
5765! Local variable declarations.
5766!
5767 integer :: lstr, myerror, npts, serror
5768 integer :: i, j, np, rank, request, varid
5769 integer :: io_error
5770 integer :: ibuffer(2), my_bounds(2), start(1), total(1)
5771
5772 integer, dimension(MPI_STATUS_SIZE) :: status
5773 integer, dimension(2,0:NtileI(ng)*NtileJ(ng)-1) :: asize
5774!
5775 real(r8), allocatable :: arecv(:)
5776!
5777 character (len=MPI_MAX_ERROR_STRING) :: string
5778
5779 character (len=*), parameter :: myfile = &
5780 & __FILE__//", mp_ncwrite1d"
5781
5782# ifdef PROFILE
5783!
5784!-----------------------------------------------------------------------
5785! Turn on time clocks.
5786!-----------------------------------------------------------------------
5787!
5788 CALL wclock_on (ng, model, 66, __line__, myfile)
5789# endif
5790!
5791!-----------------------------------------------------------------------
5792! Collect and write data into requested NetCDF file.
5793!-----------------------------------------------------------------------
5794!
5795 io_error=nf90_noerr
5796!
5797! Collect data lower and upper bounds dimensions.
5798!
5799 np=2
5800 my_bounds(1)=lb1
5801 my_bounds(2)=ub1
5802 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5803 & asize, np, mpi_integer, &
5804 & ocn_comm_world, myerror)
5805 IF (myerror.ne.mpi_success) THEN
5806 CALL mpi_error_string (myerror, string, lstr, serror)
5807 lstr=len_trim(string)
5808 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5809 & string(1:lstr)
5810 exit_flag=2
5811 RETURN
5812 END IF
5813!
5814! If master node, allocate the receive buffer.
5815!
5816 IF (myrank.eq.mymaster) THEN
5817 npts=0
5818 DO rank=0,ntilei(ng)*ntilej(ng)-1
5819 np=(asize(2,rank)-asize(1,rank)+1)
5820 npts=max(npts, np)
5821 END DO
5822 IF (.not.allocated(arecv)) THEN
5823 allocate (arecv(npts))
5824 END IF
5825!
5826! Write out master node contribution.
5827!
5828 start(1)=lb1
5829 total(1)=ub1-lb1+1
5830 np=0
5831 DO i=lb1,ub1
5832 np=np+1
5833 arecv(np)=a(i)
5834 END DO
5835 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5836 IF (io_error.eq.nf90_noerr) THEN
5837 io_error=nf90_put_var(ncid, varid, arecv, start, total)
5838 IF (io_error.ne.nf90_noerr) THEN
5839 WRITE (stdout,20) trim(ncvname), trim(ncname)
5840 exit_flag=3
5841 ioerror=io_error
5842 END IF
5843 ELSE
5844 WRITE (stdout,30) trim(ncvname), trim(ncname)
5845 exit_flag=3
5846 ioerror=io_error
5847 END IF
5848!
5849! If master node, loop over other nodes and receive the data.
5850!
5851 IF (exit_flag.eq.noerror) THEN
5852 DO rank=1,ntilei(ng)*ntilej(ng)-1
5853 np=asize(2,rank)-asize(1,rank)+1
5854 CALL mpi_irecv (arecv, np, mp_float, rank, rank+5, &
5855 & ocn_comm_world, request, myerror)
5856 CALL mpi_wait (request, status, myerror)
5857 IF (myerror.ne.mpi_success) THEN
5858 CALL mpi_error_string (myerror, string, lstr, serror)
5859 lstr=len_trim(string)
5860 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, &
5861 & string(1:lstr)
5862 exit_flag=3
5863 RETURN
5864 END IF
5865!
5866! Write out data into NetCDF file.
5867!
5868 start(1)=asize(1,rank)
5869 total(1)=asize(2,rank)-asize(1,rank)+1
5870 DO i=1,np
5871 arecv(i)=arecv(i)*ascale
5872 END DO
5873 io_error=nf90_put_var(ncid, varid, arecv, start, total)
5874 IF (io_error.ne.nf90_noerr) THEN
5875 WRITE (stdout,20) trim(ncvname), trim(ncname)
5876 exit_flag=3
5877 ioerror=io_error
5878 EXIT
5879 END IF
5880 END DO
5881 END IF
5882!
5883! Otherwise, send data to master node.
5884!
5885 ELSE
5886 np=ub1-lb1+1
5887 CALL mpi_isend (a(lb1:), np, mp_float, mymaster, myrank+5, &
5888 & ocn_comm_world, request, myerror)
5889 CALL mpi_wait (request, status, myerror)
5890 IF (myerror.ne.mpi_success) THEN
5891 CALL mpi_error_string (myerror, string, lstr, serror)
5892 lstr=len_trim(string)
5893 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5894 exit_flag=2
5895 RETURN
5896 END IF
5897 END IF
5898!
5899! Broadcast error flags to all nodes.
5900!
5901 ibuffer(1)=exit_flag
5902 ibuffer(2)=ioerror
5903 CALL mp_bcasti (ng, model, ibuffer)
5904 exit_flag=ibuffer(1)
5905 ioerror=ibuffer(2)
5906!
5907! Maximum automatic buffer memory size in bytes.
5908!
5909 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
5910!
5911! Deallocate receive buffer.
5912!
5913 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
5914 deallocate (arecv)
5915 END IF
5916
5917# ifdef PROFILE
5918!
5919!-----------------------------------------------------------------------
5920! Turn on time clocks.
5921!-----------------------------------------------------------------------
5922!
5923 CALL wclock_off (ng, model, 66, __line__, myfile)
5924# endif
5925!
5926 10 FORMAT (/,' MP_NCWRITE1D - error during ',a,' call, Task = ',i0, &
5927 & ' Error = ',i0,/,21x,a)
5928 20 FORMAT (/,' MP_NCWRITE1D - error while writing variable: ',a, &
5929 & /,16x,'in file: ',a)
5930 30 FORMAT (/,' MP_NCWRITE1D - error while inquiring ID for', &
5931 & ' variable: ',a,/,16x,'in file: ',a)
5932!
5933 RETURN
5934 END FUNCTION mp_ncwrite1d
5935!
5936 FUNCTION mp_ncwrite2d (ng, model, ncid, ncvname, ncname, &
5937 & ncrec, LB1, UB1, LB2, UB2, Ascale, A) &
5938 & result(io_error)
5939!
5940!***********************************************************************
5941! !
5942! This function collects floating point 2D state array data from the !
5943! other nodes and writes it into specified NetCDF file. !
5944! !
5945! On Input: !
5946! !
5947! ng Nested grid number. !
5948! model Calling model identifier. !
5949! ncid NetCDF file ID. !
5950! ncvname NetCDF variable name. !
5951! ncname NetCDF file name. !
5952! ncrec NetCDF record index to write. If negative, it !
5953! assumes that the variable is recordless. !
5954! LB1 First-dimension Lower bound. !
5955! UB1 First-dimension Upper bound. !
5956! LB2 Second-dimension Lower bound. !
5957! UB2 Second-dimension Upper bound. !
5958! Ascale Factor to scale field before writing (real). !
5959! A Field to write out (real). !
5960! !
5961! On Output: !
5962! !
5963! io_error Error flag (integer). !
5964! !
5965! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5966! dependency. Instead we need original NetCDF library module !
5967! "USE netcdf". !
5968! !
5969!***********************************************************************
5970!
5971 USE netcdf
5972!
5973! Imported variable declarations.
5974!
5975 integer, intent(in) :: ng, model, ncid, ncrec
5976 integer, intent(in) :: lb1, ub1, lb2, ub2
5977!
5978 real(r8), intent(in) :: ascale
5979
5980 real(r8), intent(in) :: a(lb1:ub1,lb2:ub2)
5981!
5982 character (len=*), intent(in) :: ncvname
5983 character (len=*), intent(in) :: ncname
5984!
5985! Local variable declarations.
5986!
5987 integer :: lstr, myerror, npts, serror
5988 integer :: i, j, np, rank, request, varid
5989 integer :: io_error
5990 integer :: ibuffer(2), my_bounds(4), start(2), total(2)
5991
5992 integer, dimension(MPI_STATUS_SIZE) :: status
5993 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: asize
5994!
5995 real(r8), allocatable :: arecv(:)
5996!
5997 character (len=MPI_MAX_ERROR_STRING) :: string
5998
5999 character (len=*), parameter :: myfile = &
6000 & __FILE__//", mp_ncwrite2d_nf90"
6001
6002# ifdef PROFILE
6003!
6004!-----------------------------------------------------------------------
6005! Turn on time clocks.
6006!-----------------------------------------------------------------------
6007!
6008 CALL wclock_on (ng, model, 66, __line__, myfile)
6009# endif
6010!
6011!-----------------------------------------------------------------------
6012! Collect and write data into requested NetCDF file.
6013!-----------------------------------------------------------------------
6014!
6015 io_error=nf90_noerr
6016!
6017! Collect data lower and upper bounds dimensions.
6018!
6019 np=4
6020 my_bounds(1)=lb1
6021 my_bounds(2)=ub1
6022 my_bounds(3)=lb2
6023 my_bounds(4)=ub2
6024 CALL mpi_allgather (my_bounds, np, mpi_integer, &
6025 & asize, np, mpi_integer, &
6026 & ocn_comm_world, myerror)
6027 IF (myerror.ne.mpi_success) THEN
6028 CALL mpi_error_string (myerror, string, lstr, serror)
6029 lstr=len_trim(string)
6030 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6031 & string(1:lstr)
6032 exit_flag=2
6033 RETURN
6034 END IF
6035!
6036! If master node, allocate the receive buffer.
6037!
6038 IF (myrank.eq.mymaster) THEN
6039 npts=0
6040 DO rank=0,ntilei(ng)*ntilej(ng)-1
6041 np=(asize(2,rank)-asize(1,rank)+1)* &
6042 & (asize(4,rank)-asize(3,rank)+1)
6043 npts=max(npts, np)
6044 END DO
6045 IF (.not.allocated(arecv)) THEN
6046 allocate (arecv(npts))
6047 END IF
6048!
6049! Write out master node contribution.
6050!
6051 start(1)=lb1
6052 total(1)=ub1-lb1+1
6053 start(2)=lb2
6054 total(2)=ub2-lb2+1
6055 np=0
6056 DO j=lb2,ub2
6057 DO i=lb1,ub1
6058 np=np+1
6059 arecv(np)=a(i,j)
6060 END DO
6061 END DO
6062 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
6063 IF (io_error.eq.nf90_noerr) THEN
6064 io_error=nf90_put_var(ncid, varid, arecv, start, total)
6065 IF (io_error.ne.nf90_noerr) THEN
6066 WRITE (stdout,20) trim(ncvname), trim(ncname)
6067 exit_flag=3
6068 ioerror=io_error
6069 END IF
6070 ELSE
6071 WRITE (stdout,30) trim(ncvname), trim(ncname)
6072 exit_flag=3
6073 ioerror=io_error
6074 END IF
6075!
6076! If master node, loop over other nodes and receive the data.
6077!
6078 IF (exit_flag.eq.noerror) THEN
6079 DO rank=1,ntilei(ng)*ntilej(ng)-1
6080 np=(asize(2,rank)-asize(1,rank)+1)* &
6081 & (asize(4,rank)-asize(3,rank)+1)
6082 CALL mpi_irecv (arecv, np, mp_float, rank, rank+5, &
6083 & ocn_comm_world, request, myerror)
6084 CALL mpi_wait (request, status, myerror)
6085 IF (myerror.ne.mpi_success) THEN
6086 CALL mpi_error_string (myerror, string, lstr, serror)
6087 lstr=len_trim(string)
6088 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, &
6089 & string(1:lstr)
6090 exit_flag=3
6091 RETURN
6092 END IF
6093!
6094! Write out data into NetCDF file.
6095!
6096 start(1)=asize(1,rank)
6097 total(1)=asize(2,rank)-asize(1,rank)+1
6098 start(2)=asize(3,rank)
6099 total(2)=asize(4,rank)-asize(3,rank)+1
6100 DO i=1,np
6101 arecv(i)=arecv(i)*ascale
6102 END DO
6103 io_error=nf90_put_var(ncid, varid, arecv, start, total)
6104 IF (io_error.ne.nf90_noerr) THEN
6105 WRITE (stdout,20) trim(ncvname), trim(ncname)
6106 exit_flag=3
6107 ioerror=io_error
6108 EXIT
6109 END IF
6110 END DO
6111 END IF
6112!
6113! Otherwise, send data to master node.
6114!
6115 ELSE
6116 np=(ub1-lb1+1)*(ub2-lb2+1)
6117 CALL mpi_isend (a(lb1:,lb2:), np, mp_float, mymaster, myrank+5, &
6118 & ocn_comm_world, request, myerror)
6119 CALL mpi_wait (request, status, myerror)
6120 IF (myerror.ne.mpi_success) THEN
6121 CALL mpi_error_string (myerror, string, lstr, serror)
6122 lstr=len_trim(string)
6123 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
6124 exit_flag=2
6125 RETURN
6126 END IF
6127 END IF
6128!
6129! Broadcast error flags to all nodes.
6130!
6131 ibuffer(1)=exit_flag
6132 ibuffer(2)=ioerror
6133 CALL mp_bcasti (ng, model, ibuffer)
6134 exit_flag=ibuffer(1)
6135 ioerror=ibuffer(2)
6136!
6137! Maximum automatic buffer memory size in bytes.
6138!
6139 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
6140!
6141! Deallocate receive buffer.
6142!
6143 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
6144 deallocate (arecv)
6145 END IF
6146
6147# ifdef PROFILE
6148!
6149!-----------------------------------------------------------------------
6150! Turn on time clocks.
6151!-----------------------------------------------------------------------
6152!
6153 CALL wclock_off (ng, model, 66, __line__, myfile)
6154# endif
6155!
6156 10 FORMAT (/,' MP_NCWRITE2D - error during ',a,' call, Task = ',i0, &
6157 & ' Error = ',i0,/,21x,a)
6158 20 FORMAT (/,' MP_NCWRITE2D - error while writing variable: ',a, &
6159 & /,16x,'in file: ',a)
6160 30 FORMAT (/,' MP_NCWRITE2D - error while inquiring ID for', &
6161 & ' variable: ',a,/,16x,'in file: ',a)
6162!
6163 RETURN
6164 END FUNCTION mp_ncwrite2d
6165!
6166 SUBROUTINE mp_reduce_i8 (ng, model, Asize, A, handle_op, InpComm)
6168!***********************************************************************
6169! !
6170! This routine collects and reduces requested 1D integer array !
6171! variables from all nodes in the group. !
6172! !
6173! On Input: !
6174! !
6175! ng Nested grid number. !
6176! model Calling model identifier. !
6177! Asize Number of scalar variables to reduce. !
6178! A Vector of scalar variables to reduce. !
6179! handle_op Reduction operation handle (string). The following !
6180! reduction operations are supported: !
6181! 'MIN', 'MAX', 'SUM' !
6182! InpComm Communicator handle (integer, OPTIONAL). !
6183! !
6184! On Output: !
6185! !
6186! A Vector of reduced scalar variables. !
6187! !
6188!***********************************************************************
6189!
6190! Imported variable declarations.
6191!
6192 integer, intent(in) :: ng, model, Asize
6193
6194 integer, intent(in), optional :: InpComm
6195!
6196 character (len=*), intent(in) :: handle_op(Asize)
6197!
6198 integer(i8b), intent(inout) :: A(Asize)
6199!
6200! Local variable declarations.
6201!
6202 integer :: Lstr, MyCOMM, MyError, Serror
6203 integer :: handle, i, rank, request
6204
6205 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6206
6207 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6208 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6209!
6210 integer(i8b), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6211 integer(i8b), dimension(Asize) :: Areduce
6212 integer(i8b), dimension(Asize) :: Asend
6213!
6214 character (len=MPI_MAX_ERROR_STRING) :: string
6215
6216 character (len=*), parameter :: MyFile = &
6217 & __FILE__//", mp_reduce_1di"
6218
6219# ifdef PROFILE
6220!
6221!-----------------------------------------------------------------------
6222! Turn on time clocks.
6223!-----------------------------------------------------------------------
6224!
6225 CALL wclock_on (ng, model, 65, __line__, myfile)
6226# endif
6227# ifdef MPI
6228!
6229!-----------------------------------------------------------------------
6230! Set distributed-memory communicator handle (context ID).
6231!-----------------------------------------------------------------------
6232!
6233 IF (PRESENT(inpcomm)) THEN
6234 mycomm=inpcomm
6235 ELSE
6236 mycomm=ocn_comm_world
6237 END IF
6238# endif
6239!
6240!-----------------------------------------------------------------------
6241! Collect and reduce requested scalar variables.
6242!-----------------------------------------------------------------------
6243!
6244! Maximum automatic buffer memory size in bytes.
6245!
6246 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
6247 & 2*asize)*kind(a),r8))
6248!
6249! Pack data to reduce.
6250!
6251 DO i=1,asize
6252 asend(i)=a(i)
6253 END DO
6254!
6255! Collect and reduce.
6256!
6257# if defined REDUCE_ALLREDUCE
6258 DO i=1,asize
6259 IF (handle_op(i)(1:3).eq.'MIN') THEN
6260 handle=mpi_min
6261 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6262 handle=mpi_max
6263 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6264 handle=mpi_sum
6265 END IF
6266 CALL mpi_allreduce (asend(i), areduce(i), 1, mpi_integer, &
6267 & handle, mycomm, myerror)
6268 IF (myerror.ne.mpi_success) THEN
6269 CALL mpi_error_string (myerror, string, lstr, serror)
6270 lstr=len_trim(string)
6271 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6272 & string(1:lstr)
6273 exit_flag=2
6274 RETURN
6275 END IF
6276 END DO
6277# elif defined REDUCE_ALLGATHER
6278 CALL mpi_allgather (asend, asize, mpi_integer, &
6279 & arecv, asize, mpi_integer, &
6280 & mycomm, myerror)
6281 IF (myerror.ne.mpi_success) THEN
6282 CALL mpi_error_string (myerror, string, lstr, serror)
6283 lstr=len_trim(string)
6284 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6285 & string(1:lstr)
6286 exit_flag=2
6287 RETURN
6288 END IF
6289 DO i=1,asize
6290 areduce(i)=arecv(i,0)
6291 DO rank=1,ntilei(ng)*ntilej(ng)-1
6292 IF (handle_op(i)(1:3).eq.'MIN') THEN
6293 areduce(i)=min(areduce(i),arecv(i,rank))
6294 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6295 areduce(i)=max(areduce(i),arecv(i,rank))
6296 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6297 areduce(i)=areduce(i)+arecv(i,rank)
6298 END IF
6299 END DO
6300 END DO
6301# elif defined REDUCE_SENDRECV
6302 IF (myrank.eq.mymaster) THEN
6303 DO rank=1,ntilei(ng)*ntilej(ng)-1
6304 CALL mpi_irecv (arecv(1,rank), asize, mpi_integer, rank, &
6305 & rank+500, mycomm, rrequest(rank), &
6306 & myerror)
6307 END DO
6308 DO i=1,asize
6309 areduce(i)=asend(i)
6310 END DO
6311 DO rank=1,ntilei(ng)*ntilej(ng)-1
6312 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6313 IF (myerror.ne.mpi_success) THEN
6314 CALL mpi_error_string (myerror, string, lstr, serror)
6315 lstr=len_trim(string)
6316 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6317 exit_flag=2
6318 RETURN
6319 END IF
6320 DO i=1,asize
6321 IF (handle_op(i)(1:3).eq.'MIN') THEN
6322 areduce(i)=min(areduce(i),arecv(i,rank))
6323 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6324 areduce(i)=max(areduce(i),arecv(i,rank))
6325 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6326 areduce(i)=areduce(i)+arecv(i,rank)
6327 END IF
6328 END DO
6329 END DO
6330 ELSE
6331 CALL mpi_isend (asend, asize, mpi_integer, mymaster, &
6332 & myrank+500, mycomm, request, myerror)
6333 CALL mpi_wait (request, sstatus, myerror)
6334 IF (myerror.ne.mpi_success) THEN
6335 CALL mpi_error_string (myerror, string, lstr, serror)
6336 lstr=len_trim(string)
6337 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6338 exit_flag=2
6339 RETURN
6340 END IF
6341 END IF
6342!
6343! Broadcast reduced variables from process to all processes in the
6344! group.
6345!
6346 CALL mpi_bcast (areduce, asize, mpi_integer, mymaster, &
6347 & mycomm, myerror)
6348 IF (myerror.ne.mpi_success) THEN
6349 CALL mpi_error_string (myerror, string, lstr, serror)
6350 lstr=len_trim(string)
6351 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6352 exit_flag=2
6353 RETURN
6354 END IF
6355# endif
6356 10 FORMAT (/,' MP_REDUCE_I8 - error during ',a,' call, Task = ', &
6357 & i3.3,' Error = ',i3,/,16x,a)
6358!
6359! Unpack.
6360!
6361 DO i=1,asize
6362 a(i)=areduce(i)
6363 END DO
6364# ifdef PROFILE
6365!
6366!-----------------------------------------------------------------------
6367! Turn off time clocks.
6368!-----------------------------------------------------------------------
6369!
6370 CALL wclock_off (ng, model, 65, __line__, myfile)
6371# endif
6372!
6373 RETURN
6374 END SUBROUTINE mp_reduce_i8
6375
6376# ifdef SINGLE_PRECISION
6377!
6378 SUBROUTINE mp_reduce_0dp (ng, model, Asize, A, handle_op, InpComm)
6380!***********************************************************************
6381! !
6382! This routine collects and reduces requested double precision !
6383! variables from all nodes in the group. Then, it broadcasts !
6384! reduced variables to all nodes in the group. !
6385! !
6386! On Input: !
6387! !
6388! ng Nested grid number. !
6389! model Calling model identifier. !
6390! Asize Number of scalar variables to reduce. !
6391! A Vector of scalar variables to reduce. !
6392! handle_op Reduction operation handle (string). The following !
6393! reduction operations are supported: !
6394! 'MIN', 'MAX', 'SUM' !
6395! InpComm Communicator handle (integer, OPTIONAL). !
6396! !
6397! On Output: !
6398! !
6399! A Vector of reduced scalar variables. !
6400! !
6401!***********************************************************************
6402!
6403! Imported variable declarations.
6404!
6405 integer, intent(in) :: ng, model, Asize
6406
6407 integer, intent(in), optional :: InpComm
6408!
6409 character (len=*), intent(in) :: handle_op
6410!
6411 real(dp), intent(inout) :: A
6412!
6413! Local variable declarations.
6414!
6415 integer :: Lstr, MyCOMM, MyError, Npts, Serror
6416 integer :: handle, i, rank, request
6417
6418 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6419
6420 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6421 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6422!
6423 real(dp) :: Areduce
6424 real(dp) :: Asend
6425 real(dp), dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6426!
6427 character (len=MPI_MAX_ERROR_STRING) :: string
6428
6429 character (len=*), parameter :: MyFile = &
6430 & __FILE__//", mp_reduce_0dp"
6431
6432# ifdef PROFILE
6433!
6434!-----------------------------------------------------------------------
6435! Turn on time clocks.
6436!-----------------------------------------------------------------------
6437!
6438 CALL wclock_on (ng, model, 65, __line__, myfile)
6439# endif
6440# ifdef MPI
6441!
6442!-----------------------------------------------------------------------
6443! Set distributed-memory communicator handle (context ID).
6444!-----------------------------------------------------------------------
6445!
6446 IF (PRESENT(inpcomm)) THEN
6447 mycomm=inpcomm
6448 ELSE
6449 mycomm=ocn_comm_world
6450 END IF
6451# endif
6452!
6453!-----------------------------------------------------------------------
6454! Collect and reduce requested scalar variables.
6455!-----------------------------------------------------------------------
6456!
6457! Pack data to reduce.
6458!
6459 asend=a
6460 npts=1
6461!
6462! Collect and reduce.
6463!
6464# if defined REDUCE_ALLREDUCE
6465 IF (handle_op(1:3).eq.'MIN') THEN
6466 handle=mpi_min
6467 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6468 handle=mpi_max
6469 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6470 handle=mpi_sum
6471 END IF
6472 CALL mpi_allreduce (asend, areduce, npts, mp_double, handle, &
6473 & mycomm, myerror)
6474 IF (myerror.ne.mpi_success) THEN
6475 CALL mpi_error_string (myerror, string, lstr, serror)
6476 lstr=len_trim(string)
6477 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6478 & string(1:lstr)
6479 exit_flag=2
6480 RETURN
6481 END IF
6482# elif defined REDUCE_ALLGATHER
6483 CALL mpi_allgather (asend, npts, mp_double, &
6484 & arecv, npts, mp_double, &
6485 & mycomm, myerror)
6486 IF (myerror.ne.mpi_success) THEN
6487 CALL mpi_error_string (myerror, string, lstr, serror)
6488 lstr=len_trim(string)
6489 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6490 & string(1:lstr)
6491 exit_flag=2
6492 RETURN
6493 END IF
6494 areduce=arecv(0)
6495 DO rank=1,ntilei(ng)*ntilej(ng)-1
6496 IF (handle_op(1:3).eq.'MIN') THEN
6497 areduce=min(areduce,arecv(rank))
6498 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6499 areduce=max(areduce,arecv(rank))
6500 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6501 areduce=areduce+arecv(rank)
6502 END IF
6503 END DO
6504# elif defined REDUCE_SENDRECV
6505 IF (myrank.eq.mymaster) THEN
6506 DO rank=1,ntilei(ng)*ntilej(ng)-1
6507 CALL mpi_irecv (arecv(rank), npts, mp_double, rank, &
6508 & rank+500, mycomm, rrequest(rank), &
6509 & myerror)
6510 END DO
6511 areduce=asend
6512 DO rank=1,ntilei(ng)*ntilej(ng)-1
6513 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6514 IF (myerror.ne.mpi_success) THEN
6515 CALL mpi_error_string (myerror, string, lstr, serror)
6516 lstr=len_trim(string)
6517 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6518 exit_flag=2
6519 RETURN
6520 END IF
6521 IF (handle_op(1:3).eq.'MIN') THEN
6522 areduce=min(areduce,arecv(rank))
6523 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6524 areduce=max(areduce,arecv(rank))
6525 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6526 areduce=areduce+arecv(rank)
6527 END IF
6528 END DO
6529 ELSE
6530 CALL mpi_isend (asend, npts, mp_double, mymaster, myrank+500, &
6531 & mycomm, request, myerror)
6532 CALL mpi_wait (request, sstatus, myerror)
6533 IF (myerror.ne.mpi_success) THEN
6534 CALL mpi_error_string (myerror, string, lstr, serror)
6535 lstr=len_trim(string)
6536 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6537 exit_flag=2
6538 RETURN
6539 END IF
6540 END IF
6541!
6542! Broadcast reduced variables from process to all processes in the
6543! group.
6544!
6545 CALL mpi_bcast (areduce, npts, mp_double, mymaster, &
6546 & mycomm, myerror)
6547 IF (myerror.ne.mpi_success) THEN
6548 CALL mpi_error_string (myerror, string, lstr, serror)
6549 lstr=len_trim(string)
6550 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6551 exit_flag=2
6552 RETURN
6553 END IF
6554# endif
6555 10 FORMAT (/,' MP_REDUCE_0DP - error during ',a,' call, Task = ', &
6556 & i3.3,' Error = ',i3,/,16x,a)
6557!
6558! Unpack.
6559!
6560 a=areduce
6561# ifdef PROFILE
6562!
6563!-----------------------------------------------------------------------
6564! Turn off time clocks.
6565!-----------------------------------------------------------------------
6566!
6567 CALL wclock_off (ng, model, 65, __line__, myfile)
6568# endif
6569!
6570 RETURN
6571 END SUBROUTINE mp_reduce_0dp
6572!
6573 SUBROUTINE mp_reduce_1dp (ng, model, Asize, A, handle_op, InpComm)
6575!***********************************************************************
6576! !
6577! This routine collects and reduces requested double precision !
6578! variables from all nodes in the group. Then, it broadcasts !
6579! reduced variables to all nodes in the group. !
6580! !
6581! On Input: !
6582! !
6583! ng Nested grid number. !
6584! model Calling model identifier. !
6585! Asize Number of scalar variables to reduce. !
6586! A Vector of scalar variables to reduce. !
6587! handle_op Reduction operation handle (string). The following !
6588! reduction operations are supported: !
6589! 'MIN', 'MAX', 'SUM' !
6590! InpComm Communicator handle (integer, OPTIONAL). !
6591! !
6592! On Output: !
6593! !
6594! A Vector of reduced scalar variables. !
6595! !
6596!***********************************************************************
6597!
6598! Imported variable declarations.
6599!
6600 integer, intent(in) :: ng, model, Asize
6601
6602 integer, intent(in), optional :: InpComm
6603!
6604 character (len=*), intent(in) :: handle_op(Asize)
6605!
6606 real(dp), intent(inout) :: A(Asize)
6607!
6608! Local variable declarations.
6609!
6610 integer :: Lstr, MyCOMM, MyError, Serror
6611 integer :: handle, i, rank, request
6612
6613 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6614
6615 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6616 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6617!
6618 real(dp), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6619 real(dp), dimension(Asize) :: Areduce
6620 real(dp), dimension(Asize) :: Asend
6621!
6622 character (len=MPI_MAX_ERROR_STRING) :: string
6623
6624 character (len=*), parameter :: MyFile = &
6625 & __FILE__//", mp_reduce_1dp"
6626
6627# ifdef PROFILE
6628!
6629!-----------------------------------------------------------------------
6630! Turn on time clocks.
6631!-----------------------------------------------------------------------
6632!
6633 CALL wclock_on (ng, model, 65, __line__, myfile)
6634# endif
6635# ifdef MPI
6636!
6637!-----------------------------------------------------------------------
6638! Set distributed-memory communicator handle (context ID).
6639!-----------------------------------------------------------------------
6640!
6641 IF (PRESENT(inpcomm)) THEN
6642 mycomm=inpcomm
6643 ELSE
6644 mycomm=ocn_comm_world
6645 END IF
6646# endif
6647!
6648!-----------------------------------------------------------------------
6649! Collect and reduce requested scalar variables.
6650!-----------------------------------------------------------------------
6651!
6652! Maximum automatic buffer memory size in bytes.
6653!
6654 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
6655 & 2*asize)*kind(a),r8))
6656!
6657! Pack data to reduce.
6658!
6659 DO i=1,asize
6660 asend(i)=a(i)
6661 END DO
6662!
6663! Collect and reduce.
6664!
6665# if defined REDUCE_ALLREDUCE
6666 DO i=1,asize
6667 IF (handle_op(i)(1:3).eq.'MIN') THEN
6668 handle=mpi_min
6669 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6670 handle=mpi_max
6671 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6672 handle=mpi_sum
6673 END IF
6674 CALL mpi_allreduce (asend(i), areduce(i), 1, mp_double, handle, &
6675 & mycomm, myerror)
6676 IF (myerror.ne.mpi_success) THEN
6677 CALL mpi_error_string (myerror, string, lstr, serror)
6678 lstr=len_trim(string)
6679 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6680 & string(1:lstr)
6681 exit_flag=2
6682 RETURN
6683 END IF
6684 END DO
6685# elif defined REDUCE_ALLGATHER
6686 CALL mpi_allgather (asend, asize, mp_double, &
6687 & arecv, asize, mp_double, &
6688 & mycomm, myerror)
6689 IF (myerror.ne.mpi_success) THEN
6690 CALL mpi_error_string (myerror, string, lstr, serror)
6691 lstr=len_trim(string)
6692 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6693 & string(1:lstr)
6694 exit_flag=2
6695 RETURN
6696 END IF
6697 DO i=1,asize
6698 areduce(i)=arecv(i,0)
6699 DO rank=1,ntilei(ng)*ntilej(ng)-1
6700 IF (handle_op(i)(1:3).eq.'MIN') THEN
6701 areduce(i)=min(areduce(i),arecv(i,rank))
6702 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6703 areduce(i)=max(areduce(i),arecv(i,rank))
6704 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6705 areduce(i)=areduce(i)+arecv(i,rank)
6706 END IF
6707 END DO
6708 END DO
6709# elif defined REDUCE_SENDRECV
6710 IF (myrank.eq.mymaster) THEN
6711 DO rank=1,ntilei(ng)*ntilej(ng)-1
6712 CALL mpi_irecv (arecv(1,rank), asize, mp_double, rank, &
6713 & rank+500, mycomm, rrequest(rank), &
6714 & myerror)
6715 END DO
6716 DO i=1,asize
6717 areduce(i)=asend(i)
6718 END DO
6719 DO rank=1,ntilei(ng)*ntilej(ng)-1
6720 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6721 IF (myerror.ne.mpi_success) THEN
6722 CALL mpi_error_string (myerror, string, lstr, serror)
6723 lstr=len_trim(string)
6724 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6725 exit_flag=2
6726 RETURN
6727 END IF
6728 DO i=1,asize
6729 IF (handle_op(i)(1:3).eq.'MIN') THEN
6730 areduce(i)=min(areduce(i),arecv(i,rank))
6731 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6732 areduce(i)=max(areduce(i),arecv(i,rank))
6733 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6734 areduce(i)=areduce(i)+arecv(i,rank)
6735 END IF
6736 END DO
6737 END DO
6738 ELSE
6739 CALL mpi_isend (asend, asize, mp_double, mymaster, myrank+500, &
6740 & mycomm, request, myerror)
6741 CALL mpi_wait (request, sstatus, myerror)
6742 IF (myerror.ne.mpi_success) THEN
6743 CALL mpi_error_string (myerror, string, lstr, serror)
6744 lstr=len_trim(string)
6745 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6746 exit_flag=2
6747 RETURN
6748 END IF
6749 END IF
6750!
6751! Broadcast reduced variables from process to all processes in the
6752! group.
6753!
6754 CALL mpi_bcast (areduce, asize, mp_double, mymaster, &
6755 & mycomm, myerror)
6756 IF (myerror.ne.mpi_success) THEN
6757 CALL mpi_error_string (myerror, string, lstr, serror)
6758 lstr=len_trim(string)
6759 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6760 exit_flag=2
6761 RETURN
6762 END IF
6763# endif
6764 10 FORMAT (/,' MP_REDUCE_1DP - error during ',a,' call, Task = ', &
6765 & i3.3,' Error = ',i3,/,16x,a)
6766!
6767! Unpack.
6768!
6769 DO i=1,asize
6770 a(i)=areduce(i)
6771 END DO
6772# ifdef PROFILE
6773!
6774!-----------------------------------------------------------------------
6775! Turn off time clocks.
6776!-----------------------------------------------------------------------
6777!
6778 CALL wclock_off (ng, model, 65, __line__, myfile)
6779# endif
6780!
6781 RETURN
6782 END SUBROUTINE mp_reduce_1dp
6783# endif
6784!
6785 SUBROUTINE mp_reduce_0d (ng, model, Asize, A, handle_op, InpComm)
6787!***********************************************************************
6788! !
6789! This routine collects and reduces requested variables from all !
6790! nodes in the group. Then, it broadcasts reduced variables to !
6791! all nodes in the group. !
6792! !
6793! On Input: !
6794! !
6795! ng Nested grid number. !
6796! model Calling model identifier. !
6797! Asize Number of scalar variables to reduce. !
6798! A Vector of scalar variables to reduce. !
6799! handle_op Reduction operation handle (string). The following !
6800! reduction operations are supported: !
6801! 'MIN', 'MAX', 'SUM' !
6802! InpComm Communicator handle (integer, OPTIONAL). !
6803! !
6804! On Output: !
6805! !
6806! A Vector of reduced scalar variables. !
6807! !
6808!***********************************************************************
6809!
6810! Imported variable declarations.
6811!
6812 integer, intent(in) :: ng, model, Asize
6813
6814 integer, intent(in), optional :: InpComm
6815!
6816 character (len=*), intent(in) :: handle_op
6817!
6818 real(r8), intent(inout) :: A
6819!
6820! Local variable declarations.
6821!
6822 integer :: Lstr, MyCOMM, MyError, Npts, Serror
6823 integer :: handle, i, rank, request
6824
6825 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6826
6827 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6828 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6829!
6830 real(r8) :: Areduce
6831 real(r8) :: Asend
6832 real(r8), dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6833!
6834 character (len=MPI_MAX_ERROR_STRING) :: string
6835
6836 character (len=*), parameter :: MyFile = &
6837 & __FILE__//", mp_reduce_0d"
6838
6839# ifdef PROFILE
6840!
6841!-----------------------------------------------------------------------
6842! Turn on time clocks.
6843!-----------------------------------------------------------------------
6844!
6845 CALL wclock_on (ng, model, 65, __line__, myfile)
6846# endif
6847# ifdef MPI
6848!
6849!-----------------------------------------------------------------------
6850! Set distributed-memory communicator handle (context ID).
6851!-----------------------------------------------------------------------
6852!
6853 IF (PRESENT(inpcomm)) THEN
6854 mycomm=inpcomm
6855 ELSE
6856 mycomm=ocn_comm_world
6857 END IF
6858# endif
6859!
6860!-----------------------------------------------------------------------
6861! Collect and reduce requested scalar variables.
6862!-----------------------------------------------------------------------
6863!
6864! Pack data to reduce.
6865!
6866 asend=a
6867 npts=1
6868!
6869! Collect and reduce.
6870!
6871# if defined REDUCE_ALLREDUCE
6872 IF (handle_op(1:3).eq.'MIN') THEN
6873 handle=mpi_min
6874 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6875 handle=mpi_max
6876 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6877 handle=mpi_sum
6878 END IF
6879 CALL mpi_allreduce (asend, areduce, npts, mp_float, handle, &
6880 & mycomm, myerror)
6881 IF (myerror.ne.mpi_success) THEN
6882 CALL mpi_error_string (myerror, string, lstr, serror)
6883 lstr=len_trim(string)
6884 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6885 & string(1:lstr)
6886 exit_flag=2
6887 RETURN
6888 END IF
6889# elif defined REDUCE_ALLGATHER
6890 CALL mpi_allgather (asend, npts, mp_float, &
6891 & arecv, npts, mp_float, &
6892 & mycomm, myerror)
6893 IF (myerror.ne.mpi_success) THEN
6894 CALL mpi_error_string (myerror, string, lstr, serror)
6895 lstr=len_trim(string)
6896 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6897 & string(1:lstr)
6898 exit_flag=2
6899 RETURN
6900 END IF
6901 areduce=arecv(0)
6902 DO rank=1,ntilei(ng)*ntilej(ng)-1
6903 IF (handle_op(1:3).eq.'MIN') THEN
6904 areduce=min(areduce,arecv(rank))
6905 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6906 areduce=max(areduce,arecv(rank))
6907 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6908 areduce=areduce+arecv(rank)
6909 END IF
6910 END DO
6911# elif defined REDUCE_SENDRECV
6912 IF (myrank.eq.mymaster) THEN
6913 DO rank=1,ntilei(ng)*ntilej(ng)-1
6914 CALL mpi_irecv (arecv(rank), npts, mp_float, rank, &
6915 & rank+500, mycomm, rrequest(rank), &
6916 & myerror)
6917 END DO
6918 areduce=asend
6919 DO rank=1,ntilei(ng)*ntilej(ng)-1
6920 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6921 IF (myerror.ne.mpi_success) THEN
6922 CALL mpi_error_string (myerror, string, lstr, serror)
6923 lstr=len_trim(string)
6924 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6925 exit_flag=2
6926 RETURN
6927 END IF
6928 IF (handle_op(1:3).eq.'MIN') THEN
6929 areduce=min(areduce,arecv(rank))
6930 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6931 areduce=max(areduce,arecv(rank))
6932 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6933 areduce=areduce+arecv(rank)
6934 END IF
6935 END DO
6936 ELSE
6937 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+500, &
6938 & mycomm, request, myerror)
6939 CALL mpi_wait (request, sstatus, myerror)
6940 IF (myerror.ne.mpi_success) THEN
6941 CALL mpi_error_string (myerror, string, lstr, serror)
6942 lstr=len_trim(string)
6943 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6944 exit_flag=2
6945 RETURN
6946 END IF
6947 END IF
6948!
6949! Broadcast reduced variables from process to all processes in the
6950! group.
6951!
6952 CALL mpi_bcast (areduce, npts, mp_float, mymaster, &
6953 & mycomm, myerror)
6954 IF (myerror.ne.mpi_success) THEN
6955 CALL mpi_error_string (myerror, string, lstr, serror)
6956 lstr=len_trim(string)
6957 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6958 exit_flag=2
6959 RETURN
6960 END IF
6961# endif
6962 10 FORMAT (/,' MP_REDUCE_0D - error during ',a,' call, Task = ', &
6963 & i3.3,' Error = ',i3,/,16x,a)
6964!
6965! Unpack.
6966!
6967 a=areduce
6968# ifdef PROFILE
6969!
6970!-----------------------------------------------------------------------
6971! Turn off time clocks.
6972!-----------------------------------------------------------------------
6973!
6974 CALL wclock_off (ng, model, 65, __line__, myfile)
6975# endif
6976!
6977 RETURN
6978 END SUBROUTINE mp_reduce_0d
6979!
6980 SUBROUTINE mp_reduce_1d (ng, model, Asize, A, handle_op, InpComm)
6982!***********************************************************************
6983! !
6984! This routine collects and reduces requested variables from all !
6985! nodes in the group. Then, it broadcasts reduced variables to !
6986! all nodes in the group. !
6987! !
6988! On Input: !
6989! !
6990! ng Nested grid number. !
6991! model Calling model identifier. !
6992! Asize Number of scalar variables to reduce. !
6993! A Vector of scalar variables to reduce. !
6994! handle_op Reduction operation handle (string). The following !
6995! reduction operations are supported: !
6996! 'MIN', 'MAX', 'SUM' !
6997! InpComm Communicator handle (integer, OPTIONAL). !
6998! !
6999! On Output: !
7000! !
7001! A Vector of reduced scalar variables. !
7002! !
7003!***********************************************************************
7004!
7005! Imported variable declarations.
7006!
7007 integer, intent(in) :: ng, model, Asize
7008
7009 integer, intent(in), optional :: InpComm
7010!
7011 character (len=*), intent(in) :: handle_op(Asize)
7012!
7013 real(r8), intent(inout) :: A(Asize)
7014!
7015! Local variable declarations.
7016!
7017 integer :: Lstr, MyCOMM, MyError, Serror
7018 integer :: handle, i, rank, request
7019
7020 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
7021
7022 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
7023 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
7024!
7025 real(r8), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
7026 real(r8), dimension(Asize) :: Areduce
7027 real(r8), dimension(Asize) :: Asend
7028!
7029 character (len=MPI_MAX_ERROR_STRING) :: string
7030
7031 character (len=*), parameter :: MyFile = &
7032 & __FILE__//", mp_reduce_1d"
7033
7034# ifdef PROFILE
7035!
7036!-----------------------------------------------------------------------
7037! Turn on time clocks.
7038!-----------------------------------------------------------------------
7039!
7040 CALL wclock_on (ng, model, 65, __line__, myfile)
7041# endif
7042# ifdef MPI
7043!
7044!-----------------------------------------------------------------------
7045! Set distributed-memory communicator handle (context ID).
7046!-----------------------------------------------------------------------
7047!
7048 IF (PRESENT(inpcomm)) THEN
7049 mycomm=inpcomm
7050 ELSE
7051 mycomm=ocn_comm_world
7052 END IF
7053# endif
7054!
7055!-----------------------------------------------------------------------
7056! Collect and reduce requested scalar variables.
7057!-----------------------------------------------------------------------
7058!
7059! Maximum automatic buffer memory size in bytes.
7060!
7061 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
7062 & 2*asize)*kind(a),r8))
7063!
7064! Pack data to reduce.
7065!
7066 DO i=1,asize
7067 asend(i)=a(i)
7068 END DO
7069!
7070! Collect and reduce.
7071!
7072# if defined REDUCE_ALLREDUCE
7073 DO i=1,asize
7074 IF (handle_op(i)(1:3).eq.'MIN') THEN
7075 handle=mpi_min
7076 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7077 handle=mpi_max
7078 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7079 handle=mpi_sum
7080 END IF
7081 CALL mpi_allreduce (asend(i), areduce(i), 1, mp_float, handle, &
7082 & mycomm, myerror)
7083 IF (myerror.ne.mpi_success) THEN
7084 CALL mpi_error_string (myerror, string, lstr, serror)
7085 lstr=len_trim(string)
7086 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
7087 & string(1:lstr)
7088 exit_flag=2
7089 RETURN
7090 END IF
7091 END DO
7092# elif defined REDUCE_ALLGATHER
7093 CALL mpi_allgather (asend, asize, mp_float, &
7094 & arecv, asize, mp_float, &
7095 & mycomm, myerror)
7096 IF (myerror.ne.mpi_success) THEN
7097 CALL mpi_error_string (myerror, string, lstr, serror)
7098 lstr=len_trim(string)
7099 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
7100 & string(1:lstr)
7101 exit_flag=2
7102 RETURN
7103 END IF
7104 DO i=1,asize
7105 areduce(i)=arecv(i,0)
7106 DO rank=1,ntilei(ng)*ntilej(ng)-1
7107 IF (handle_op(i)(1:3).eq.'MIN') THEN
7108 areduce(i)=min(areduce(i),arecv(i,rank))
7109 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7110 areduce(i)=max(areduce(i),arecv(i,rank))
7111 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7112 areduce(i)=areduce(i)+arecv(i,rank)
7113 END IF
7114 END DO
7115 END DO
7116# elif defined REDUCE_SENDRECV
7117 IF (myrank.eq.mymaster) THEN
7118 DO rank=1,ntilei(ng)*ntilej(ng)-1
7119 CALL mpi_irecv (arecv(1,rank), asize, mp_float, rank, &
7120 & rank+500, mycomm, rrequest(rank), myerror)
7121 END DO
7122 DO i=1,asize
7123 areduce(i)=asend(i)
7124 END DO
7125 DO rank=1,ntilei(ng)*ntilej(ng)-1
7126 CALL mpi_wait (rrequest(rank), rstatus, myerror)
7127 IF (myerror.ne.mpi_success) THEN
7128 CALL mpi_error_string (myerror, string, lstr, serror)
7129 lstr=len_trim(string)
7130 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
7131 exit_flag=2
7132 RETURN
7133 END IF
7134 DO i=1,asize
7135 IF (handle_op(i)(1:3).eq.'MIN') THEN
7136 areduce(i)=min(areduce(i),arecv(i,rank))
7137 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7138 areduce(i)=max(areduce(i),arecv(i,rank))
7139 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7140 areduce(i)=areduce(i)+arecv(i,rank)
7141 END IF
7142 END DO
7143 END DO
7144 ELSE
7145 CALL mpi_isend (asend, asize, mp_float, mymaster, myrank+500, &
7146 & mycomm, request, myerror)
7147 CALL mpi_wait (request, sstatus, myerror)
7148 IF (myerror.ne.mpi_success) THEN
7149 CALL mpi_error_string (myerror, string, lstr, serror)
7150 lstr=len_trim(string)
7151 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
7152 exit_flag=2
7153 RETURN
7154 END IF
7155 END IF
7156!
7157! Broadcast reduced variables from process to all processes in the
7158! group.
7159!
7160 CALL mpi_bcast (areduce, asize, mp_float, mymaster, &
7161 & mycomm, myerror)
7162 IF (myerror.ne.mpi_success) THEN
7163 CALL mpi_error_string (myerror, string, lstr, serror)
7164 lstr=len_trim(string)
7165 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
7166 exit_flag=2
7167 RETURN
7168 END IF
7169# endif
7170 10 FORMAT (/,' MP_REDUCE_1D - error during ',a,' call, Task = ', &
7171 & i3.3,' Error = ',i3,/,16x,a)
7172!
7173! Unpack.
7174!
7175 DO i=1,asize
7176 a(i)=areduce(i)
7177 END DO
7178# ifdef PROFILE
7179!
7180!-----------------------------------------------------------------------
7181! Turn off time clocks.
7182!-----------------------------------------------------------------------
7183!
7184 CALL wclock_off (ng, model, 65, __line__, myfile)
7185# endif
7186!
7187 RETURN
7188 END SUBROUTINE mp_reduce_1d
7189!
7190 SUBROUTINE mp_reduce2 (ng, model, Isize, Jsize, A, handle_op, &
7191 & InpComm)
7192!
7193!***********************************************************************
7194! !
7195! This routine computes the global minimum/maximum and its associated !
7196! qualifiers like: location and/or other scalar components. Then, it !
7197! it broadcasts reduced variables to all nodes in the group. !
7198! !
7199! On Input: !
7200! !
7201! ng Nested grid number. !
7202! model Calling model identifier. !
7203! Isize Size of I-dimension: the minimum/maximum to reduce !
7204! is in location A(1,:) and qualifiers A(2:Isize,:). !
7205! Jsize Size of J-dimension: number of different sets of !
7206! minimum and/or maximum to process. !
7207! A Matrix of variables and qualifiers to reduce. !
7208! handle_op Reduction operation handle (string) of size Jsize. !
7209! The following reduction operations are supported: !
7210! 'MINLOC', 'MAXLOC' !
7211! InpComm Communicator handle (integer, OPTIONAL). !
7212! !
7213! On Output: !
7214! !
7215! A Matrix of reduced variables and qualifiers. !
7216! !
7217!***********************************************************************
7218!
7219! Imported variable declarations.
7220!
7221 integer, intent(in) :: ng, model, Isize, Jsize
7222
7223 integer, intent(in), optional :: InpComm
7224!
7225 character (len=*), intent(in) :: handle_op(Jsize)
7226!
7227 real(r8), intent(inout) :: A(Isize,Jsize)
7228!
7229! Local variable declarations.
7230!
7231 integer :: Lstr, MyCOMM, MyError, Serror
7232 integer :: handle, i, j
7233!
7234 real(r8), dimension(2,Isize) :: Areduce
7235 real(r8), dimension(2,Isize) :: Asend
7236!
7237 character (len=MPI_MAX_ERROR_STRING) :: string
7238
7239 character (len=*), parameter :: MyFile = &
7240 & __FILE__//", mp_reduce2"
7241
7242# ifdef PROFILE
7243!
7244!-----------------------------------------------------------------------
7245! Turn on time clocks.
7246!-----------------------------------------------------------------------
7247!
7248 CALL wclock_on (ng, model, 65, __line__, myfile)
7249# endif
7250# ifdef MPI
7251!
7252!-----------------------------------------------------------------------
7253! Set distributed-memory communicator handle (context ID).
7254!-----------------------------------------------------------------------
7255!
7256 IF (PRESENT(inpcomm)) THEN
7257 mycomm=inpcomm
7258 ELSE
7259 mycomm=ocn_comm_world
7260 END IF
7261# endif
7262!
7263!-----------------------------------------------------------------------
7264! Reduce requested variables and qualifiers.
7265!-----------------------------------------------------------------------
7266!
7267! Maximum automatic buffer memory size in bytes.
7268!
7269 bmemmax(ng)=max(bmemmax(ng), real((SIZE(areduce)+ &
7270 & SIZE(asend))*kind(a),r8))
7271!
7272! Pack and reduce.
7273!
7274 DO j=1,jsize
7275 DO i=1,isize
7276 asend(1,i)=a(1,j)
7277 asend(2,i)=a(i,j)
7278 END DO
7279 IF (handle_op(j)(1:6).eq.'MINLOC') THEN
7280 handle=mpi_minloc
7281 ELSE IF (handle_op(j)(1:6).eq.'MAXLOC') THEN
7282 handle=mpi_maxloc
7283 END IF
7284 CALL mpi_allreduce (asend, areduce, isize, &
7285# ifdef DOUBLE_PRECISION
7286 & mpi_2double_precision, &
7287# else
7288 & mpi_2real, &
7289# endif
7290 & handle, mycomm, myerror)
7291 IF (myerror.ne.mpi_success) THEN
7292 CALL mpi_error_string (myerror, string, lstr, serror)
7293 lstr=len_trim(string)
7294 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
7295 & string(1:lstr)
7296 10 FORMAT (/,' MP_REDUCE2 - error during ',a,' call, Task = ', &
7297 & i3.3,' Error = ',i3,/,16x,a)
7298 exit_flag=2
7299 RETURN
7300 END IF
7301!
7302! Unpack.
7303!
7304 a(1,j)=areduce(1,1)
7305 DO i=2,isize
7306 a(i,j)=areduce(2,i)
7307 END DO
7308 END DO
7309
7310# ifdef PROFILE
7311!
7312!-----------------------------------------------------------------------
7313! Turn off time clocks.
7314!-----------------------------------------------------------------------
7315!
7316 CALL wclock_off (ng, model, 65, __line__, myfile)
7317# endif
7318!
7319 RETURN
7320 END SUBROUTINE mp_reduce2
7321!
7322 SUBROUTINE mp_scatter2d (ng, model, LBi, UBi, LBj, UBj, &
7323 & Nghost, gtype, Amin, Amax, &
7324# if defined READ_WATER && defined MASKING
7325 & NWpts, IJ_water, &
7326# endif
7327 & Npts, A, Awrk)
7328!
7329!***********************************************************************
7330! !
7331! This routine scatters input global data, packed as 1D real array, !
7332! to each tiled array. Because this routine is also used by the !
7333! adjoint model, the ghost-points in the halo region are NOT updated !
7334! in the ouput tile array (Awrk). It is used by the master node to !
7335! distribute date read from NetCDF files during serial I/O. !
7336! !
7337! On Input: !
7338! !
7339! ng Nested grid number. !
7340! model Calling model identifier. !
7341! LBi I-dimension Lower bound. !
7342! UBi I-dimension Upper bound. !
7343! LBj J-dimension Lower bound. !
7344! UBj J-dimension Upper bound. !
7345! Nghost Number of ghost-points in the halo region. !
7346! gtype C-grid type. If negative and Land-Sea mask is !
7347! available, only water-points are processed. !
7348! Amin Input array minimum value. !
7349! Amax Input array maximum value. !
7350! NWpts Number of water points. !
7351! IJ_water IJ-indices for water points. !
7352! Npts Number of points to processes in A. !
7353! A Input global data from each node packed into 1D array !
7354! in column-major order. That is, in the same way !
7355! that Fortran multi-dimensional arrays are stored !
7356! in memory. Only valid on root task, MyMaster. !
7357! Npts Number of points to processes in A. !
7358! !
7359! On Output: !
7360! !
7361! Awrk 2D tiled, floating-point array. !
7362! !
7363!***********************************************************************
7364!
7365! Imported variable declarations.
7366!
7367 integer, intent(in) :: ng, model
7368 integer, intent(in) :: LBi, UBi, LBj, UBj
7369 integer, intent(in) :: Nghost, gtype, Npts
7370
7371# if defined READ_WATER && defined MASKING
7372 integer, intent(in) :: NWpts
7373 integer, intent(in) :: IJ_water(NWpts)
7374# endif
7375!
7376 real(r8), intent(inout) :: Amin, Amax
7377 real(r8), intent(inout) :: A(Npts+2)
7378 real(r8), intent(out) :: Awrk(LBi:UBi,LBj:UBj)
7379!
7380! Local variable declarations.
7381!
7382 integer :: Io, Ie, Jo, Je, Ioff, Joff
7383 integer :: Imin, Imax, Jmin, Jmax
7384 integer :: iLB, iUB, jLB, jUB
7385 integer :: Isize, Jsize, IJsize, Vsize
7386 integer :: Lstr, MyError, MySize, MyType, Ntasks, Serror, ghost
7387 integer :: Cgrid, i, ic, ij, j, jc, mc, nc, rank
7388!
7389 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
7390!
7391# ifndef SCATTER_BCAST
7392 integer, allocatable :: ij_global(:,:)
7393!
7394 real(r8) :: Astats(2)
7395 real(r8), allocatable :: Vrecv(:)
7396 real(r8), dimension(Npts) :: Vreset
7397# endif
7398 real(r8), dimension(Npts+2) :: Vglobal
7399!
7400 character (len=10) :: MyMethod
7401 character (len=MPI_MAX_ERROR_STRING) :: string
7402
7403 character (len=*), parameter :: MyFile = &
7404 & __FILE__//", mp_scatter2d"
7405
7406# ifdef PROFILE
7407!
7408!-----------------------------------------------------------------------
7409! Turn on time clocks.
7410!-----------------------------------------------------------------------
7411!
7412 CALL wclock_on (ng, model, 67, __line__, myfile)
7413# endif
7414!
7415!-----------------------------------------------------------------------
7416! Set horizontal starting and ending indices for parallel domain
7417! partitions in the XI- and ETA-directions.
7418!-----------------------------------------------------------------------
7419!
7420! Maximum automatic buffer memory size in bytes.
7421!
7422 bmemmax(ng)=max(bmemmax(ng), real(SIZE(vglobal)*kind(a),r8))
7423!
7424! Set full grid first and last point according to staggered C-grid
7425! classification. Notice that the offsets are for the private array
7426! counter.
7427!
7428 mytype=abs(gtype)
7429
7430 SELECT CASE (mytype)
7431 CASE (p2dvar, p3dvar)
7432 io=iobounds(ng) % ILB_psi
7433 ie=iobounds(ng) % IUB_psi
7434 jo=iobounds(ng) % JLB_psi
7435 je=iobounds(ng) % JUB_psi
7436 ioff=0
7437 joff=1
7438 CASE (r2dvar, r3dvar)
7439 io=iobounds(ng) % ILB_rho
7440 ie=iobounds(ng) % IUB_rho
7441 jo=iobounds(ng) % JLB_rho
7442 je=iobounds(ng) % JUB_rho
7443 ioff=1
7444 joff=0
7445 CASE (u2dvar, u3dvar)
7446 io=iobounds(ng) % ILB_u
7447 ie=iobounds(ng) % IUB_u
7448 jo=iobounds(ng) % JLB_u
7449 je=iobounds(ng) % JUB_u
7450 ioff=0
7451 joff=0
7452 CASE (v2dvar, v3dvar)
7453 io=iobounds(ng) % ILB_v
7454 ie=iobounds(ng) % IUB_v
7455 jo=iobounds(ng) % JLB_v
7456 je=iobounds(ng) % JUB_v
7457 ioff=1
7458 joff=1
7459 CASE DEFAULT ! RHO-points
7460 io=iobounds(ng) % ILB_rho
7461 ie=iobounds(ng) % IUB_rho
7462 jo=iobounds(ng) % JLB_rho
7463 je=iobounds(ng) % JUB_rho
7464 ioff=1
7465 joff=0
7466 END SELECT
7467!
7468 isize=ie-io+1
7469 jsize=je-jo+1
7470 ijsize=isize*jsize
7471!
7472! Set Scatter counts and displacement vectors. Use non-overlapping
7473! (ghost=0) ranges according to tile rank in 'mpi_scatterv' to work
7474! correctly.
7475!
7476 ghost=0 ! non-overlapping
7477!
7478 SELECT CASE (mytype)
7479 CASE (p2dvar, p3dvar)
7480 cgrid=1
7481 CASE (r2dvar, r3dvar)
7482 cgrid=2
7483 CASE (u2dvar, u3dvar)
7484 cgrid=3
7485 CASE (v2dvar, v3dvar)
7486 cgrid=4
7487 CASE DEFAULT ! RHO-points
7488 cgrid=2
7489 END SELECT
7490!
7491 ntasks=ntilei(ng)*ntilej(ng)
7492 nc=0
7493 DO rank=0,ntasks-1
7494 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
7495 iub=bounds(ng) % Imax(cgrid,ghost,rank)
7496 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
7497 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
7498 displs(rank)=nc
7499 DO j=jlb,jub
7500 DO i=ilb,iub
7501 nc=nc+1
7502 END DO
7503 END DO
7504 counts(rank)=nc-displs(rank)
7505 END DO
7506!
7507! Load global data into send buffer, Vglobal, which is only known by
7508! the root process at this point. If water points input data, fill
7509! land points.
7510!
7511 vglobal=0.0_r8
7512 vsize=npts
7513!
7514 IF (myrank.eq.mymaster) Then
7515 IF (gtype.gt.0) THEN
7516 vglobal(1:vsize)=a(1:vsize)
7517# if defined READ_WATER && defined MASKING
7518 ELSE
7519 ij=0
7520 mc=0
7521 nc=0
7522 DO j=jo,je
7523 jc=(j-joff)*isize
7524 DO i=io,ie
7525 ij=ij+1
7526 ic=i+ioff+jc
7527 IF (ij_water(mc+1).eq.ij) THEN
7528 mc=mc+1
7529 nc=nc+1
7530 vglobal(ic)=a(nc)
7531 ELSE
7532 vglobal(ic)=0.0_r8
7533 END IF
7534 END DO
7535 END DO
7536 vsize=ic
7537# endif
7538 END IF
7539 END IF
7540!
7541!-----------------------------------------------------------------------
7542! Scatter requested global data.
7543!-----------------------------------------------------------------------
7544
7545# ifdef SCATTER_BCAST
7546!
7547! Set tile range to include overlapping halos, if requested.
7548!
7549 IF (nghost.eq.0) THEN
7550 ghost=0 ! non-overlapping
7551 ELSE
7552 ghost=1 ! overlapping
7553 END IF
7554 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
7555 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
7556 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
7557 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
7558!
7559! Append Min/Max values since they are only known by root process
7560!
7561 IF (myrank.eq.mymaster) Then
7562 vglobal(vsize+1)=amin
7563 vglobal(vsize+2)=amax
7564 END IF
7565 vsize=vsize+2
7566!
7567! Broadcast data to all processes in the group, itself included.
7568!
7569 CALL mpi_bcast (vglobal, vsize, mp_float, mymaster, &
7570 & ocn_comm_world, myerror)
7571 IF (myerror.ne.mpi_success) THEN
7572 CALL mpi_error_string (myerror, string, lstr, serror)
7573 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, trim(string)
7574 10 FORMAT (/,' ROMS_SCATTER2D - error during ',a, &
7575 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
7576 exit_flag=2
7577 RETURN
7578 END IF
7579!
7580! Unpack data buffer and load into tiled array.
7581!
7582 DO j=jmin, jmax
7583 jc=(j-joff)*isize
7584 DO i=imin, imax
7585 ic=i+ioff+jc
7586 awrk(i,j)=vglobal(ic)
7587 END DO
7588 END DO
7589 amin=vglobal(vsize-1)
7590 amax=vglobal(vsize)
7591
7592# else
7593!
7594! Set tile range for non-overlapping tiles.
7595!
7596 ghost=0 ! non-overlapping
7597 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
7598 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
7599 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
7600 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
7601!
7602! If master node, set (i,j) indices map from global array to vector.
7603!
7604 IF (myrank.eq.mymaster) THEN
7605 allocate ( ij_global(io:ie,jo:je) )
7606!
7607 DO j=jo,je
7608 jc=(j-joff)*isize
7609 DO i=io,ie
7610 ij=i+ioff+jc
7611 ij_global(i,j)=ij
7612 END DO
7613 END DO
7614!
7615! Reorganize the input global vector in such a way that the tiled data
7616! is continuous in memory to facilitate "SCATTERV" with different size
7617! sections.
7618!
7619 nc=0
7620 DO rank=0,ntasks-1
7621 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
7622 iub=bounds(ng) % Imax(cgrid,ghost,rank)
7623 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
7624 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
7625 DO j=jlb,jub
7626 DO i=ilb,iub
7627 ij=ij_global(i,j)
7628 nc=nc+1
7629 vreset(nc)=vglobal(ij)
7630 END DO
7631 END DO
7632 END DO
7633 deallocate (ij_global)
7634 END IF
7635!
7636! Scatter global data to local tiled arrays.
7637!
7638 mysize=(imax-imin+1)*(jmax-jmin+1)
7639 allocate ( vrecv(mysize) )
7640 vrecv=0.0_r8
7641!
7642 CALL mpi_scatterv (vreset, counts, displs, mp_float, &
7643 & vrecv, mysize, mp_float, &
7644 & mymaster, ocn_comm_world, myerror)
7645 IF (myerror.ne.mpi_success) THEN
7646 CALL mpi_error_string (myerror, string, lstr, serror)
7647 WRITE (stdout,20) 'MPI_SCATTERV', myrank, myerror, &
7648 & trim(string)
7649 20 FORMAT (/,' MP_SCATTER2D - error during ',a, &
7650 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
7651 exit_flag=2
7652 RETURN
7653 END IF
7654!
7655! Unpack data buffer and load into tiled array
7656!
7657 nc=0
7658 DO j=jmin,jmax
7659 DO i=imin,imax
7660 nc=nc+1
7661 awrk(i,j)=vrecv(nc)
7662 END DO
7663 END DO
7664 deallocate ( vrecv )
7665!
7666! If requested, include halo exchanges.
7667!
7668 IF (nghost.gt.0) THEN
7669 CALL mp_exchange2d (ng, myrank, model, 1, &
7670 & lbi, ubi, lbj, ubj, &
7671 & nghostpoints, &
7672 & ewperiodic(ng), nsperiodic(ng), &
7673 & awrk)
7674 END IF
7675!
7676! Broadcast global Min/Max values to all tasks in the group since they
7677! are only known by root.
7678!
7679 astats(1)=amin
7680 astats(2)=amax
7681 mysize=2
7682!
7683 CALL mpi_bcast (astats, mysize, mp_float, mymaster, &
7684 & ocn_comm_world, myerror)
7685 IF (myerror.ne.mpi_success) THEN
7686 CALL mpi_error_string (myerror, string, lstr, serror)
7687 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, trim(string)
7688 30 FORMAT (/,' MP_SCATTER2D - error during ',a,' call, Task = ', &
7689 & i3.3, ' Error = ',i3,/,15x,a)
7690 exit_flag=2
7691 RETURN
7692 END IF
7693!
7694 amin=astats(1)
7695 amax=astats(2)
7696# endif
7697# ifdef PROFILE
7698!
7699!-----------------------------------------------------------------------
7700! Turn off time clocks.
7701!-----------------------------------------------------------------------
7702!
7703 CALL wclock_off (ng, model, 67, __line__, myfile)
7704# endif
7705!
7706 RETURN
7707 END SUBROUTINE mp_scatter2d
7708
7709# ifdef GRID_EXTRACT
7710!
7711 SUBROUTINE mp_scatter2d_xtr (ng, model, LBi, UBi, LBj, UBj, &
7712 & Nghost, gtype, Amin, Amax, &
7713# if defined READ_WATER && defined MASKING
7714 & NWpts, IJ_water, &
7715# endif
7716 & Npts, A, Awrk)
7717!
7718!***********************************************************************
7719! !
7720! This routine broadcasts input global data, packed as 1D real array, !
7721! to each tiled array. Because this routine is also used by the !
7722! adjoint model, the ghost-points in the halo region are NOT updated !
7723! in the ouput tile array (Awrk). It is used by the master node to !
7724! scatter input global data to each tiled node. !
7725! !
7726! On Input: !
7727! !
7728! ng Nested grid number. !
7729! model Calling model identifier. !
7730! LBi I-dimension Lower bound. !
7731! UBi I-dimension Upper bound. !
7732! LBj J-dimension Lower bound. !
7733! UBj J-dimension Upper bound. !
7734! Nghost Number of ghost-points in the halo region. !
7735! gtype C-grid type. If negative and Land-Sea mask is !
7736! available, only water-points are processed. !
7737! Amin Input array minimum value. !
7738! Amax Input array maximum value. !
7739! NWpts Number of water points. !
7740! IJ_water IJ-indices for water points. !
7741! Npts Number of points to processes in A. !
7742! A Input global data from each node packed into 1D array !
7743! in column-major order. That is, in the same way !
7744! that Fortran multi-dimensional arrays are stored !
7745! in memory. Only valid on root task, MyMaster. !
7746! Npts Number of points to processes in A. !
7747! !
7748! On Output: !
7749! !
7750! Awrk 2D tiled, floating-point array. !
7751! !
7752!***********************************************************************
7753!
7754! Imported variable declarations.
7755!
7756 integer, intent(in) :: ng, model
7757 integer, intent(in) :: LBi, UBi, LBj, UBj
7758 integer, intent(in) :: Nghost, gtype, Npts
7759
7760# if defined READ_WATER && defined MASKING
7761 integer, intent(in) :: NWpts
7762 integer, intent(in) :: IJ_water(NWpts)
7763# endif
7764!
7765 real(r8), intent(inout) :: Amin, Amax
7766 real(r8), intent(inout) :: A(Npts+2)
7767 real(r8), intent(out) :: Awrk(LBi:UBi,LBj:UBj)
7768!
7769! Local variable declarations.
7770!
7771 integer :: Io, Ie, Jo, Je, Ioff, Joff
7772 integer :: Imin, Imax, Jmin, Jmax
7773 integer :: iLB, iUB, jLB, jUB
7774 integer :: Isize, Jsize, IJsize, Vsize
7775 integer :: Lstr, MyError, MySize, MyType, Ntasks, Serror, ghost
7776 integer :: Cgrid, i, ic, ij, j, jc, mc, nc, rank
7777!
7778 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
7779!
7780# ifndef SCATTER_BCAST
7781 integer, allocatable :: ij_global(:,:)
7782!
7783 real(r8) :: Astats(2)
7784 real(r8), allocatable :: Vrecv(:)
7785 real(r8), dimension(Npts) :: Vreset
7786# endif
7787 real(r8), dimension(Npts+2) :: Vglobal
7788!
7789 character (len=MPI_MAX_ERROR_STRING) :: string
7790
7791 character (len=*), parameter :: MyFile = &
7792 & __FILE__//", mp_scatter2d_xtr"
7793
7794# ifdef PROFILE
7795!
7796!-----------------------------------------------------------------------
7797! Turn on time clocks.
7798!-----------------------------------------------------------------------
7799!
7800 CALL wclock_on (ng, model, 67, __line__, myfile)
7801# endif
7802!
7803!-----------------------------------------------------------------------
7804! Set horizontal starting and ending indices for parallel domain
7805! partitions in the XI- and ETA-directions.
7806!-----------------------------------------------------------------------
7807!
7808! Maximum automatic buffer memory size in bytes.
7809!
7810 bmemmax(ng)=max(bmemmax(ng), real(SIZE(vglobal)*kind(a),r8))
7811!
7812! Set full grid first and last point according to staggered C-grid
7813! classification. Notice that the offsets are for the private array
7814! counter.
7815!
7816 mytype=abs(gtype)
7817
7818 SELECT CASE (mytype)
7819 CASE (p2dvar, p3dvar)
7820 io=xtr_iobounds(ng) % ILB_psi
7821 ie=xtr_iobounds(ng) % IUB_psi
7822 jo=xtr_iobounds(ng) % JLB_psi
7823 je=xtr_iobounds(ng) % JUB_psi
7824 ioff=0
7825 joff=1
7826 CASE (r2dvar, r3dvar)
7827 io=xtr_iobounds(ng) % ILB_rho
7828 ie=xtr_iobounds(ng) % IUB_rho
7829 jo=xtr_iobounds(ng) % JLB_rho
7830 je=xtr_iobounds(ng) % JUB_rho
7831 ioff=1
7832 joff=0
7833 CASE (u2dvar, u3dvar)
7834 io=xtr_iobounds(ng) % ILB_u
7835 ie=xtr_iobounds(ng) % IUB_u
7836 jo=xtr_iobounds(ng) % JLB_u
7837 je=xtr_iobounds(ng) % JUB_u
7838 ioff=0
7839 joff=0
7840 CASE (v2dvar, v3dvar)
7841 io=xtr_iobounds(ng) % ILB_v
7842 ie=xtr_iobounds(ng) % IUB_v
7843 jo=xtr_iobounds(ng) % JLB_v
7844 je=xtr_iobounds(ng) % JUB_v
7845 ioff=1
7846 joff=1
7847 CASE DEFAULT ! RHO-points
7848 io=xtr_iobounds(ng) % ILB_rho
7849 ie=xtr_iobounds(ng) % IUB_rho
7850 jo=xtr_iobounds(ng) % JLB_rho
7851 je=xtr_iobounds(ng) % JUB_rho
7852 ioff=1
7853 joff=0
7854 END SELECT
7855!
7856 isize=ie-io+1
7857 jsize=je-jo+1
7858 ijsize=isize*jsize
7859!
7860! Set Scatter counts and displacement vectors. Use non-overlapping
7861! (Nghost=0) or overlapping (Nghost>0) ranges according to tile rank.
7862!
7863 ghost=0 ! non-overlapping
7864!
7865 SELECT CASE (mytype)
7866 CASE (p2dvar, p3dvar)
7867 cgrid=1
7868 CASE (r2dvar, r3dvar)
7869 cgrid=2
7870 CASE (u2dvar, u3dvar)
7871 cgrid=3
7872 CASE (v2dvar, v3dvar)
7873 cgrid=4
7874 CASE DEFAULT ! RHO-points
7875 cgrid=2
7876 END SELECT
7877!
7878 ntasks=ntilei(ng)*ntilej(ng)
7879 nc=0
7880 DO rank=0,ntasks-1
7881 ilb=xtr_bounds(ng) % Imin(cgrid,ghost,rank)
7882 iub=xtr_bounds(ng) % Imax(cgrid,ghost,rank)
7883 jlb=xtr_bounds(ng) % Jmin(cgrid,ghost,rank)
7884 jub=xtr_bounds(ng) % Jmax(cgrid,ghost,rank)
7885 displs(rank)=nc
7886 DO j=jlb,jub
7887 DO i=ilb,iub
7888 nc=nc+1
7889 END DO
7890 END DO
7891 counts(rank)=nc-displs(rank)
7892 END DO
7893!
7894! Load global data into send buffer, Vglobal, which is only known by
7895! the root process at this point. If water points input data, fill
7896! land points.
7897!
7898 vglobal=0.0_r8
7899 vsize=npts
7900!
7901 IF (myrank.eq.mymaster) Then
7902 IF (gtype.gt.0) THEN
7903 vglobal(1:vsize)=a(1:vsize)
7904# if defined READ_WATER && defined MASKING
7905 ELSE
7906 ij=0
7907 mc=0
7908 nc=0
7909 DO j=jo,je
7910 jc=(j-joff)*ilen
7911 DO i=io,ie
7912 ij=ij+1
7913 ic=i+ioff+jc
7914 IF (ij_water(mc+1).eq.ij) THEN
7915 mc=mc+1
7916 nc=nc+1
7917 vglobal(ic)=a(nc)
7918 ELSE
7919 vglobal(ic)=0.0_r8
7920 END IF
7921 END DO
7922 END DO
7923 vsize=ic
7924# endif
7925 END IF
7926 END IF
7927!
7928!-----------------------------------------------------------------------
7929! Scatter requested global data.
7930!-----------------------------------------------------------------------
7931
7932# ifdef SCATTER_BCAST
7933!
7934! Set tile range to include overlapping halos, if requested.
7935!
7936 IF (nghost.eq.0) THEN
7937 ghost=0 ! non-overlapping
7938 ELSE
7939 ghost=1 ! overlapping
7940 END IF
7941 imin=xtr_bounds(ng) % Imin(cgrid,ghost,myrank)
7942 imax=xtr_bounds(ng) % Imax(cgrid,ghost,myrank)
7943 jmin=xtr_bounds(ng) % Jmin(cgrid,ghost,myrank)
7944 jmax=xtr_bounds(ng) % Jmax(cgrid,ghost,myrank)
7945!
7946! Append Min/Max values since they are only known by root process.
7947!
7948 IF (myrank.eq.mymaster) Then
7949 vglobal(vsize+1)=amin
7950 vglobal(vsize+2)=amax
7951 END IF
7952 vsize=vsize+2
7953!
7954! Broadcast data to all processes in the group, itself included.
7955!
7956 CALL mpi_bcast (vglobal, vsize, mp_float, mymaster, &
7957 & ocn_comm_world, myerror)
7958 IF (myerror.ne.mpi_success) THEN
7959 CALL mpi_error_string (myerror, string, lstr, serror)
7960 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, trim(string)
7961 10 FORMAT (/,' MP_SCATTER2D_XTR - error during ',a, &
7962 & ' call, Task = ',i3.3, ' Error = ',i3,/,15x,a)
7963 exit_flag=2
7964 RETURN
7965 END IF
7966!
7967! Unpack data buffer and load into tiled array.
7968!
7969 DO j=jmin, jmax
7970 jc=(j-joff)*isize
7971 DO i=imin, imax
7972 ic=i+ioff+jc
7973 awrk(i,j)=vglobal(ic)
7974 END DO
7975 END DO
7976 amin=vglobal(vsize-1)
7977 amax=vglobal(vsize)
7978
7979# else
7980!
7981! Set tile range for non-overlapping tiles.
7982!
7983 ghost=0 ! non-overlapping
7984 imin=xtr_bounds(ng) % Imin(cgrid,ghost,myrank)
7985 imax=xtr_bounds(ng) % Imax(cgrid,ghost,myrank)
7986 jmin=xtr_bounds(ng) % Jmin(cgrid,ghost,myrank)
7987 jmax=xtr_bounds(ng) % Jmax(cgrid,ghost,myrank)
7988!
7989! If master node, set (i,j) indices map from global array to vector.
7990!
7991 IF (myrank.eq.mymaster) THEN
7992 allocate ( ij_global(io:ie,jo:je) )
7993!
7994 DO j=jo,je
7995 jc=(j-joff)*isize
7996 DO i=io,ie
7997 ij=i+ioff+jc
7998 ij_global(i,j)=ij
7999 END DO
8000 END DO
8001!
8002! Reorganize the input global vector in such a way that the tiled data
8003! is continuous in memory to facilitate "SCATTERV" with different size
8004! sections.
8005!
8006 nc=0
8007 DO rank=0,ntasks-1
8008 ilb=xtr_bounds(ng) % Imin(cgrid,ghost,rank)
8009 iub=xtr_bounds(ng) % Imax(cgrid,ghost,rank)
8010 jlb=xtr_bounds(ng) % Jmin(cgrid,ghost,rank)
8011 jub=xtr_bounds(ng) % Jmax(cgrid,ghost,rank)
8012 DO j=jlb,jub
8013 DO i=ilb,iub
8014 ij=ij_global(i,j)
8015 nc=nc+1
8016 vreset(nc)=vglobal(ij)
8017 END DO
8018 END DO
8019 END DO
8020 deallocate (ij_global)
8021 END IF
8022!
8023! Scatter global data to local tiled arrays.
8024!
8025 mysize=(imax-imin+1)*(jmax-jmin+1)
8026 allocate ( vrecv(mysize) )
8027 vrecv=0.0_r8
8028!
8029 CALL mpi_scatterv (vreset, counts, displs, mp_float, &
8030 & vrecv, mysize, mp_float, &
8031 & mymaster, ocn_comm_world, myerror)
8032 IF (myerror.ne.mpi_success) THEN
8033 CALL mpi_error_string (myerror, string, lstr, serror)
8034 WRITE (stdout,20) 'MPI_SCATTERV', myrank, myerror, &
8035 & trim(string)
8036 20 FORMAT (/,' MP_SCATTER2D_XTR - error during ',a, &
8037 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8038 exit_flag=2
8039 RETURN
8040 END IF
8041!
8042! Unpack data buffer and load into tiled array
8043!
8044 nc=0
8045 DO j=jmin,jmax
8046 DO i=imin,imax
8047 nc=nc+1
8048 awrk(i,j)=vrecv(nc)
8049 END DO
8050 END DO
8051 deallocate ( vrecv )
8052!
8053! Broadcast global Min/Max values to all tasks in the group since they
8054! are only known by root.
8055!
8056 astats(1)=amin
8057 astats(2)=amax
8058 mysize=2
8059!
8060 CALL mpi_bcast (astats, mysize, mp_float, mymaster, &
8061 & ocn_comm_world, myerror)
8062 IF (myerror.ne.mpi_success) THEN
8063 CALL mpi_error_string (myerror, string, lstr, serror)
8064 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, trim(string)
8065 30 FORMAT (/,' MP_SCATTER2D_XTR - error during ',a, &
8066 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8067 exit_flag=2
8068 RETURN
8069 END IF
8070!
8071 amin=astats(1)
8072 amax=astats(2)
8073# endif
8074# ifdef PROFILE
8075!
8076!-----------------------------------------------------------------------
8077! Turn off time clocks.
8078!-----------------------------------------------------------------------
8079!
8080 CALL wclock_off (ng, model, 67, __line__, myfile)
8081# endif
8082!
8083 RETURN
8084 END SUBROUTINE mp_scatter2d_xtr
8085# endif
8086!
8087 SUBROUTINE mp_scatter3d (ng, model, LBi, UBi, LBj, UBj, LBk, UBk, &
8088 & Nghost, gtype, Amin, Amax, &
8089# if defined READ_WATER && defined MASKING
8090 & NWpts, IJ_water, &
8091# endif
8092 & Npts, A, Awrk)
8093!
8094!***********************************************************************
8095! !
8096! This routine broadcasts input global data, packed as 1D real array, !
8097! to each tiled array. Because this routine is also used by the !
8098! adjoint model, the ghost-points in the halo region are NOT updated !
8099! in the ouput tile array (Awrk). It is used by the master node to !
8100! scatter input global data to each tiled node. !
8101! !
8102! On Input: !
8103! !
8104! ng Nested grid number. !
8105! model Calling model identifier. !
8106! LBi I-dimension Lower bound. !
8107! UBi I-dimension Upper bound. !
8108! LBj J-dimension Lower bound. !
8109! UBj J-dimension Upper bound. !
8110! LBk K-dimension Lower bound. !
8111! UBk K-dimension Upper bound. !
8112! Nghost Number of ghost-points in the halo region. !
8113! gtype C-grid type. If negative and Land-Sea mask is !
8114! available, only water-points are processed. !
8115! Amin Input array minimum value. !
8116! Amax Input array maximum value. !
8117! NWpts Number of water points. !
8118! IJ_water IJ-indices for water points. !
8119! Npts Number of points to processes in A. !
8120! A Input global data from each node packed into 1D array !
8121! in column-major order. That is, in the same way !
8122! that Fortran multi-dimensional arrays are stored !
8123! in memory. Only valid on root task, MyMaster. !
8124! Npts Number of points to processes in A. !
8125! !
8126! On Output: !
8127! !
8128! Awrk 3D tiled, floating-point array. !
8129! !
8130!***********************************************************************
8131!
8132! Imported variable declarations.
8133!
8134 integer, intent(in) :: ng, model
8135 integer, intent(in) :: LBi, UBi, LBj, UBj, LBk, UBk
8136 integer, intent(in) :: Nghost, gtype, Npts
8137
8138# if defined READ_WATER && defined MASKING
8139 integer, intent(in) :: NWpts
8140 integer, intent(in) :: IJ_water(NWpts)
8141# endif
8142!
8143 real(r8), intent(inout) :: Amin, Amax
8144 real(r8), intent(inout) :: A(Npts+2)
8145 real(r8), intent(out) :: Awrk(LBi:UBi,LBj:UBj,LBk:UBk)
8146!
8147! Local variable declarations.
8148!
8149 integer :: Io, Ie, Jo, Je, Ioff, Joff, Koff
8150 integer :: Imin, Imax, Jmin, Jmax
8151 integer :: iLB, iUB, jLB, jUB
8152 integer :: Isize, Jsize, Ksize, IJsize, Vsize, Vsize2d
8153 integer :: Lstr, MyError, MySize, MyType, Ntasks, Serror, ghost
8154 integer :: Cgrid, i, ic, ij, ijk, j, jc, k, kc, mc, nc, rank
8155!
8156 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
8157!
8158# ifndef SCATTER_BCAST
8159 integer, allocatable :: ijk_global(:,:,:)
8160!
8161 real(r8) :: Astats(2)
8162 real(r8), allocatable :: Vrecv(:)
8163 real(r8), dimension(Npts) :: Vreset
8164# endif
8165 real(r8), dimension(Npts+2) :: Vglobal
8166!
8167 character (len=10) :: MyMethod
8168 character (len=MPI_MAX_ERROR_STRING) :: string
8169
8170 character (len=*), parameter :: MyFile = &
8171 & __FILE__//", mp_scatter3d"
8172
8173# ifdef PROFILE
8174!
8175!-----------------------------------------------------------------------
8176! Turn on time clocks.
8177!-----------------------------------------------------------------------
8178!
8179 CALL wclock_on (ng, model, 67, __line__, myfile)
8180# endif
8181!
8182!-----------------------------------------------------------------------
8183! Set horizontal starting and ending indices for parallel domain
8184! partitions in the XI- and ETA-directions.
8185!-----------------------------------------------------------------------
8186!
8187! Maximum automatic buffer memory size in bytes.
8188!
8189 bmemmax(ng)=max(bmemmax(ng), real(SIZE(vglobal)*kind(a),r8))
8190!
8191! Set full grid first and last point according to staggered C-grid
8192! classification. Notice that the offsets are for the private array
8193! counters.
8194!
8195 mytype=abs(gtype)
8196
8197 SELECT CASE (mytype)
8198 CASE (p2dvar, p3dvar)
8199 io=iobounds(ng) % ILB_psi
8200 ie=iobounds(ng) % IUB_psi
8201 jo=iobounds(ng) % JLB_psi
8202 je=iobounds(ng) % JUB_psi
8203 ioff=0
8204 joff=1
8205 CASE (r2dvar, r3dvar)
8206 io=iobounds(ng) % ILB_rho
8207 ie=iobounds(ng) % IUB_rho
8208 jo=iobounds(ng) % JLB_rho
8209 je=iobounds(ng) % JUB_rho
8210 ioff=1
8211 joff=0
8212 CASE (u2dvar, u3dvar)
8213 io=iobounds(ng) % ILB_u
8214 ie=iobounds(ng) % IUB_u
8215 jo=iobounds(ng) % JLB_u
8216 je=iobounds(ng) % JUB_u
8217 ioff=0
8218 joff=0
8219 CASE (v2dvar, v3dvar)
8220 io=iobounds(ng) % ILB_v
8221 ie=iobounds(ng) % IUB_v
8222 jo=iobounds(ng) % JLB_v
8223 je=iobounds(ng) % JUB_v
8224 ioff=1
8225 joff=1
8226 CASE DEFAULT ! RHO-points
8227 io=iobounds(ng) % ILB_rho
8228 ie=iobounds(ng) % IUB_rho
8229 jo=iobounds(ng) % JLB_rho
8230 je=iobounds(ng) % JUB_rho
8231 ioff=1
8232 joff=0
8233 END SELECT
8234
8235 IF (lbk.eq.0) THEN
8236 koff=0
8237 ELSE
8238 koff=1
8239 END IF
8240
8241 isize=ie-io+1
8242 jsize=je-jo+1
8243 ksize=ubk-lbk+1
8244 ijsize=isize*jsize
8245!
8246! Set Scatter counts and displacement vectors. Use non-overlapping
8247! (ghost=0) ranges according to tile rank in 'mpi_scatterv' to work
8248! correctly.
8249!
8250 ghost=0 ! non-overlapping
8251!
8252 SELECT CASE (mytype)
8253 CASE (p2dvar, p3dvar)
8254 cgrid=1
8255 CASE (r2dvar, r3dvar)
8256 cgrid=2
8257 CASE (u2dvar, u3dvar)
8258 cgrid=3
8259 CASE (v2dvar, v3dvar)
8260 cgrid=4
8261 CASE DEFAULT ! RHO-points
8262 cgrid=2
8263 END SELECT
8264!
8265 ntasks=ntilei(ng)*ntilej(ng)
8266 nc=0
8267 DO rank=0,ntasks-1
8268 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
8269 iub=bounds(ng) % Imax(cgrid,ghost,rank)
8270 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
8271 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
8272 displs(rank)=nc
8273 DO k=lbk,ubk
8274 DO j=jlb,jub
8275 DO i=ilb,iub
8276 nc=nc+1
8277 END DO
8278 END DO
8279 END DO
8280 counts(rank)=nc-displs(rank)
8281 END DO
8282!
8283! Load global data into send buffer, Vglobal, which is only known by
8284! the root process at this point. If water points input data, fill
8285! land points.
8286!
8287 vglobal=0.0_r8
8288 vsize=npts
8289!
8290 IF (myrank.eq.mymaster) Then
8291 IF (gtype.gt.0) THEN
8292 vglobal(1:vsize)=a(1:vsize)
8293# if defined READ_WATER && defined MASKING
8294 ELSE
8295 nc=0
8296 DO k=lbk,ubk
8297 kc=(k-koff)*ijsize
8298 ij=0
8299 mc=0
8300 DO j=jo,je
8301 jc=(j-joff)*isize
8302 DO i=io,ie
8303 ij=ij+1
8304 ic=i+ioff+jc+kc
8305 IF (ij_water(mc+1).eq.ij) THEN
8306 mc=mc+1
8307 nc=nc+1
8308 vglobal(ic)=a(nc)
8309 ELSE
8310 vglobal(ic)=0.0_r8
8311 END IF
8312 END DO
8313 END DO
8314 END DO
8315 vsize=ic
8316# endif
8317 END IF
8318 END IF
8319!
8320!-----------------------------------------------------------------------
8321! Scatter requested array data.
8322!-----------------------------------------------------------------------
8323
8324# ifdef SCATTER_BCAST
8325!
8326! Set tile range to include overlapping halos, if requested.
8327!
8328 IF (nghost.eq.0) THEN
8329 ghost=0 ! non-overlapping
8330 ELSE
8331 ghost=1 ! overlapping
8332 END IF
8333 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
8334 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
8335 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
8336 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
8337!
8338! Append Min/Max values since they are only known by root.
8339!
8340 IF (myrank.eq.mymaster) Then
8341 vglobal(vsize+1)=amin
8342 vglobal(vsize+2)=amax
8343 END IF
8344 vsize=vsize+2
8345!
8346! Broadcast data to all processes in the group, itself included.
8347!
8348 CALL mpi_bcast (vglobal, vsize, mp_float, mymaster, &
8349 & ocn_comm_world, myerror)
8350 IF (myerror.ne.mpi_success) THEN
8351 CALL mpi_error_string (myerror, string, lstr, serror)
8352 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, trim(string)
8353 10 FORMAT (/,' MP_SCATTER3D - error during ',a, &
8354 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8355 exit_flag=2
8356 RETURN
8357 END IF
8358!
8359! Unpack data buffer and load into tiled array.
8360!
8361 DO k=lbk,ubk
8362 kc=(k-koff)*isize*jsize
8363 DO j=jmin,jmax
8364 jc=(j-joff)*isize
8365 DO i=imin,imax
8366 ic=i+ioff+jc+kc
8367 awrk(i,j,k)=vglobal(ic)
8368 END DO
8369 END DO
8370 END DO
8371 amin=vglobal(vsize-1)
8372 amax=vglobal(vsize)
8373
8374# else
8375!
8376! Set tile range for non-overlapping tiles.
8377!
8378 ghost=0 ! non-overlapping
8379 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
8380 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
8381 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
8382 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
8383!
8384! If mater node, Set (i,j,k) indices map from global array to vector.
8385!
8386 IF (myrank.eq.mymaster) THEN
8387 allocate ( ijk_global(io:ie,jo:je,lbk:ubk) )
8388!
8389 DO k=lbk,ubk
8390 kc=(k-koff)*ijsize
8391 DO j=jo,je
8392 jc=(j-joff)*isize
8393 DO i=io,ie
8394 ijk=i+ioff+jc+kc
8395 ijk_global(i,j,k)=ijk
8396 END DO
8397 END DO
8398 END DO
8399!
8400! Reorganize the input global vector in such a way that the tile data
8401! is continuous in memory to facilitate "SCATTERV" with different size
8402! sections.
8403!
8404 nc=0
8405 DO rank=0,ntasks-1
8406 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
8407 iub=bounds(ng) % Imax(cgrid,ghost,rank)
8408 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
8409 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
8410 DO k=lbk,ubk
8411 DO j=jlb,jub
8412 DO i=ilb,iub
8413 ijk=ijk_global(i,j,k)
8414 nc=nc+1
8415 vreset(nc)=vglobal(ijk)
8416 END DO
8417 END DO
8418 END DO
8419 END DO
8420 deallocate (ijk_global)
8421 END IF
8422!
8423! Distribute global data into local tiled arrays.
8424!
8425 mysize=(imax-imin+1)*(jmax-jmin+1)*(ubk-lbk+1)
8426 allocate ( vrecv(mysize) )
8427 vrecv=0.0_r8
8428!
8429 CALL mpi_scatterv (vreset, counts, displs, mp_float, &
8430 & vrecv, mysize, mp_float, &
8431 & mymaster, ocn_comm_world, myerror)
8432 IF (myerror.ne.mpi_success) THEN
8433 CALL mpi_error_string (myerror, string, lstr, serror)
8434 WRITE (stdout,20) 'MPI_SCATTERV', myrank, myerror, &
8435 & trim(string)
8436 20 FORMAT (/,' MP_SCATTER3D - error during ',a, &
8437 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8438 exit_flag=2
8439 RETURN
8440 END IF
8441!
8442! Unpack data buffer and load into tiled array
8443!
8444 nc=0
8445 DO k=lbk,ubk
8446 DO j=jmin,jmax
8447 DO i=imin,imax
8448 nc=nc+1
8449 awrk(i,j,k)=vrecv(nc)
8450 END DO
8451 END DO
8452 END DO
8453 deallocate ( vrecv )
8454!
8455! If requested, include halo exchanges.
8456!
8457 IF (nghost.gt.0) THEN
8458 CALL mp_exchange3d (ng, myrank, model, 1, &
8459 & lbi, ubi, lbj, ubj, lbk, ubk, &
8460 & nghostpoints, &
8461 & ewperiodic(ng), nsperiodic(ng), &
8462 & awrk)
8463 END IF
8464!
8465! Broadcast global Min/Max values to all tasks in the group since they
8466! are only known by root.
8467!
8468 astats(1)=amin
8469 astats(2)=amax
8470 mysize=2
8471!
8472 CALL mpi_bcast (astats, mysize, mp_float, mymaster, &
8473 & ocn_comm_world, myerror)
8474 IF (myerror.ne.mpi_success) THEN
8475 CALL mpi_error_string (myerror, string, lstr, serror)
8476 lstr=len_trim(string)
8477 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, trim(string)
8478 30 FORMAT (/,' MP_SCATTER3D - error during ',a,' call, Task = ', &
8479 & i3.3, ' Error = ',i3,/,15x,a)
8480 exit_flag=2
8481 RETURN
8482 END IF
8483!
8484 amin=astats(1)
8485 amax=astats(2)
8486# endif
8487# ifdef PROFILE
8488!
8489!-----------------------------------------------------------------------
8490! Turn off time clocks.
8491!-----------------------------------------------------------------------
8492!
8493 CALL wclock_off (ng, model, 67, __line__, myfile)
8494# endif
8495!
8496 RETURN
8497 END SUBROUTINE mp_scatter3d
8498!
8499 SUBROUTINE mp_scatter_state (ng, model, Mstr, Mend, Asize, &
8500 & A, Awrk)
8501!
8502!***********************************************************************
8503! !
8504! This routine scatters (global to threaded) state data to all nodes !
8505! in the group. Before this can be done, the global data needs to be !
8506! collected from all the nodes by the master. This is achieved by !
8507! summing the input values at each point. This routine is used to !
8508! pack the state data for the GST analysis propagators. !
8509! !
8510! On Input: !
8511! !
8512! ng Nested grid number. !
8513! model Calling model identifier. !
8514! Mstr Threaded array lower bound. !
8515! Mend Threaded array upper bound. !
8516! Asize Size of array A. !
8517! A Threaded 1D array process. !
8518! !
8519! On Output: !
8520! !
8521! A Collected data from all nodes. !
8522! Awrk Threaded block of data. !
8523! !
8524!***********************************************************************
8525!
8526! Imported variable declarations.
8527!
8528 integer, intent(in) :: ng, model
8529 integer, intent(in) :: Mstr, Mend, Asize
8530!
8531 real(r8), intent(inout) :: A(Asize)
8532
8533 real(r8), intent(out) :: Awrk(Mstr:Mend)
8534!
8535! Local variable declarations.
8536!
8537 integer :: Lstr, MyError, Serror
8538 integer :: i, rank, request
8539
8540 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
8541
8542 integer, dimension(MPI_STATUS_SIZE) :: status
8543!
8544 real(r8), allocatable :: Arecv(:)
8545!
8546 character (len=MPI_MAX_ERROR_STRING) :: string
8547
8548 character (len=*), parameter :: MyFile = &
8549 & __FILE__//", mp_scatter_state"
8550
8551# ifdef PROFILE
8552!
8553!-----------------------------------------------------------------------
8554! Turn on time clocks.
8555!-----------------------------------------------------------------------
8556!
8557 CALL wclock_on (ng, model, 67, __line__, myfile)
8558# endif
8559!
8560!-----------------------------------------------------------------------
8561! Collect data blocks from all nodes and scatter the data to all nodes.
8562!-----------------------------------------------------------------------
8563!
8564! Maximum automatic buffer memory size in bytes.
8565!
8566 bmemmax(ng)=max(bmemmax(ng), real(asize*kind(a),r8))
8567!
8568! All nodes have distinct pieces of the data and zero everywhere else.
8569! So the strategy here is for the master node to receive the data from
8570! the other nodes (excluding itself) and accumulate the sum at each
8571! point. Then, the master node broadcast (itself included) its copy of
8572! the accumlated data to other the nodes in the group. After this, each
8573! node loads only the required block of the data into output array.
8574!
8575! Notice that only the master node allocates the recieving buffer
8576! (Arecv). It also receives only buffer at the time to avoid having
8577! a very large communication array. So here memory is more important
8578! than time.
8579!
8580 IF (myrank.eq.mymaster) THEN
8581!
8582! If master node, allocate and receive buffer.
8583!
8584 IF (.not.allocated(arecv)) THEN
8585 allocate (arecv(asize))
8586 END IF
8587!
8588! If master node, loop over other nodes to receive and accumulate the
8589! data.
8590!
8591 DO rank=1,ntilei(ng)*ntilej(ng)-1
8592 CALL mpi_irecv (arecv, asize, mp_float, rank, rank+5, &
8593 & ocn_comm_world, rrequest(rank), myerror)
8594 CALL mpi_wait (rrequest(rank), status, myerror)
8595 IF (myerror.ne.mpi_success) THEN
8596 CALL mpi_error_string (myerror, string, lstr, serror)
8597 lstr=len_trim(string)
8598 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
8599 10 FORMAT (/,' MP_SCATTER_STATE - error during ',a, &
8600 & ' call, Task = ', i3.3,' Error = ',i3,/,13x,a)
8601 exit_flag=2
8602 RETURN
8603 END IF
8604 DO i=1,asize
8605 a(i)=a(i)+arecv(i)
8606 END DO
8607 END DO
8608!
8609! Otherwise, send data to master node.
8610!
8611 ELSE
8612 CALL mpi_isend (a, asize, mp_float, mymaster, myrank+5, &
8613 & ocn_comm_world, request, myerror)
8614 CALL mpi_wait (request, status, myerror)
8615 IF (myerror.ne.mpi_success) THEN
8616 CALL mpi_error_string (myerror, string, lstr, serror)
8617 lstr=len_trim(string)
8618 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
8619 exit_flag=2
8620 RETURN
8621 END IF
8622 END IF
8623!
8624! Broadcast accumulated (full) data to all nodes.
8625!
8626 CALL mpi_bcast (a, asize, mp_float, mymaster, ocn_comm_world, &
8627 & myerror)
8628 IF (myerror.ne.mpi_success) THEN
8629 CALL mpi_error_string (myerror, string, lstr, serror)
8630 lstr=len_trim(string)
8631 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
8632 exit_flag=2
8633 RETURN
8634 END IF
8635!
8636! Load appropriate data block into output array.
8637!
8638 DO i=mstr,mend
8639 awrk(i)=a(i)
8640 END DO
8641!
8642! Deallocate receive buffer.
8643!
8644 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
8645 deallocate (arecv)
8646 END IF
8647
8648# ifdef PROFILE
8649!
8650!-----------------------------------------------------------------------
8651! Turn off time clocks.
8652!-----------------------------------------------------------------------
8653!
8654 CALL wclock_off (ng, model, 67, __line__, myfile)
8655# endif
8656!
8657 RETURN
8658 END SUBROUTINE mp_scatter_state
8659!
8660 SUBROUTINE mp_aggregate2d (ng, model, gtype, &
8661 & LBiT, UBiT, LBjT, UBjT, &
8662 & LBiG, UBiG, LBjG, UBjG, &
8663 & Atiled, Aglobal)
8664!
8665!***********************************************************************
8666! !
8667! This routine collects a 2D tiled, floating-point array from each !
8668! spawned node and stores it into 2D global array. If nesting, the !
8669! global array contains the contact points data. !
8670! !
8671! On Input: !
8672! !
8673! ng Nested grid number. !
8674! model Calling model identifier. !
8675! gtype C-grid type. !
8676! LBiT Tiled array, I-dimension Lower bound. !
8677! UBiT Tiled array, I-dimension Upper bound. !
8678! LBjT Tiled array, J-dimension Lower bound. !
8679! UBjT Tiled array, J-dimension Upper bound. !
8680! LBiG Global array, I-dimension Lower bound. !
8681! UBiG Global array, I-dimension Upper bound. !
8682! LBjG Global array, J-dimension Lower bound. !
8683! UBjG Global array, J-dimension Upper bound. !
8684! Atiled 2D tiled, floating-point array to process. !
8685! !
8686! On Output: !
8687! !
8688! Aglobal 2D global array, all tiles are aggregated. !
8689! !
8690!***********************************************************************
8691!
8692! Imported variable declarations.
8693!
8694 integer, intent(in) :: ng, model, gtype
8695 integer, intent(in) :: LBiT, UBiT, LBjT, UBjT
8696 integer, intent(in) :: LBiG, UBiG, LBjG, UBjG
8697!
8698 real(r8), intent(in) :: Atiled(LBiT:UBiT,LBjT:UBjT)
8699 real(r8), intent(out) :: Aglobal(LBiG:UBiG,LBjG:UBjG)
8700!
8701! Local variable declarations.
8702!
8703 integer :: Lstr, MyError, MyType, Nnodes, Npts, Serror
8704 integer :: i, j, np, rank
8705
8706 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: my_bounds
8707!
8708 real(r8), dimension(TileSize(ng)) :: Asend
8709 real(r8), dimension(TileSize(ng)* & & NtileI(ng)*NtileJ(ng)) :: Arecv
8710!
8711 character (len=MPI_MAX_ERROR_STRING) :: string
8712
8713 character (len=*), parameter :: MyFile = &
8714 & __FILE__//", mp_aggregate2d"
8715
8716# ifdef PROFILE
8717!
8718!-----------------------------------------------------------------------
8719! Turn on time clocks.
8720!-----------------------------------------------------------------------
8721!
8722 CALL wclock_on (ng, model, 71, __line__, myfile)
8723# endif
8724!
8725!-----------------------------------------------------------------------
8726! Set horizontal starting and ending indices for parallel domain
8727! partitions in the XI- and ETA-directions.
8728!-----------------------------------------------------------------------
8729!
8730! Maximum automatic buffer memory size in bytes.
8731!
8732 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
8733 & SIZE(aglobal)+ &
8734 & SIZE(arecv))*kind(asend),r8))
8735!
8736! Number of nodes in the group.
8737!
8738 nnodes=ntilei(ng)*ntilej(ng)-1
8739!
8740! Set starting and ending indices to process including contact points
8741! (if nesting) according to the staggered C-grid classification.
8742!
8743 mytype=abs(gtype)
8744
8745 SELECT CASE (mytype)
8746 CASE (p2dvar, p3dvar)
8747 DO rank=0,nnodes
8748 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8749 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8750 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8751 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8752 END DO
8753 CASE (r2dvar, r3dvar)
8754 DO rank=0,nnodes
8755 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8756 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8757 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8758 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8759 END DO
8760 CASE (u2dvar, u3dvar)
8761 DO rank=0,nnodes
8762 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8763 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8764 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8765 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8766 END DO
8767 CASE (v2dvar, v3dvar)
8768 DO rank=0,nnodes
8769 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8770 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8771 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8772 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8773 END DO
8774 END SELECT
8775!
8776! Determine the maximum number of points to process between all tiles.
8777! In collective communications, the amount of data sent must be equal
8778! to the amount of data received.
8779!
8780 npts=0
8781 DO rank=0,nnodes
8782 np=(my_bounds(2,rank)-my_bounds(1,rank)+1)* &
8783 & (my_bounds(4,rank)-my_bounds(3,rank)+1)
8784 npts=max(npts, np)
8785 END DO
8786
8787 IF (npts.gt.tilesize(ng)) THEN
8788 IF (master) THEN
8789 WRITE (stdout,10) ' TileSize = ', tilesize(ng), npts
8790 10 FORMAT (/,' MP_AGGREGATE2D - communication buffer to small,', &
8791 & a, 2i8)
8792 END IF
8793 exit_flag=5
8794 RETURN
8795 END IF
8796!
8797! Initialize local arrays to facilitate collective communicatios.
8798! This also avoid denormalized values, which facilitates debugging.
8799!
8800 asend=0.0_r8
8801 arecv=0.0_r8
8802!
8803!-----------------------------------------------------------------------
8804! Pack tile data.
8805!-----------------------------------------------------------------------
8806!
8807 np=0
8808 DO j=my_bounds(3,myrank),my_bounds(4,myrank)
8809 DO i=my_bounds(1,myrank),my_bounds(2,myrank)
8810 np=np+1
8811 asend(np)=atiled(i,j)
8812 END DO
8813 END DO
8814!
8815!-----------------------------------------------------------------------
8816! Aggregate data from all nodes.
8817!-----------------------------------------------------------------------
8818!
8819 CALL mpi_allgather (asend, npts, mp_float, &
8820 & arecv, npts, mp_float, &
8821 & ocn_comm_world, myerror)
8822 IF (myerror.ne.mpi_success) THEN
8823 CALL mpi_error_string (myerror, string, lstr, serror)
8824 lstr=len_trim(string)
8825 WRITE (stdout,20) 'MPI_ALLGATHER', myrank, myerror, &
8826 & string(1:lstr)
8827 20 FORMAT (/,' MP_AGGREGATE2D - error during ',a,' call, Task = ', &
8828 & i3.3,' Error = ',i3,/,18x,a)
8829 exit_flag=5
8830 RETURN
8831 END IF
8832!
8833!-----------------------------------------------------------------------
8834! Unpack data into a global 2D array.
8835!-----------------------------------------------------------------------
8836!
8837 DO rank=0,nnodes
8838 np=rank*npts
8839 DO j=my_bounds(3,rank),my_bounds(4,rank)
8840 DO i=my_bounds(1,rank),my_bounds(2,rank)
8841 np=np+1
8842 aglobal(i,j)=arecv(np)
8843 END DO
8844 END DO
8845 END DO
8846
8847# ifdef PROFILE
8848!
8849!-----------------------------------------------------------------------
8850! Turn off time clocks.
8851!-----------------------------------------------------------------------
8852!
8853 CALL wclock_off (ng, model, 71, __line__, myfile)
8854# endif
8855!
8856 RETURN
8857 END SUBROUTINE mp_aggregate2d
8858!
8859 SUBROUTINE mp_aggregate3d (ng, model, gtype, &
8860 & LBiT, UBiT, LBjT, UBjT, &
8861 & LBiG, UBiG, LBjG, UBjG, &
8862 & LBk, UBk, &
8863 & Atiled, Aglobal)
8864!
8865!***********************************************************************
8866! !
8867! This routine collects a 3D tiled, floating-point array from each !
8868! spawned node and stores it into 3D global array. If nesting, the !
8869! global array contains the contact points data. !
8870! !
8871! On Input: !
8872! !
8873! ng Nested grid number. !
8874! model Calling model identifier. !
8875! gtype C-grid type. !
8876! LBiT Tiled array, I-dimension Lower bound. !
8877! UBiT Tiled array, I-dimension Upper bound. !
8878! LBjT Tiled array, J-dimension Lower bound. !
8879! UBjT Tiled array, J-dimension Upper bound. !
8880! LBkT Tiled array, K-dimension Lower bound. !
8881! UBkT Tiled array, K-dimension Upper bound. !
8882! LBiG Global array, I-dimension Lower bound. !
8883! UBiG Global array, I-dimension Upper bound. !
8884! LBjG Global array, J-dimension Lower bound. !
8885! UBjG Global array, J-dimension Upper bound. !
8886! LBkG Global array, K-dimension Lower bound. !
8887! UBkG Global array, K-dimension Upper bound. !
8888! Atiled 3D tiled, floating-point array to process. !
8889! !
8890! On Output: !
8891! !
8892! Aglobal 3D global array, all tiles are aggregated. !
8893! !
8894!***********************************************************************
8895!
8896! Imported variable declarations.
8897!
8898 integer, intent(in) :: ng, model, gtype
8899 integer, intent(in) :: LBiT, UBiT, LBjT, UBjT
8900 integer, intent(in) :: LBiG, UBiG, LBjG, UBjG
8901 integer, intent(in) :: LBk, UBk
8902!
8903 real(r8), intent(in) :: Atiled(LBiT:UBiT,LBjT:UBjT,LBk:UBk)
8904 real(r8), intent(out) :: Aglobal(LBiG:UBiG,LBjG:UBjG,LBk:UBk)
8905!
8906! Local variable declarations.
8907!
8908 integer :: Klen, Lstr, MyError, MyType, Nnodes, Npts, Serror
8909 integer :: i, j, k, np, rank
8910
8911 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: my_bounds
8912!
8913 real(r8), dimension(TileSize(ng)*(UBk-LBk+1)) :: Asend
8914
8915 real(r8), dimension(TileSize(ng)*(UBk-LBk+1)* & & NtileI(ng)*NtileJ(ng)) :: Arecv
8916!
8917 character (len=MPI_MAX_ERROR_STRING) :: string
8918
8919 character (len=*), parameter :: MyFile = &
8920 & __FILE__//", mp_aggregate3d"
8921
8922# ifdef PROFILE
8923!
8924!-----------------------------------------------------------------------
8925! Turn on time clocks.
8926!-----------------------------------------------------------------------
8927!
8928 CALL wclock_on (ng, model, 71, __line__, myfile)
8929# endif
8930!
8931!-----------------------------------------------------------------------
8932! Set horizontal starting and ending indices for parallel domain
8933! partitions in the XI- and ETA-directions.
8934!-----------------------------------------------------------------------
8935!
8936! Maximum automatic buffer memory size in bytes.
8937!
8938 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
8939 & SIZE(aglobal)+ &
8940 & SIZE(arecv))*kind(asend),r8))
8941!
8942! Number of nodes in the group.
8943!
8944 nnodes=ntilei(ng)*ntilej(ng)-1
8945!
8946! Set starting and ending indices to process including contact points
8947! (if nesting) according to the staggered C-grid classification.
8948!
8949 mytype=abs(gtype)
8950
8951 SELECT CASE (mytype)
8952 CASE (p2dvar, p3dvar)
8953 DO rank=0,nnodes
8954 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8955 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8956 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8957 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8958 END DO
8959 CASE (r2dvar, r3dvar)
8960 DO rank=0,nnodes
8961 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8962 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8963 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8964 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8965 END DO
8966 CASE (u2dvar, u3dvar)
8967 DO rank=0,nnodes
8968 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8969 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8970 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8971 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8972 END DO
8973 CASE (v2dvar, v3dvar)
8974 DO rank=0,nnodes
8975 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8976 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8977 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8978 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8979 END DO
8980 END SELECT
8981 klen=ubk-lbk+1
8982!
8983! Determine the maximum number of points to process between all tiles.
8984! In collective communications, the amount of data sent must be equal
8985! to the amount of data received.
8986!
8987 npts=0
8988 DO rank=0,nnodes
8989 np=(my_bounds(2,rank)-my_bounds(1,rank)+1)* &
8990 & (my_bounds(4,rank)-my_bounds(3,rank)+1)* &
8991 & klen
8992 npts=max(npts, np)
8993 END DO
8994
8995 IF (npts.gt.tilesize(ng)*klen) THEN
8996 IF (master) THEN
8997 WRITE (stdout,10) ' TileSize = ', tilesize(ng)*klen, npts
8998 10 FORMAT (/,' MP_AGGREGATE3D - communication buffer to small,', &
8999 & a, 2i8)
9000 END IF
9001 exit_flag=5
9002 RETURN
9003 END IF
9004!
9005! Initialize local arrays to facilitate collective communicatios.
9006! This also avoid denormalized values, which facilitates debugging.
9007!
9008 asend=0.0_r8
9009 arecv=0.0_r8
9010!
9011!-----------------------------------------------------------------------
9012! Pack tile data.
9013!-----------------------------------------------------------------------
9014!
9015 np=0
9016 DO k=lbk,ubk
9017 DO j=my_bounds(3,myrank),my_bounds(4,myrank)
9018 DO i=my_bounds(1,myrank),my_bounds(2,myrank)
9019 np=np+1
9020 asend(np)=atiled(i,j,k)
9021 END DO
9022 END DO
9023 END DO
9024!
9025!-----------------------------------------------------------------------
9026! Aggregate data from all nodes.
9027!-----------------------------------------------------------------------
9028!
9029 CALL mpi_allgather (asend, npts, mp_float, &
9030 & arecv, npts, mp_float, &
9031 & ocn_comm_world, myerror)
9032 IF (myerror.ne.mpi_success) THEN
9033 CALL mpi_error_string (myerror, string, lstr, serror)
9034 lstr=len_trim(string)
9035 WRITE (stdout,20) 'MPI_ALLGATHER', myrank, myerror, &
9036 & string(1:lstr)
9037 20 FORMAT (/,' MP_AGGREGATE3D - error during ',a,' call, Task = ', &
9038 & i3.3,' Error = ',i3,/,18x,a)
9039 exit_flag=5
9040 RETURN
9041 END IF
9042!
9043!-----------------------------------------------------------------------
9044! Unpack data into a global 2D array.
9045!-----------------------------------------------------------------------
9046!
9047 DO rank=0,nnodes
9048 np=rank*npts
9049 DO k=lbk,ubk
9050 DO j=my_bounds(3,rank),my_bounds(4,rank)
9051 DO i=my_bounds(1,rank),my_bounds(2,rank)
9052 np=np+1
9053 aglobal(i,j,k)=arecv(np)
9054 END DO
9055 END DO
9056 END DO
9057 END DO
9058
9059# ifdef PROFILE
9060!
9061!-----------------------------------------------------------------------
9062! Turn off time clocks.
9063!-----------------------------------------------------------------------
9064!
9065 CALL wclock_off (ng, model, 71, __line__, myfile)
9066# endif
9067!
9068 RETURN
9069 END SUBROUTINE mp_aggregate3d
9070!
9071 SUBROUTINE mp_dump (ng, tile, gtype, &
9072 & ILB, IUB, JLB, JUB, KLB, KUB, A, name)
9073!
9074!***********************************************************************
9075! !
9076! This routine is used to debug distributed-memory communications. !
9077! It writes field into an ASCII file for further post-processing. !
9078! !
9079!***********************************************************************
9080!
9081! Imported variable declarations.
9082!
9083 integer, intent(in) :: ng, tile, gtype
9084 integer, intent(in) :: ILB, IUB, JLB, JUB, KLB, KUB
9085!
9086 real(r8), intent(in) :: A(ILB:IUB,JLB:JUB,KLB:KUB)
9087!
9088 character (len=*) :: name
9089!
9090! Local variable declarations.
9091!
9092 common /counter/ nc
9093 integer :: nc
9094!
9095 logical, save :: first = .true.
9096!
9097 integer :: Imin, Imax, Ioff, Jmin, Jmax, Joff
9098 integer :: unit
9099!
9100 character (len=*), parameter :: MyFile = &
9101 & __FILE__//", mp_dump"
9102
9103# include "set_bounds.h"
9104!
9105!------------------------------------------------------------------------
9106! Write out requested field.
9107!------------------------------------------------------------------------
9108!
9109 IF (first) THEN
9110 nc=0
9111 first=.false.
9112 END IF
9113 nc=nc+1
9114 IF (master) THEN
9115 WRITE (10,'(a,i3.3,a,a)') 'file ', nc, ': ', trim(name)
9116 FLUSH (10)
9117 END IF
9118!
9119! Write out field including ghost-points.
9120!
9121 imin=0
9122 imax=lm(ng)+1
9123 IF (ewperiodic(ng)) THEN
9124 ioff=3
9125 ELSE
9126 ioff=1
9127 END IF
9128
9129 jmin=0
9130 jmax=mm(ng)+1
9131 IF (nsperiodic(ng)) THEN
9132 joff=3
9133 ELSE
9134 joff=1
9135 END IF
9136
9137 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9138 & (gtype.eq.u2dvar).or.(gtype.eq.u3dvar)) THEN
9139 imin=1
9140 END IF
9141 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9142 & (gtype.eq.v2dvar).or.(gtype.eq.v3dvar)) THEN
9143 jmin=1
9144 END IF
9145
9146 unit=(myrank+1)*1000+nc
9147 WRITE (unit,*) ilb, iub, jlb, jub, klb, kub, &
9148 & ioff, joff, imin, imax, jmin, jmax, &
9149 & a(ilb:iub,jlb:jub,klb:kub)
9150 FLUSH (unit)
9151!
9152! Write out non-overlapping field.
9153!
9154 imin=istrr
9155 imax=iendr
9156 IF (ewperiodic(ng)) THEN
9157 ioff=2
9158 ELSE
9159 ioff=1
9160 END IF
9161
9162 jmin=jstrr
9163 jmax=jendr
9164 IF (nsperiodic(ng)) THEN
9165 joff=2
9166 ELSE
9167 joff=1
9168 END IF
9169
9170 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9171 & (gtype.eq.u2dvar).or.(gtype.eq.u3dvar)) THEN
9172 imin=istr
9173 ioff=ioff-1
9174 END IF
9175 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9176 & (gtype.eq.v2dvar).or.(gtype.eq.v3dvar)) THEN
9177 jmin=jstr
9178 joff=joff-1
9179 END IF
9180
9181 unit=(myrank+1)*10000+nc
9182 WRITE (unit,*) imin, imax, jmin, jmax, klb, kub, &
9183 & ioff, joff, imin, imax, jmin, jmax, &
9184 & a(imin:imax,jmin:jmax,klb:kub)
9185 FLUSH (unit)
9186
9187 RETURN
9188 END SUBROUTINE mp_dump
9189#endif
9190 END MODULE distribute_mod
9191
subroutine mp_bcastl_0d(ng, model, a, inpcomm)
subroutine mp_assemblef_1d(ng, model, npts, aspv, a, inpcomm)
subroutine mp_bcastf_3dp(ng, model, a, inpcomm)
Definition distribute.F:466
subroutine mp_bcastf_3d(ng, model, a, inpcomm)
Definition distribute.F:831
subroutine mp_scatter3d(ng, model, lbi, ubi, lbj, ubj, lbk, ubk, nghost, gtype, amin, amax, nwpts, ij_water, npts, a, awrk)
subroutine mp_bcastf_4d(ng, model, a, inpcomm)
Definition distribute.F:925
subroutine mp_collect_i(ng, model, npts, aspv, a, inpcomm)
integer function mp_ncwrite2d(ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, lb2, ub2, ascale, a)
subroutine mp_reduce2(ng, model, isize, jsize, a, handle_op, inpcomm)
subroutine mp_reduce_0dp(ng, model, asize, a, handle_op, inpcomm)
subroutine mp_gather_state(ng, model, mstr, mend, asize, a, awrk)
subroutine mp_bcastl_1d(ng, model, a, inpcomm)
subroutine mp_bcastf_2d(ng, model, a, inpcomm)
Definition distribute.F:738
subroutine mp_reduce_1d(ng, model, asize, a, handle_op, inpcomm)
subroutine mp_assemblef_3d(ng, model, npts, aspv, a, inpcomm)
subroutine mp_bcasti_2d(ng, model, a, inpcomm)
subroutine mp_bcastf_2dp(ng, model, a, inpcomm)
Definition distribute.F:373
subroutine mp_bcasts_1d(ng, model, a, inpcomm)
subroutine mp_bcasti_1d(ng, model, a, inpcomm)
subroutine mp_assemblei_1d(ng, model, npts, aspv, a, inpcomm)
subroutine mp_bcasts_0d(ng, model, a, inpcomm)
subroutine mp_reduce_0d(ng, model, asize, a, handle_op, inpcomm)
integer function mp_ncread1d(ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, ascale, a)
subroutine mp_bcasts_2d(ng, model, a, inpcomm)
subroutine mp_bcastf_1dp(ng, model, a, inpcomm)
Definition distribute.F:284
integer function mp_ncread2d(ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, lb2, ub2, ascale, a)
subroutine mp_aggregate3d(ng, model, gtype, lbit, ubit, lbjt, ubjt, lbig, ubig, lbjg, ubjg, lbk, ubk, atiled, aglobal)
subroutine mp_gather3d(ng, model, lbi, ubi, lbj, ubj, lbk, ubk, tindex, gtype, ascl, amask, a, npts, awrk, setfillval)
subroutine mp_boundary(ng, model, imin, imax, lbi, ubi, lbk, ubk, update, a)
subroutine mp_reduce_i8(ng, model, asize, a, handle_op, inpcomm)
subroutine mp_aggregate2d(ng, model, gtype, lbit, ubit, lbjt, ubjt, lbig, ubig, lbjg, ubjg, atiled, aglobal)
subroutine mp_collect_f(ng, model, npts, aspv, a, inpcomm)
subroutine mp_barrier(ng, model, inpcomm)
Definition distribute.F:126
subroutine mp_bcastf_0dp(ng, model, a, inpcomm)
Definition distribute.F:196
subroutine mp_bcasti_0d(ng, model, a, inpcomm)
subroutine mp_assemblef_2d(ng, model, npts, aspv, a, inpcomm)
subroutine mp_assemblei_2d(ng, model, npts, aspv, a, inpcomm)
subroutine mp_gather2d(ng, model, lbi, ubi, lbj, ubj, tindex, gtype, ascl, amask, a, npts, awrk, setfillval)
subroutine mp_dump(ng, tile, gtype, ilb, iub, jlb, jub, klb, kub, a, name)
integer function mp_ncwrite1d(ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, ascale, a)
subroutine mp_scatter2d(ng, model, lbi, ubi, lbj, ubj, nghost, gtype, amin, amax, nwpts, ij_water, npts, a, awrk)
subroutine mp_scatter_state(ng, model, mstr, mend, asize, a, awrk)
subroutine mp_reduce_1dp(ng, model, asize, a, handle_op, inpcomm)
subroutine mp_bcastf_0d(ng, model, a, inpcomm)
Definition distribute.F:561
subroutine mp_bcastl_2d(ng, model, a, inpcomm)
subroutine mp_bcast_struc(ng, model, s, inpcomm)
subroutine mp_bcasts_3d(ng, model, a, inpcomm)
subroutine mp_bcastf_1d(ng, model, a, inpcomm)
Definition distribute.F:649
character(len=50), dimension(9) rerror
integer ioerror
integer stdout
integer, parameter mp_double
integer, parameter mp_float
logical master
integer mymaster
integer ocn_comm_world
logical lwclock
integer, dimension(:), allocatable tilesize
Definition mod_param.F:705
type(t_bounds), dimension(:), allocatable bounds
Definition mod_param.F:232
integer, parameter r3dvar
Definition mod_param.F:721
type(t_iobounds), dimension(:), allocatable iobounds
Definition mod_param.F:282
integer nghostpoints
Definition mod_param.F:710
integer, parameter u3dvar
Definition mod_param.F:722
integer, dimension(:), allocatable lm
Definition mod_param.F:455
integer, parameter u2dvar
Definition mod_param.F:718
real(r8), dimension(:), allocatable bmemmax
Definition mod_param.F:132
integer, dimension(:), allocatable ntilei
Definition mod_param.F:677
integer, parameter p2dvar
Definition mod_param.F:716
integer, dimension(:), allocatable mm
Definition mod_param.F:456
integer, parameter r2dvar
Definition mod_param.F:717
integer, parameter v2dvar
Definition mod_param.F:719
integer, parameter p3dvar
Definition mod_param.F:720
integer, parameter v3dvar
Definition mod_param.F:723
integer, dimension(:), allocatable ntilej
Definition mod_param.F:678
real(dp), parameter spval
logical, dimension(:), allocatable ewperiodic
logical, dimension(:), allocatable nsperiodic
integer exit_flag
integer noerror
subroutine mp_exchange2d(ng, tile, model, nvar, lbi, ubi, lbj, ubj, nghost, ew_periodic, ns_periodic, a, b, c, d)
subroutine mp_exchange3d(ng, tile, model, nvar, lbi, ubi, lbj, ubj, lbk, ubk, nghost, ew_periodic, ns_periodic, a, b, c, d)
recursive subroutine wclock_off(ng, model, region, line, routine)
Definition timers.F:148
recursive subroutine wclock_on(ng, model, region, line, routine)
Definition timers.F:3