ROMS
Loading...
Searching...
No Matches
distribute_mod Module Reference

Data Types

interface  mp_assemble
 
interface  mp_bcastf
 
interface  mp_bcasti
 
interface  mp_bcastl
 
interface  mp_bcasts
 
interface  mp_collect
 
interface  mp_reduce
 

Functions/Subroutines

subroutine mp_barrier (ng, model, inpcomm)
 
subroutine mp_bcastf_0dp (ng, model, a, inpcomm)
 
subroutine mp_bcastf_1dp (ng, model, a, inpcomm)
 
subroutine mp_bcastf_2dp (ng, model, a, inpcomm)
 
subroutine mp_bcastf_3dp (ng, model, a, inpcomm)
 
subroutine mp_bcastf_0d (ng, model, a, inpcomm)
 
subroutine mp_bcastf_1d (ng, model, a, inpcomm)
 
subroutine mp_bcastf_2d (ng, model, a, inpcomm)
 
subroutine mp_bcastf_3d (ng, model, a, inpcomm)
 
subroutine mp_bcastf_4d (ng, model, a, inpcomm)
 
subroutine mp_bcasti_0d (ng, model, a, inpcomm)
 
subroutine mp_bcasti_1d (ng, model, a, inpcomm)
 
subroutine mp_bcasti_2d (ng, model, a, inpcomm)
 
subroutine mp_bcastl_0d (ng, model, a, inpcomm)
 
subroutine mp_bcastl_1d (ng, model, a, inpcomm)
 
subroutine mp_bcastl_2d (ng, model, a, inpcomm)
 
subroutine mp_bcasts_0d (ng, model, a, inpcomm)
 
subroutine mp_bcasts_1d (ng, model, a, inpcomm)
 
subroutine mp_bcasts_2d (ng, model, a, inpcomm)
 
subroutine mp_bcasts_3d (ng, model, a, inpcomm)
 
subroutine mp_bcast_struc (ng, model, s, inpcomm)
 
subroutine mp_boundary (ng, model, imin, imax, lbi, ubi, lbk, ubk, update, a)
 
subroutine mp_assemblef_1d (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_assemblef_2d (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_assemblef_3d (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_assemblei_1d (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_assemblei_2d (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_collect_f (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_collect_i (ng, model, npts, aspv, a, inpcomm)
 
subroutine mp_gather2d (ng, model, lbi, ubi, lbj, ubj, tindex, gtype, ascl, amask, a, npts, awrk, setfillval)
 
subroutine mp_gather3d (ng, model, lbi, ubi, lbj, ubj, lbk, ubk, tindex, gtype, ascl, amask, a, npts, awrk, setfillval)
 
subroutine mp_gather_state (ng, model, mstr, mend, asize, a, awrk)
 
integer function mp_ncread1d (ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, ascale, a)
 
integer function mp_ncread2d (ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, lb2, ub2, ascale, a)
 
integer function mp_ncwrite1d (ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, ascale, a)
 
integer function mp_ncwrite2d (ng, model, ncid, ncvname, ncname, ncrec, lb1, ub1, lb2, ub2, ascale, a)
 
subroutine mp_reduce_i8 (ng, model, asize, a, handle_op, inpcomm)
 
subroutine mp_reduce_0dp (ng, model, asize, a, handle_op, inpcomm)
 
subroutine mp_reduce_1dp (ng, model, asize, a, handle_op, inpcomm)
 
subroutine mp_reduce_0d (ng, model, asize, a, handle_op, inpcomm)
 
subroutine mp_reduce_1d (ng, model, asize, a, handle_op, inpcomm)
 
subroutine mp_reduce2 (ng, model, isize, jsize, a, handle_op, inpcomm)
 
subroutine mp_scatter2d (ng, model, lbi, ubi, lbj, ubj, nghost, gtype, amin, amax, nwpts, ij_water, npts, a, awrk)
 
subroutine mp_scatter3d (ng, model, lbi, ubi, lbj, ubj, lbk, ubk, nghost, gtype, amin, amax, nwpts, ij_water, npts, a, awrk)
 
subroutine mp_scatter_state (ng, model, mstr, mend, asize, a, awrk)
 
subroutine mp_aggregate2d (ng, model, gtype, lbit, ubit, lbjt, ubjt, lbig, ubig, lbjg, ubjg, atiled, aglobal)
 
subroutine mp_aggregate3d (ng, model, gtype, lbit, ubit, lbjt, ubjt, lbig, ubig, lbjg, ubjg, lbk, ubk, atiled, aglobal)
 
subroutine mp_dump (ng, tile, gtype, ilb, iub, jlb, jub, klb, kub, a, name)
 

Function/Subroutine Documentation

◆ mp_aggregate2d()

subroutine distribute_mod::mp_aggregate2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) gtype,
integer, intent(in) lbit,
integer, intent(in) ubit,
integer, intent(in) lbjt,
integer, intent(in) ubjt,
integer, intent(in) lbig,
integer, intent(in) ubig,
integer, intent(in) lbjg,
integer, intent(in) ubjg,
real(r8), dimension(lbit:ubit,lbjt:ubjt), intent(in) atiled,
real(r8), dimension(lbig:ubig,lbjg:ubjg), intent(out) aglobal )

Definition at line 8661 of file distribute.F.

8665!
8666!***********************************************************************
8667! !
8668! This routine collects a 2D tiled, floating-point array from each !
8669! spawned node and stores it into 2D global array. If nesting, the !
8670! global array contains the contact points data. !
8671! !
8672! On Input: !
8673! !
8674! ng Nested grid number. !
8675! model Calling model identifier. !
8676! gtype C-grid type. !
8677! LBiT Tiled array, I-dimension Lower bound. !
8678! UBiT Tiled array, I-dimension Upper bound. !
8679! LBjT Tiled array, J-dimension Lower bound. !
8680! UBjT Tiled array, J-dimension Upper bound. !
8681! LBiG Global array, I-dimension Lower bound. !
8682! UBiG Global array, I-dimension Upper bound. !
8683! LBjG Global array, J-dimension Lower bound. !
8684! UBjG Global array, J-dimension Upper bound. !
8685! Atiled 2D tiled, floating-point array to process. !
8686! !
8687! On Output: !
8688! !
8689! Aglobal 2D global array, all tiles are aggregated. !
8690! !
8691!***********************************************************************
8692!
8693! Imported variable declarations.
8694!
8695 integer, intent(in) :: ng, model, gtype
8696 integer, intent(in) :: LBiT, UBiT, LBjT, UBjT
8697 integer, intent(in) :: LBiG, UBiG, LBjG, UBjG
8698!
8699 real(r8), intent(in) :: Atiled(LBiT:UBiT,LBjT:UBjT)
8700 real(r8), intent(out) :: Aglobal(LBiG:UBiG,LBjG:UBjG)
8701!
8702! Local variable declarations.
8703!
8704 integer :: Lstr, MyError, MyType, Nnodes, Npts, Serror
8705 integer :: i, j, np, rank
8706
8707 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: my_bounds
8708!
8709 real(r8), dimension(TileSize(ng)) :: Asend
8710 real(r8), dimension(TileSize(ng)* & & NtileI(ng)*NtileJ(ng)) :: Arecv
8711!
8712 character (len=MPI_MAX_ERROR_STRING) :: string
8713
8714 character (len=*), parameter :: MyFile = &
8715 & __FILE__//", mp_aggregate2d"
8716
8717# ifdef PROFILE
8718!
8719!-----------------------------------------------------------------------
8720! Turn on time clocks.
8721!-----------------------------------------------------------------------
8722!
8723 CALL wclock_on (ng, model, 71, __line__, myfile)
8724# endif
8725!
8726!-----------------------------------------------------------------------
8727! Set horizontal starting and ending indices for parallel domain
8728! partitions in the XI- and ETA-directions.
8729!-----------------------------------------------------------------------
8730!
8731! Maximum automatic buffer memory size in bytes.
8732!
8733 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
8734 & SIZE(aglobal)+ &
8735 & SIZE(arecv))*kind(asend),r8))
8736!
8737! Number of nodes in the group.
8738!
8739 nnodes=ntilei(ng)*ntilej(ng)-1
8740!
8741! Set starting and ending indices to process including contact points
8742! (if nesting) according to the staggered C-grid classification.
8743!
8744 mytype=abs(gtype)
8745
8746 SELECT CASE (mytype)
8747 CASE (p2dvar, p3dvar)
8748 DO rank=0,nnodes
8749 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8750 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8751 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8752 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8753 END DO
8754 CASE (r2dvar, r3dvar)
8755 DO rank=0,nnodes
8756 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8757 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8758 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8759 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8760 END DO
8761 CASE (u2dvar, u3dvar)
8762 DO rank=0,nnodes
8763 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8764 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8765 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8766 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8767 END DO
8768 CASE (v2dvar, v3dvar)
8769 DO rank=0,nnodes
8770 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8771 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8772 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8773 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8774 END DO
8775 END SELECT
8776!
8777! Determine the maximum number of points to process between all tiles.
8778! In collective communications, the amount of data sent must be equal
8779! to the amount of data received.
8780!
8781 npts=0
8782 DO rank=0,nnodes
8783 np=(my_bounds(2,rank)-my_bounds(1,rank)+1)* &
8784 & (my_bounds(4,rank)-my_bounds(3,rank)+1)
8785 npts=max(npts, np)
8786 END DO
8787
8788 IF (npts.gt.tilesize(ng)) THEN
8789 IF (master) THEN
8790 WRITE (stdout,10) ' TileSize = ', tilesize(ng), npts
8791 10 FORMAT (/,' MP_AGGREGATE2D - communication buffer to small,', &
8792 & a, 2i8)
8793 END IF
8794 exit_flag=5
8795 RETURN
8796 END IF
8797!
8798! Initialize local arrays to facilitate collective communicatios.
8799! This also avoid denormalized values, which facilitates debugging.
8800!
8801 asend=0.0_r8
8802 arecv=0.0_r8
8803!
8804!-----------------------------------------------------------------------
8805! Pack tile data.
8806!-----------------------------------------------------------------------
8807!
8808 np=0
8809 DO j=my_bounds(3,myrank),my_bounds(4,myrank)
8810 DO i=my_bounds(1,myrank),my_bounds(2,myrank)
8811 np=np+1
8812 asend(np)=atiled(i,j)
8813 END DO
8814 END DO
8815!
8816!-----------------------------------------------------------------------
8817! Aggregate data from all nodes.
8818!-----------------------------------------------------------------------
8819!
8820 CALL mpi_allgather (asend, npts, mp_float, &
8821 & arecv, npts, mp_float, &
8822 & ocn_comm_world, myerror)
8823 IF (myerror.ne.mpi_success) THEN
8824 CALL mpi_error_string (myerror, string, lstr, serror)
8825 lstr=len_trim(string)
8826 WRITE (stdout,20) 'MPI_ALLGATHER', myrank, myerror, &
8827 & string(1:lstr)
8828 20 FORMAT (/,' MP_AGGREGATE2D - error during ',a,' call, Task = ', &
8829 & i3.3,' Error = ',i3,/,18x,a)
8830 exit_flag=5
8831 RETURN
8832 END IF
8833!
8834!-----------------------------------------------------------------------
8835! Unpack data into a global 2D array.
8836!-----------------------------------------------------------------------
8837!
8838 DO rank=0,nnodes
8839 np=rank*npts
8840 DO j=my_bounds(3,rank),my_bounds(4,rank)
8841 DO i=my_bounds(1,rank),my_bounds(2,rank)
8842 np=np+1
8843 aglobal(i,j)=arecv(np)
8844 END DO
8845 END DO
8846 END DO
8847
8848# ifdef PROFILE
8849!
8850!-----------------------------------------------------------------------
8851! Turn off time clocks.
8852!-----------------------------------------------------------------------
8853!
8854 CALL wclock_off (ng, model, 71, __line__, myfile)
8855# endif
8856!
8857 RETURN
8858
recursive subroutine wclock_off(ng, model, region, line, routine)
Definition timers.F:148
recursive subroutine wclock_on(ng, model, region, line, routine)
Definition timers.F:3

References mod_param::bmemmax, mod_param::bounds, mod_scalars::exit_flag, mod_parallel::master, mod_parallel::mp_float, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_iounits::stdout, mod_param::tilesize, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by ad_nesting_mod::ad_fine2coarse2d(), ad_nesting_mod::ad_fine2coarse3d(), nesting_mod::fine2coarse2d(), and nesting_mod::fine2coarse3d().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_aggregate3d()

subroutine distribute_mod::mp_aggregate3d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) gtype,
integer, intent(in) lbit,
integer, intent(in) ubit,
integer, intent(in) lbjt,
integer, intent(in) ubjt,
integer, intent(in) lbig,
integer, intent(in) ubig,
integer, intent(in) lbjg,
integer, intent(in) ubjg,
integer, intent(in) lbk,
integer, intent(in) ubk,
real(r8), dimension(lbit:ubit,lbjt:ubjt,lbk:ubk), intent(in) atiled,
real(r8), dimension(lbig:ubig,lbjg:ubjg,lbk:ubk), intent(out) aglobal )

Definition at line 8861 of file distribute.F.

8866!
8867!***********************************************************************
8868! !
8869! This routine collects a 3D tiled, floating-point array from each !
8870! spawned node and stores it into 3D global array. If nesting, the !
8871! global array contains the contact points data. !
8872! !
8873! On Input: !
8874! !
8875! ng Nested grid number. !
8876! model Calling model identifier. !
8877! gtype C-grid type. !
8878! LBiT Tiled array, I-dimension Lower bound. !
8879! UBiT Tiled array, I-dimension Upper bound. !
8880! LBjT Tiled array, J-dimension Lower bound. !
8881! UBjT Tiled array, J-dimension Upper bound. !
8882! LBkT Tiled array, K-dimension Lower bound. !
8883! UBkT Tiled array, K-dimension Upper bound. !
8884! LBiG Global array, I-dimension Lower bound. !
8885! UBiG Global array, I-dimension Upper bound. !
8886! LBjG Global array, J-dimension Lower bound. !
8887! UBjG Global array, J-dimension Upper bound. !
8888! LBkG Global array, K-dimension Lower bound. !
8889! UBkG Global array, K-dimension Upper bound. !
8890! Atiled 3D tiled, floating-point array to process. !
8891! !
8892! On Output: !
8893! !
8894! Aglobal 3D global array, all tiles are aggregated. !
8895! !
8896!***********************************************************************
8897!
8898! Imported variable declarations.
8899!
8900 integer, intent(in) :: ng, model, gtype
8901 integer, intent(in) :: LBiT, UBiT, LBjT, UBjT
8902 integer, intent(in) :: LBiG, UBiG, LBjG, UBjG
8903 integer, intent(in) :: LBk, UBk
8904!
8905 real(r8), intent(in) :: Atiled(LBiT:UBiT,LBjT:UBjT,LBk:UBk)
8906 real(r8), intent(out) :: Aglobal(LBiG:UBiG,LBjG:UBjG,LBk:UBk)
8907!
8908! Local variable declarations.
8909!
8910 integer :: Klen, Lstr, MyError, MyType, Nnodes, Npts, Serror
8911 integer :: i, j, k, np, rank
8912
8913 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: my_bounds
8914!
8915 real(r8), dimension(TileSize(ng)*(UBk-LBk+1)) :: Asend
8916
8917 real(r8), dimension(TileSize(ng)*(UBk-LBk+1)* & & NtileI(ng)*NtileJ(ng)) :: Arecv
8918!
8919 character (len=MPI_MAX_ERROR_STRING) :: string
8920
8921 character (len=*), parameter :: MyFile = &
8922 & __FILE__//", mp_aggregate3d"
8923
8924# ifdef PROFILE
8925!
8926!-----------------------------------------------------------------------
8927! Turn on time clocks.
8928!-----------------------------------------------------------------------
8929!
8930 CALL wclock_on (ng, model, 71, __line__, myfile)
8931# endif
8932!
8933!-----------------------------------------------------------------------
8934! Set horizontal starting and ending indices for parallel domain
8935! partitions in the XI- and ETA-directions.
8936!-----------------------------------------------------------------------
8937!
8938! Maximum automatic buffer memory size in bytes.
8939!
8940 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
8941 & SIZE(aglobal)+ &
8942 & SIZE(arecv))*kind(asend),r8))
8943!
8944! Number of nodes in the group.
8945!
8946 nnodes=ntilei(ng)*ntilej(ng)-1
8947!
8948! Set starting and ending indices to process including contact points
8949! (if nesting) according to the staggered C-grid classification.
8950!
8951 mytype=abs(gtype)
8952
8953 SELECT CASE (mytype)
8954 CASE (p2dvar, p3dvar)
8955 DO rank=0,nnodes
8956 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8957 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8958 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8959 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8960 END DO
8961 CASE (r2dvar, r3dvar)
8962 DO rank=0,nnodes
8963 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8964 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8965 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8966 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8967 END DO
8968 CASE (u2dvar, u3dvar)
8969 DO rank=0,nnodes
8970 my_bounds(1,rank)=bounds(ng) % IstrP(rank)
8971 my_bounds(2,rank)=bounds(ng) % IendP(rank)
8972 my_bounds(3,rank)=bounds(ng) % JstrT(rank)
8973 my_bounds(4,rank)=bounds(ng) % JendT(rank)
8974 END DO
8975 CASE (v2dvar, v3dvar)
8976 DO rank=0,nnodes
8977 my_bounds(1,rank)=bounds(ng) % IstrT(rank)
8978 my_bounds(2,rank)=bounds(ng) % IendT(rank)
8979 my_bounds(3,rank)=bounds(ng) % JstrP(rank)
8980 my_bounds(4,rank)=bounds(ng) % JendP(rank)
8981 END DO
8982 END SELECT
8983 klen=ubk-lbk+1
8984!
8985! Determine the maximum number of points to process between all tiles.
8986! In collective communications, the amount of data sent must be equal
8987! to the amount of data received.
8988!
8989 npts=0
8990 DO rank=0,nnodes
8991 np=(my_bounds(2,rank)-my_bounds(1,rank)+1)* &
8992 & (my_bounds(4,rank)-my_bounds(3,rank)+1)* &
8993 & klen
8994 npts=max(npts, np)
8995 END DO
8996
8997 IF (npts.gt.tilesize(ng)*klen) THEN
8998 IF (master) THEN
8999 WRITE (stdout,10) ' TileSize = ', tilesize(ng)*klen, npts
9000 10 FORMAT (/,' MP_AGGREGATE3D - communication buffer to small,', &
9001 & a, 2i8)
9002 END IF
9003 exit_flag=5
9004 RETURN
9005 END IF
9006!
9007! Initialize local arrays to facilitate collective communicatios.
9008! This also avoid denormalized values, which facilitates debugging.
9009!
9010 asend=0.0_r8
9011 arecv=0.0_r8
9012!
9013!-----------------------------------------------------------------------
9014! Pack tile data.
9015!-----------------------------------------------------------------------
9016!
9017 np=0
9018 DO k=lbk,ubk
9019 DO j=my_bounds(3,myrank),my_bounds(4,myrank)
9020 DO i=my_bounds(1,myrank),my_bounds(2,myrank)
9021 np=np+1
9022 asend(np)=atiled(i,j,k)
9023 END DO
9024 END DO
9025 END DO
9026!
9027!-----------------------------------------------------------------------
9028! Aggregate data from all nodes.
9029!-----------------------------------------------------------------------
9030!
9031 CALL mpi_allgather (asend, npts, mp_float, &
9032 & arecv, npts, mp_float, &
9033 & ocn_comm_world, myerror)
9034 IF (myerror.ne.mpi_success) THEN
9035 CALL mpi_error_string (myerror, string, lstr, serror)
9036 lstr=len_trim(string)
9037 WRITE (stdout,20) 'MPI_ALLGATHER', myrank, myerror, &
9038 & string(1:lstr)
9039 20 FORMAT (/,' MP_AGGREGATE3D - error during ',a,' call, Task = ', &
9040 & i3.3,' Error = ',i3,/,18x,a)
9041 exit_flag=5
9042 RETURN
9043 END IF
9044!
9045!-----------------------------------------------------------------------
9046! Unpack data into a global 2D array.
9047!-----------------------------------------------------------------------
9048!
9049 DO rank=0,nnodes
9050 np=rank*npts
9051 DO k=lbk,ubk
9052 DO j=my_bounds(3,rank),my_bounds(4,rank)
9053 DO i=my_bounds(1,rank),my_bounds(2,rank)
9054 np=np+1
9055 aglobal(i,j,k)=arecv(np)
9056 END DO
9057 END DO
9058 END DO
9059 END DO
9060
9061# ifdef PROFILE
9062!
9063!-----------------------------------------------------------------------
9064! Turn off time clocks.
9065!-----------------------------------------------------------------------
9066!
9067 CALL wclock_off (ng, model, 71, __line__, myfile)
9068# endif
9069!
9070 RETURN
9071

References mod_param::bmemmax, mod_param::bounds, mod_scalars::exit_flag, mod_parallel::master, mod_parallel::mp_float, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_iounits::stdout, mod_param::tilesize, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by ad_nesting_mod::ad_fine2coarse3d(), and nesting_mod::fine2coarse3d().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_assemblef_1d()

subroutine distribute_mod::mp_assemblef_1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
real(r8), intent(in) aspv,
real(r8), dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 2278 of file distribute.F.

2279!
2280!***********************************************************************
2281! !
2282! This routine assembles a 1D floating-point array from all members !
2283! in the group. The collection of data from all nodes is achieved !
2284! as a reduction sum. !
2285! !
2286! On Input: !
2287! !
2288! ng Nested grid number. !
2289! model Calling model identifier. !
2290! Npts Number of collected data points, PROD(SIZE(A)). !
2291! Aspv Special value indicating that an array element is !
2292! not operated by the current parallel node. It must !
2293! be zero to collect data by a global reduction sum. !
2294! A 1D array to collect. !
2295! InpComm Communicator handle (integer, OPTIONAL). !
2296! !
2297! On Output: !
2298! !
2299! A Assembled 1D array. !
2300! !
2301!***********************************************************************
2302!
2303! Imported variable declarations.
2304!
2305 integer, intent(in) :: ng, model, Npts
2306
2307 integer, intent(in), optional :: InpComm
2308!
2309 real(r8), intent(in) :: Aspv
2310
2311 real(r8), intent(inout) :: A(:)
2312!
2313! Local variable declarations.
2314!
2315 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2316 integer :: i, rank, request
2317
2318 integer, dimension(MPI_STATUS_SIZE) :: status
2319!
2320# if defined ASSEMBLE_ALLGATHER
2321 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2322# elif defined ASSEMBLE_ALLREDUCE
2323 real(r8), dimension(Npts) :: Asend
2324# elif defined ASSEMBLE_SENDRECV
2325 real(r8), allocatable :: Arecv(:)
2326# endif
2327!
2328 character (len=MPI_MAX_ERROR_STRING) :: string
2329
2330 character (len=*), parameter :: MyFile = &
2331 & __FILE__//", mp_assemblef_1d"
2332
2333# ifdef PROFILE
2334!
2335!-----------------------------------------------------------------------
2336! Turn on time clocks.
2337!-----------------------------------------------------------------------
2338!
2339 CALL wclock_on (ng, model, 70, __line__, myfile)
2340# endif
2341# ifdef MPI
2342!
2343!-----------------------------------------------------------------------
2344! Set distributed-memory communicator handle (context ID).
2345!-----------------------------------------------------------------------
2346!
2347 IF (PRESENT(inpcomm)) THEN
2348 mycomm=inpcomm
2349 ELSE
2350 mycomm=ocn_comm_world
2351 END IF
2352# endif
2353!
2354!-----------------------------------------------------------------------
2355! Check input parameters.
2356!-----------------------------------------------------------------------
2357!
2358! Maximum automatic buffer memory size in bytes.
2359!
2360# if defined ASSEMBLE_ALLGATHER
2361 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
2362# else
2363 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
2364# endif
2365!
2366 mynpts=ubound(a, dim=1)
2367 IF (npts.ne.mynpts) THEN
2368 IF (master) THEN
2369 WRITE (stdout,10) npts, mynpts
2370 END IF
2371 exit_flag=7
2372 END IF
2373!
2374 IF (aspv.ne.0.0_r8) THEN
2375 IF (master) THEN
2376 WRITE (stdout,20) aspv
2377 END IF
2378 exit_flag=7
2379 END IF
2380!
2381!-----------------------------------------------------------------------
2382! Collect data from all nodes.
2383!-----------------------------------------------------------------------
2384!
2385# if defined ASSEMBLE_ALLGATHER
2386 CALL mpi_allgather (a, npts, mp_float, arecv, npts, mp_float, &
2387 & mycomm, myerror)
2388 IF (myerror.ne.mpi_success) THEN
2389 CALL mpi_error_string (myerror, string, lstr, serror)
2390 lstr=len_trim(string)
2391 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2392 & string(1:lstr)
2393 exit_flag=2
2394 RETURN
2395 END IF
2396!
2397! Pack data according to special values: sum or ignore.
2398!
2399 nnodes=ntilei(ng)*ntilej(ng)-1
2400 IF (aspv.eq.0.0_r8) THEN
2401 DO i=1,npts
2402 a(i)=0.0_r8
2403 DO rank=0,nnodes
2404 a(i)=a(i)+arecv(i,rank)
2405 END DO
2406 END DO
2407 ELSE
2408 DO i=1,npts
2409 DO rank=0,nnodes
2410 IF (arecv(i,rank).ne.aspv) THEN
2411 a(i)=arecv(i,rank)
2412 END IF
2413 END DO
2414 END DO
2415 END IF
2416
2417# elif defined ASSEMBLE_ALLREDUCE
2418!
2419! Coppy data to send.
2420!
2421 DO i=1,npts
2422 asend(i)=a(i)
2423 END DO
2424!
2425! Collect data from all nodes as a reduced sum.
2426!
2427 CALL mpi_allreduce (asend, a, npts, mp_float, mpi_sum, &
2428 & mycomm, myerror)
2429 IF (myerror.ne.mpi_success) THEN
2430 CALL mpi_error_string (myerror, string, lstr, serror)
2431 lstr=len_trim(string)
2432 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2433 & string(1:lstr)
2434 exit_flag=2
2435 RETURN
2436 END IF
2437
2438# elif defined ASSEMBLE_SENDRECV
2439
2440 IF (myrank.eq.mymaster) THEN
2441!
2442! If master node, allocate and receive buffer.
2443!
2444 IF (.not.allocated(arecv)) THEN
2445 allocate (arecv(npts))
2446 END IF
2447!
2448! If master node, loop over other nodes to receive and accumulate the
2449! data.
2450!
2451 DO rank=1,ntilei(ng)*ntilej(ng)-1
2452 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2453 & mycomm, request, myerror)
2454 CALL mpi_wait (request, status, myerror)
2455 IF (myerror.ne.mpi_success) THEN
2456 CALL mpi_error_string (myerror, string, lstr, serror)
2457 lstr=len_trim(string)
2458 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2459 exit_flag=2
2460 RETURN
2461 END IF
2462 DO i=1,npts
2463 a(i)=a(i)+arecv(i)
2464 END DO
2465 END DO
2466 deallocate (arecv)
2467!
2468! Otherwise, send data to master node.
2469!
2470 ELSE
2471 CALL mpi_isend (a, npts, mp_float, mymaster, myrank+5, &
2472 & mycomm, request, myerror)
2473 CALL mpi_wait (request, status, myerror)
2474 IF (myerror.ne.mpi_success) THEN
2475 CALL mpi_error_string (myerror, string, lstr, serror)
2476 lstr=len_trim(string)
2477 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2478 exit_flag=2
2479 RETURN
2480 END IF
2481 END IF
2482!
2483! Broadcast accumulated (full) data to all nodes.
2484!
2485 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
2486 IF (myerror.ne.mpi_success) THEN
2487 CALL mpi_error_string (myerror, string, lstr, serror)
2488 lstr=len_trim(string)
2489 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2490 exit_flag=2
2491 RETURN
2492 END IF
2493# endif
2494
2495# ifdef PROFILE
2496!
2497!-----------------------------------------------------------------------
2498! Turn off time clocks.
2499!-----------------------------------------------------------------------
2500!
2501 CALL wclock_off (ng, model, 70, __line__, myfile)
2502# endif
2503!
2504 10 FORMAT (/,' MP_ASSEMBLEF_1D - inconsistent array size, Npts = ', &
2505 & i10,2x,i10,/,19x,'number of addressed array elements ', &
2506 & 'is incorrect.')
2507 20 FORMAT (/,' MP_ASSEMBLEF_1D - illegal special value, Aspv = ', &
2508 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
2509 & 'reduction.')
2510 30 FORMAT (/,' MP_ASSEMBLEF_1D - error during ',a,' call, Task = ', &
2511 & i3.3,' Error = ',i3,/,19x,a)
2512!
2513 RETURN

◆ mp_assemblef_2d()

subroutine distribute_mod::mp_assemblef_2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
real(r8), intent(in) aspv,
real(r8), dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 2516 of file distribute.F.

2517!
2518!***********************************************************************
2519! !
2520! This routine assembles a 2D floating-point array from all members !
2521! in the group. The collection of data from all nodes is achieved !
2522! as a reduction sum. !
2523! !
2524! On Input: !
2525! !
2526! ng Nested grid number. !
2527! model Calling model identifier. !
2528! Npts Number of collected data points, PROD(SIZE(A)). !
2529! Aspv Special value indicating that an array element is !
2530! not operated by the current parallel node. It must !
2531! be zero to collect data by a global reduction sum. !
2532! A 2D array to collect. !
2533! InpComm Communicator handle (integer, OPTIONAL). !
2534! !
2535! On Output: !
2536! !
2537! A Assembled 2D array. !
2538! !
2539!***********************************************************************
2540!
2541! Imported variable declarations.
2542!
2543 integer, intent(in) :: ng, model, Npts
2544
2545 integer, intent(in), optional :: InpComm
2546!
2547 real(r8), intent(in) :: Aspv
2548
2549 real(r8), intent(inout) :: A(:,:)
2550!
2551! Local variable declarations.
2552!
2553 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2554 integer :: i, rank, request
2555
2556 integer :: Asize(2)
2557
2558 integer, dimension(MPI_STATUS_SIZE) :: status
2559!
2560# if defined ASSEMBLE_ALLGATHER
2561 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2562 real(r8), dimension(Npts) :: Asend
2563# elif defined ASSEMBLE_ALLREDUCE
2564 real(r8), dimension(Npts) :: Arecv, Asend
2565# elif defined ASSEMBLE_SENDRECV
2566 real(r8), allocatable :: Arecv(:)
2567 real(r8), dimension(Npts) :: Asend
2568# endif
2569!
2570 character (len=MPI_MAX_ERROR_STRING) :: string
2571
2572 character (len=*), parameter :: MyFile = &
2573 & __FILE__//", mp_assemblef_2d"
2574
2575# ifdef PROFILE
2576!
2577!-----------------------------------------------------------------------
2578! Turn on time clocks.
2579!-----------------------------------------------------------------------
2580!
2581 CALL wclock_on (ng, model, 70, __line__, myfile)
2582# endif
2583# ifdef MPI
2584!
2585!-----------------------------------------------------------------------
2586! Set distributed-memory communicator handle (context ID).
2587!-----------------------------------------------------------------------
2588!
2589 IF (PRESENT(inpcomm)) THEN
2590 mycomm=inpcomm
2591 ELSE
2592 mycomm=ocn_comm_world
2593 END IF
2594# endif
2595!
2596!-----------------------------------------------------------------------
2597! Check input parameters.
2598!-----------------------------------------------------------------------
2599!
2600! Maximum automatic buffer memory size in bytes.
2601!
2602# if defined ASSEMBLE_ALLGATHER
2603 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
2604# else
2605 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
2606# endif
2607!
2608 asize(1)=ubound(a, dim=1)
2609 asize(2)=ubound(a, dim=2)
2610 mynpts=asize(1)*asize(2)
2611 IF (npts.ne.mynpts) THEN
2612 IF (master) THEN
2613 WRITE (stdout,10) npts, mynpts
2614 END IF
2615 exit_flag=7
2616 END IF
2617!
2618 IF (aspv.ne.0.0_r8) THEN
2619 IF (master) THEN
2620 WRITE (stdout,20) aspv
2621 END IF
2622 exit_flag=7
2623 END IF
2624!
2625!-----------------------------------------------------------------------
2626! Collect data from all nodes.
2627!-----------------------------------------------------------------------
2628!
2629! Reshape input 2D data into 1D array to facilitate communications.
2630!
2631 asend=reshape(a, (/npts/))
2632
2633# if defined ASSEMBLE_ALLGATHER
2634!
2635! Collect data from all nodes.
2636!
2637 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2638 & mycomm, myerror)
2639 IF (myerror.ne.mpi_success) THEN
2640 CALL mpi_error_string (myerror, string, lstr, serror)
2641 lstr=len_trim(string)
2642 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2643 & string(1:lstr)
2644 exit_flag=2
2645 RETURN
2646 END IF
2647!
2648! Pack data according to special values: sum or ignore.
2649!
2650 nnodes=ntilei(ng)*ntilej(ng)-1
2651 IF (aspv.eq.0.0_r8) THEN
2652 DO i=1,npts
2653 asend(i)=0.0_r8
2654 DO rank=0,nnodes
2655 asend(i)=asend(i)+arecv(i,rank)
2656 END DO
2657 END DO
2658 ELSE
2659 DO i=1,npts
2660 DO rank=0,nnodes
2661 IF (arecv(i,rank).ne.aspv) THEN
2662 asend(i)=arecv(i,rank)
2663 END IF
2664 END DO
2665 END DO
2666 END IF
2667!
2668! Load collected data in output 2D array.
2669!
2670 a=reshape(asend, asize)
2671
2672# elif defined ASSEMBLE_ALLREDUCE
2673!
2674! Collect data from all nodes as a reduced sum.
2675!
2676 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2677 & mycomm, myerror)
2678 IF (myerror.ne.mpi_success) THEN
2679 CALL mpi_error_string (myerror, string, lstr, serror)
2680 lstr=len_trim(string)
2681 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2682 & string(1:lstr)
2683 exit_flag=2
2684 RETURN
2685 END IF
2686!
2687! Load collected data into output 2D array.
2688!
2689 a=reshape(arecv, asize)
2690
2691# elif defined ASSEMBLE_SENDRECV
2692!
2693 IF (myrank.eq.mymaster) THEN
2694!
2695! If master node, allocate and receive buffer.
2696!
2697 IF (.not.allocated(arecv)) THEN
2698 allocate (arecv(npts))
2699 END IF
2700!
2701! If master node, loop over other nodes to receive and accumulate the
2702! data.
2703!
2704 DO rank=1,ntilei(ng)*ntilej(ng)-1
2705 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2706 & mycomm, request, myerror)
2707 CALL mpi_wait (request, status, myerror)
2708 IF (myerror.ne.mpi_success) THEN
2709 CALL mpi_error_string (myerror, string, lstr, serror)
2710 lstr=len_trim(string)
2711 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2712 exit_flag=2
2713 RETURN
2714 END IF
2715 DO i=1,npts
2716 asend(i)=asend(i)+arecv(i)
2717 END DO
2718 END DO
2719 deallocate (arecv)
2720!
2721! Load collected data in output 2D array.
2722!
2723 a=reshape(asend, asize)
2724!
2725! Otherwise, send data to master node.
2726!
2727 ELSE
2728 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+5, &
2729 & mycomm, request, myerror)
2730 CALL mpi_wait (request, status, myerror)
2731 IF (myerror.ne.mpi_success) THEN
2732 CALL mpi_error_string (myerror, string, lstr, serror)
2733 lstr=len_trim(string)
2734 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2735 exit_flag=2
2736 RETURN
2737 END IF
2738 END IF
2739!
2740! Broadcast accumulated (full) data to all nodes.
2741!
2742 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
2743 IF (myerror.ne.mpi_success) THEN
2744 CALL mpi_error_string (myerror, string, lstr, serror)
2745 lstr=len_trim(string)
2746 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2747 exit_flag=2
2748 RETURN
2749 END IF
2750# endif
2751# ifdef PROFILE
2752!
2753!-----------------------------------------------------------------------
2754! Turn off time clocks.
2755!-----------------------------------------------------------------------
2756!
2757 CALL wclock_off (ng, model, 70, __line__, myfile)
2758# endif
2759!
2760 10 FORMAT (/,' MP_ASSEMBLEF_2D - inconsistent array size, Npts = ', &
2761 & i10,2x,i10,/,19x,'number of addressed array elements ', &
2762 & 'is incorrect.')
2763 20 FORMAT (/,' MP_ASSEMBLEF_2D - illegal special value, Aspv = ', &
2764 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
2765 & 'reduction.')
2766 30 FORMAT (/,' MP_ASSEMBLEF_2D - error during ',a,' call, Task = ', &
2767 & i3.3,' Error = ',i3,/,19x,a)
2768!
2769 RETURN

◆ mp_assemblef_3d()

subroutine distribute_mod::mp_assemblef_3d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
real(r8), intent(in) aspv,
real(r8), dimension(:,:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 2772 of file distribute.F.

2773!
2774!***********************************************************************
2775! !
2776! This routine assembles a 3D floating-point array from all members !
2777! in the group. The collection of data from all nodes is achieved !
2778! as a reduction sum. !
2779! !
2780! On Input: !
2781! !
2782! ng Nested grid number. !
2783! model Calling model identifier. !
2784! Npts Number of collected data points, PROD(SIZE(A)). !
2785! Aspv Special value indicating that an array element is !
2786! not operated by the current parallel node. It must !
2787! be zero to collect data by a global reduction sum. !
2788! A 3D array to collect. !
2789! InpComm Communicator handle (integer, OPTIONAL). !
2790! !
2791! On Output: !
2792! !
2793! A Assembled 3D array. !
2794! !
2795!***********************************************************************
2796!
2797! Imported variable declarations.
2798!
2799 integer, intent(in) :: ng, model, Npts
2800
2801 integer, intent(in), optional :: InpComm
2802
2803 real(r8), intent(in) :: Aspv
2804
2805 real(r8), intent(inout) :: A(:,:,:)
2806!
2807! Local variable declarations.
2808!
2809 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
2810 integer :: i, rank, request
2811
2812 integer :: Asize(3)
2813
2814 integer, dimension(MPI_STATUS_SIZE) :: status
2815!
2816# if defined ASSEMBLE_ALLGATHER
2817 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2818 real(r8), dimension(Npts) :: Asend
2819# elif defined ASSEMBLE_ALLREDUCE
2820 real(r8), dimension(Npts) :: Arecv, Asend
2821# elif defined ASSEMBLE_SENDRECV
2822 real(r8), allocatable :: Arecv(:)
2823 real(r8), dimension(Npts) :: Asend
2824# endif
2825!
2826 character (len=MPI_MAX_ERROR_STRING) :: string
2827
2828 character (len=*), parameter :: MyFile = &
2829 & __FILE__//", mp_assemblef_3d"
2830
2831# ifdef PROFILE
2832!
2833!-----------------------------------------------------------------------
2834! Turn on time clocks.
2835!-----------------------------------------------------------------------
2836!
2837 CALL wclock_on (ng, model, 70, __line__, myfile)
2838# endif
2839# ifdef MPI
2840!
2841!-----------------------------------------------------------------------
2842! Set distributed-memory communicator handle (context ID).
2843!-----------------------------------------------------------------------
2844!
2845 IF (PRESENT(inpcomm)) THEN
2846 mycomm=inpcomm
2847 ELSE
2848 mycomm=ocn_comm_world
2849 END IF
2850# endif
2851!
2852!-----------------------------------------------------------------------
2853! Check input parameters.
2854!-----------------------------------------------------------------------
2855!
2856! Maximum automatic buffer memory size in bytes.
2857!
2858# if defined ASSEMBLE_ALLGATHER
2859 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
2860# else
2861 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
2862# endif
2863!
2864 asize(1)=ubound(a, dim=1)
2865 asize(2)=ubound(a, dim=2)
2866 asize(3)=ubound(a, dim=3)
2867 mynpts=asize(1)*asize(2)*asize(3)
2868 IF (npts.ne.mynpts) THEN
2869 IF (master) THEN
2870 WRITE (stdout,10) npts, mynpts
2871 END IF
2872 exit_flag=7
2873 END IF
2874!
2875 IF (aspv.ne.0.0_r8) THEN
2876 IF (master) THEN
2877 WRITE (stdout,20) aspv
2878 END IF
2879 exit_flag=7
2880 END IF
2881!
2882!-----------------------------------------------------------------------
2883! Collect data from all nodes.
2884!-----------------------------------------------------------------------
2885!
2886! Reshape input 3D data into 1D array to facilitate communications.
2887!
2888 asend=reshape(a, (/npts/))
2889
2890# if defined ASSEMBLE_ALLGATHER
2891!
2892! Collect data from all nodes.
2893!
2894 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2895 & mycomm, myerror)
2896 IF (myerror.ne.mpi_success) THEN
2897 CALL mpi_error_string (myerror, string, lstr, serror)
2898 lstr=len_trim(string)
2899 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
2900 & string(1:lstr)
2901 exit_flag=2
2902 RETURN
2903 END IF
2904!
2905! Pack data according to special values: sum or ignore.
2906!
2907 nnodes=ntilei(ng)*ntilej(ng)-1
2908 IF (aspv.eq.0.0_r8) THEN
2909 DO i=1,npts
2910 asend(i)=0.0_r8
2911 DO rank=0,nnodes
2912 asend(i)=asend(i)+arecv(i,rank)
2913 END DO
2914 END DO
2915 ELSE
2916 DO i=1,npts
2917 DO rank=0,nnodes
2918 IF (arecv(i,rank).ne.aspv) THEN
2919 asend(i)=arecv(i,rank)
2920 END IF
2921 END DO
2922 END DO
2923 END IF
2924!
2925! Load collected data into output 3D array.
2926!
2927 a=reshape(asend, asize)
2928
2929# elif defined ASSEMBLE_ALLREDUCE
2930!
2931! Collect data from all nodes as a reduced sum.
2932!
2933 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2934 & mycomm, myerror)
2935 IF (myerror.ne.mpi_success) THEN
2936 CALL mpi_error_string (myerror, string, lstr, serror)
2937 lstr=len_trim(string)
2938 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
2939 & string(1:lstr)
2940 exit_flag=2
2941 RETURN
2942 END IF
2943!
2944! Load collected data into output 3D array.
2945!
2946 a=reshape(arecv, asize)
2947
2948# elif defined ASSEMBLE_SENDRECV
2949!
2950 IF (myrank.eq.mymaster) THEN
2951!
2952! If master node, allocate and receive buffer.
2953!
2954 IF (.not.allocated(arecv)) THEN
2955 allocate (arecv(npts))
2956 END IF
2957!
2958! If master node, loop over other nodes to receive and accumulate the
2959! data.
2960!
2961 DO rank=1,ntilei(ng)*ntilej(ng)-1
2962 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
2963 & mycomm, request, myerror)
2964 CALL mpi_wait (request, status, myerror)
2965 IF (myerror.ne.mpi_success) THEN
2966 CALL mpi_error_string (myerror, string, lstr, serror)
2967 lstr=len_trim(string)
2968 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
2969 exit_flag=2
2970 RETURN
2971 END IF
2972 DO i=1,npts
2973 asend(i)=asend(i)+arecv(i)
2974 END DO
2975 END DO
2976 deallocate (arecv)
2977!
2978! Load collected data into output 3D array.
2979!
2980 a=reshape(asend, asize)
2981!
2982! Otherwise, send data to master node.
2983!
2984 ELSE
2985 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+5, &
2986 & mycomm, request, myerror)
2987 CALL mpi_wait (request, status, myerror)
2988 IF (myerror.ne.mpi_success) THEN
2989 CALL mpi_error_string (myerror, string, lstr, serror)
2990 lstr=len_trim(string)
2991 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
2992 exit_flag=2
2993 RETURN
2994 END IF
2995 END IF
2996!
2997! Broadcast accumulated (full) data to all nodes.
2998!
2999 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
3000 IF (myerror.ne.mpi_success) THEN
3001 CALL mpi_error_string (myerror, string, lstr, serror)
3002 lstr=len_trim(string)
3003 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3004 exit_flag=2
3005 RETURN
3006 END IF
3007# endif
3008# ifdef PROFILE
3009!
3010!-----------------------------------------------------------------------
3011! Turn off time clocks.
3012!-----------------------------------------------------------------------
3013!
3014 CALL wclock_off (ng, model, 70, __line__, myfile)
3015# endif
3016!
3017 10 FORMAT (/,' MP_ASSEMBLEF_3D - inconsistent array size, Npts = ', &
3018 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3019 & 'is incorrect.')
3020 20 FORMAT (/,' MP_ASSEMBLEF_3D - illegal special value, Aspv = ', &
3021 & 1p,e17.10,/,19x,'a zero value is needed for global ', &
3022 & 'reduction.')
3023 30 FORMAT (/,' MP_ASSEMBLEF_3D - error during ',a,' call, Task = ', &
3024 & i3.3,' Error = ',i3,/,19x,a)
3025!
3026 RETURN

◆ mp_assemblei_1d()

subroutine distribute_mod::mp_assemblei_1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
integer, intent(in) aspv,
integer, dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 3029 of file distribute.F.

3030!
3031!***********************************************************************
3032! !
3033! This routine assembles a 1D integer array from all members in the !
3034! group. The collection of data from all nodes is achieved as a !
3035! reduction sum. !
3036! !
3037! On Input: !
3038! !
3039! ng Nested grid number. !
3040! model Calling model identifier. !
3041! Npts Number of collected data points, PROD(SIZE(A)). !
3042! Aspv Special value indicating that an array element is !
3043! not operated by the current parallel node. It must !
3044! be zero to collect data by a global reduction sum. !
3045! A 1D array to collect. !
3046! InpComm Communicator handle (integer, OPTIONAL). !
3047! !
3048! On Output: !
3049! !
3050! A Assembled 1D array. !
3051! !
3052!***********************************************************************
3053!
3054! Imported variable declarations.
3055!
3056 integer, intent(in) :: ng, model, Npts
3057
3058 integer, intent(in), optional :: InpComm
3059
3060 integer, intent(in) :: Aspv
3061
3062 integer, intent(inout) :: A(:)
3063!
3064! Local variable declarations.
3065!
3066 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
3067 integer :: i, rank, request
3068
3069 integer, dimension(MPI_STATUS_SIZE) :: status
3070
3071# if defined ASSEMBLE_ALLGATHER
3072 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3073# elif defined ASSEMBLE_ALLREDUCE
3074 integer, dimension(Npts) :: Asend
3075# elif defined ASSEMBLE_SENDRECV
3076 integer, allocatable :: Arecv(:)
3077# endif
3078!
3079 character (len=MPI_MAX_ERROR_STRING) :: string
3080
3081 character (len=*), parameter :: MyFile = &
3082 & __FILE__//", mp_assemblei_1d"
3083
3084# ifdef PROFILE
3085!
3086!-----------------------------------------------------------------------
3087! Turn on time clocks.
3088!-----------------------------------------------------------------------
3089!
3090 CALL wclock_on (ng, model, 70, __line__, myfile)
3091# endif
3092# ifdef MPI
3093!
3094!-----------------------------------------------------------------------
3095! Set distributed-memory communicator handle (context ID).
3096!-----------------------------------------------------------------------
3097!
3098 IF (PRESENT(inpcomm)) THEN
3099 mycomm=inpcomm
3100 ELSE
3101 mycomm=ocn_comm_world
3102 END IF
3103# endif
3104!
3105!-----------------------------------------------------------------------
3106! Check input parameters.
3107!-----------------------------------------------------------------------
3108!
3109! Maximum automatic buffer memory size in bytes.
3110!
3111# if defined ASSEMBLE_ALLGATHER
3112 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3113# else
3114 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3115# endif
3116!
3117 mynpts=ubound(a, dim=1)
3118 IF (npts.ne.mynpts) THEN
3119 IF (master) THEN
3120 WRITE (stdout,10) npts, mynpts
3121 END IF
3122 exit_flag=7
3123 END IF
3124!
3125 IF (aspv.ne.0) THEN
3126 IF (master) THEN
3127 WRITE (stdout,20) aspv
3128 END IF
3129 exit_flag=7
3130 END IF
3131!
3132!-----------------------------------------------------------------------
3133! Collect data from all nodes.
3134!-----------------------------------------------------------------------
3135!
3136# if defined ASSEMBLE_ALLGATHER
3137 CALL mpi_allgather (a, npts, mpi_integer, &
3138 & arecv, npts, mpi_integer, &
3139 & mycomm, myerror)
3140 IF (myerror.ne.mpi_success) THEN
3141 CALL mpi_error_string (myerror, string, lstr, serror)
3142 lstr=len_trim(string)
3143 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
3144 & string(1:lstr)
3145 exit_flag=2
3146 RETURN
3147 END IF
3148!
3149! Pack data according to special values: sum or ignore.
3150!
3151 nnodes=ntilei(ng)*ntilej(ng)-1
3152 IF (aspv.eq.0.0_r8) THEN
3153 DO i=1,npts
3154 a(i)=0.0_r8
3155 DO rank=0,nnodes
3156 a(i)=a(i)+arecv(i,rank)
3157 END DO
3158 END DO
3159 ELSE
3160 DO i=1,npts
3161 DO rank=0,nnodes
3162 IF (arecv(i,rank).ne.aspv) THEN
3163 a(i)=arecv(i,rank)
3164 END IF
3165 END DO
3166 END DO
3167 END IF
3168
3169# elif defined ASSEMBLE_ALLREDUCE
3170!
3171! Copy data to send.
3172!
3173 DO i=1,npts
3174 asend(i)=a(i)
3175 END DO
3176!
3177! Collect data from all nodes as a reduced sum.
3178!
3179 CALL mpi_allreduce (asend, a, npts, mpi_integer, mpi_sum, &
3180 & mycomm, myerror)
3181 IF (myerror.ne.mpi_success) THEN
3182 CALL mpi_error_string (myerror, string, lstr, serror)
3183 lstr=len_trim(string)
3184 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
3185 & string(1:lstr)
3186 exit_flag=2
3187 RETURN
3188 END IF
3189
3190# elif defined ASSEMBLE_SENDRECV
3191
3192 IF (myrank.eq.mymaster) THEN
3193!
3194! If master node, allocate and receive buffer.
3195!
3196 IF (.not.allocated(arecv)) THEN
3197 allocate (arecv(npts))
3198 END IF
3199!
3200! If master node, loop over other nodes to receive and accumulate the
3201! data.
3202!
3203 DO rank=1,ntilei(ng)*ntilej(ng)-1
3204 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3205 & mycomm, request, myerror)
3206 CALL mpi_wait (request, status, myerror)
3207 IF (myerror.ne.mpi_success) THEN
3208 CALL mpi_error_string (myerror, string, lstr, serror)
3209 lstr=len_trim(string)
3210 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
3211 exit_flag=2
3212 RETURN
3213 END IF
3214 DO i=1,npts
3215 a(i)=a(i)+arecv(i)
3216 END DO
3217 END DO
3218 deallocate (arecv)
3219!
3220! Otherwise, send data to master node.
3221!
3222 ELSE
3223 CALL mpi_isend (a, npts, mpi_integer, mymaster, myrank+5, &
3224 & mycomm, request, myerror)
3225 CALL mpi_wait (request, status, myerror)
3226 IF (myerror.ne.mpi_success) THEN
3227 CALL mpi_error_string (myerror, string, lstr, serror)
3228 lstr=len_trim(string)
3229 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3230 exit_flag=2
3231 RETURN
3232 END IF
3233 END IF
3234!
3235! Broadcast accumulated (full) data to all nodes.
3236!
3237 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3238 IF (myerror.ne.mpi_success) THEN
3239 CALL mpi_error_string (myerror, string, lstr, serror)
3240 lstr=len_trim(string)
3241 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3242 exit_flag=2
3243 RETURN
3244 END IF
3245# endif
3246# ifdef PROFILE
3247!
3248!-----------------------------------------------------------------------
3249! Turn off time clocks.
3250!-----------------------------------------------------------------------
3251!
3252 CALL wclock_off (ng, model, 70, __line__, myfile)
3253# endif
3254!
3255 10 FORMAT (/,' MP_ASSEMBLEI_1D - inconsistent array size, Npts = ', &
3256 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3257 & 'is incorrect.')
3258 20 FORMAT (/,' MP_ASSEMBLEI_1D - illegal special value, Aspv = ',i4, &
3259 & /,19x,'a zero value is needed for global reduction.')
3260 30 FORMAT (/,' MP_ASSEMBLEI_1D - error during ',a,' call, Task = ', &
3261 & i3.3,' Error = ',i3,/,19x,a)
3262!
3263 RETURN

◆ mp_assemblei_2d()

subroutine distribute_mod::mp_assemblei_2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
integer, intent(in) aspv,
integer, dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 3266 of file distribute.F.

3267!
3268!***********************************************************************
3269! !
3270! This routine assembles a 2D integer array from all members in the !
3271! group. The collection of data from all nodes is achieved as a !
3272! reduction sum. !
3273! !
3274! On Input: !
3275! !
3276! ng Nested grid number. !
3277! model Calling model identifier. !
3278! Npts Number of collected data points, PROD(SIZE(A)). !
3279! Aspv Special value indicating that an array element is !
3280! not operated by the current parallel node. It must !
3281! be zero to collect data by a global reduction sum. !
3282! A 2D array to collect. !
3283! InpComm Communicator handle (integer, OPTIONAL). !
3284! !
3285! On Output: !
3286! !
3287! A Assembled 2D array. !
3288! !
3289!***********************************************************************
3290!
3291! Imported variable declarations.
3292!
3293 integer, intent(in) :: ng, model, Npts
3294
3295 integer, intent(in), optional :: InpComm
3296
3297 integer, intent(in) :: Aspv
3298
3299 integer, intent(inout) :: A(:,:)
3300!
3301! Local variable declarations.
3302!
3303 integer :: Lstr, MyCOMM, MyError, MyNpts, Nnodes, Serror
3304 integer :: i, rank, request
3305
3306 integer :: Asize(2)
3307
3308 integer, dimension(MPI_STATUS_SIZE) :: status
3309
3310# if defined ASSEMBLE_ALLGATHER
3311 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3312 integer, dimension(Npts) :: Asend
3313# elif defined ASSEMBLE_ALLREDUCE
3314 integer, dimension(Npts) :: Arecv, Asend
3315# elif defined ASSEMBLE_SENDRECV
3316 integer, allocatable :: Arecv(:)
3317 integer, dimension(Npts) :: Asend
3318# endif
3319!
3320 character (len=MPI_MAX_ERROR_STRING) :: string
3321
3322 character (len=*), parameter :: MyFile = &
3323 & __FILE__//", mp_assemblei_2d"
3324
3325# ifdef PROFILE
3326!
3327!-----------------------------------------------------------------------
3328! Turn on time clocks.
3329!-----------------------------------------------------------------------
3330!
3331 CALL wclock_on (ng, model, 70, __line__, myfile)
3332# endif
3333# ifdef MPI
3334!
3335!-----------------------------------------------------------------------
3336! Set distributed-memory communicator handle (context ID).
3337!-----------------------------------------------------------------------
3338!
3339 IF (PRESENT(inpcomm)) THEN
3340 mycomm=inpcomm
3341 ELSE
3342 mycomm=ocn_comm_world
3343 END IF
3344# endif
3345!
3346!-----------------------------------------------------------------------
3347! Check input parameters.
3348!-----------------------------------------------------------------------
3349!
3350! Maximum automatic buffer memory size in bytes.
3351!
3352# if defined ASSEMBLE_ALLGATHER
3353 bmemmax(ng)=max(bmemmax(ng), real((npts+SIZE(arecv))*kind(a),r8))
3354# else
3355 bmemmax(ng)=max(bmemmax(ng), real(2*npts*kind(a),r8))
3356# endif
3357!
3358 asize(1)=ubound(a, dim=1)
3359 asize(2)=ubound(a, dim=2)
3360 mynpts=asize(1)*asize(2)
3361 IF (npts.ne.mynpts) THEN
3362 IF (master) THEN
3363 WRITE (stdout,10) npts, mynpts
3364 END IF
3365 exit_flag=7
3366 END IF
3367!
3368 IF (aspv.ne.0) THEN
3369 IF (master) THEN
3370 WRITE (stdout,20) aspv
3371 END IF
3372 exit_flag=7
3373 END IF
3374!
3375!-----------------------------------------------------------------------
3376! Collect data from all nodes.
3377!-----------------------------------------------------------------------
3378!
3379! Reshape input 2D data into 1D array to facilitate communications.
3380!
3381 asend=reshape(a, (/npts/))
3382
3383# if defined ASSEMBLE_ALLGATHER
3384!
3385! Collect data from all nodes.
3386!
3387 CALL mpi_allgather (asend, npts, mpi_integer, &
3388 & arecv, npts, mpi_integer, &
3389 & mycomm, myerror)
3390 IF (myerror.ne.mpi_success) THEN
3391 CALL mpi_error_string (myerror, string, lstr, serror)
3392 lstr=len_trim(string)
3393 WRITE (stdout,30) 'MPI_ALLGATHER', myrank, myerror, &
3394 & string(1:lstr)
3395 exit_flag=2
3396 RETURN
3397 END IF
3398!
3399! Pack data according to special values: sum or ignore.
3400!
3401 nnodes=ntilei(ng)*ntilej(ng)-1
3402 IF (aspv.eq.0.0_r8) THEN
3403 DO i=1,npts
3404 asend(i)=0.0_r8
3405 DO rank=0,nnodes
3406 asend(i)=asend(i)+arecv(i,rank)
3407 END DO
3408 END DO
3409 ELSE
3410 DO i=1,npts
3411 DO rank=0,nnodes
3412 IF (arecv(i,rank).ne.aspv) THEN
3413 asend(i)=arecv(i,rank)
3414 END IF
3415 END DO
3416 END DO
3417 END IF
3418!
3419! Load collected data in output 2D array.
3420!
3421 a=reshape(asend, asize)
3422
3423# elif defined ASSEMBLE_ALLREDUCE
3424!
3425! Collect data from all nodes as a reduced sum.
3426!
3427 CALL mpi_allreduce (asend, arecv, npts, mpi_integer, mpi_sum, &
3428 & mycomm, myerror)
3429 IF (myerror.ne.mpi_success) THEN
3430 CALL mpi_error_string (myerror, string, lstr, serror)
3431 lstr=len_trim(string)
3432 WRITE (stdout,30) 'MPI_ALLREDUCE', myrank, myerror, &
3433 & string(1:lstr)
3434 exit_flag=2
3435 RETURN
3436 END IF
3437!
3438! Load collected data.
3439!
3440 a=reshape(arecv, asize)
3441
3442# elif defined ASSEMBLE_SENDRECV
3443!
3444 IF (myrank.eq.mymaster) THEN
3445!
3446! If master node, allocate and receive buffer.
3447!
3448 IF (.not.allocated(arecv)) THEN
3449 allocate (arecv(npts))
3450 END IF
3451!
3452! If master node, loop over other nodes to receive and accumulate the
3453! data.
3454!
3455 DO rank=1,ntilei(ng)*ntilej(ng)-1
3456 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3457 & mycomm, request, myerror)
3458 CALL mpi_wait (request, status, myerror)
3459 IF (myerror.ne.mpi_success) THEN
3460 CALL mpi_error_string (myerror, string, lstr, serror)
3461 lstr=len_trim(string)
3462 WRITE (stdout,30) 'MPI_IRECV', rank, myerror, string(1:lstr)
3463 exit_flag=2
3464 RETURN
3465 END IF
3466 DO i=1,npts
3467 asend(i)=asend(i)+arecv(i)
3468 END DO
3469 END DO
3470 deallocate (arecv)
3471!
3472! Load collected data in output 2D array.
3473!
3474 a=reshape(asend, asize)
3475!
3476! Otherwise, send data to master node.
3477!
3478 ELSE
3479 CALL mpi_isend (asend, npts, mpi_integer, mymaster, myrank+5, &
3480 & mycomm, request, myerror)
3481 CALL mpi_wait (request, status, myerror)
3482 IF (myerror.ne.mpi_success) THEN
3483 CALL mpi_error_string (myerror, string, lstr, serror)
3484 lstr=len_trim(string)
3485 WRITE (stdout,30) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3486 exit_flag=2
3487 RETURN
3488 END IF
3489 END IF
3490!
3491! Broadcast accumulated (full) data to all nodes.
3492!
3493 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3494 IF (myerror.ne.mpi_success) THEN
3495 CALL mpi_error_string (myerror, string, lstr, serror)
3496 lstr=len_trim(string)
3497 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3498 exit_flag=2
3499 RETURN
3500 END IF
3501# endif
3502# ifdef PROFILE
3503!
3504!-----------------------------------------------------------------------
3505! Turn off time clocks.
3506!-----------------------------------------------------------------------
3507!
3508 CALL wclock_off (ng, model, 70, __line__, myfile)
3509# endif
3510!
3511 10 FORMAT (/,' MP_ASSEMBLEI_2D - inconsistent array size, Npts = ', &
3512 & i10,2x,i10,/,19x,'number of addressed array elements ', &
3513 & 'is incorrect.')
3514 20 FORMAT (/,' MP_ASSEMBLEI_2D - illegal special value, Aspv = ',i4, &
3515 & /,19x,'a zero value is needed for global reduction.')
3516 30 FORMAT (/,' MP_ASSEMBLEI_2D - error during ',a,' call, Task = ', &
3517 & i3.3,' Error = ',i3,/,19x,a)
3518!
3519 RETURN

◆ mp_barrier()

subroutine distribute_mod::mp_barrier ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in), optional inpcomm )

Definition at line 125 of file distribute.F.

126!
127!***********************************************************************
128! !
129! This routine blocks the caller until all group members have called !
130! it. !
131! !
132! On Input: !
133! !
134! ng Nested grid number. !
135! model Calling model identifier. !
136! InpComm Communicator handle (integer, OPTIONAL). !
137! !
138!***********************************************************************
139!
140! Imported variable declarations.
141!
142 integer, intent(in) :: ng, model
143
144 integer, intent(in), optional :: InpComm
145!
146! Local variable declarations.
147!
148 integer :: MyCOMM, MyError
149!
150 character (len=*), parameter :: MyFile = &
151 & __FILE__//", mp_barrier"
152
153# ifdef PROFILE
154!
155!-----------------------------------------------------------------------
156! Turn on time clocks.
157!-----------------------------------------------------------------------
158!
159 CALL wclock_on (ng, model, 72, __line__, myfile)
160# endif
161# ifdef MPI
162!
163!-----------------------------------------------------------------------
164! Set distributed-memory communicator handle (context ID).
165!-----------------------------------------------------------------------
166!
167 IF (PRESENT(inpcomm)) THEN
168 mycomm=inpcomm
169 ELSE
170 mycomm=ocn_comm_world
171 END IF
172# endif
173!
174!-----------------------------------------------------------------------
175! Synchronize all distribute-memory nodes in the group.
176!-----------------------------------------------------------------------
177!
178# ifdef MPI
179 CALL mpi_barrier (mycomm, myerror)
180# endif
181# ifdef PROFILE
182!
183!-----------------------------------------------------------------------
184! Turn off time clocks.
185!-----------------------------------------------------------------------
186!
187 CALL wclock_off (ng, model, 72, __line__, myfile)
188# endif
189!
190 RETURN

References mod_parallel::ocn_comm_world, wclock_off(), and wclock_on().

Referenced by strings_mod::globalerror(), strings_mod::taskerror(), wclock_off(), and wclock_on().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_bcast_struc()

subroutine distribute_mod::mp_bcast_struc ( integer, intent(in) ng,
integer, intent(in) model,
type(t_io), dimension(:), intent(inout) s,
integer, intent(in), optional inpcomm )

Definition at line 1923 of file distribute.F.

1924!
1925!***********************************************************************
1926! !
1927! This routine broadcasts the NetCDF IDs of a TYPE_IO structure to !
1928! all processors in the communicator. It is called by all the !
1929! members in the group. !
1930! !
1931! On Input: !
1932! !
1933! ng Nested grid number. !
1934! model Calling model identifier. !
1935! S ROMS I/O structure, TYPE(T_IO). !
1936! InpComm Communicator handle (integer, OPTIONAL). !
1937! !
1938! On Output: !
1939! !
1940! S Broadcasted ROMS I/O structure. !
1941! !
1942!***********************************************************************
1943!
1944! Imported variable declarations.
1945!
1946 integer, intent(in) :: ng, model
1947
1948 integer, intent(in), optional :: InpComm
1949!
1950 TYPE(T_IO), intent(inout) :: S(:)
1951!
1952! Local variable declarations
1953!
1954 integer :: Lstr, MyCOMM, MyError, Nchars, Npts, Serror
1955 integer :: ibuffer(5)
1956!
1957 character (len=MPI_MAX_ERROR_STRING) :: string
1958
1959 character (len=*), parameter :: MyFile = &
1960 & __FILE__//", mp_bcast_struc"
1961
1962# ifdef PROFILE
1963!
1964!-----------------------------------------------------------------------
1965! Turn on time clocks.
1966!-----------------------------------------------------------------------
1967!
1968 CALL wclock_on (ng, model, 64, __line__, myfile)
1969# endif
1970# ifdef MPI
1971!
1972!-----------------------------------------------------------------------
1973! Set distributed-memory communicator handle (context ID).
1974!-----------------------------------------------------------------------
1975!
1976 IF (PRESENT(inpcomm)) THEN
1977 mycomm=inpcomm
1978 ELSE
1979 mycomm=ocn_comm_world
1980 END IF
1981# endif
1982!
1983!-----------------------------------------------------------------------
1984! Broadcast variables in structure.
1985!-----------------------------------------------------------------------
1986
1987# ifdef MPI
1988!
1989! Structure scalar integer variables.
1990!
1991 ibuffer(1)=s(ng)%Nfiles
1992 ibuffer(2)=s(ng)%Fcount
1993 ibuffer(3)=s(ng)%load
1994 ibuffer(4)=s(ng)%Rindex
1995 ibuffer(5)=s(ng)%ncid
1996!
1997 npts=5
1998 CALL mpi_bcast (ibuffer, npts, mpi_integer, mymaster, &
1999 & mycomm, myerror)
2000 IF (myerror.ne.mpi_success) THEN
2001 CALL mpi_error_string (myerror, string, lstr, serror)
2002 lstr=len_trim(string)
2003 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2004 10 FORMAT (/,' MP_BCAST_STRUC - error during ',a,' call, Task = ', &
2005 & i3.3,' Error = ',i3,/,13x,a)
2006 exit_flag=2
2007 RETURN
2008 ELSE
2009 s(ng)%Nfiles=ibuffer(1)
2010 s(ng)%Fcount=ibuffer(2)
2011 s(ng)%load =ibuffer(3)
2012 s(ng)%Rindex=ibuffer(4)
2013 s(ng)%ncid =ibuffer(5)
2014 END IF
2015!
2016! Variables IDs.
2017!
2018 npts=ubound(s(ng)%Vid, dim=1)
2019 CALL mpi_bcast (s(ng)%Vid, npts, mpi_integer, mymaster, &
2020 & mycomm, myerror)
2021 IF (myerror.ne.mpi_success) THEN
2022 CALL mpi_error_string (myerror, string, lstr, serror)
2023 lstr=len_trim(string)
2024 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2025 exit_flag=2
2026 RETURN
2027 END IF
2028
2029# ifdef SOLVE3D
2030!
2031! Tracer variables IDs.
2032!
2033 npts=ubound(s(ng)%Tid, dim=1)
2034 CALL mpi_bcast (s(ng)%Tid, npts, mpi_integer, mymaster, &
2035 & mycomm, myerror)
2036 IF (myerror.ne.mpi_success) THEN
2037 CALL mpi_error_string (myerror, string, lstr, serror)
2038 lstr=len_trim(string)
2039 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2040 exit_flag=2
2041 RETURN
2042 END IF
2043# endif
2044!
2045! Structure Filenames.
2046!
2047 nchars=len(s(ng)%head)
2048 CALL mpi_bcast (s(ng)%head, nchars, mpi_byte, mymaster, &
2049 & mycomm, myerror)
2050 IF (myerror.ne.mpi_success) THEN
2051 CALL mpi_error_string (myerror, string, lstr, serror)
2052 lstr=len_trim(string)
2053 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2054 exit_flag=2
2055 RETURN
2056 END IF
2057!
2058 nchars=len(s(ng)%base)
2059 CALL mpi_bcast (s(ng)%base, nchars, mpi_byte, mymaster, &
2060 & mycomm, myerror)
2061 IF (myerror.ne.mpi_success) THEN
2062 CALL mpi_error_string (myerror, string, lstr, serror)
2063 lstr=len_trim(string)
2064 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2065 exit_flag=2
2066 RETURN
2067 END IF
2068!
2069 nchars=len(s(ng)%name)
2070 CALL mpi_bcast (s(ng)%name, nchars, mpi_byte, mymaster, &
2071 & mycomm, myerror)
2072 IF (myerror.ne.mpi_success) THEN
2073 CALL mpi_error_string (myerror, string, lstr, serror)
2074 lstr=len_trim(string)
2075 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2076 exit_flag=2
2077 RETURN
2078 END IF
2079!
2080 nchars=len(s(ng)%files(1))*s(ng)%Nfiles
2081 CALL mpi_bcast (s(ng)%files, nchars, mpi_byte, mymaster, &
2082 & mycomm, myerror)
2083 IF (myerror.ne.mpi_success) THEN
2084 CALL mpi_error_string (myerror, string, lstr, serror)
2085 lstr=len_trim(string)
2086 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
2087 exit_flag=2
2088 RETURN
2089 END IF
2090# endif
2091# ifdef PROFILE
2092!
2093!-----------------------------------------------------------------------
2094! Turn off time clocks.
2095!-----------------------------------------------------------------------
2096!
2097 CALL wclock_off (ng, model, 64, __line__, myfile)
2098# endif
2099!
2100 RETURN

References mod_scalars::exit_flag, mod_parallel::mymaster, mod_parallel::myrank, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Here is the call graph for this function:

◆ mp_bcastf_0d()

subroutine distribute_mod::mp_bcastf_0d ( integer, intent(in) ng,
integer, intent(in) model,
real(r8), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 560 of file distribute.F.

561!
562!***********************************************************************
563! !
564! This routine broadcasts a floating-point scalar variable to all !
565! processors in the communicator. It is called by all the members !
566! in the group. !
567! !
568! On Input: !
569! !
570! ng Nested grid number. !
571! model Calling model identifier. !
572! A Variable to broadcast (real). !
573! InpComm Communicator handle (integer, OPTIONAL). !
574! !
575! On Output: !
576! !
577! A Broadcasted variable. !
578! !
579!***********************************************************************
580!
581! Imported variable declarations.
582!
583 integer, intent(in) :: ng, model
584
585 integer, intent(in), optional :: InpComm
586!
587 real(r8), intent(inout) :: A
588!
589! Local variable declarations
590!
591 integer :: Lstr, MyCOMM, MyError, Npts, Serror
592!
593 character (len=MPI_MAX_ERROR_STRING) :: string
594
595 character (len=*), parameter :: MyFile = &
596 & __FILE__//", mp_bcastf_0d"
597
598# ifdef PROFILE
599!
600!-----------------------------------------------------------------------
601! Turn on time clocks.
602!-----------------------------------------------------------------------
603!
604 CALL wclock_on (ng, model, 64, __line__, myfile)
605# endif
606# ifdef MPI
607!
608!-----------------------------------------------------------------------
609! Set distributed-memory communicator handle (context ID).
610!-----------------------------------------------------------------------
611!
612 IF (PRESENT(inpcomm)) THEN
613 mycomm=inpcomm
614 ELSE
615 mycomm=ocn_comm_world
616 END IF
617# endif
618!
619!-----------------------------------------------------------------------
620! Broadcast requested variable.
621!-----------------------------------------------------------------------
622!
623 npts=1
624# ifdef MPI
625 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
626 IF (myerror.ne.mpi_success) THEN
627 CALL mpi_error_string (myerror, string, lstr, serror)
628 lstr=len_trim(string)
629 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
630 10 FORMAT (/,' MP_BCASTF_0D - error during ',a,' call, Task = ', &
631 & i3.3,' Error = ',i3,/,13x,a)
632 exit_flag=2
633 RETURN
634 END IF
635# endif
636# ifdef PROFILE
637!
638!-----------------------------------------------------------------------
639! Turn off time clocks.
640!-----------------------------------------------------------------------
641!
642 CALL wclock_off (ng, model, 64, __line__, myfile)
643# endif
644!
645 RETURN

◆ mp_bcastf_0dp()

subroutine distribute_mod::mp_bcastf_0dp ( integer, intent(in) ng,
integer, intent(in) model,
real(dp), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 195 of file distribute.F.

196!
197!***********************************************************************
198! !
199! This routine broadcasts a double-precision scalar variable to all !
200! processors in the communicator. It is called by all the members in !
201! the group. !
202! !
203! On Input: !
204! !
205! ng Nested grid number. !
206! model Calling model identifier. !
207! A Variable to broadcast (real). !
208! InpComm Communicator handle (integer, OPTIONAL). !
209! !
210! On Output: !
211! !
212! A Broadcasted variable. !
213! !
214!***********************************************************************
215!
216! Imported variable declarations.
217!
218 integer, intent(in) :: ng, model
219
220 integer, intent(in), optional :: InpComm
221
222 real(dp), intent(inout) :: A
223!
224! Local variable declarations
225!
226 integer :: Lstr, MyCOMM, MyError, Npts, Serror
227!
228 character (len=MPI_MAX_ERROR_STRING) :: string
229
230 character (len=*), parameter :: MyFile = &
231 & __FILE__//", mp_bcastf_0dp"
232
233# ifdef PROFILE
234!
235!-----------------------------------------------------------------------
236! Turn on time clocks.
237!-----------------------------------------------------------------------
238!
239 CALL wclock_on (ng, model, 64, __line__, myfile)
240# endif
241# ifdef MPI
242!
243!-----------------------------------------------------------------------
244! Set distributed-memory communicator handle (context ID).
245!-----------------------------------------------------------------------
246!
247 IF (PRESENT(inpcomm)) THEN
248 mycomm=inpcomm
249 ELSE
250 mycomm=ocn_comm_world
251 END IF
252# endif
253!
254!-----------------------------------------------------------------------
255! Broadcast requested variable.
256!-----------------------------------------------------------------------
257!
258 npts=1
259# ifdef MPI
260 CALL mpi_bcast (a, npts, mp_double, mymaster, mycomm, myerror)
261 IF (myerror.ne.mpi_success) THEN
262 CALL mpi_error_string (myerror, string, lstr, serror)
263 lstr=len_trim(string)
264 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
265 10 FORMAT (/,' MP_BCASTF_0DP - error during ',a,' call, Task = ', &
266 & i3.3,' Error = ',i3,/,13x,a)
267 exit_flag=2
268 RETURN
269 END IF
270# endif
271# ifdef PROFILE
272!
273!-----------------------------------------------------------------------
274! Turn off time clocks.
275!-----------------------------------------------------------------------
276!
277 CALL wclock_off (ng, model, 64, __line__, myfile)
278# endif
279!
280 RETURN

◆ mp_bcastf_1d()

subroutine distribute_mod::mp_bcastf_1d ( integer, intent(in) ng,
integer, intent(in) model,
real(r8), dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 648 of file distribute.F.

649!
650!***********************************************************************
651! !
652! This routine broadcasts a 1D floating-point, non-tiled, array !
653! to all processors in the communicator. It is called by all the !
654! members in the group. !
655! !
656! On Input: !
657! !
658! ng Nested grid number. !
659! model Calling model identifier. !
660! A 1D array to broadcast (real). !
661! InpComm Communicator handle (integer, OPTIONAL). !
662! !
663! On Output: !
664! !
665! A Broadcasted 1D array. !
666! !
667!***********************************************************************
668!
669! Imported variable declarations.
670!
671 integer, intent(in) :: ng, model
672
673 integer, intent(in), optional :: InpComm
674!
675 real(r8), intent(inout) :: A(:)
676!
677! Local variable declarations
678!
679 integer :: Lstr, MyCOMM, MyError, Npts, Serror
680!
681 character (len=MPI_MAX_ERROR_STRING) :: string
682
683 character (len=*), parameter :: MyFile = &
684 & __FILE__//", mp_bcastf_1d"
685
686# ifdef PROFILE
687!
688!-----------------------------------------------------------------------
689! Turn on time clocks.
690!-----------------------------------------------------------------------
691!
692 CALL wclock_on (ng, model, 64, __line__, myfile)
693# endif
694# ifdef MPI
695!
696!-----------------------------------------------------------------------
697! Set distributed-memory communicator handle (context ID).
698!-----------------------------------------------------------------------
699!
700 IF (PRESENT(inpcomm)) THEN
701 mycomm=inpcomm
702 ELSE
703 mycomm=ocn_comm_world
704 END IF
705# endif
706!
707!-----------------------------------------------------------------------
708! Broadcast requested variable.
709!-----------------------------------------------------------------------
710!
711 npts=ubound(a, dim=1)
712
713# ifdef MPI
714 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
715 IF (myerror.ne.mpi_success) THEN
716 CALL mpi_error_string (myerror, string, lstr, serror)
717 lstr=len_trim(string)
718 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
719 10 FORMAT (/,' MP_BCASTF_1D - error during ',a,' call, Task = ', &
720 & i3.3,' Error = ',i3,/,13x,a)
721 exit_flag=2
722 RETURN
723 END IF
724# endif
725# ifdef PROFILE
726!
727!-----------------------------------------------------------------------
728! Turn off time clocks.
729!-----------------------------------------------------------------------
730!
731 CALL wclock_off (ng, model, 64, __line__, myfile)
732# endif
733!
734 RETURN

◆ mp_bcastf_1dp()

subroutine distribute_mod::mp_bcastf_1dp ( integer, intent(in) ng,
integer, intent(in) model,
real(dp), dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 283 of file distribute.F.

284!
285!***********************************************************************
286! !
287! This routine broadcasts a 1D double-precission, non-tiled, array !
288! to all processors in the communicator. It is called by all the !
289! members in the group. !
290! !
291! On Input: !
292! !
293! ng Nested grid number. !
294! model Calling model identifier. !
295! A 1D array to broadcast (real). !
296! InpComm Communicator handle (integer, OPTIONAL). !
297! !
298! On Output: !
299! !
300! A Broadcasted 1D array. !
301! !
302!***********************************************************************
303!
304! Imported variable declarations.
305!
306 integer, intent(in) :: ng, model
307
308 integer, intent(in), optional :: InpComm
309!
310 real(dp), intent(inout) :: A(:)
311!
312! Local variable declarations
313!
314 integer :: Lstr, MyCOMM, MyError, Npts, Serror
315!
316 character (len=MPI_MAX_ERROR_STRING) :: string
317
318 character (len=*), parameter :: MyFile = &
319 & __FILE__//", mp_bcastf_1dp"
320
321# ifdef PROFILE
322!
323!-----------------------------------------------------------------------
324! Turn on time clocks.
325!-----------------------------------------------------------------------
326!
327 CALL wclock_on (ng, model, 64, __line__, myfile)
328# endif
329# ifdef MPI
330!
331!-----------------------------------------------------------------------
332! Set distributed-memory communicator handle (context ID).
333!-----------------------------------------------------------------------
334!
335 IF (PRESENT(inpcomm)) THEN
336 mycomm=inpcomm
337 ELSE
338 mycomm=ocn_comm_world
339 END IF
340# endif
341!
342!-----------------------------------------------------------------------
343! Broadcast requested variable.
344!-----------------------------------------------------------------------
345!
346 npts=ubound(a, dim=1)
347
348# ifdef MPI
349 CALL mpi_bcast (a, npts, mp_double, mymaster, mycomm, myerror)
350 IF (myerror.ne.mpi_success) THEN
351 CALL mpi_error_string (myerror, string, lstr, serror)
352 lstr=len_trim(string)
353 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
354 10 FORMAT (/,' MP_BCASTF_1DP - error during ',a,' call, Task = ', &
355 & i3.3,' Error = ',i3,/,13x,a)
356 exit_flag=2
357 RETURN
358 END IF
359# endif
360# ifdef PROFILE
361!
362!-----------------------------------------------------------------------
363! Turn off time clocks.
364!-----------------------------------------------------------------------
365!
366 CALL wclock_off (ng, model, 64, __line__, myfile)
367# endif
368!
369 RETURN

◆ mp_bcastf_2d()

subroutine distribute_mod::mp_bcastf_2d ( integer, intent(in) ng,
integer, intent(in) model,
real(r8), dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 737 of file distribute.F.

738!
739!***********************************************************************
740! !
741! This routine broadcasts a 2D floating-point, non-tiled, array !
742! to all processors in the communicator. It is called by all the !
743! members in the group. !
744! !
745! On Input: !
746! !
747! ng Nested grid number. !
748! model Calling model identifier. !
749! A 2D array to broadcast (real). !
750! InpComm Communicator handle (integer, OPTIONAL). !
751! !
752! On Output: !
753! !
754! A Broadcasted 2D array. !
755! !
756!***********************************************************************
757!
758! Imported variable declarations.
759!
760 integer, intent(in) :: ng, model
761
762 integer, intent(in), optional :: InpComm
763!
764 real(r8), intent(inout) :: A(:,:)
765!
766! Local variable declarations
767!
768 integer :: Lstr, MyCOMM, MyError, Npts, Serror
769
770 integer :: Asize(2)
771!
772 character (len=MPI_MAX_ERROR_STRING) :: string
773
774 character (len=*), parameter :: MyFile = &
775 & __FILE__//", mp_bcastf_2d"
776
777# ifdef PROFILE
778!
779!-----------------------------------------------------------------------
780! Turn on time clocks.
781!-----------------------------------------------------------------------
782!
783 CALL wclock_on (ng, model, 64, __line__, myfile)
784# endif
785# ifdef MPI
786!
787!-----------------------------------------------------------------------
788! Set distributed-memory communicator handle (context ID).
789!-----------------------------------------------------------------------
790!
791 IF (PRESENT(inpcomm)) THEN
792 mycomm=inpcomm
793 ELSE
794 mycomm=ocn_comm_world
795 END IF
796# endif
797!
798!-----------------------------------------------------------------------
799! Broadcast requested variable.
800!-----------------------------------------------------------------------
801!
802 asize(1)=ubound(a, dim=1)
803 asize(2)=ubound(a, dim=2)
804 npts=asize(1)*asize(2)
805
806# ifdef MPI
807 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
808 IF (myerror.ne.mpi_success) THEN
809 CALL mpi_error_string (myerror, string, lstr, serror)
810 lstr=len_trim(string)
811 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
812 10 FORMAT (/,' MP_BCASTF_2D - error during ',a,' call, Task = ', &
813 & i3.3,' Error = ',i3,/,13x,a)
814 exit_flag=2
815 RETURN
816 END IF
817# endif
818# ifdef PROFILE
819!
820!-----------------------------------------------------------------------
821! Turn off time clocks.
822!-----------------------------------------------------------------------
823!
824 CALL wclock_off (ng, model, 64, __line__, myfile)
825# endif
826!
827 RETURN

◆ mp_bcastf_2dp()

subroutine distribute_mod::mp_bcastf_2dp ( integer, intent(in) ng,
integer, intent(in) model,
real(dp), dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 372 of file distribute.F.

373!
374!***********************************************************************
375! !
376! This routine broadcasts a 2D double-preision, non-tiled, array !
377! to all processors in the communicator. It is called by all the !
378! members in the group. !
379! !
380! On Input: !
381! !
382! ng Nested grid number. !
383! model Calling model identifier. !
384! A 2D array to broadcast (real). !
385! InpComm Communicator handle (integer, OPTIONAL). !
386! !
387! On Output: !
388! !
389! A Broadcasted 2D array. !
390! !
391!***********************************************************************
392!
393! Imported variable declarations.
394!
395 integer, intent(in) :: ng, model
396
397 integer, intent(in), optional :: InpComm
398!
399 real(dp), intent(inout) :: A(:,:)
400!
401! Local variable declarations
402!
403 integer :: Lstr, MyCOMM, MyError, Npts, Serror
404
405 integer :: Asize(2)
406!
407 character (len=MPI_MAX_ERROR_STRING) :: string
408
409 character (len=*), parameter :: MyFile = &
410 & __FILE__//", mp_bcastf_2dp"
411
412# ifdef PROFILE
413!
414!-----------------------------------------------------------------------
415! Turn on time clocks.
416!-----------------------------------------------------------------------
417!
418 CALL wclock_on (ng, model, 64, __line__, myfile)
419# endif
420# ifdef MPI
421!
422!-----------------------------------------------------------------------
423! Set distributed-memory communicator handle (context ID).
424!-----------------------------------------------------------------------
425!
426 IF (PRESENT(inpcomm)) THEN
427 mycomm=inpcomm
428 ELSE
429 mycomm=ocn_comm_world
430 END IF
431# endif
432!
433!-----------------------------------------------------------------------
434! Broadcast requested variable.
435!-----------------------------------------------------------------------
436!
437 asize(1)=ubound(a, dim=1)
438 asize(2)=ubound(a, dim=2)
439 npts=asize(1)*asize(2)
440
441# ifdef MPI
442 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
443 IF (myerror.ne.mpi_success) THEN
444 CALL mpi_error_string (myerror, string, lstr, serror)
445 lstr=len_trim(string)
446 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
447 10 FORMAT (/,' MP_BCASTF_2DP - error during ',a,' call, Task = ', &
448 & i3.3,' Error = ',i3,/,13x,a)
449 exit_flag=2
450 RETURN
451 END IF
452# endif
453# ifdef PROFILE
454!
455!-----------------------------------------------------------------------
456! Turn off time clocks.
457!-----------------------------------------------------------------------
458!
459 CALL wclock_off (ng, model, 64, __line__, myfile)
460# endif
461!
462 RETURN

◆ mp_bcastf_3d()

subroutine distribute_mod::mp_bcastf_3d ( integer, intent(in) ng,
integer, intent(in) model,
real(r8), dimension(:,:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 830 of file distribute.F.

831!
832!***********************************************************************
833! !
834! This routine broadcasts a 3D floating-point, non-tiled, array !
835! to all processors in the communicator. It is called by all the !
836! members in the group. !
837! !
838! On Input: !
839! !
840! ng Nested grid number. !
841! model Calling model identifier. !
842! A 3D array to broadcast (real). !
843! InpComm Communicator handle (integer, OPTIONAL). !
844! !
845! On Output: !
846! !
847! A Broadcasted 3D array. !
848! !
849!***********************************************************************
850!
851! Imported variable declarations.
852!
853 integer, intent(in) :: ng, model
854
855 integer, intent(in), optional :: InpComm
856!
857 real(r8), intent(inout) :: A(:,:,:)
858!
859! Local variable declarations
860!
861 integer :: Lstr, MyCOMM, MyError, Npts, Serror
862
863 integer :: Asize(3)
864!
865 character (len=MPI_MAX_ERROR_STRING) :: string
866
867 character (len=*), parameter :: MyFile = &
868 & __FILE__//", mp_bcastf_3d"
869
870# ifdef PROFILE
871!
872!-----------------------------------------------------------------------
873! Turn on time clocks.
874!-----------------------------------------------------------------------
875!
876 CALL wclock_on (ng, model, 64, __line__, myfile)
877# endif
878# ifdef MPI
879!
880!-----------------------------------------------------------------------
881! Set distributed-memory communicator handle (context ID).
882!-----------------------------------------------------------------------
883!
884 IF (PRESENT(inpcomm)) THEN
885 mycomm=inpcomm
886 ELSE
887 mycomm=ocn_comm_world
888 END IF
889# endif
890!
891!-----------------------------------------------------------------------
892! Broadcast requested variable.
893!-----------------------------------------------------------------------
894!
895 asize(1)=ubound(a, dim=1)
896 asize(2)=ubound(a, dim=2)
897 asize(3)=ubound(a, dim=3)
898 npts=asize(1)*asize(2)*asize(3)
899
900# ifdef MPI
901 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
902 IF (myerror.ne.mpi_success) THEN
903 CALL mpi_error_string (myerror, string, lstr, serror)
904 lstr=len_trim(string)
905 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
906 10 FORMAT (/,' MP_BCASTF_3D - error during ',a,' call, Task = ', &
907 & i3.3,' Error = ',i3,/,13x,a)
908 exit_flag=2
909 RETURN
910 END IF
911# endif
912# ifdef PROFILE
913!
914!-----------------------------------------------------------------------
915! Turn off time clocks.
916!-----------------------------------------------------------------------
917!
918 CALL wclock_off (ng, model, 64, __line__, myfile)
919# endif
920!
921 RETURN

◆ mp_bcastf_3dp()

subroutine distribute_mod::mp_bcastf_3dp ( integer, intent(in) ng,
integer, intent(in) model,
real(dp), dimension(:,:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 465 of file distribute.F.

466!
467!***********************************************************************
468! !
469! This routine broadcasts a 3D double-precision, non-tiled, array !
470! to all processors in the communicator. It is called by all the !
471! members in the group. !
472! !
473! On Input: !
474! !
475! ng Nested grid number. !
476! model Calling model identifier. !
477! A 3D array to broadcast (real). !
478! InpComm Communicator handle (integer, OPTIONAL). !
479! !
480! On Output: !
481! !
482! A Broadcasted 3D array. !
483! !
484!***********************************************************************
485!
486! Imported variable declarations.
487!
488 integer, intent(in) :: ng, model
489
490 integer, intent(in), optional :: InpComm
491!
492 real(dp), intent(inout) :: A(:,:,:)
493!
494! Local variable declarations
495!
496 integer :: Lstr, MyCOMM, MyError, Npts, Serror
497
498 integer :: Asize(3)
499!
500 character (len=MPI_MAX_ERROR_STRING) :: string
501
502 character (len=*), parameter :: MyFile = &
503 & __FILE__//", mp_bcastf_3d"
504
505# ifdef PROFILE
506!
507!-----------------------------------------------------------------------
508! Turn on time clocks.
509!-----------------------------------------------------------------------
510!
511 CALL wclock_on (ng, model, 64, __line__, myfile)
512# endif
513# ifdef MPI
514!
515!-----------------------------------------------------------------------
516! Set distributed-memory communicator handle (context ID).
517!-----------------------------------------------------------------------
518!
519 IF (PRESENT(inpcomm)) THEN
520 mycomm=inpcomm
521 ELSE
522 mycomm=ocn_comm_world
523 END IF
524# endif
525!
526!-----------------------------------------------------------------------
527! Broadcast requested variable.
528!-----------------------------------------------------------------------
529!
530 asize(1)=ubound(a, dim=1)
531 asize(2)=ubound(a, dim=2)
532 asize(3)=ubound(a, dim=3)
533 npts=asize(1)*asize(2)*asize(3)
534
535# ifdef MPI
536 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
537 IF (myerror.ne.mpi_success) THEN
538 CALL mpi_error_string (myerror, string, lstr, serror)
539 lstr=len_trim(string)
540 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
541 10 FORMAT (/,' MP_BCASTF_3DP - error during ',a,' call, Task = ', &
542 & i3.3,' Error = ',i3,/,13x,a)
543 exit_flag=2
544 RETURN
545 END IF
546# endif
547# ifdef PROFILE
548!
549!-----------------------------------------------------------------------
550! Turn off time clocks.
551!-----------------------------------------------------------------------
552!
553 CALL wclock_off (ng, model, 64, __line__, myfile)
554# endif
555!
556 RETURN

◆ mp_bcastf_4d()

subroutine distribute_mod::mp_bcastf_4d ( integer, intent(in) ng,
integer, intent(in) model,
real(r8), dimension(:,:,:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 924 of file distribute.F.

925!
926!***********************************************************************
927! !
928! This routine broadcasts a 4D floating-point, non-tiled, array !
929! to all processors in the communicator. It is called by all the !
930! members in the group. !
931! !
932! On Input: !
933! !
934! ng Nested grid number. !
935! model Calling model identifier. !
936! A 4D array to broadcast (real). !
937! !
938! On Output: !
939! !
940! A Broadcasted 4D array. !
941! !
942!***********************************************************************
943!
944! Imported variable declarations.
945!
946 integer, intent(in) :: ng, model
947
948 integer, intent(in), optional :: InpComm
949!
950 real(r8), intent(inout) :: A(:,:,:,:)
951!
952! Local variable declarations
953!
954 integer :: Lstr, MyCOMM, MyError, Npts, Serror
955
956 integer :: Asize(4)
957!
958 character (len=MPI_MAX_ERROR_STRING) :: string
959
960 character (len=*), parameter :: MyFile = &
961 & __FILE__//", mp_bcastf_4d"
962
963# ifdef PROFILE
964!
965!-----------------------------------------------------------------------
966! Turn on time clocks.
967!-----------------------------------------------------------------------
968!
969 CALL wclock_on (ng, model, 64, __line__, myfile)
970# endif
971# ifdef MPI
972!
973!-----------------------------------------------------------------------
974! Set distributed-memory communicator handle (context ID).
975!-----------------------------------------------------------------------
976!
977 IF (PRESENT(inpcomm)) THEN
978 mycomm=inpcomm
979 ELSE
980 mycomm=ocn_comm_world
981 END IF
982# endif
983!
984!-----------------------------------------------------------------------
985! Broadcast requested variable.
986!-----------------------------------------------------------------------
987!
988 asize(1)=ubound(a, dim=1)
989 asize(2)=ubound(a, dim=2)
990 asize(3)=ubound(a, dim=3)
991 asize(4)=ubound(a, dim=4)
992 npts=asize(1)*asize(2)*asize(3)*asize(4)
993
994# ifdef MPI
995 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
996 IF (myerror.ne.mpi_success) THEN
997 CALL mpi_error_string (myerror, string, lstr, serror)
998 lstr=len_trim(string)
999 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1000 10 FORMAT (/,' MP_BCASTF_4D - error during ',a,' call, Task = ', &
1001 & i3.3,' Error = ',i3,/,13x,a)
1002 exit_flag=2
1003 RETURN
1004 END IF
1005# endif
1006# ifdef PROFILE
1007!
1008!-----------------------------------------------------------------------
1009! Turn off time clocks.
1010!-----------------------------------------------------------------------
1011!
1012 CALL wclock_off (ng, model, 64, __line__, myfile)
1013# endif
1014!
1015 RETURN

◆ mp_bcasti_0d()

subroutine distribute_mod::mp_bcasti_0d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1018 of file distribute.F.

1019!
1020!***********************************************************************
1021! !
1022! This routine broadcasts an integer scalar variable to all !
1023! processors in the communicator. It is called by all the !
1024! members in the group. !
1025! !
1026! On Input: !
1027! !
1028! ng Nested grid number. !
1029! model Calling model identifier. !
1030! A Variable to broadcast (integer). !
1031! InpComm Communicator handle (integer, OPTIONAL). !
1032! !
1033! On Output: !
1034! !
1035! A Broadcasted variable. !
1036! !
1037!***********************************************************************
1038!
1039! Imported variable declarations.
1040!
1041 integer, intent(in) :: ng, model
1042
1043 integer, intent(in), optional :: InpComm
1044
1045 integer, intent(inout) :: A
1046!
1047! Local variable declarations
1048!
1049 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1050!
1051 character (len=MPI_MAX_ERROR_STRING) :: string
1052
1053 character (len=*), parameter :: MyFile = &
1054 & __FILE__//", mp_bcasti_0d"
1055
1056# ifdef PROFILE
1057!
1058!-----------------------------------------------------------------------
1059! Turn on time clocks.
1060!-----------------------------------------------------------------------
1061!
1062 IF (lwclock) THEN
1063 CALL wclock_on (ng, model, 64, __line__, myfile)
1064 END IF
1065# endif
1066# ifdef MPI
1067!
1068!-----------------------------------------------------------------------
1069! Set distributed-memory communicator handle (context ID).
1070!-----------------------------------------------------------------------
1071!
1072 IF (PRESENT(inpcomm)) THEN
1073 mycomm=inpcomm
1074 ELSE
1075 mycomm=ocn_comm_world
1076 END IF
1077# endif
1078!
1079!-----------------------------------------------------------------------
1080! Broadcast requested variable.
1081!-----------------------------------------------------------------------
1082!
1083 npts=1
1084# ifdef MPI
1085 CALL mpi_bcast (a, npts, mpi_integer, mymaster, ocn_comm_world, &
1086 & myerror)
1087 IF (myerror.ne.mpi_success) THEN
1088 CALL mpi_error_string (myerror, string, lstr, serror)
1089 lstr=len_trim(string)
1090 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1091 10 FORMAT (/,' MP_BCASTI_0D - error during ',a,' call, Task = ', &
1092 & i3.3,' Error = ',i3,/,13x,a)
1093 RETURN
1094 END IF
1095# endif
1096# ifdef PROFILE
1097!
1098!-----------------------------------------------------------------------
1099! Turn off time clocks.
1100!-----------------------------------------------------------------------
1101!
1102 IF (lwclock) THEN
1103 CALL wclock_off (ng, model, 64, __line__, myfile)
1104 END IF
1105# endif
1106!
1107 RETURN

◆ mp_bcasti_1d()

subroutine distribute_mod::mp_bcasti_1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1110 of file distribute.F.

1111!
1112!***********************************************************************
1113! !
1114! This routine broadcasts a 1D non-tiled, integer array to all !
1115! processors in the communicator. It is called by all the !
1116! members in the group. !
1117! !
1118! On Input: !
1119! !
1120! ng Nested grid number. !
1121! model Calling model identifier. !
1122! A 1D array to broadcast (integer). !
1123! InpComm Communicator handle (integer, OPTIONAL). !
1124! !
1125! On Output: !
1126! !
1127! A Broadcasted 1D array. !
1128! !
1129!***********************************************************************
1130!
1131! Imported variable declarations.
1132!
1133 integer, intent(in) :: ng, model
1134
1135 integer, intent(in), optional :: InpComm
1136
1137 integer, intent(inout) :: A(:)
1138!
1139! Local variable declarations
1140!
1141 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1142!
1143 character (len=MPI_MAX_ERROR_STRING) :: string
1144
1145 character (len=*), parameter :: MyFile = &
1146 & __FILE__//", mp_bcasti_1d"
1147
1148# ifdef PROFILE
1149!
1150!-----------------------------------------------------------------------
1151! Turn on time clocks.
1152!-----------------------------------------------------------------------
1153!
1154 CALL wclock_on (ng, model, 64, __line__, myfile)
1155# endif
1156# ifdef MPI
1157!
1158!-----------------------------------------------------------------------
1159! Set distributed-memory communicator handle (context ID).
1160!-----------------------------------------------------------------------
1161!
1162 IF (PRESENT(inpcomm)) THEN
1163 mycomm=inpcomm
1164 ELSE
1165 mycomm=ocn_comm_world
1166 END IF
1167# endif
1168!
1169!-----------------------------------------------------------------------
1170! Broadcast requested variable.
1171!-----------------------------------------------------------------------
1172!
1173 npts=ubound(a, dim=1)
1174
1175# ifdef MPI
1176 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
1177 IF (myerror.ne.mpi_success) THEN
1178 CALL mpi_error_string (myerror, string, lstr, serror)
1179 lstr=len_trim(string)
1180 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1181 10 FORMAT (/,' MP_BCASTI_1D - error during ',a,' call, Task = ', &
1182 & i3.3,' Error = ',i3,/,13x,a)
1183 exit_flag=2
1184 RETURN
1185 END IF
1186# endif
1187# ifdef PROFILE
1188!
1189!-----------------------------------------------------------------------
1190! Turn off time clocks.
1191!-----------------------------------------------------------------------
1192!
1193 CALL wclock_off (ng, model, 64, __line__, myfile)
1194# endif
1195!
1196 RETURN

◆ mp_bcasti_2d()

subroutine distribute_mod::mp_bcasti_2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1199 of file distribute.F.

1200!
1201!***********************************************************************
1202! !
1203! This routine broadcasts a 2D non-tiled, integer array to all !
1204! processors in the communicator. It is called by all the !
1205! members in the group. !
1206! !
1207! On Input: !
1208! !
1209! ng Nested grid number. !
1210! model Calling model identifier. !
1211! A 2D array to broadcast (integer). !
1212! InpComm Communicator handle (integer, OPTIONAL). !
1213! !
1214! On Output: !
1215! !
1216! A Broadcasted 2D array. !
1217! !
1218!***********************************************************************
1219!
1220! Imported variable declarations.
1221!
1222 integer, intent(in) :: ng, model
1223
1224 integer, intent(in), optional :: InpComm
1225
1226 integer, intent(inout) :: A(:,:)
1227!
1228! Local variable declarations
1229!
1230 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1231 integer :: Asize(2)
1232!
1233 character (len=MPI_MAX_ERROR_STRING) :: string
1234
1235 character (len=*), parameter :: MyFile = &
1236 & __FILE__//", mp_bcasti_2d"
1237
1238# ifdef PROFILE
1239!
1240!-----------------------------------------------------------------------
1241! Turn on time clocks.
1242!-----------------------------------------------------------------------
1243!
1244 CALL wclock_on (ng, model, 64, __line__, myfile)
1245# endif
1246# ifdef MPI
1247!
1248!-----------------------------------------------------------------------
1249! Set distributed-memory communicator handle (context ID).
1250!-----------------------------------------------------------------------
1251!
1252 IF (PRESENT(inpcomm)) THEN
1253 mycomm=inpcomm
1254 ELSE
1255 mycomm=ocn_comm_world
1256 END IF
1257# endif
1258!
1259!-----------------------------------------------------------------------
1260! Broadcast requested variable.
1261!-----------------------------------------------------------------------
1262!
1263 asize(1)=ubound(a, dim=1)
1264 asize(2)=ubound(a, dim=2)
1265 npts=asize(1)*asize(2)
1266
1267# ifdef MPI
1268 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
1269 IF (myerror.ne.mpi_success) THEN
1270 CALL mpi_error_string (myerror, string, lstr, serror)
1271 lstr=len_trim(string)
1272 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1273 10 FORMAT (/,' MP_BCASTI_2D - error during ',a,' call, Task = ', &
1274 & i3.3,' Error = ',i3,/,13x,a)
1275 exit_flag=2
1276 RETURN
1277 END IF
1278# endif
1279# ifdef PROFILE
1280!
1281!-----------------------------------------------------------------------
1282! Turn off time clocks.
1283!-----------------------------------------------------------------------
1284!
1285 CALL wclock_off (ng, model, 64, __line__, myfile)
1286# endif
1287!
1288 RETURN

◆ mp_bcastl_0d()

subroutine distribute_mod::mp_bcastl_0d ( integer, intent(in) ng,
integer, intent(in) model,
logical, intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1291 of file distribute.F.

1292!
1293!***********************************************************************
1294! !
1295! This routine broadcasts a logical scalar variable to all !
1296! processors in the communicator. It is called by all the !
1297! members in the group. !
1298! !
1299! On Input: !
1300! !
1301! ng Nested grid number. !
1302! model Calling model identifier. !
1303! A Variable to broadcast (logical). !
1304! InpComm Communicator handle (integer, OPTIONAL). !
1305! !
1306! On Output: !
1307! !
1308! A Broadcasted variable. !
1309! !
1310!***********************************************************************
1311!
1312! Imported variable declarations.
1313!
1314 integer, intent(in) :: ng, model
1315
1316 integer, intent(in), optional :: InpComm
1317!
1318 logical, intent(inout) :: A
1319!
1320! Local variable declarations
1321!
1322 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1323!
1324 character (len=MPI_MAX_ERROR_STRING) :: string
1325
1326 character (len=*), parameter :: MyFile = &
1327 & __FILE__//", mp_bcastl_0d"
1328
1329# ifdef PROFILE
1330!
1331!-----------------------------------------------------------------------
1332! Turn on time clocks.
1333!-----------------------------------------------------------------------
1334!
1335 CALL wclock_on (ng, model, 64, __line__, myfile)
1336# endif
1337# ifdef MPI
1338!
1339!-----------------------------------------------------------------------
1340! Set distributed-memory communicator handle (context ID).
1341!-----------------------------------------------------------------------
1342!
1343 IF (PRESENT(inpcomm)) THEN
1344 mycomm=inpcomm
1345 ELSE
1346 mycomm=ocn_comm_world
1347 END IF
1348# endif
1349!
1350!-----------------------------------------------------------------------
1351! Broadcast requested variable.
1352!-----------------------------------------------------------------------
1353!
1354 npts=1
1355# ifdef MPI
1356 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1357 IF (myerror.ne.mpi_success) THEN
1358 CALL mpi_error_string (myerror, string, lstr, serror)
1359 lstr=len_trim(string)
1360 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1361 10 FORMAT (/,' MP_BCASTL_0D - error during ',a,' call, Task = ', &
1362 & i3.3,' Error = ',i3,/,13x,a)
1363 exit_flag=2
1364 RETURN
1365 END IF
1366# endif
1367# ifdef PROFILE
1368!
1369!-----------------------------------------------------------------------
1370! Turn off time clocks.
1371!-----------------------------------------------------------------------
1372!
1373 CALL wclock_off (ng, model, 64, __line__, myfile)
1374# endif
1375!
1376 RETURN

◆ mp_bcastl_1d()

subroutine distribute_mod::mp_bcastl_1d ( integer, intent(in) ng,
integer, intent(in) model,
logical, dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1379 of file distribute.F.

1380!
1381!***********************************************************************
1382! !
1383! This routine broadcasts a 1D nontiled, logical array to all !
1384! processors in the communicator. It is called by all the !
1385! members in the group. !
1386! !
1387! On Input: !
1388! !
1389! ng Nested grid number. !
1390! model Calling model identifier. !
1391! A 1D array to broadcast (logical). !
1392! InpComm Communicator handle (integer, OPTIONAL). !
1393! !
1394! On Output: !
1395! !
1396! A Broadcasted 1D array. !
1397! !
1398!***********************************************************************
1399!
1400! Imported variable declarations.
1401!
1402 integer, intent(in) :: ng, model
1403
1404 integer, intent(in), optional :: InpComm
1405!
1406 logical, intent(inout) :: A(:)
1407!
1408! Local variable declarations
1409!
1410 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1411!
1412 character (len=MPI_MAX_ERROR_STRING) :: string
1413
1414 character (len=*), parameter :: MyFile = &
1415 & __FILE__//", mp_bcastl_1d"
1416
1417# ifdef PROFILE
1418!
1419!-----------------------------------------------------------------------
1420! Turn on time clocks.
1421!-----------------------------------------------------------------------
1422!
1423 CALL wclock_on (ng, model, 64, __line__, myfile)
1424# endif
1425# ifdef MPI
1426!
1427!-----------------------------------------------------------------------
1428! Set distributed-memory communicator handle (context ID).
1429!-----------------------------------------------------------------------
1430!
1431 IF (PRESENT(inpcomm)) THEN
1432 mycomm=inpcomm
1433 ELSE
1434 mycomm=ocn_comm_world
1435 END IF
1436# endif
1437!
1438!-----------------------------------------------------------------------
1439! Broadcast requested variable.
1440!-----------------------------------------------------------------------
1441!
1442 npts=ubound(a, dim=1)
1443
1444# ifdef MPI
1445 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1446 IF (myerror.ne.mpi_success) THEN
1447 CALL mpi_error_string (myerror, string, lstr, serror)
1448 lstr=len_trim(string)
1449 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1450 10 FORMAT (/,' MP_BCASTL_1D - error during ',a,' call, Task = ', &
1451 & i3.3,' Error = ',i3,/,13x,a)
1452 exit_flag=2
1453 RETURN
1454 END IF
1455# endif
1456# ifdef PROFILE
1457!
1458!-----------------------------------------------------------------------
1459! Turn off time clocks.
1460!-----------------------------------------------------------------------
1461!
1462 CALL wclock_off (ng, model, 64, __line__, myfile)
1463# endif
1464!
1465 RETURN

◆ mp_bcastl_2d()

subroutine distribute_mod::mp_bcastl_2d ( integer, intent(in) ng,
integer, intent(in) model,
logical, dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1468 of file distribute.F.

1469!
1470!***********************************************************************
1471! !
1472! This routine broadcasts a 2D non-tiled, logical array to all !
1473! processors in the communicator. It is called by all the !
1474! members in the group. !
1475! !
1476! On Input: !
1477! !
1478! ng Nested grid number. !
1479! model Calling model identifier. !
1480! A 2D array to broadcast (logical). !
1481! InpComm Communicator handle (integer, OPTIONAL). !
1482! !
1483! On Output: !
1484! !
1485! A Broadcasted 2D array. !
1486! !
1487!***********************************************************************
1488!
1489! Imported variable declarations.
1490!
1491 integer, intent(in) :: ng, model
1492
1493 integer, intent(in), optional :: InpComm
1494!
1495 logical, intent(inout) :: A(:,:)
1496!
1497! Local variable declarations
1498!
1499 integer :: Lstr, MyCOMM, MyError, Npts, Serror
1500 integer :: Asize(2)
1501!
1502 character (len=MPI_MAX_ERROR_STRING) :: string
1503
1504 character (len=*), parameter :: MyFile = &
1505 & __FILE__//", mp_bcastl_2d"
1506
1507# ifdef PROFILE
1508!
1509!-----------------------------------------------------------------------
1510! Turn on time clocks.
1511!-----------------------------------------------------------------------
1512!
1513 CALL wclock_on (ng, model, 64, __line__, myfile)
1514# endif
1515# ifdef MPI
1516!
1517!-----------------------------------------------------------------------
1518! Set distributed-memory communicator handle (context ID).
1519!-----------------------------------------------------------------------
1520!
1521 IF (PRESENT(inpcomm)) THEN
1522 mycomm=inpcomm
1523 ELSE
1524 mycomm=ocn_comm_world
1525 END IF
1526# endif
1527!
1528!-----------------------------------------------------------------------
1529! Broadcast requested variable.
1530!-----------------------------------------------------------------------
1531!
1532 asize(1)=ubound(a, dim=1)
1533 asize(2)=ubound(a, dim=2)
1534 npts=asize(1)*asize(2)
1535
1536# ifdef MPI
1537 CALL mpi_bcast (a, npts, mpi_logical, mymaster, mycomm, myerror)
1538 IF (myerror.ne.mpi_success) THEN
1539 CALL mpi_error_string (myerror, string, lstr, serror)
1540 lstr=len_trim(string)
1541 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1542 10 FORMAT (/,' MP_BCASTL_2D - error during ',a,' call, Task = ', &
1543 & i3.3,' Error = ',i3,/,13x,a)
1544 exit_flag=2
1545 RETURN
1546 END IF
1547# endif
1548# ifdef PROFILE
1549!
1550!-----------------------------------------------------------------------
1551! Turn off time clocks.
1552!-----------------------------------------------------------------------
1553!
1554 CALL wclock_off (ng, model, 64, __line__, myfile)
1555# endif
1556!
1557 RETURN

◆ mp_bcasts_0d()

subroutine distribute_mod::mp_bcasts_0d ( integer, intent(in) ng,
integer, intent(in) model,
character (len=*), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1560 of file distribute.F.

1561!
1562!***********************************************************************
1563! !
1564! This routine broadcasts a string scalar variable to all processors !
1565! in the communicator. It is called by all the members in the group. !
1566! !
1567! On Input: !
1568! !
1569! ng Nested grid number. !
1570! model Calling model identifier. !
1571! A Variable to broadcast (string). !
1572! InpComm Communicator handle (integer, OPTIONAL). !
1573! !
1574! On Output: !
1575! !
1576! A Broadcasted variable. !
1577! !
1578!***********************************************************************
1579!
1580! Imported variable declarations.
1581!
1582 integer, intent(in) :: ng, model
1583
1584 integer, intent(in), optional :: InpComm
1585!
1586 character (len=*), intent(inout) :: A
1587!
1588! Local variable declarations
1589!
1590 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1591!
1592 character (len=MPI_MAX_ERROR_STRING) :: string
1593
1594 character (len=*), parameter :: MyFile = &
1595 & __FILE__//", mp_bcasts_0d"
1596
1597# ifdef PROFILE
1598!
1599!-----------------------------------------------------------------------
1600! Turn on time clocks.
1601!-----------------------------------------------------------------------
1602!
1603 IF (lwclock) THEN
1604 CALL wclock_on (ng, model, 64, __line__, myfile)
1605 END IF
1606# endif
1607# ifdef MPI
1608!
1609!-----------------------------------------------------------------------
1610! Set distributed-memory communicator handle (context ID).
1611!-----------------------------------------------------------------------
1612!
1613 IF (PRESENT(inpcomm)) THEN
1614 mycomm=inpcomm
1615 ELSE
1616 mycomm=ocn_comm_world
1617 END IF
1618# endif
1619!
1620!-----------------------------------------------------------------------
1621! Broadcast requested variable.
1622!-----------------------------------------------------------------------
1623!
1624 nchars=len(a)
1625# ifdef MPI
1626 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1627 IF (myerror.ne.mpi_success) THEN
1628 CALL mpi_error_string (myerror, string, lstr, serror)
1629 lstr=len_trim(string)
1630 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1631 10 FORMAT (/,' MP_BCASTS_0D - error during ',a,' call, Task = ', &
1632 & i3.3,' Error = ',i3,/,13x,a)
1633 exit_flag=2
1634 RETURN
1635 END IF
1636# endif
1637# ifdef PROFILE
1638!
1639!-----------------------------------------------------------------------
1640! Turn off time clocks.
1641!-----------------------------------------------------------------------
1642!
1643 IF (lwclock) THEN
1644 CALL wclock_off (ng, model, 64, __line__, myfile)
1645 END IF
1646# endif
1647!
1648 RETURN

◆ mp_bcasts_1d()

subroutine distribute_mod::mp_bcasts_1d ( integer, intent(in) ng,
integer, intent(in) model,
character (len=*), dimension(:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1651 of file distribute.F.

1652!
1653!***********************************************************************
1654! !
1655! This routine broadcasts a 1D string array to all processors in the !
1656! communicator. It is called by all the members in the group. !
1657! !
1658! On Input: !
1659! !
1660! ng Nested grid number. !
1661! model Calling model identifier. !
1662! A 1D array to broadcast (string). !
1663! InpComm Communicator handle (integer, OPTIONAL). !
1664! !
1665! On Output: !
1666! !
1667! A Broadcasted 1D array. !
1668! !
1669!***********************************************************************
1670!
1671! Imported variable declarations.
1672!
1673 integer, intent(in) :: ng, model
1674
1675 integer, intent(in), optional :: InpComm
1676!
1677 character (len=*), intent(inout) :: A(:)
1678!
1679! Local variable declarations
1680!
1681 integer :: Asize, Lstr, MyCOMM, MyError, Nchars, Serror
1682!
1683 character (len=MPI_MAX_ERROR_STRING) :: string
1684
1685 character (len=*), parameter :: MyFile = &
1686 & __FILE__//", mp_bcasts_1d"
1687
1688# ifdef PROFILE
1689!
1690!-----------------------------------------------------------------------
1691! Turn on time clocks.
1692!-----------------------------------------------------------------------
1693!
1694 CALL wclock_on (ng, model, 64, __line__, myfile)
1695# endif
1696# ifdef MPI
1697!
1698!-----------------------------------------------------------------------
1699! Set distributed-memory communicator handle (context ID).
1700!-----------------------------------------------------------------------
1701!
1702 IF (PRESENT(inpcomm)) THEN
1703 mycomm=inpcomm
1704 ELSE
1705 mycomm=ocn_comm_world
1706 END IF
1707# endif
1708!
1709!-----------------------------------------------------------------------
1710! Broadcast requested variable.
1711!-----------------------------------------------------------------------
1712!
1713 asize=ubound(a, dim=1)
1714 nchars=len(a(1))*asize
1715
1716# ifdef MPI
1717 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1718 IF (myerror.ne.mpi_success) THEN
1719 CALL mpi_error_string (myerror, string, lstr, serror)
1720 lstr=len_trim(string)
1721 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1722 10 FORMAT (/,' MP_BCASTS_1D - error during ',a,' call, Task = ', &
1723 & i3.3,' Error = ',i3,/,13x,a)
1724 exit_flag=2
1725 RETURN
1726 END IF
1727# endif
1728# ifdef PROFILE
1729!
1730!-----------------------------------------------------------------------
1731! Turn off time clocks.
1732!-----------------------------------------------------------------------
1733!
1734 CALL wclock_off (ng, model, 64, __line__, myfile)
1735# endif
1736!
1737 RETURN

◆ mp_bcasts_2d()

subroutine distribute_mod::mp_bcasts_2d ( integer, intent(in) ng,
integer, intent(in) model,
character (len=*), dimension(:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1740 of file distribute.F.

1741!
1742!***********************************************************************
1743! !
1744! This routine broadcasts a 2D string array to all processors in the !
1745! communicator. It is called by all the members in the group. !
1746! !
1747! On Input: !
1748! !
1749! ng Nested grid number. !
1750! model Calling model identifier. !
1751! A 2D array to broadcast (string). !
1752! InpComm Communicator handle (integer, OPTIONAL). !
1753! !
1754! On Output: !
1755! !
1756! A Broadcasted 2D array. !
1757! !
1758!***********************************************************************
1759!
1760! Imported variable declarations.
1761!
1762 integer, intent(in) :: ng, model
1763
1764 integer, intent(in), optional :: InpComm
1765!
1766 character (len=*), intent(inout) :: A(:,:)
1767!
1768! Local variable declarations
1769!
1770 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1771 integer :: Asize(2)
1772!
1773 character (len=MPI_MAX_ERROR_STRING) :: string
1774
1775 character (len=*), parameter :: MyFile = &
1776 & __FILE__//", mp_bcasts_2d"
1777
1778# ifdef PROFILE
1779!
1780!-----------------------------------------------------------------------
1781! Turn on time clocks.
1782!-----------------------------------------------------------------------
1783!
1784 CALL wclock_on (ng, model, 64, __line__, myfile)
1785# endif
1786# ifdef MPI
1787!
1788!-----------------------------------------------------------------------
1789! Set distributed-memory communicator handle (context ID).
1790!-----------------------------------------------------------------------
1791!
1792 IF (PRESENT(inpcomm)) THEN
1793 mycomm=inpcomm
1794 ELSE
1795 mycomm=ocn_comm_world
1796 END IF
1797# endif
1798!
1799!-----------------------------------------------------------------------
1800! Broadcast requested variable.
1801!-----------------------------------------------------------------------
1802!
1803 asize(1)=ubound(a, dim=1)
1804 asize(2)=ubound(a, dim=2)
1805 nchars=len(a(1,1))*asize(1)*asize(2)
1806
1807# ifdef MPI
1808 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1809 IF (myerror.ne.mpi_success) THEN
1810 CALL mpi_error_string (myerror, string, lstr, serror)
1811 lstr=len_trim(string)
1812 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1813 10 FORMAT (/,' MP_BCASTS_2D - error during ',a,' call, Task = ', &
1814 & i3.3,' Error = ',i3,/,13x,a)
1815 exit_flag=2
1816 RETURN
1817 END IF
1818# endif
1819# ifdef PROFILE
1820!
1821!-----------------------------------------------------------------------
1822! Turn off time clocks.
1823!-----------------------------------------------------------------------
1824!
1825 CALL wclock_off (ng, model, 64, __line__, myfile)
1826# endif
1827!
1828 RETURN

◆ mp_bcasts_3d()

subroutine distribute_mod::mp_bcasts_3d ( integer, intent(in) ng,
integer, intent(in) model,
character (len=*), dimension(:,:,:), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 1831 of file distribute.F.

1832!
1833!***********************************************************************
1834! !
1835! This routine broadcasts a 3D string array to all processors in the !
1836! communicator. It is called by all the members in the group. !
1837! !
1838! On Input: !
1839! !
1840! ng Nested grid number. !
1841! model Calling model identifier. !
1842! A 3D array to broadcast (string). !
1843! InpComm Communicator handle (integer, OPTIONAL). !
1844! !
1845! On Output: !
1846! !
1847! A Broadcasted 3D array. !
1848! !
1849!***********************************************************************
1850!
1851! Imported variable declarations.
1852!
1853 integer, intent(in) :: ng, model
1854
1855 integer, intent(in), optional :: InpComm
1856!
1857 character (len=*), intent(inout) :: A(:,:,:)
1858!
1859! Local variable declarations
1860!
1861 integer :: Lstr, MyCOMM, MyError, Nchars, Serror
1862 integer :: Asize(3)
1863!
1864 character (len=MPI_MAX_ERROR_STRING) :: string
1865
1866 character (len=*), parameter :: MyFile = &
1867 & __FILE__//", mp_bcasts_3d"
1868
1869# ifdef PROFILE
1870!
1871!-----------------------------------------------------------------------
1872! Turn on time clocks.
1873!-----------------------------------------------------------------------
1874!
1875 CALL wclock_on (ng, model, 64, __line__, myfile)
1876# endif
1877# ifdef MPI
1878!
1879!-----------------------------------------------------------------------
1880! Set distributed-memory communicator handle (context ID).
1881!-----------------------------------------------------------------------
1882!
1883 IF (PRESENT(inpcomm)) THEN
1884 mycomm=inpcomm
1885 ELSE
1886 mycomm=ocn_comm_world
1887 END IF
1888# endif
1889!
1890!-----------------------------------------------------------------------
1891! Broadcast requested variable.
1892!-----------------------------------------------------------------------
1893!
1894 asize(1)=ubound(a, dim=1)
1895 asize(2)=ubound(a, dim=2)
1896 asize(3)=ubound(a, dim=3)
1897 nchars=len(a(1,1,1))*asize(1)*asize(2)*asize(3)
1898
1899# ifdef MPI
1900 CALL mpi_bcast (a, nchars, mpi_byte, mymaster, mycomm, myerror)
1901 IF (myerror.ne.mpi_success) THEN
1902 CALL mpi_error_string (myerror, string, lstr, serror)
1903 lstr=len_trim(string)
1904 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
1905 10 FORMAT (/,' MP_BCASTS_3D - error during ',a,' call, Task = ', &
1906 & i3.3,' Error = ',i3,/,13x,a)
1907 exit_flag=2
1908 RETURN
1909 END IF
1910# endif
1911# ifdef PROFILE
1912!
1913!-----------------------------------------------------------------------
1914! Turn off time clocks.
1915!-----------------------------------------------------------------------
1916!
1917 CALL wclock_off (ng, model, 64, __line__, myfile)
1918# endif
1919!
1920 RETURN

◆ mp_boundary()

subroutine distribute_mod::mp_boundary ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) imin,
integer, intent(in) imax,
integer, intent(in) lbi,
integer, intent(in) ubi,
integer, intent(in) lbk,
integer, intent(in) ubk,
logical, intent(in) update,
real(r8), dimension(lbi:ubi,lbk:ubk), intent(inout) a )

Definition at line 2103 of file distribute.F.

2106!
2107!***********************************************************************
2108! !
2109! This routine exchanges boundary arrays between tiles. !
2110! !
2111! On Input: !
2112! !
2113! ng Nested grid number. !
2114! model Calling model identifier. !
2115! Imin Starting tile index. !
2116! Imax Ending tile index. !
2117! Jstr Starting tile index in the J-direction. !
2118! Jend Ending tile index in the J-direction. !
2119! LBi I-dimension Lower bound. !
2120! UBi I-dimension Upper bound. !
2121! LBk K-dimension Lower bound, if any. Otherwise, a value !
2122! of one is expected. !
2123! LBk K-dimension Upper bound, if any. Otherwise, a value !
2124! of one is expected. !
2125! UBk K-dimension Upper bound. !
2126! update Switch activated by the node that updated the !
2127! boundary data. !
2128! A Boundary array (1D or 2D) to process. !
2129! !
2130! On Output: !
2131! !
2132! A Updated boundary array (1D or 2D). !
2133! !
2134!***********************************************************************
2135!
2136! Imported variable declarations.
2137!
2138 logical, intent(in) :: update
2139!
2140 integer, intent(in) :: ng, model, Imin, Imax
2141 integer, intent(in) :: LBi, UBi, LBk, UBk
2142!
2143 real(r8), intent(inout) :: A(LBi:UBi,LBk:UBk)
2144!
2145! Local variable declarations.
2146!
2147 integer :: Ilen, Ioff, Lstr, MyError, Nnodes, Npts, Serror
2148 integer :: i, ik, k, kc, rank
2149!
2150 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1)) :: Asend
2151
2152# if defined BOUNDARY_ALLGATHER
2153 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1), & & 0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
2154# elif defined BOUNDARY_ALLREDUCE
2155 real(r8), dimension((UBi-LBi+1)*(UBk-LBk+1)) :: Arecv
2156# endif
2157!
2158 character (len=MPI_MAX_ERROR_STRING) :: string
2159
2160 character (len=*), parameter :: MyFile = &
2161 & __FILE__//", mp_boundary"
2162
2163# ifdef PROFILE
2164!
2165!-----------------------------------------------------------------------
2166! Turn on time clocks.
2167!-----------------------------------------------------------------------
2168!
2169 CALL wclock_on (ng, model, 68, __line__, myfile)
2170# endif
2171!
2172!-----------------------------------------------------------------------
2173! Pack boundary data. Zero-out boundary array except points updated
2174! by the appropriate node, so sum reduction can be perfomed during
2175! unpacking.
2176!-----------------------------------------------------------------------
2177!
2178! Maximum automatic buffer memory size in bytes.
2179!
2180 bmemmax(ng)=max(bmemmax(ng), real((SIZE(asend)+ &
2181 & SIZE(arecv))*kind(a),r8))
2182!
2183! Initialize buffer to the full range so unpacking is correct with
2184! summation. This also allows even exchange of segments with
2185! communication routine "mpi_allgather".
2186!
2187 ilen=ubi-lbi+1
2188 ioff=1-lbi
2189 npts=ilen*(ubk-lbk+1)
2190 DO i=1,npts
2191 asend(i)=0.0_r8
2192 END DO
2193!
2194! If a boundary tile, load boundary data.
2195!
2196 IF (update) THEN
2197 DO k=lbk,ubk
2198 kc=(k-lbk)*ilen
2199 DO i=imin,imax
2200 ik=i+ioff+kc
2201 asend(ik)=a(i,k)
2202 END DO
2203 END DO
2204 END IF
2205!
2206!-----------------------------------------------------------------------
2207! Collect data from all nodes.
2208!-----------------------------------------------------------------------
2209!
2210# ifdef MPI
2211# if defined BOUNDARY_ALLGATHER
2212 CALL mpi_allgather (asend, npts, mp_float, arecv, npts, mp_float, &
2213 & ocn_comm_world, myerror)
2214 IF (myerror.ne.mpi_success) THEN
2215 CALL mpi_error_string (myerror, string, lstr, serror)
2216 lstr=len_trim(string)
2217 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
2218 & string(1:lstr)
2219 10 FORMAT (/,' MP_BOUNDARY - error during ',a,' call, Task = ', &
2220 & i3.3,' Error = ',i3,/,15x,a)
2221 exit_flag=2
2222 RETURN
2223 END IF
2224# elif defined BOUNDARY_ALLREDUCE
2225 CALL mpi_allreduce (asend, arecv, npts, mp_float, mpi_sum, &
2226 & ocn_comm_world, myerror)
2227 IF (myerror.ne.mpi_success) THEN
2228 CALL mpi_error_string (myerror, string, lstr, serror)
2229 lstr=len_trim(string)
2230 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
2231 & string(1:lstr)
2232 10 FORMAT (/,' MP_BOUNDARY - error during ',a,' call, Task = ', &
2233 & i3.3,' Error = ',i3,/,15x,a)
2234 exit_flag=2
2235 RETURN
2236 END IF
2237# endif
2238# endif
2239!
2240!-----------------------------------------------------------------------
2241! Unpack data: reduction sum.
2242!-----------------------------------------------------------------------
2243!
2244# if defined BOUNDARY_ALLGATHER
2245 nnodes=ntilei(ng)*ntilej(ng)-1
2246 ik=0
2247 DO k=lbk,ubk
2248 DO i=lbi,ubi
2249 a(i,k)=0.0_r8
2250 ik=ik+1
2251 DO rank=0,nnodes
2252 a(i,k)=a(i,k)+arecv(ik,rank)
2253 END DO
2254 END DO
2255 END DO
2256# elif defined BOUNDARY_ALLREDUCE
2257 ik=0
2258 DO k=lbk,ubk
2259 DO i=lbi,ubi
2260 ik=ik+1
2261 a(i,k)=arecv(ik)
2262 END DO
2263 END DO
2264# endif
2265# ifdef PROFILE
2266!
2267!-----------------------------------------------------------------------
2268! Turn off time clocks.
2269!-----------------------------------------------------------------------
2270!
2271 CALL wclock_off (ng, model, 68, __line__, myfile)
2272# endif
2273!
2274 RETURN
2275

References mod_param::bmemmax, mod_scalars::exit_flag, mod_parallel::mp_float, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by ad_set_data_tile(), rp_set_data_tile(), set_data_tile(), set_tides_mod::set_tides_tile(), and tl_set_data_tile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_collect_f()

subroutine distribute_mod::mp_collect_f ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
real(r8), intent(in) aspv,
real(r8), dimension(npts), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 3522 of file distribute.F.

3523!
3524!***********************************************************************
3525! !
3526! This routine collects a 1D floating-point array from all members !
3527! in the group. Then, it packs distributed data by removing the !
3528! special values. This routine is used when extracting station !
3529! data from tiled arrays. !
3530! !
3531! On Input: !
3532! !
3533! ng Nested grid number. !
3534! model Calling model identifier. !
3535! Npts Number of collected data points. !
3536! Aspv Special value indicating no data. This implies that !
3537! desired data is tile unbouded. !
3538! A Collected data. !
3539! InpComm Communicator handle (integer, OPTIONAL). !
3540! !
3541! On Output: !
3542! !
3543! A Collected data. !
3544! !
3545!***********************************************************************
3546!
3547! Imported variable declarations.
3548!
3549 integer, intent(in) :: ng, model, Npts
3550
3551 integer, intent(in), optional :: InpComm
3552!
3553 real(r8), intent(in) :: Aspv
3554
3555 real(r8), intent(inout) :: A(Npts)
3556!
3557! Local variable declarations.
3558!
3559 integer :: Lstr, MyCOMM, MyError, Nnodes, Serror
3560 integer :: i, rank, request
3561
3562 integer, dimension(MPI_STATUS_SIZE) :: status
3563!
3564# if defined COLLECT_ALLGATHER
3565 real(r8), dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3566# elif defined COLLECT_ALLREDUCE
3567 real(r8), dimension(Npts) :: Asend
3568# else
3569 real(r8), allocatable :: Arecv(:)
3570# endif
3571!
3572 character (len=MPI_MAX_ERROR_STRING) :: string
3573
3574 character (len=*), parameter :: MyFile = &
3575 & __FILE__//", mp_collect_f"
3576
3577# ifdef PROFILE
3578!
3579!-----------------------------------------------------------------------
3580! Turn on time clocks.
3581!-----------------------------------------------------------------------
3582!
3583 CALL wclock_on (ng, model, 69, __line__, myfile)
3584# endif
3585# ifdef MPI
3586!
3587!-----------------------------------------------------------------------
3588! Set distributed-memory communicator handle (context ID).
3589!-----------------------------------------------------------------------
3590!
3591 IF (PRESENT(inpcomm)) THEN
3592 mycomm=inpcomm
3593 ELSE
3594 mycomm=ocn_comm_world
3595 END IF
3596# endif
3597!
3598!-----------------------------------------------------------------------
3599! Collect data from all nodes.
3600!-----------------------------------------------------------------------
3601!
3602! Maximum automatic buffer memory size in bytes.
3603!
3604# if defined COLLECT_ALLGATHER
3605 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3606# else
3607 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3608# endif
3609
3610# if defined COLLECT_ALLGATHER
3611!
3612 CALL mpi_allgather (a, npts, mp_float, arecv, npts, mp_float, &
3613 & mycomm, myerror)
3614 IF (myerror.ne.mpi_success) THEN
3615 CALL mpi_error_string (myerror, string, lstr, serror)
3616 lstr=len_trim(string)
3617 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
3618 & string(1:lstr)
3619 exit_flag=2
3620 RETURN
3621 END IF
3622!
3623! Pack data according to special values: sum or ignore.
3624!
3625 nnodes=ntilei(ng)*ntilej(ng)-1
3626 IF (aspv.eq.0.0_r8) THEN
3627 DO i=1,npts
3628 a(i)=0.0_r8
3629 DO rank=0,nnodes
3630 a(i)=a(i)+arecv(i,rank)
3631 END DO
3632 END DO
3633 ELSE
3634 DO i=1,npts
3635 DO rank=0,nnodes
3636 IF (arecv(i,rank).ne.aspv) THEN
3637 a(i)=arecv(i,rank)
3638 END IF
3639 END DO
3640 END DO
3641 END IF
3642# elif defined COLLECT_ALLREDUCE
3643!
3644! Copy data to send.
3645!
3646 DO i=1,npts
3647 asend(i)=a(i)
3648 END DO
3649!
3650! Collect data from all nodes as a reduced sum.
3651!
3652 CALL mpi_allreduce (asend, a, npts, mp_float, mpi_sum, &
3653 & mycomm, myerror)
3654 IF (myerror.ne.mpi_success) THEN
3655 CALL mpi_error_string (myerror, string, lstr, serror)
3656 lstr=len_trim(string)
3657 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
3658 & string(1:lstr)
3659 exit_flag=2
3660 RETURN
3661 END IF
3662# else
3663!
3664 IF (myrank.eq.mymaster) THEN
3665!
3666! If master node, allocate and receive buffer.
3667!
3668 IF (.not.allocated(arecv)) THEN
3669 allocate (arecv(npts))
3670 END IF
3671!
3672! If master node, loop over other nodes to receive and accumulate the
3673! data.
3674!
3675 DO rank=1,ntilei(ng)*ntilej(ng)-1
3676 CALL mpi_irecv (arecv, npts, mp_float, rank, rank+5, &
3677 & mycomm, request, myerror)
3678 CALL mpi_wait (request, status, myerror)
3679 IF (myerror.ne.mpi_success) THEN
3680 CALL mpi_error_string (myerror, string, lstr, serror)
3681 lstr=len_trim(string)
3682 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
3683 exit_flag=2
3684 RETURN
3685 END IF
3686 DO i=1,npts
3687 a(i)=a(i)+arecv(i)
3688 END DO
3689 END DO
3690 deallocate (arecv)
3691!
3692! Otherwise, send data to master node.
3693!
3694 ELSE
3695 CALL mpi_isend (a, npts, mp_float, mymaster, myrank+5, &
3696 & mycomm, request, myerror)
3697 CALL mpi_wait (request, status, myerror)
3698 IF (myerror.ne.mpi_success) THEN
3699 CALL mpi_error_string (myerror, string, lstr, serror)
3700 lstr=len_trim(string)
3701 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3702 exit_flag=2
3703 RETURN
3704 END IF
3705 END IF
3706!
3707! Broadcast accumulated (full) data to all nodes.
3708!
3709 CALL mpi_bcast (a, npts, mp_float, mymaster, mycomm, myerror)
3710 IF (myerror.ne.mpi_success) THEN
3711 CALL mpi_error_string (myerror, string, lstr, serror)
3712 lstr=len_trim(string)
3713 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3714 exit_flag=2
3715 RETURN
3716 END IF
3717# endif
3718 10 FORMAT (/,' MP_COLLECT_F - error during ',a,' call, Task = ', &
3719 & i3.3,' Error = ',i3,/,14x,a)
3720
3721# ifdef PROFILE
3722!
3723!-----------------------------------------------------------------------
3724! Turn off time clocks.
3725!-----------------------------------------------------------------------
3726!
3727 CALL wclock_off (ng, model, 69, __line__, myfile)
3728# endif
3729!
3730 RETURN

◆ mp_collect_i()

subroutine distribute_mod::mp_collect_i ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) npts,
integer, intent(in) aspv,
integer, dimension(npts), intent(inout) a,
integer, intent(in), optional inpcomm )

Definition at line 3733 of file distribute.F.

3734!
3735!***********************************************************************
3736! !
3737! This routine collects a 1D integer array from all members in !
3738! the group. Then, it packs distributed data by removing the !
3739! special values. This routine is used when extracting station !
3740! data from tiled arrays. !
3741! !
3742! On Input: !
3743! !
3744! ng Nested grid number. !
3745! model Calling model identifier. !
3746! Npts Number of collected data points. !
3747! Aspv Special value indicating no data. This implies that !
3748! desired data is tile unbouded. !
3749! A Collected data. !
3750! InpComm Communicator handle (integer, OPTIONAL). !
3751! !
3752! On Output: !
3753! !
3754! A Collected data. !
3755! !
3756!***********************************************************************
3757!
3758! Imported variable declarations.
3759!
3760 integer, intent(in) :: ng, model, Npts
3761
3762 integer, intent(in) :: Aspv
3763
3764 integer, intent(in), optional :: InpComm
3765
3766 integer, intent(inout) :: A(Npts)
3767!
3768! Local variable declarations.
3769!
3770 integer :: Lstr, MyCOMM, MyError, Nnodes, Serror
3771 integer :: i, rank, request
3772
3773 integer, dimension(MPI_STATUS_SIZE) :: status
3774
3775# if defined COLLECT_ALLGATHER
3776 integer, dimension(Npts,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
3777# elif defined COLLECT_ALLREDUCE
3778 integer, dimension(Npts) :: Asend
3779# else
3780 integer, allocatable :: Arecv(:)
3781# endif
3782!
3783 character (len=MPI_MAX_ERROR_STRING) :: string
3784
3785 character (len=*), parameter :: MyFile = &
3786 & __FILE__//", mp_collect_i"
3787
3788# ifdef PROFILE
3789!
3790!-----------------------------------------------------------------------
3791! Turn on time clocks.
3792!-----------------------------------------------------------------------
3793!
3794 CALL wclock_on (ng, model, 69, __line__, myfile)
3795# endif
3796# ifdef MPI
3797!
3798!-----------------------------------------------------------------------
3799! Set distributed-memory communicator handle (context ID).
3800!-----------------------------------------------------------------------
3801!
3802 IF (PRESENT(inpcomm)) THEN
3803 mycomm=inpcomm
3804 ELSE
3805 mycomm=ocn_comm_world
3806 END IF
3807# endif
3808!
3809!-----------------------------------------------------------------------
3810! Collect data from all nodes.
3811!-----------------------------------------------------------------------
3812!
3813! Maximum automatic buffer memory size in bytes.
3814!
3815# if defined COLLECT_ALLGATHER
3816 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
3817# else
3818 bmemmax(ng)=max(bmemmax(ng), real(npts*kind(a),r8))
3819# endif
3820
3821# if defined COLLECT_ALLGATHER
3822!
3823 CALL mpi_allgather (a, npts, mpi_integer, arecv, npts, &
3824 & mpi_integer, mycomm, myerror)
3825 IF (myerror.ne.mpi_success) THEN
3826 CALL mpi_error_string (myerror, string, lstr, serror)
3827 lstr=len_trim(string)
3828 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
3829 & string(1:lstr)
3830 exit_flag=2
3831 RETURN
3832 END IF
3833!
3834! Pack data according to special values: sum or ignore.
3835!
3836 nnodes=ntilei(ng)*ntilej(ng)-1
3837 IF (aspv.eq.0) THEN
3838 DO i=1,npts
3839 a(i)=0
3840 DO rank=0,nnodes
3841 a(i)=a(i)+arecv(i,rank)
3842 END DO
3843 END DO
3844 ELSE
3845 DO i=1,npts
3846 DO rank=0,nnodes
3847 IF (arecv(i,rank).ne.aspv) THEN
3848 a(i)=arecv(i,rank)
3849 END IF
3850 END DO
3851 END DO
3852 END IF
3853# elif defined COLLECT_ALLREDUCE
3854!
3855! Copy data to send.
3856!
3857 DO i=1,npts
3858 asend(i)=a(i)
3859 END DO
3860!
3861! Collect data from all nodes as a reduced sum.
3862!
3863 CALL mpi_allreduce (asend, a, npts, mpi_integer, mpi_sum, &
3864 & mycomm, myerror)
3865 IF (myerror.ne.mpi_success) THEN
3866 CALL mpi_error_string (myerror, string, lstr, serror)
3867 lstr=len_trim(string)
3868 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
3869 & string(1:lstr)
3870 exit_flag=2
3871 RETURN
3872 END IF
3873# else
3874!
3875 IF (myrank.eq.mymaster) THEN
3876!
3877! If master node, allocate and receive buffer.
3878!
3879 IF (.not.allocated(arecv)) THEN
3880 allocate (arecv(npts))
3881 END IF
3882!
3883! If master node, loop over other nodes to receive and accumulate the
3884! data.
3885!
3886 DO rank=1,ntilei(ng)*ntilej(ng)-1
3887 CALL mpi_irecv (arecv, npts, mpi_integer, rank, rank+5, &
3888 & mycomm, request, myerror)
3889 CALL mpi_wait (request, status, myerror)
3890 IF (myerror.ne.mpi_success) THEN
3891 CALL mpi_error_string (myerror, string, lstr, serror)
3892 lstr=len_trim(string)
3893 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
3894 exit_flag=2
3895 RETURN
3896 END IF
3897 DO i=1,npts
3898 a(i)=a(i)+arecv(i)
3899 END DO
3900 END DO
3901 deallocate (arecv)
3902!
3903! Otherwise, send data to master node.
3904!
3905 ELSE
3906 CALL mpi_isend (a, npts, mpi_integer, mymaster, myrank+5, &
3907 & mycomm, request, myerror)
3908 CALL mpi_wait (request, status, myerror)
3909 IF (myerror.ne.mpi_success) THEN
3910 CALL mpi_error_string (myerror, string, lstr, serror)
3911 lstr=len_trim(string)
3912 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
3913 exit_flag=2
3914 RETURN
3915 END IF
3916 END IF
3917!
3918! Broadcast accumulated (full) data to all nodes.
3919!
3920 CALL mpi_bcast (a, npts, mpi_integer, mymaster, mycomm, myerror)
3921 IF (myerror.ne.mpi_success) THEN
3922 CALL mpi_error_string (myerror, string, lstr, serror)
3923 lstr=len_trim(string)
3924 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
3925 exit_flag=2
3926 RETURN
3927 END IF
3928# endif
3929 10 FORMAT (/,' MP_COLLECT_I - error during ',a,' call, Task = ', &
3930 & i3.3,' Error = ',i3,/,14x,a)
3931
3932# ifdef PROFILE
3933!
3934!-----------------------------------------------------------------------
3935! Turn off time clocks.
3936!-----------------------------------------------------------------------
3937!
3938 CALL wclock_off (ng, model, 69, __line__, myfile)
3939# endif
3940!
3941 RETURN

◆ mp_dump()

subroutine distribute_mod::mp_dump ( integer, intent(in) ng,
integer, intent(in) tile,
integer, intent(in) gtype,
integer, intent(in) ilb,
integer, intent(in) iub,
integer, intent(in) jlb,
integer, intent(in) jub,
integer, intent(in) klb,
integer, intent(in) kub,
real(r8), dimension(ilb:iub,jlb:jub,klb:kub), intent(in) a,
character (len=*) name )

Definition at line 9074 of file distribute.F.

9076!
9077!***********************************************************************
9078! !
9079! This routine is used to debug distributed-memory communications. !
9080! It writes field into an ASCII file for further post-processing. !
9081! !
9082!***********************************************************************
9083!
9084! Imported variable declarations.
9085!
9086 integer, intent(in) :: ng, tile, gtype
9087 integer, intent(in) :: ILB, IUB, JLB, JUB, KLB, KUB
9088!
9089 real(r8), intent(in) :: A(ILB:IUB,JLB:JUB,KLB:KUB)
9090!
9091 character (len=*) :: name
9092!
9093! Local variable declarations.
9094!
9095 common /counter/ nc
9096 integer :: nc
9097!
9098 logical, save :: first = .true.
9099!
9100 integer :: Imin, Imax, Ioff, Jmin, Jmax, Joff
9101 integer :: unit
9102!
9103 character (len=*), parameter :: MyFile = &
9104 & __FILE__//", mp_dump"
9105
9106# include "set_bounds.h"
9107!
9108!------------------------------------------------------------------------
9109! Write out requested field.
9110!------------------------------------------------------------------------
9111!
9112 IF (first) THEN
9113 nc=0
9114 first=.false.
9115 END IF
9116 nc=nc+1
9117 IF (master) THEN
9118 WRITE (10,'(a,i3.3,a,a)') 'file ', nc, ': ', trim(name)
9119 FLUSH (10)
9120 END IF
9121!
9122! Write out field including ghost-points.
9123!
9124 imin=0
9125 imax=lm(ng)+1
9126 IF (ewperiodic(ng)) THEN
9127 ioff=3
9128 ELSE
9129 ioff=1
9130 END IF
9131
9132 jmin=0
9133 jmax=mm(ng)+1
9134 IF (nsperiodic(ng)) THEN
9135 joff=3
9136 ELSE
9137 joff=1
9138 END IF
9139
9140 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9141 & (gtype.eq.u2dvar).or.(gtype.eq.u3dvar)) THEN
9142 imin=1
9143 END IF
9144 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9145 & (gtype.eq.v2dvar).or.(gtype.eq.v3dvar)) THEN
9146 jmin=1
9147 END IF
9148
9149 unit=(myrank+1)*1000+nc
9150 WRITE (unit,*) ilb, iub, jlb, jub, klb, kub, &
9151 & ioff, joff, imin, imax, jmin, jmax, &
9152 & a(ilb:iub,jlb:jub,klb:kub)
9153 FLUSH (unit)
9154!
9155! Write out non-overlapping field.
9156!
9157 imin=istrr
9158 imax=iendr
9159 IF (ewperiodic(ng)) THEN
9160 ioff=2
9161 ELSE
9162 ioff=1
9163 END IF
9164
9165 jmin=jstrr
9166 jmax=jendr
9167 IF (nsperiodic(ng)) THEN
9168 joff=2
9169 ELSE
9170 joff=1
9171 END IF
9172
9173 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9174 & (gtype.eq.u2dvar).or.(gtype.eq.u3dvar)) THEN
9175 imin=istr
9176 ioff=ioff-1
9177 END IF
9178 IF ((gtype.eq.p2dvar).or.(gtype.eq.p3dvar).or. &
9179 & (gtype.eq.v2dvar).or.(gtype.eq.v3dvar)) THEN
9180 jmin=jstr
9181 joff=joff-1
9182 END IF
9183
9184 unit=(myrank+1)*10000+nc
9185 WRITE (unit,*) imin, imax, jmin, jmax, klb, kub, &
9186 & ioff, joff, imin, imax, jmin, jmax, &
9187 & a(imin:imax,jmin:jmax,klb:kub)
9188 FLUSH (unit)
9189
9190 RETURN

References mod_scalars::ewperiodic, mod_param::lm, mod_parallel::master, mod_param::mm, mod_parallel::myrank, mod_scalars::nsperiodic, mod_param::p2dvar, mod_param::p3dvar, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, and mod_param::v3dvar.

◆ mp_gather2d()

subroutine distribute_mod::mp_gather2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) lbi,
integer, intent(in) ubi,
integer, intent(in) lbj,
integer, intent(in) ubj,
integer, intent(in) tindex,
integer, intent(in) gtype,
real(dp), intent(in) ascl,
real(r8), dimension(lbi:ubi,lbj:ubj), intent(in) amask,
real(r8), dimension(lbi:ubi,lbj:ubj), intent(in) a,
integer, intent(out) npts,
real(r8), dimension(:), intent(out) awrk,
logical, intent(in), optional setfillval )

Definition at line 3944 of file distribute.F.

3950!
3951!***********************************************************************
3952! !
3953! This routine collects a 2D tiled, floating-point array from each !
3954! spawned node and stores it into one dimensional global array. It !
3955! is used to collect and pack output data. !
3956! !
3957! On Input: !
3958! !
3959! ng Nested grid number. !
3960! model Calling model identifier. !
3961! LBi I-dimension Lower bound. !
3962! UBi I-dimension Upper bound. !
3963! LBj J-dimension Lower bound. !
3964! UBj J-dimension Upper bound. !
3965! tindex Time record index to process. !
3966! gtype C-grid type. If negative and Land-Sea is available, !
3967! only water-points processed. !
3968! Ascl Factor to scale field before writing. !
3969! Amask Land/Sea mask, if any. !
3970! A 2D tiled, floating-point array to process. !
3971! SetFillVal Logical switch to set fill value in land areas !
3972! (optional). !
3973! !
3974! On Output: !
3975! !
3976! Npts Number of points processed in Awrk. !
3977! Awrk Collected data from each node packed into 1D array !
3978! in column-major order. That is, in the same way !
3979! that Fortran multi-dimensional arrays are stored !
3980! in memory. !
3981! !
3982!***********************************************************************
3983!
3984! Imported variable declarations.
3985!
3986 logical, intent(in), optional :: SetFillVal
3987!
3988 integer, intent(in) :: ng, model, tindex, gtype
3989 integer, intent(in) :: LBi, UBi, LBj, UBj
3990 integer, intent(out) :: Npts
3991!
3992 real(dp), intent(in) :: Ascl
3993
3994# ifdef MASKING
3995 real(r8), intent(in) :: Amask(LBi:UBi,LBj:UBj)
3996# endif
3997 real(r8), intent(in) :: A(LBi:UBi,LBj:UBj)
3998 real(r8), intent(out) :: Awrk(:)
3999!
4000! Local variable declarations.
4001!
4002# ifdef MASKING
4003 logical :: LandFill
4004# endif
4005 integer :: Cgrid, Ntasks, ghost, rank
4006 integer :: Io, Ie, Jo, Je, Ioff, Joff
4007 integer :: Imin, Imax, Jmin, Jmax
4008 integer :: iLB, iUB, jLB, jUB
4009 integer :: Asize, Isize, Jsize, IJsize
4010 integer :: Lstr, MyError, MyType, Serror, Srequest
4011 integer :: i, ic, ij, j, jc, nc
4012!
4013 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
4014# ifdef GATHER_SENDRECV
4015 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: MySize
4016 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
4017
4018 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
4019 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
4020!
4021 real(r8), allocatable :: Arecv(:,:)
4022# else
4023 real(r8), allocatable :: Arecv(:)
4024# endif
4025 real(r8), allocatable :: Asend(:)
4026!
4027 character (len=MPI_MAX_ERROR_STRING) :: string
4028
4029 character (len=*), parameter :: MyFile = &
4030 & __FILE__//", mp_gather2d"
4031
4032# ifdef PROFILE
4033!
4034!-----------------------------------------------------------------------
4035! Turn on time clocks.
4036!-----------------------------------------------------------------------
4037!
4038 CALL wclock_on (ng, model, 66, __line__, myfile)
4039# endif
4040!
4041!-----------------------------------------------------------------------
4042! Set horizontal starting and ending indices for parallel domain
4043! partitions in the XI- and ETA-directions.
4044!-----------------------------------------------------------------------
4045!
4046! Maximum automatic buffer memory size in bytes.
4047!
4048 bmemmax(ng)=max(bmemmax(ng), real(tilesize(ng)*kind(a),r8))
4049!
4050! Set full grid first and last point according to staggered C-grid
4051! classification. Notice that the offsets are for the private array
4052! counter.
4053!
4054 mytype=abs(gtype)
4055
4056 SELECT CASE (mytype)
4057 CASE (p2dvar, p3dvar)
4058 io=iobounds(ng) % ILB_psi
4059 ie=iobounds(ng) % IUB_psi
4060 jo=iobounds(ng) % JLB_psi
4061 je=iobounds(ng) % JUB_psi
4062 ioff=0
4063 joff=1
4064 CASE (r2dvar, r3dvar)
4065 io=iobounds(ng) % ILB_rho
4066 ie=iobounds(ng) % IUB_rho
4067 jo=iobounds(ng) % JLB_rho
4068 je=iobounds(ng) % JUB_rho
4069 ioff=1
4070 joff=0
4071 CASE (u2dvar, u3dvar)
4072 io=iobounds(ng) % ILB_u
4073 ie=iobounds(ng) % IUB_u
4074 jo=iobounds(ng) % JLB_u
4075 je=iobounds(ng) % JUB_u
4076 ioff=0
4077 joff=0
4078 CASE (v2dvar, v3dvar)
4079 io=iobounds(ng) % ILB_v
4080 ie=iobounds(ng) % IUB_v
4081 jo=iobounds(ng) % JLB_v
4082 je=iobounds(ng) % JUB_v
4083 ioff=1
4084 joff=1
4085 CASE DEFAULT ! RHO-points
4086 io=iobounds(ng) % ILB_rho
4087 ie=iobounds(ng) % IUB_rho
4088 jo=iobounds(ng) % JLB_rho
4089 je=iobounds(ng) % JUB_rho
4090 ioff=1
4091 joff=0
4092 END SELECT
4093!
4094 isize=ie-io+1
4095 jsize=je-jo+1
4096 ijsize=isize*jsize
4097 npts=isize*jsize
4098!
4099! Set "GATHERV" counts and displacement vectors. Use non-overlapping
4100! (ghost=0) ranges according to tile rank.
4101!
4102 ghost=0
4103!
4104 SELECT CASE (mytype)
4105 CASE (p2dvar, p3dvar)
4106 cgrid=1
4107 CASE (r2dvar, r3dvar)
4108 cgrid=2
4109 CASE (u2dvar, u3dvar)
4110 cgrid=3
4111 CASE (v2dvar, v3dvar)
4112 cgrid=4
4113 CASE DEFAULT ! RHO-points
4114 cgrid=2
4115 END SELECT
4116!
4117 ntasks=ntilei(ng)*ntilej(ng)
4118 nc=0
4119 DO rank=0,ntasks-1
4120 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4121 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4122 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4123 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4124# ifdef GATHER_SENDRECV
4125 mysize(rank)=(iub-ilb+1)*(jub-jlb+1)
4126# endif
4127 displs(rank)=nc
4128 DO j=jlb,jub
4129 DO i=ilb,iub
4130 nc=nc+1
4131 END DO
4132 END DO
4133 counts(rank)=nc-displs(rank)
4134 END DO
4135!
4136!-----------------------------------------------------------------------
4137! Pack and scale input tiled data.
4138!-----------------------------------------------------------------------
4139!
4140 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
4141 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
4142 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
4143 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
4144!
4145 asize=(imax-imin+1)*(jmax-jmin+1)
4146 allocate ( asend(asize) )
4147 asend=0.0_r8
4148!
4149 nc=0
4150 DO j=jmin,jmax
4151 DO i=imin,imax
4152 nc=nc+1
4153 asend(nc)=a(i,j)*ascl
4154 END DO
4155 END DO
4156
4157# ifdef MASKING
4158!
4159! If overwriting Land/Sea mask or processing water-points only, flag
4160! land-points with special value.
4161!
4162 IF (PRESENT(setfillval)) THEN
4163 landfill=setfillval
4164 ELSE
4165 landfill=tindex.gt.0
4166 END IF
4167 IF (gtype.lt.0) THEN
4168 nc=0
4169 DO j=jmin,jmax
4170 DO i=imin,imax
4171 nc=nc+1
4172 IF (amask(i,j).eq.0.0_r8) THEN
4173 asend(nc)=spval
4174 END IF
4175 END DO
4176 END DO
4177 ELSE IF (landfill) THEN
4178 nc=0
4179 DO j=jmin,jmax
4180 DO i=imin,imax
4181 nc=nc+1
4182 IF (amask(i,j).eq.0.0_r8) THEN
4183 asend(nc)=spval
4184 END IF
4185 END DO
4186 END DO
4187 END IF
4188# endif
4189!
4190!-----------------------------------------------------------------------
4191! Gather requested global data from tiled arrays.
4192!-----------------------------------------------------------------------
4193
4194# ifdef GATHER_SENDRECV
4195!
4196 allocate ( arecv(ijsize, ntasks-1) )
4197 arecv=0.0_r8
4198!
4199! If master processor, unpack the send buffer since there is not
4200! need to distribute.
4201!
4202 IF (myrank.eq.mymaster) THEN
4203 nc=0
4204 DO j=jmin,jmax
4205 jc=(j-joff)*isize
4206 DO i=imin,imax
4207 nc=nc+1
4208 ic=i+ioff+jc
4209 awrk(ic)=asend(nc)
4210 END DO
4211 END DO
4212 END IF
4213!
4214! Send, receive, and unpack data.
4215!
4216 IF (myrank.eq.mymaster) THEN
4217 DO rank=1,ntasks-1
4218 CALL mpi_irecv (arecv(1,rank), mysize(rank), mp_float, rank, &
4219 & rank+5, ocn_comm_world, rrequest(rank), &
4220 & myerror)
4221 END DO
4222 DO rank=1,ntasks-1
4223 CALL mpi_wait (rrequest(rank), rstatus, myerror)
4224 IF (myerror.ne.mpi_success) THEN
4225 CALL mpi_error_string (myerror, string, lstr, serror)
4226 lstr=len_trim(string)
4227 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
4228 10 FORMAT (/,' MP_GATHER2D - error during ',a, &
4229 & ' call, Task = ',i3.3,' Error = ',i3,/,13x,a)
4230 exit_flag=2
4231 RETURN
4232 END IF
4233!
4234 imin=bounds(ng) % Imin(cgrid,ghost,rank)
4235 imax=bounds(ng) % Imax(cgrid,ghost,rank)
4236 jmin=bounds(ng) % Jmin(cgrid,ghost,rank)
4237 jmax=bounds(ng) % Jmax(cgrid,ghost,rank)
4238!
4239 nc=0
4240 DO j=jmin,jmax
4241 jc=(j-joff)*isize
4242 DO i=imin,imax
4243 nc=nc+1
4244 ic=i+ioff+jc
4245 awrk(ic)=arecv(nc,rank)
4246 END DO
4247 END DO
4248 END DO
4249 ELSE
4250 CALL mpi_isend (asend, asize, mp_float, mymaster, &
4251 & myrank+5, ocn_comm_world, srequest, myerror)
4252 CALL mpi_wait (srequest, sstatus, myerror)
4253 IF (myerror.ne.mpi_success) THEN
4254 CALL mpi_error_string (myerror, string, lstr, serror)
4255 lstr=len_trim(string)
4256 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
4257 exit_flag=2
4258 RETURN
4259 END IF
4260 END IF
4261
4262# else
4263!
4264! Gather local tiled data into a global array.
4265!
4266 allocate ( arecv(ijsize) )
4267 arecv=0.0_r8
4268!
4269 CALL mpi_gatherv (asend, asize, mp_float, &
4270 & arecv, counts, displs, mp_float, &
4271 & mymaster, ocn_comm_world, myerror)
4272 IF (myerror.ne.mpi_success) THEN
4273 CALL mpi_error_string (myerror, string, lstr, serror)
4274 WRITE (stdout,10) 'MPI_GATHERV', myrank, myerror, trim(string)
4275 10 FORMAT (/,' MP_GATHER2D - error during ',a,' call, Task = ', &
4276 & i3.3, ' Error = ',i3,/,15x,a)
4277 exit_flag=2
4278 RETURN
4279 END IF
4280!
4281! Unpack gathered data in a continuous memory order and remap every
4282! task segment.
4283!
4284 nc=0
4285 DO rank=0,ntasks-1
4286 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4287 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4288 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4289 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4290 DO j=jlb,jub
4291 jc=(j-joff)*isize
4292 DO i=ilb,iub
4293 ij=i+ioff+jc
4294 nc=nc+1
4295 awrk(ij)=arecv(nc)
4296 END DO
4297 END DO
4298 END DO
4299# endif
4300# ifdef MASKING
4301!
4302! If pocessing only water-points, remove land points and repack.
4303!
4304 IF ((myrank.eq.mymaster).and.(gtype.lt.0)) THEN
4305 nc=0
4306 DO i=1,ijsize
4307 IF (awrk(i).lt.spval) THEN
4308 nc=nc+1
4309 awrk(nc)=awrk(i)
4310 END IF
4311 END DO
4312 npts=nc
4313 END IF
4314# endif
4315!
4316! Deallocate local arrays.
4317!
4318 deallocate (arecv)
4319 deallocate (asend)
4320
4321# ifdef PROFILE
4322!
4323!-----------------------------------------------------------------------
4324! Turn off time clocks.
4325!-----------------------------------------------------------------------
4326!
4327 CALL wclock_off (ng, model, 66, __line__, myfile)
4328# endif
4329!
4330 RETURN

References mod_param::bmemmax, mod_param::bounds, mod_scalars::exit_flag, mod_param::iobounds, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_scalars::spval, mod_iounits::stdout, mod_param::tilesize, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by nf_fwrite2d_mod::nf_fwrite2d::nf90_fwrite2d(), nf_fwrite3d_mod::nf_fwrite3d::nf90_fwrite3d(), nf_fwrite4d_mod::nf_fwrite4d::nf90_fwrite4d(), pack_field_mod::pack_field2d(), and wpoints_tile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_gather3d()

subroutine distribute_mod::mp_gather3d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) lbi,
integer, intent(in) ubi,
integer, intent(in) lbj,
integer, intent(in) ubj,
integer, intent(in) lbk,
integer, intent(in) ubk,
integer, intent(in) tindex,
integer, intent(in) gtype,
real(dp), intent(in) ascl,
real(r8), dimension(lbi:ubi,lbj:ubj), intent(in) amask,
real(r8), dimension(lbi:ubi,lbj:ubj,lbk:ubk), intent(in) a,
integer, intent(out) npts,
real(r8), dimension(:), intent(out) awrk,
logical, intent(in), optional setfillval )

Definition at line 4725 of file distribute.F.

4731!
4732!***********************************************************************
4733! !
4734! This routine collects a 3D tiled, floating-point array from each !
4735! spawned node and stores it into one dimensional global array. It !
4736! is used to collect and pack output data. !
4737! !
4738! On Input: !
4739! !
4740! ng Nested grid number. !
4741! model Calling model identifier. !
4742! LBi I-dimension Lower bound. !
4743! UBi I-dimension Upper bound. !
4744! LBj J-dimension Lower bound. !
4745! UBj J-dimension Upper bound. !
4746! LBk K-dimension Lower bound. !
4747! UBk K-dimension Upper bound. !
4748! tindex Time record index to process. !
4749! gtype C-grid type. If negative and Land-Sea is available, !
4750! only water-points processed. !
4751! Ascl Factor to scale field before writing. !
4752! Amask Land/Sea mask, if any. !
4753! A 3D tiled, floating-point array to process. !
4754! SetFillVal Logical switch to set fill value in land areas !
4755! (optional). !
4756! !
4757! On Output: !
4758! !
4759! Npts Number of points processed in Awrk. !
4760! Awrk Collected data from each node packed into 1D array !
4761! in column-major order. That is, in the same way !
4762! that Fortran multi-dimensional arrays are stored !
4763! in memory. !
4764! !
4765!***********************************************************************
4766!
4767! Imported variable declarations.
4768!
4769 logical, intent(in), optional :: SetFillVal
4770!
4771 integer, intent(in) :: ng, model, tindex, gtype
4772 integer, intent(in) :: LBi, UBi, LBj, UBj, LBk, UBk
4773 integer, intent(out) :: Npts
4774!
4775 real(dp), intent(in) :: Ascl
4776
4777# ifdef MASKING
4778 real(r8), intent(in) :: Amask(LBi:UBi,LBj:UBj)
4779# endif
4780 real(r8), intent(in) :: A(LBi:UBi,LBj:UBj,LBk:UBk)
4781 real(r8), intent(out) :: Awrk(:)
4782!
4783! Local variable declarations.
4784!
4785# ifdef MASKING
4786 logical :: LandFill
4787!
4788# endif
4789 integer :: Cgrid, ghost, rank
4790 integer :: Io, Ie, Jo, Je, Ioff, Joff, Koff
4791 integer :: Imin, Imax, Jmin, Jmax
4792 integer :: iLB, iUB, jLB, jUB
4793 integer :: Asize, Isize, Jsize, Ksize, IJsize
4794 integer :: Lstr, MyError, MyType, Serror, Srequest
4795 integer :: i, ic, ijk, j, jc, k, kc, nc
4796!
4797 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
4798# ifdef GATHER_SENDRECV
4799 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: MySize
4800 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
4801
4802 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
4803 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
4804!
4805 real(r8), allocatable :: Arecv(:,:)
4806# else
4807 real(r8), allocatable :: Arecv(:)
4808# endif
4809 real(r8), allocatable :: Asend(:)
4810!
4811 character (len=MPI_MAX_ERROR_STRING) :: string
4812
4813 character (len=*), parameter :: MyFile = &
4814 & __FILE__//", mp_gather3d"
4815
4816# ifdef PROFILE
4817!
4818!-----------------------------------------------------------------------
4819! Turn on time clocks.
4820!-----------------------------------------------------------------------
4821!
4822 CALL wclock_on (ng, model, 66, __line__, myfile)
4823# endif
4824!
4825!-----------------------------------------------------------------------
4826! Set horizontal starting and ending indices for parallel domain
4827! partitions in the XI- and ETA-directions.
4828!-----------------------------------------------------------------------
4829!
4830! Maximum automatic buffer memory size in bytes.
4831!
4832 bmemmax(ng)=max(bmemmax(ng), real(2*SIZE(awrk)*kind(a),r8))
4833!
4834! Set full grid first and last point according to staggered C-grid
4835! classification. Notice that the offsets are for the private array
4836! counter.
4837!
4838 mytype=abs(gtype)
4839
4840 SELECT CASE (mytype)
4841 CASE (p2dvar, p3dvar)
4842 io=iobounds(ng) % ILB_psi
4843 ie=iobounds(ng) % IUB_psi
4844 jo=iobounds(ng) % JLB_psi
4845 je=iobounds(ng) % JUB_psi
4846 ioff=0
4847 joff=1
4848 CASE (r2dvar, r3dvar)
4849 io=iobounds(ng) % ILB_rho
4850 ie=iobounds(ng) % IUB_rho
4851 jo=iobounds(ng) % JLB_rho
4852 je=iobounds(ng) % JUB_rho
4853 ioff=1
4854 joff=0
4855 CASE (u2dvar, u3dvar)
4856 io=iobounds(ng) % ILB_u
4857 ie=iobounds(ng) % IUB_u
4858 jo=iobounds(ng) % JLB_u
4859 je=iobounds(ng) % JUB_u
4860 ioff=0
4861 joff=0
4862 CASE (v2dvar, v3dvar)
4863 io=iobounds(ng) % ILB_v
4864 ie=iobounds(ng) % IUB_v
4865 jo=iobounds(ng) % JLB_v
4866 je=iobounds(ng) % JUB_v
4867 ioff=1
4868 joff=1
4869 CASE DEFAULT ! RHO-points
4870 io=iobounds(ng) % ILB_rho
4871 ie=iobounds(ng) % IUB_rho
4872 jo=iobounds(ng) % JLB_rho
4873 je=iobounds(ng) % JUB_rho
4874 ioff=1
4875 joff=0
4876 END SELECT
4877!
4878 IF (lbk.eq.0) THEN
4879 koff=0
4880 ELSE
4881 koff=1
4882 END IF
4883!
4884 isize=ie-io+1
4885 jsize=je-jo+1
4886 ksize=ubk-lbk+1
4887 ijsize=isize*jsize
4888 npts=ijsize*ksize
4889!
4890! Set "GATHERV" counts and displacement vectors. Use non-overlapping
4891! (ghost=0) ranges according to tile rank.
4892!
4893 ghost=0
4894!
4895 SELECT CASE (mytype)
4896 CASE (p2dvar, p3dvar)
4897 cgrid=1
4898 CASE (r2dvar, r3dvar)
4899 cgrid=2
4900 CASE (u2dvar, u3dvar)
4901 cgrid=3
4902 CASE (v2dvar, v3dvar)
4903 cgrid=4
4904 CASE DEFAULT ! RHO-points
4905 cgrid=2
4906 END SELECT
4907!
4908 ntasks=ntilei(ng)*ntilej(ng)
4909 nc=0
4910 DO rank=0,ntasks-1
4911 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
4912 iub=bounds(ng) % Imax(cgrid,ghost,rank)
4913 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
4914 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
4915# ifdef GATHER_SENDRECV
4916 mysize(rank)=(iub-ilb+1)*(jub-jlb+1)*(ubk-lbk+1)
4917# endif
4918 displs(rank)=nc
4919 DO k=lbk,ubk
4920 DO j=jlb,jub
4921 DO i=ilb,iub
4922 nc=nc+1
4923 END DO
4924 END DO
4925 END DO
4926 counts(rank)=nc-displs(rank)
4927 END DO
4928!
4929!-----------------------------------------------------------------------
4930! Pack and scale input tiled data.
4931!-----------------------------------------------------------------------
4932!
4933 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
4934 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
4935 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
4936 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
4937!
4938 asize=(imax-imin+1)*(jmax-jmin+1)*(ubk-lbk+1)
4939 allocate ( asend(asize) )
4940 asend=0.0_r8
4941!
4942 nc=0
4943 DO k=lbk,ubk
4944 DO j=jmin,jmax
4945 DO i=imin,imax
4946 nc=nc+1
4947 asend(nc)=a(i,j,k)*ascl
4948 END DO
4949 END DO
4950 END DO
4951
4952# ifdef MASKING
4953!
4954! If overwriting Land/Sea mask or processing water-points only, flag
4955! land-points with special value.
4956!
4957 IF (PRESENT(setfillval)) THEN
4958 landfill=setfillval
4959 ELSE
4960 landfill=tindex.gt.0
4961 END IF
4962 IF (gtype.lt.0) THEN
4963 nc=0
4964 DO k=lbk,ubk
4965 DO j=jmin,jmax
4966 DO i=imin,imax
4967 nc=nc+1
4968 IF (amask(i,j).eq.0.0_r8) THEN
4969 asend(nc)=spval
4970 END IF
4971 END DO
4972 END DO
4973 END DO
4974 ELSE IF (landfill) THEN
4975 nc=0
4976 DO k=lbk,ubk
4977 DO j=jmin,jmax
4978 DO i=imin,imax
4979 nc=nc+1
4980 IF (amask(i,j).eq.0.0_r8) THEN
4981 asend(nc)=spval
4982 END IF
4983 END DO
4984 END DO
4985 END DO
4986 END IF
4987# endif
4988!
4989!-----------------------------------------------------------------------
4990! Gather requested global data from tiled arrays.
4991!-----------------------------------------------------------------------
4992
4993# ifdef GATHER_SENDRECV
4994!
4995 allocate ( arecv(ijsize*ksize, ntasks-1) )
4996 arecv=0.0_r8
4997!
4998! If master processor, unpack the send buffer since there is not
4999! need to distribute.
5000!
5001 IF (myrank.eq.mymaster) THEN
5002 nc=0
5003 DO k=lbk,ubk
5004 kc=(k-koff)*ijsize
5005 DO j=jmin,jmax
5006 jc=(j-joff)*isize
5007 DO i=imin,imax
5008 nc=nc+1
5009 ic=i+ioff+jc+kc
5010 awrk(ic)=asend(nc)
5011 END DO
5012 END DO
5013 END DO
5014 END IF
5015!
5016! Send, receive, and unpack data.
5017!
5018 IF (myrank.eq.mymaster) THEN
5019 DO rank=1,ntasks-1
5020 CALL mpi_irecv (arecv(1,rank), mysize(rank), mp_float, rank, &
5021 & rank+5, ocn_comm_world, rrequest(rank), &
5022 & myerror)
5023 END DO
5024 DO rank=1,ntasks-1
5025 CALL mpi_wait (rrequest(rank), rstatus, myerror)
5026 IF (myerror.ne.mpi_success) THEN
5027 CALL mpi_error_string (myerror, string, lstr, serror)
5028 lstr=len_trim(string)
5029 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
5030 10 FORMAT (/,' MP_GATHER3D - error during ',a,' call, Task = ',&
5031 & i3.3,' Error = ',i3,/,13x,a)
5032 exit_flag=2
5033 RETURN
5034 END IF
5035!
5036 imin=bounds(ng) % Imin(cgrid,ghost,rank)
5037 imax=bounds(ng) % Imax(cgrid,ghost,rank)
5038 jmin=bounds(ng) % Jmin(cgrid,ghost,rank)
5039 jmax=bounds(ng) % Jmax(cgrid,ghost,rank)
5040!
5041 nc=0
5042 DO k=lbk,ubk
5043 kc=(k-koff)*ijsize
5044 DO j=jmin,jmax
5045 jc=(j-joff)*isize
5046 DO i=imin,imax
5047 nc=nc+1
5048 ic=i+ioff+jc+kc
5049 awrk(ic)=arecv(nc,rank)
5050 END DO
5051 END DO
5052 END DO
5053 END DO
5054 ELSE
5055 CALL mpi_isend (asend, mysize(myrank), mp_float, mymaster, &
5056 & myrank+5, ocn_comm_world, srequest, myerror)
5057 CALL mpi_wait (srequest, sstatus, myerror)
5058 IF (myerror.ne.mpi_success) THEN
5059 CALL mpi_error_string (myerror, string, lstr, serror)
5060 lstr=len_trim(string)
5061 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5062 exit_flag=2
5063 RETURN
5064 END IF
5065 END IF
5066
5067# else
5068!
5069! Gather local tiled data into a global array.
5070!
5071 allocate ( arecv(ijsize*ksize) )
5072 arecv=0.0_r8
5073!
5074 CALL mpi_gatherv (asend, asize, mp_float, &
5075 & arecv, counts, displs, mp_float, &
5076 & mymaster, ocn_comm_world, myerror)
5077 IF (myerror.ne.mpi_success) THEN
5078 CALL mpi_error_string (myerror, string, lstr, serror)
5079 WRITE (stdout,10) 'MPI_GATHERV', myrank, myerror, trim(string)
5080 10 FORMAT (/,' MP_GATHER3D - error during ',a,' call, Task = ', &
5081 & i3.3, ' Error = ',i3,/,15x,a)
5082 exit_flag=2
5083 RETURN
5084 END IF
5085!
5086! Unpack gathered data in a continuous memory order and remap every
5087! task segment.
5088!
5089 nc=0
5090 DO rank=0,ntasks-1
5091 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
5092 iub=bounds(ng) % Imax(cgrid,ghost,rank)
5093 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
5094 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
5095 DO k=lbk,ubk
5096 kc=(k-koff)*ijsize
5097 DO j=jlb,jub
5098 jc=(j-joff)*isize
5099 DO i=ilb,iub
5100 ijk=i+ioff+jc+kc
5101 nc=nc+1
5102 awrk(ijk)=arecv(nc)
5103 END DO
5104 END DO
5105 END DO
5106 END DO
5107# endif
5108# ifdef MASKING
5109!
5110! If pocessing only water-points, remove land points and repack.
5111!
5112 IF ((myrank.eq.mymaster).and.(gtype.lt.0)) THEN
5113 nc=0
5114 DO i=1,ijsize*ksize
5115 IF (awrk(i).lt.spval) THEN
5116 nc=nc+1
5117 awrk(nc)=awrk(i)
5118 END IF
5119 END DO
5120 npts=nc
5121 END IF
5122# endif
5123!
5124! Deallocate local arrays.
5125!
5126 deallocate (arecv)
5127 deallocate (asend)
5128
5129# ifdef PROFILE
5130!
5131!-----------------------------------------------------------------------
5132! Turn off time clocks.
5133!-----------------------------------------------------------------------
5134!
5135 CALL wclock_off (ng, model, 66, __line__, myfile)
5136# endif
5137!
5138 RETURN

References mod_param::bmemmax, mod_param::bounds, mod_scalars::exit_flag, mod_param::iobounds, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_scalars::spval, mod_iounits::stdout, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by nf_fwrite3d_mod::nf_fwrite3d::nf90_fwrite3d(), nf_fwrite4d_mod::nf_fwrite4d::nf90_fwrite4d(), pack_field_mod::pack_field3d(), and pack_field_mod::pack_field4d().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_gather_state()

subroutine distribute_mod::mp_gather_state ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) mstr,
integer, intent(in) mend,
integer, intent(in) asize,
real(r8), dimension(mstr:mend), intent(in) a,
real(r8), dimension(asize), intent(out) awrk )

Definition at line 5141 of file distribute.F.

5143!
5144!***********************************************************************
5145! !
5146! This routine gathers (threaded to global) state data to all nodes !
5147! in the group. This routine is used to unpack the state data for !
5148! the GST analysis propagators. !
5149! !
5150! On Input: !
5151! !
5152! ng Nested grid number. !
5153! model Calling model identifier. !
5154! Mstr Threaded array lower bound. !
5155! Mend Threaded array upper bound. !
5156! Asize Size of the full state. !
5157! A Threaded 1D array process. !
5158! !
5159! On Output: !
5160! !
5161! Awrk Collected data from each node packed into 1D full !
5162! state array. !
5163! !
5164!***********************************************************************
5165!
5166! Imported variable declarations.
5167!
5168 integer, intent(in) :: ng, model
5169 integer, intent(in) :: Mstr, Mend, Asize
5170!
5171 real(r8), intent(in) :: A(Mstr:Mend)
5172 real(r8), intent(out) :: Awrk(Asize)
5173!
5174! Local variable declarations.
5175!
5176 integer :: LB, Lstr, MyError, Serror
5177 integer :: i, np, rank, request
5178
5179 integer :: my_bounds(2)
5180 integer, dimension(MPI_STATUS_SIZE) :: status
5181 integer, dimension(2,0:NtileI(ng)*NtileJ(ng)-1) :: Abounds
5182!
5183 character (len=MPI_MAX_ERROR_STRING) :: string
5184
5185 character (len=*), parameter :: MyFile = &
5186 & __FILE__//", mp_gather_state"
5187
5188# ifdef PROFILE
5189!
5190!-----------------------------------------------------------------------
5191! Turn on time clocks.
5192!-----------------------------------------------------------------------
5193!
5194 CALL wclock_on (ng, model, 66, __line__, myfile)
5195# endif
5196!
5197!-----------------------------------------------------------------------
5198! Collect data from all nodes.
5199!-----------------------------------------------------------------------
5200!
5201! Collect data lower and upper bound dimensions.
5202!
5203 np=2
5204 my_bounds(1)=mstr
5205 my_bounds(2)=mend
5206 CALL mpi_allgather (my_bounds, np, mpi_integer, abounds, np, &
5207 & mpi_integer, ocn_comm_world, myerror)
5208 IF (myerror.ne.mpi_success) THEN
5209 CALL mpi_error_string (myerror, string, lstr, serror)
5210 lstr=len_trim(string)
5211 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5212 & string(1:lstr)
5213 10 FORMAT (/,' MP_GATHER_STATE - error during ',a, &
5214 & ' call, Task = ',i3.3,' Error = ',i3,/,13x,a)
5215 exit_flag=2
5216 RETURN
5217 END IF
5218!
5219! If master node, loop over other nodes and receive the data.
5220!
5221 IF (myrank.eq.mymaster) THEN
5222 DO rank=1,ntilei(ng)*ntilej(ng)-1
5223 np=abounds(2,rank)-abounds(1,rank)+1
5224 lb=abounds(1,rank)
5225 CALL mpi_irecv (awrk(lb:), np, mp_float, rank, rank+5, &
5226 & ocn_comm_world, request, myerror)
5227 CALL mpi_wait (request, status, myerror)
5228 IF (myerror.ne.mpi_success) THEN
5229 CALL mpi_error_string (myerror, string, lstr, serror)
5230 lstr=len_trim(string)
5231 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
5232 exit_flag=2
5233 RETURN
5234 END IF
5235 END DO
5236!
5237! Load master node contribution.
5238!
5239 DO i=mstr,mend
5240 awrk(i)=a(i)
5241 END DO
5242!
5243! Otherwise, send data to master node.
5244!
5245 ELSE
5246 np=mend-mstr+1
5247 CALL mpi_isend (a(mstr:), np, mp_float, mymaster, myrank+5, &
5248 & ocn_comm_world, request, myerror)
5249 CALL mpi_wait (request, status, myerror)
5250 IF (myerror.ne.mpi_success) THEN
5251 CALL mpi_error_string (myerror, string, lstr, serror)
5252 lstr=len_trim(string)
5253 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5254 exit_flag=2
5255 RETURN
5256 END IF
5257 END IF
5258!
5259! Broadcast collected data to all nodes.
5260!
5261 CALL mpi_bcast (awrk, asize, mp_float, mymaster, ocn_comm_world, &
5262 & myerror)
5263 IF (myerror.ne.mpi_success) THEN
5264 CALL mpi_error_string (myerror, string, lstr, serror)
5265 lstr=len_trim(string)
5266 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
5267 exit_flag=2
5268 RETURN
5269 END IF
5270
5271# ifdef PROFILE
5272!
5273!-----------------------------------------------------------------------
5274! Turn off time clocks.
5275!-----------------------------------------------------------------------
5276!
5277 CALL wclock_off (ng, model, 66, __line__, myfile)
5278# endif
5279!
5280 RETURN

References mod_scalars::exit_flag, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by ad_unpack(), ad_unpack_tile(), tl_unpack(), and tl_unpack_tile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_ncread1d()

integer function distribute_mod::mp_ncread1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) ncid,
character (len=*), intent(in) ncvname,
character (len=*), intent(in) ncname,
integer, intent(in) ncrec,
integer, intent(in) lb1,
integer, intent(in) ub1,
real(r8), intent(in) ascale,
real(r8), dimension(lb1:ub1), intent(out) a )

Definition at line 5283 of file distribute.F.

5286!
5287!***********************************************************************
5288! !
5289! This function reads floating point 1D state array from specified !
5290! NetCDF file and scatters it to the other nodes. !
5291! !
5292! On Input: !
5293! !
5294! ng Nested grid number. !
5295! model Calling model identifier. !
5296! ncid NetCDF file ID. !
5297! ncvname NetCDF variable name. !
5298! ncname NetCDF file name. !
5299! ncrec NetCDF record index to write. If negative, it !
5300! assumes that the variable is recordless. !
5301! LB1 First-dimension Lower bound. !
5302! UB1 First-dimension Upper bound. !
5303! Ascale Factor to scale field after reading (real). !
5304! !
5305! On Output: !
5306! !
5307! A Field to read in (real). !
5308! io_error Error flag (integer). !
5309! !
5310! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5311! dependency. Instead we need original NetCDF library module !
5312! "USE netcdf". !
5313! !
5314!***********************************************************************
5315!
5316 USE netcdf
5317!
5318! Imported variable declarations.
5319!
5320 integer, intent(in) :: ng, model, ncid, ncrec
5321 integer, intent(in) :: LB1, UB1
5322!
5323 real(r8), intent(in) :: Ascale
5324
5325 real(r8), intent(out) :: A(LB1:UB1)
5326!
5327 character (len=*), intent(in) :: ncvname
5328 character (len=*), intent(in) :: ncname
5329!
5330! Local variable declarations.
5331!
5332 integer :: Lstr, MyError, Npts, Serror
5333 integer :: i, j, np, rank, request, varid
5334 integer :: io_error
5335 integer :: ibuffer(2), my_bounds(2), start(1), total(1)
5336
5337 integer, dimension(MPI_STATUS_SIZE) :: status
5338 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: Asize
5339!
5340 real(r8), allocatable :: Asend(:)
5341!
5342 character (len=MPI_MAX_ERROR_STRING) :: string
5343
5344 character (len=*), parameter :: MyFile = &
5345 & __FILE__//", mp_ncread1d_nf90"
5346
5347# ifdef PROFILE
5348!
5349!-----------------------------------------------------------------------
5350! Turn on time clocks.
5351!-----------------------------------------------------------------------
5352!
5353 CALL wclock_on (ng, model, 67, __line__, myfile)
5354# endif
5355!
5356!-----------------------------------------------------------------------
5357! Read requested NetCDF file and scatter it to all nodes.
5358!-----------------------------------------------------------------------
5359!
5360 io_error=nf90_noerr
5361!
5362! Collect data lower and upper bounds dimensions.
5363!
5364 np=2
5365 my_bounds(1)=lb1
5366 my_bounds(2)=ub1
5367 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5368 & asize, np, mpi_integer, &
5369 & ocn_comm_world, myerror)
5370 IF (myerror.ne.mpi_success) THEN
5371 CALL mpi_error_string (myerror, string, lstr, serror)
5372 lstr=len_trim(string)
5373 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5374 & string(1:lstr)
5375 exit_flag=2
5376 RETURN
5377 END IF
5378!
5379! If not master node, receive data from master node.
5380!
5381 IF (myrank.ne.mymaster) THEN
5382 np=ub1-lb1+1
5383 CALL mpi_irecv (a(lb1:), np, mp_float, mymaster, myrank+5, &
5384 & ocn_comm_world, request, myerror)
5385 CALL mpi_wait (request, status, myerror)
5386 IF (myerror.ne.mpi_success) THEN
5387 CALL mpi_error_string (myerror, string, lstr, serror)
5388 lstr=len_trim(string)
5389 WRITE (stdout,10) 'MPI_IRECV', myrank, myerror, string(1:lstr)
5390 exit_flag=2
5391 RETURN
5392 END IF
5393!
5394! Scale recieved (read) data.
5395!
5396 DO i=lb1,ub1
5397 a(i)=a(i)*ascale
5398 END DO
5399!
5400! Otherwise, if master node allocate the send buffer.
5401!
5402 ELSE
5403 npts=0
5404 DO rank=0,ntilei(ng)*ntilej(ng)-1
5405 np=asize(2,rank)-asize(1,rank)+1
5406 npts=max(npts, np)
5407 END DO
5408 IF (.not.allocated(asend)) THEN
5409 allocate (asend(npts))
5410 END IF
5411!
5412! If master node, loop over all nodes and read buffers to send.
5413!
5414 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5415 IF (io_error.ne.nf90_noerr) THEN
5416 WRITE (stdout,20) trim(ncvname), trim(ncname)
5417 exit_flag=2
5418 ioerror=io_error
5419 END IF
5420 IF (exit_flag.eq.noerror) THEN
5421 DO rank=0,ntilei(ng)*ntilej(ng)-1
5422 start(1)=asize(1,rank)
5423 total(1)=asize(2,rank)-asize(1,rank)+1
5424 io_error=nf90_get_var(ncid, varid, asend, start, total)
5425 IF (io_error.ne.nf90_noerr) THEN
5426 WRITE (stdout,30) trim(ncvname), trim(ncname)
5427 exit_flag=2
5428 ioerror=io_error
5429 EXIT
5430 END IF
5431!
5432! Send buffer to all nodes, except itself.
5433!
5434 IF (rank.eq.mymaster) THEN
5435 np=0
5436 DO i=lb1,ub1
5437 np=np+1
5438 a(i)=asend(np)*ascale
5439 END DO
5440 ELSE
5441 np=asize(2,rank)-asize(1,rank)+1
5442 CALL mpi_isend (asend, np, mp_float, rank, rank+5, &
5443 & ocn_comm_world, request, myerror)
5444 CALL mpi_wait (request, status, myerror)
5445 IF (myerror.ne.mpi_success) THEN
5446 CALL mpi_error_string (myerror, string, lstr, serror)
5447 lstr=len_trim(string)
5448 WRITE (stdout,10) 'MPI_ISEND', rank, myerror, &
5449 & string(1:lstr)
5450 exit_flag=2
5451 RETURN
5452 END IF
5453 END IF
5454 END DO
5455 END IF
5456 END IF
5457!
5458! Broadcast error flags to all nodes.
5459!
5460 ibuffer(1)=exit_flag
5461 ibuffer(2)=ioerror
5462 CALL mp_bcasti (ng, model, ibuffer)
5463 exit_flag=ibuffer(1)
5464 ioerror=ibuffer(2)
5465!
5466! Maximum automatic buffer memory size in bytes.
5467!
5468 bmemmax(ng)=max(bmemmax(ng), real(SIZE(asend)*kind(a),r8))
5469!
5470! Deallocate send buffer.
5471!
5472 IF (allocated(asend).and.(myrank.eq.mymaster)) THEN
5473 deallocate (asend)
5474 END IF
5475
5476# ifdef PROFILE
5477!
5478!-----------------------------------------------------------------------
5479! Turn on time clocks.
5480!-----------------------------------------------------------------------
5481!
5482 CALL wclock_off (ng, model, 67, __line__, myfile)
5483# endif
5484!
5485 10 FORMAT (/,' MP_NCREAD1D - error during ',a,' call, Task = ',i0, &
5486 & ' Error = ',i0,/,15x,a)
5487 20 FORMAT (/,' MP_NCREAD1D - error while inquiring ID for', &
5488 & ' variable: ',a,/,15x,'in file: ',a)
5489 30 FORMAT (/,' MP_NCREAD1D - error while reading variable: ',a, &
5490 & /,15x,'in file: ',a)
5491!
5492 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_iounits::ioerror, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_scalars::noerror, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by get_gst_mod::get_gst_nf90().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_ncread2d()

integer function distribute_mod::mp_ncread2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) ncid,
character (len=*), intent(in) ncvname,
character (len=*), intent(in) ncname,
integer, intent(in) ncrec,
integer, intent(in) lb1,
integer, intent(in) ub1,
integer, intent(in) lb2,
integer, intent(in) ub2,
real(r8), intent(in) ascale,
real(r8), dimension(lb1:ub1,lb2:ub2), intent(out) a )

Definition at line 5495 of file distribute.F.

5498!
5499!***********************************************************************
5500! !
5501! This function reads floating point 2D state array from specified !
5502! NetCDF file and scatters it to the other nodes. !
5503! !
5504! On Input: !
5505! !
5506! ng Nested grid number. !
5507! model Calling model identifier. !
5508! ncid NetCDF file ID. !
5509! ncvname NetCDF variable name. !
5510! ncname NetCDF file name. !
5511! ncrec NetCDF record index to write. If negative, it !
5512! assumes that the variable is recordless. !
5513! LB1 First-dimension Lower bound. !
5514! UB1 First-dimension Upper bound. !
5515! LB2 Second-dimension Lower bound. !
5516! UB2 Second-dimension Upper bound. !
5517! Ascale Factor to scale field after reading (real). !
5518! !
5519! On Output: !
5520! !
5521! A Field to read in (real). !
5522! io_error Error flag (integer). !
5523! !
5524! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5525! dependency. Instead we need original NetCDF library module !
5526! "USE netcdf". !
5527! !
5528!***********************************************************************
5529!
5530 USE netcdf
5531!
5532! Imported variable declarations.
5533!
5534 integer, intent(in) :: ng, model, ncid, ncrec
5535 integer, intent(in) :: LB1, UB1, LB2, UB2
5536!
5537 real(r8), intent(in) :: Ascale
5538
5539 real(r8), intent(out) :: A(LB1:UB1,LB2:UB2)
5540!
5541 character (len=*), intent(in) :: ncvname
5542 character (len=*), intent(in) :: ncname
5543!
5544! Local variable declarations.
5545!
5546 integer :: Lstr, MyError, Npts, Serror
5547 integer :: i, j, np, rank, request, varid
5548 integer :: io_error
5549 integer :: ibuffer(2), my_bounds(4), start(2), total(2)
5550
5551 integer, dimension(MPI_STATUS_SIZE) :: status
5552 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: Asize
5553!
5554 real(r8), allocatable :: Asend(:)
5555!
5556 character (len=MPI_MAX_ERROR_STRING) :: string
5557
5558 character (len=*), parameter :: MyFile = &
5559 & __FILE__//", mp_ncread2d_nf90"
5560
5561# ifdef PROFILE
5562!
5563!-----------------------------------------------------------------------
5564! Turn on time clocks.
5565!-----------------------------------------------------------------------
5566!
5567 CALL wclock_on (ng, model, 67, __line__, myfile)
5568# endif
5569!
5570!-----------------------------------------------------------------------
5571! Read requested NetCDF file and scatter it to all nodes.
5572!-----------------------------------------------------------------------
5573!
5574 io_error=nf90_noerr
5575!
5576! Collect data lower and upper bounds dimensions.
5577!
5578 np=4
5579 my_bounds(1)=lb1
5580 my_bounds(2)=ub1
5581 my_bounds(3)=lb2
5582 my_bounds(4)=ub2
5583 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5584 & asize, np, mpi_integer, &
5585 & ocn_comm_world, myerror)
5586 IF (myerror.ne.mpi_success) THEN
5587 CALL mpi_error_string (myerror, string, lstr, serror)
5588 lstr=len_trim(string)
5589 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5590 & string(1:lstr)
5591 exit_flag=2
5592 RETURN
5593 END IF
5594!
5595! If not master node, receive data from master node.
5596!
5597 IF (myrank.ne.mymaster) THEN
5598 np=(ub1-lb1+1)*(ub2-lb2+1)
5599 CALL mpi_irecv (a(lb1,lb2), np, mp_float, mymaster, myrank+5, &
5600 & ocn_comm_world, request, myerror)
5601 CALL mpi_wait (request, status, myerror)
5602 IF (myerror.ne.mpi_success) THEN
5603 CALL mpi_error_string (myerror, string, lstr, serror)
5604 lstr=len_trim(string)
5605 WRITE (stdout,10) 'MPI_IRECV', myrank, myerror, string(1:lstr)
5606 exit_flag=2
5607 RETURN
5608 END IF
5609!
5610! Scale recieved (read) data.
5611!
5612 DO j=lb2,ub2
5613 DO i=lb1,ub1
5614 a(i,j)=a(i,j)*ascale
5615 END DO
5616 END DO
5617!
5618! Otherwise, if master node allocate the send buffer.
5619!
5620 ELSE
5621 npts=0
5622 DO rank=0,ntilei(ng)*ntilej(ng)-1
5623 np=(asize(2,rank)-asize(1,rank)+1)* &
5624 & (asize(4,rank)-asize(3,rank)+1)
5625 npts=max(npts, np)
5626 END DO
5627 IF (.not.allocated(asend)) THEN
5628 allocate (asend(npts))
5629 END IF
5630!
5631! If master node, loop over all nodes and read buffers to send.
5632!
5633 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5634 IF (io_error.ne.nf90_noerr) THEN
5635 WRITE (stdout,20) trim(ncvname), trim(ncname)
5636 exit_flag=2
5637 ioerror=io_error
5638 END IF
5639 IF (exit_flag.eq.noerror) THEN
5640 DO rank=0,ntilei(ng)*ntilej(ng)-1
5641 start(1)=asize(1,rank)
5642 total(1)=asize(2,rank)-asize(1,rank)+1
5643 start(2)=asize(3,rank)
5644 total(2)=asize(4,rank)-asize(3,rank)+1
5645 io_error=nf90_get_var(ncid, varid, asend, start, total)
5646 IF (io_error.ne.nf90_noerr) THEN
5647 WRITE (stdout,30) trim(ncvname), trim(ncname)
5648 exit_flag=2
5649 ioerror=io_error
5650 EXIT
5651 END IF
5652!
5653! Send buffer to all nodes, except itself.
5654!
5655 IF (rank.eq.mymaster) THEN
5656 np=0
5657 DO j=lb2,ub2
5658 DO i=lb1,ub1
5659 np=np+1
5660 a(i,j)=asend(np)*ascale
5661 END DO
5662 END DO
5663 ELSE
5664 np=(asize(2,rank)-asize(1,rank)+1)* &
5665 & (asize(4,rank)-asize(3,rank)+1)
5666 CALL mpi_isend (asend, np, mp_float, rank, rank+5, &
5667 & ocn_comm_world, request, myerror)
5668 CALL mpi_wait (request, status, myerror)
5669 IF (myerror.ne.mpi_success) THEN
5670 CALL mpi_error_string (myerror, string, lstr, serror)
5671 lstr=len_trim(string)
5672 WRITE (stdout,10) 'MPI_ISEND', rank, myerror, &
5673 & string(1:lstr)
5674 exit_flag=2
5675 RETURN
5676 END IF
5677 END IF
5678 END DO
5679 END IF
5680 END IF
5681!
5682! Broadcast error flags to all nodes.
5683!
5684 ibuffer(1)=exit_flag
5685 ibuffer(2)=ioerror
5686 CALL mp_bcasti (ng, model, ibuffer)
5687 exit_flag=ibuffer(1)
5688 ioerror=ibuffer(2)
5689!
5690! Maximum automatic buffer memory size in bytes.
5691!
5692 bmemmax(ng)=max(bmemmax(ng), real(SIZE(asend)*kind(a),r8))
5693!
5694! Deallocate send buffer.
5695!
5696 IF (allocated(asend).and.(myrank.eq.mymaster)) THEN
5697 deallocate (asend)
5698 END IF
5699
5700# ifdef PROFILE
5701!
5702!-----------------------------------------------------------------------
5703! Turn on time clocks.
5704!-----------------------------------------------------------------------
5705!
5706 CALL wclock_off (ng, model, 67, __line__, myfile)
5707# endif
5708!
5709 10 FORMAT (/,' MP_NCREAD2D - error during ',a,' call, Task = ',i0, &
5710 & ' Error = ',i0,/,15x,a)
5711 20 FORMAT (/,' MP_NCREAD2D - error while inquiring ID for', &
5712 & ' variable: ',a,/,15x,'in file: ',a)
5713 30 FORMAT (/,' MP_NCREAD2D - error while reading variable: ',a, &
5714 & /,15x,'in file: ',a)
5715!
5716 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_iounits::ioerror, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_scalars::noerror, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by get_gst_mod::get_gst_nf90().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_ncwrite1d()

integer function distribute_mod::mp_ncwrite1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) ncid,
character (len=*), intent(in) ncvname,
character (len=*), intent(in) ncname,
integer, intent(in) ncrec,
integer, intent(in) lb1,
integer, intent(in) ub1,
real(r8), intent(in) ascale,
real(r8), dimension(lb1:ub1), intent(in) a )

Definition at line 5719 of file distribute.F.

5722!
5723!***********************************************************************
5724! !
5725! This function collects floating point 1D state array data from the !
5726! other nodes and writes it into specified NetCDF file. !
5727! !
5728! On Input: !
5729! !
5730! ng Nested grid number. !
5731! model Calling model identifier. !
5732! ncid NetCDF file ID. !
5733! ncvname NetCDF variable name. !
5734! ncname NetCDF file name. !
5735! ncrec NetCDF record index to write. If negative, it !
5736! assumes that the variable is recordless. !
5737! LB1 First-dimension Lower bound. !
5738! UB1 First-dimension Upper bound. !
5739! Ascale Factor to scale field before writing (real). !
5740! A Field to write out (real). !
5741! !
5742! On Output: !
5743! !
5744! io_error Error flag (integer). !
5745! !
5746! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5747! dependency. Instead we need original NetCDF library module !
5748! "USE netcdf". !
5749! !
5750!***********************************************************************
5751!
5752 USE netcdf
5753!
5754! Imported variable declarations.
5755!
5756 integer, intent(in) :: ng, model, ncid, ncrec
5757 integer, intent(in) :: LB1, UB1
5758!
5759 real(r8), intent(in) :: Ascale
5760
5761 real(r8), intent(in) :: A(LB1:UB1)
5762
5763 character (len=*), intent(in) :: ncvname
5764 character (len=*), intent(in) :: ncname
5765!
5766! Local variable declarations.
5767!
5768 integer :: Lstr, MyError, Npts, Serror
5769 integer :: i, j, np, rank, request, varid
5770 integer :: io_error
5771 integer :: ibuffer(2), my_bounds(2), start(1), total(1)
5772
5773 integer, dimension(MPI_STATUS_SIZE) :: status
5774 integer, dimension(2,0:NtileI(ng)*NtileJ(ng)-1) :: Asize
5775!
5776 real(r8), allocatable :: Arecv(:)
5777!
5778 character (len=MPI_MAX_ERROR_STRING) :: string
5779
5780 character (len=*), parameter :: MyFile = &
5781 & __FILE__//", mp_ncwrite1d"
5782
5783# ifdef PROFILE
5784!
5785!-----------------------------------------------------------------------
5786! Turn on time clocks.
5787!-----------------------------------------------------------------------
5788!
5789 CALL wclock_on (ng, model, 66, __line__, myfile)
5790# endif
5791!
5792!-----------------------------------------------------------------------
5793! Collect and write data into requested NetCDF file.
5794!-----------------------------------------------------------------------
5795!
5796 io_error=nf90_noerr
5797!
5798! Collect data lower and upper bounds dimensions.
5799!
5800 np=2
5801 my_bounds(1)=lb1
5802 my_bounds(2)=ub1
5803 CALL mpi_allgather (my_bounds, np, mpi_integer, &
5804 & asize, np, mpi_integer, &
5805 & ocn_comm_world, myerror)
5806 IF (myerror.ne.mpi_success) THEN
5807 CALL mpi_error_string (myerror, string, lstr, serror)
5808 lstr=len_trim(string)
5809 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
5810 & string(1:lstr)
5811 exit_flag=2
5812 RETURN
5813 END IF
5814!
5815! If master node, allocate the receive buffer.
5816!
5817 IF (myrank.eq.mymaster) THEN
5818 npts=0
5819 DO rank=0,ntilei(ng)*ntilej(ng)-1
5820 np=(asize(2,rank)-asize(1,rank)+1)
5821 npts=max(npts, np)
5822 END DO
5823 IF (.not.allocated(arecv)) THEN
5824 allocate (arecv(npts))
5825 END IF
5826!
5827! Write out master node contribution.
5828!
5829 start(1)=lb1
5830 total(1)=ub1-lb1+1
5831 np=0
5832 DO i=lb1,ub1
5833 np=np+1
5834 arecv(np)=a(i)
5835 END DO
5836 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
5837 IF (io_error.eq.nf90_noerr) THEN
5838 io_error=nf90_put_var(ncid, varid, arecv, start, total)
5839 IF (io_error.ne.nf90_noerr) THEN
5840 WRITE (stdout,20) trim(ncvname), trim(ncname)
5841 exit_flag=3
5842 ioerror=io_error
5843 END IF
5844 ELSE
5845 WRITE (stdout,30) trim(ncvname), trim(ncname)
5846 exit_flag=3
5847 ioerror=io_error
5848 END IF
5849!
5850! If master node, loop over other nodes and receive the data.
5851!
5852 IF (exit_flag.eq.noerror) THEN
5853 DO rank=1,ntilei(ng)*ntilej(ng)-1
5854 np=asize(2,rank)-asize(1,rank)+1
5855 CALL mpi_irecv (arecv, np, mp_float, rank, rank+5, &
5856 & ocn_comm_world, request, myerror)
5857 CALL mpi_wait (request, status, myerror)
5858 IF (myerror.ne.mpi_success) THEN
5859 CALL mpi_error_string (myerror, string, lstr, serror)
5860 lstr=len_trim(string)
5861 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, &
5862 & string(1:lstr)
5863 exit_flag=3
5864 RETURN
5865 END IF
5866!
5867! Write out data into NetCDF file.
5868!
5869 start(1)=asize(1,rank)
5870 total(1)=asize(2,rank)-asize(1,rank)+1
5871 DO i=1,np
5872 arecv(i)=arecv(i)*ascale
5873 END DO
5874 io_error=nf90_put_var(ncid, varid, arecv, start, total)
5875 IF (io_error.ne.nf90_noerr) THEN
5876 WRITE (stdout,20) trim(ncvname), trim(ncname)
5877 exit_flag=3
5878 ioerror=io_error
5879 EXIT
5880 END IF
5881 END DO
5882 END IF
5883!
5884! Otherwise, send data to master node.
5885!
5886 ELSE
5887 np=ub1-lb1+1
5888 CALL mpi_isend (a(lb1:), np, mp_float, mymaster, myrank+5, &
5889 & ocn_comm_world, request, myerror)
5890 CALL mpi_wait (request, status, myerror)
5891 IF (myerror.ne.mpi_success) THEN
5892 CALL mpi_error_string (myerror, string, lstr, serror)
5893 lstr=len_trim(string)
5894 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
5895 exit_flag=2
5896 RETURN
5897 END IF
5898 END IF
5899!
5900! Broadcast error flags to all nodes.
5901!
5902 ibuffer(1)=exit_flag
5903 ibuffer(2)=ioerror
5904 CALL mp_bcasti (ng, model, ibuffer)
5905 exit_flag=ibuffer(1)
5906 ioerror=ibuffer(2)
5907!
5908! Maximum automatic buffer memory size in bytes.
5909!
5910 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
5911!
5912! Deallocate receive buffer.
5913!
5914 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
5915 deallocate (arecv)
5916 END IF
5917
5918# ifdef PROFILE
5919!
5920!-----------------------------------------------------------------------
5921! Turn on time clocks.
5922!-----------------------------------------------------------------------
5923!
5924 CALL wclock_off (ng, model, 66, __line__, myfile)
5925# endif
5926!
5927 10 FORMAT (/,' MP_NCWRITE1D - error during ',a,' call, Task = ',i0, &
5928 & ' Error = ',i0,/,21x,a)
5929 20 FORMAT (/,' MP_NCWRITE1D - error while writing variable: ',a, &
5930 & /,16x,'in file: ',a)
5931 30 FORMAT (/,' MP_NCWRITE1D - error while inquiring ID for', &
5932 & ' variable: ',a,/,16x,'in file: ',a)
5933!
5934 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_iounits::ioerror, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_scalars::noerror, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by wrt_gst_mod::wrt_gst_nf90().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_ncwrite2d()

integer function distribute_mod::mp_ncwrite2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) ncid,
character (len=*), intent(in) ncvname,
character (len=*), intent(in) ncname,
integer, intent(in) ncrec,
integer, intent(in) lb1,
integer, intent(in) ub1,
integer, intent(in) lb2,
integer, intent(in) ub2,
real(r8), intent(in) ascale,
real(r8), dimension(lb1:ub1,lb2:ub2), intent(in) a )

Definition at line 5937 of file distribute.F.

5940!
5941!***********************************************************************
5942! !
5943! This function collects floating point 2D state array data from the !
5944! other nodes and writes it into specified NetCDF file. !
5945! !
5946! On Input: !
5947! !
5948! ng Nested grid number. !
5949! model Calling model identifier. !
5950! ncid NetCDF file ID. !
5951! ncvname NetCDF variable name. !
5952! ncname NetCDF file name. !
5953! ncrec NetCDF record index to write. If negative, it !
5954! assumes that the variable is recordless. !
5955! LB1 First-dimension Lower bound. !
5956! UB1 First-dimension Upper bound. !
5957! LB2 Second-dimension Lower bound. !
5958! UB2 Second-dimension Upper bound. !
5959! Ascale Factor to scale field before writing (real). !
5960! A Field to write out (real). !
5961! !
5962! On Output: !
5963! !
5964! io_error Error flag (integer). !
5965! !
5966! Note: We cannot include "USE mod_netcdf" here because of cyclic !
5967! dependency. Instead we need original NetCDF library module !
5968! "USE netcdf". !
5969! !
5970!***********************************************************************
5971!
5972 USE netcdf
5973!
5974! Imported variable declarations.
5975!
5976 integer, intent(in) :: ng, model, ncid, ncrec
5977 integer, intent(in) :: LB1, UB1, LB2, UB2
5978!
5979 real(r8), intent(in) :: Ascale
5980
5981 real(r8), intent(in) :: A(LB1:UB1,LB2:UB2)
5982!
5983 character (len=*), intent(in) :: ncvname
5984 character (len=*), intent(in) :: ncname
5985!
5986! Local variable declarations.
5987!
5988 integer :: Lstr, MyError, Npts, Serror
5989 integer :: i, j, np, rank, request, varid
5990 integer :: io_error
5991 integer :: ibuffer(2), my_bounds(4), start(2), total(2)
5992
5993 integer, dimension(MPI_STATUS_SIZE) :: status
5994 integer, dimension(4,0:NtileI(ng)*NtileJ(ng)-1) :: Asize
5995!
5996 real(r8), allocatable :: Arecv(:)
5997!
5998 character (len=MPI_MAX_ERROR_STRING) :: string
5999
6000 character (len=*), parameter :: MyFile = &
6001 & __FILE__//", mp_ncwrite2d_nf90"
6002
6003# ifdef PROFILE
6004!
6005!-----------------------------------------------------------------------
6006! Turn on time clocks.
6007!-----------------------------------------------------------------------
6008!
6009 CALL wclock_on (ng, model, 66, __line__, myfile)
6010# endif
6011!
6012!-----------------------------------------------------------------------
6013! Collect and write data into requested NetCDF file.
6014!-----------------------------------------------------------------------
6015!
6016 io_error=nf90_noerr
6017!
6018! Collect data lower and upper bounds dimensions.
6019!
6020 np=4
6021 my_bounds(1)=lb1
6022 my_bounds(2)=ub1
6023 my_bounds(3)=lb2
6024 my_bounds(4)=ub2
6025 CALL mpi_allgather (my_bounds, np, mpi_integer, &
6026 & asize, np, mpi_integer, &
6027 & ocn_comm_world, myerror)
6028 IF (myerror.ne.mpi_success) THEN
6029 CALL mpi_error_string (myerror, string, lstr, serror)
6030 lstr=len_trim(string)
6031 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6032 & string(1:lstr)
6033 exit_flag=2
6034 RETURN
6035 END IF
6036!
6037! If master node, allocate the receive buffer.
6038!
6039 IF (myrank.eq.mymaster) THEN
6040 npts=0
6041 DO rank=0,ntilei(ng)*ntilej(ng)-1
6042 np=(asize(2,rank)-asize(1,rank)+1)* &
6043 & (asize(4,rank)-asize(3,rank)+1)
6044 npts=max(npts, np)
6045 END DO
6046 IF (.not.allocated(arecv)) THEN
6047 allocate (arecv(npts))
6048 END IF
6049!
6050! Write out master node contribution.
6051!
6052 start(1)=lb1
6053 total(1)=ub1-lb1+1
6054 start(2)=lb2
6055 total(2)=ub2-lb2+1
6056 np=0
6057 DO j=lb2,ub2
6058 DO i=lb1,ub1
6059 np=np+1
6060 arecv(np)=a(i,j)
6061 END DO
6062 END DO
6063 io_error=nf90_inq_varid(ncid, trim(ncvname), varid)
6064 IF (io_error.eq.nf90_noerr) THEN
6065 io_error=nf90_put_var(ncid, varid, arecv, start, total)
6066 IF (io_error.ne.nf90_noerr) THEN
6067 WRITE (stdout,20) trim(ncvname), trim(ncname)
6068 exit_flag=3
6069 ioerror=io_error
6070 END IF
6071 ELSE
6072 WRITE (stdout,30) trim(ncvname), trim(ncname)
6073 exit_flag=3
6074 ioerror=io_error
6075 END IF
6076!
6077! If master node, loop over other nodes and receive the data.
6078!
6079 IF (exit_flag.eq.noerror) THEN
6080 DO rank=1,ntilei(ng)*ntilej(ng)-1
6081 np=(asize(2,rank)-asize(1,rank)+1)* &
6082 & (asize(4,rank)-asize(3,rank)+1)
6083 CALL mpi_irecv (arecv, np, mp_float, rank, rank+5, &
6084 & ocn_comm_world, request, myerror)
6085 CALL mpi_wait (request, status, myerror)
6086 IF (myerror.ne.mpi_success) THEN
6087 CALL mpi_error_string (myerror, string, lstr, serror)
6088 lstr=len_trim(string)
6089 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, &
6090 & string(1:lstr)
6091 exit_flag=3
6092 RETURN
6093 END IF
6094!
6095! Write out data into NetCDF file.
6096!
6097 start(1)=asize(1,rank)
6098 total(1)=asize(2,rank)-asize(1,rank)+1
6099 start(2)=asize(3,rank)
6100 total(2)=asize(4,rank)-asize(3,rank)+1
6101 DO i=1,np
6102 arecv(i)=arecv(i)*ascale
6103 END DO
6104 io_error=nf90_put_var(ncid, varid, arecv, start, total)
6105 IF (io_error.ne.nf90_noerr) THEN
6106 WRITE (stdout,20) trim(ncvname), trim(ncname)
6107 exit_flag=3
6108 ioerror=io_error
6109 EXIT
6110 END IF
6111 END DO
6112 END IF
6113!
6114! Otherwise, send data to master node.
6115!
6116 ELSE
6117 np=(ub1-lb1+1)*(ub2-lb2+1)
6118 CALL mpi_isend (a(lb1:,lb2:), np, mp_float, mymaster, myrank+5, &
6119 & ocn_comm_world, request, myerror)
6120 CALL mpi_wait (request, status, myerror)
6121 IF (myerror.ne.mpi_success) THEN
6122 CALL mpi_error_string (myerror, string, lstr, serror)
6123 lstr=len_trim(string)
6124 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
6125 exit_flag=2
6126 RETURN
6127 END IF
6128 END IF
6129!
6130! Broadcast error flags to all nodes.
6131!
6132 ibuffer(1)=exit_flag
6133 ibuffer(2)=ioerror
6134 CALL mp_bcasti (ng, model, ibuffer)
6135 exit_flag=ibuffer(1)
6136 ioerror=ibuffer(2)
6137!
6138! Maximum automatic buffer memory size in bytes.
6139!
6140 bmemmax(ng)=max(bmemmax(ng), real(SIZE(arecv)*kind(a),r8))
6141!
6142! Deallocate receive buffer.
6143!
6144 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
6145 deallocate (arecv)
6146 END IF
6147
6148# ifdef PROFILE
6149!
6150!-----------------------------------------------------------------------
6151! Turn on time clocks.
6152!-----------------------------------------------------------------------
6153!
6154 CALL wclock_off (ng, model, 66, __line__, myfile)
6155# endif
6156!
6157 10 FORMAT (/,' MP_NCWRITE2D - error during ',a,' call, Task = ',i0, &
6158 & ' Error = ',i0,/,21x,a)
6159 20 FORMAT (/,' MP_NCWRITE2D - error while writing variable: ',a, &
6160 & /,16x,'in file: ',a)
6161 30 FORMAT (/,' MP_NCWRITE2D - error while inquiring ID for', &
6162 & ' variable: ',a,/,16x,'in file: ',a)
6163!
6164 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_iounits::ioerror, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_scalars::noerror, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by wrt_gst_mod::wrt_gst_nf90().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_reduce2()

subroutine distribute_mod::mp_reduce2 ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) isize,
integer, intent(in) jsize,
real(r8), dimension(isize,jsize), intent(inout) a,
character (len=*), dimension(jsize), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 7191 of file distribute.F.

7193!
7194!***********************************************************************
7195! !
7196! This routine computes the global minimum/maximum and its associated !
7197! qualifiers like: location and/or other scalar components. Then, it !
7198! it broadcasts reduced variables to all nodes in the group. !
7199! !
7200! On Input: !
7201! !
7202! ng Nested grid number. !
7203! model Calling model identifier. !
7204! Isize Size of I-dimension: the minimum/maximum to reduce !
7205! is in location A(1,:) and qualifiers A(2:Isize,:). !
7206! Jsize Size of J-dimension: number of different sets of !
7207! minimum and/or maximum to process. !
7208! A Matrix of variables and qualifiers to reduce. !
7209! handle_op Reduction operation handle (string) of size Jsize. !
7210! The following reduction operations are supported: !
7211! 'MINLOC', 'MAXLOC' !
7212! InpComm Communicator handle (integer, OPTIONAL). !
7213! !
7214! On Output: !
7215! !
7216! A Matrix of reduced variables and qualifiers. !
7217! !
7218!***********************************************************************
7219!
7220! Imported variable declarations.
7221!
7222 integer, intent(in) :: ng, model, Isize, Jsize
7223
7224 integer, intent(in), optional :: InpComm
7225!
7226 character (len=*), intent(in) :: handle_op(Jsize)
7227!
7228 real(r8), intent(inout) :: A(Isize,Jsize)
7229!
7230! Local variable declarations.
7231!
7232 integer :: Lstr, MyCOMM, MyError, Serror
7233 integer :: handle, i, j
7234!
7235 real(r8), dimension(2,Isize) :: Areduce
7236 real(r8), dimension(2,Isize) :: Asend
7237!
7238 character (len=MPI_MAX_ERROR_STRING) :: string
7239
7240 character (len=*), parameter :: MyFile = &
7241 & __FILE__//", mp_reduce2"
7242
7243# ifdef PROFILE
7244!
7245!-----------------------------------------------------------------------
7246! Turn on time clocks.
7247!-----------------------------------------------------------------------
7248!
7249 CALL wclock_on (ng, model, 65, __line__, myfile)
7250# endif
7251# ifdef MPI
7252!
7253!-----------------------------------------------------------------------
7254! Set distributed-memory communicator handle (context ID).
7255!-----------------------------------------------------------------------
7256!
7257 IF (PRESENT(inpcomm)) THEN
7258 mycomm=inpcomm
7259 ELSE
7260 mycomm=ocn_comm_world
7261 END IF
7262# endif
7263!
7264!-----------------------------------------------------------------------
7265! Reduce requested variables and qualifiers.
7266!-----------------------------------------------------------------------
7267!
7268! Maximum automatic buffer memory size in bytes.
7269!
7270 bmemmax(ng)=max(bmemmax(ng), real((SIZE(areduce)+ &
7271 & SIZE(asend))*kind(a),r8))
7272!
7273! Pack and reduce.
7274!
7275 DO j=1,jsize
7276 DO i=1,isize
7277 asend(1,i)=a(1,j)
7278 asend(2,i)=a(i,j)
7279 END DO
7280 IF (handle_op(j)(1:6).eq.'MINLOC') THEN
7281 handle=mpi_minloc
7282 ELSE IF (handle_op(j)(1:6).eq.'MAXLOC') THEN
7283 handle=mpi_maxloc
7284 END IF
7285 CALL mpi_allreduce (asend, areduce, isize, &
7286# ifdef DOUBLE_PRECISION
7287 & mpi_2double_precision, &
7288# else
7289 & mpi_2real, &
7290# endif
7291 & handle, mycomm, myerror)
7292 IF (myerror.ne.mpi_success) THEN
7293 CALL mpi_error_string (myerror, string, lstr, serror)
7294 lstr=len_trim(string)
7295 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
7296 & string(1:lstr)
7297 10 FORMAT (/,' MP_REDUCE2 - error during ',a,' call, Task = ', &
7298 & i3.3,' Error = ',i3,/,16x,a)
7299 exit_flag=2
7300 RETURN
7301 END IF
7302!
7303! Unpack.
7304!
7305 a(1,j)=areduce(1,1)
7306 DO i=2,isize
7307 a(i,j)=areduce(2,i)
7308 END DO
7309 END DO
7310
7311# ifdef PROFILE
7312!
7313!-----------------------------------------------------------------------
7314! Turn off time clocks.
7315!-----------------------------------------------------------------------
7316!
7317 CALL wclock_off (ng, model, 65, __line__, myfile)
7318# endif
7319!
7320 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_parallel::myrank, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by diag_mod::diag_tile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_reduce_0d()

subroutine distribute_mod::mp_reduce_0d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) asize,
real(r8), intent(inout) a,
character (len=*), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 6786 of file distribute.F.

6787!
6788!***********************************************************************
6789! !
6790! This routine collects and reduces requested variables from all !
6791! nodes in the group. Then, it broadcasts reduced variables to !
6792! all nodes in the group. !
6793! !
6794! On Input: !
6795! !
6796! ng Nested grid number. !
6797! model Calling model identifier. !
6798! Asize Number of scalar variables to reduce. !
6799! A Vector of scalar variables to reduce. !
6800! handle_op Reduction operation handle (string). The following !
6801! reduction operations are supported: !
6802! 'MIN', 'MAX', 'SUM' !
6803! InpComm Communicator handle (integer, OPTIONAL). !
6804! !
6805! On Output: !
6806! !
6807! A Vector of reduced scalar variables. !
6808! !
6809!***********************************************************************
6810!
6811! Imported variable declarations.
6812!
6813 integer, intent(in) :: ng, model, Asize
6814
6815 integer, intent(in), optional :: InpComm
6816!
6817 character (len=*), intent(in) :: handle_op
6818!
6819 real(r8), intent(inout) :: A
6820!
6821! Local variable declarations.
6822!
6823 integer :: Lstr, MyCOMM, MyError, Npts, Serror
6824 integer :: handle, i, rank, request
6825
6826 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6827
6828 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6829 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6830!
6831 real(r8) :: Areduce
6832 real(r8) :: Asend
6833 real(r8), dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6834!
6835 character (len=MPI_MAX_ERROR_STRING) :: string
6836
6837 character (len=*), parameter :: MyFile = &
6838 & __FILE__//", mp_reduce_0d"
6839
6840# ifdef PROFILE
6841!
6842!-----------------------------------------------------------------------
6843! Turn on time clocks.
6844!-----------------------------------------------------------------------
6845!
6846 CALL wclock_on (ng, model, 65, __line__, myfile)
6847# endif
6848# ifdef MPI
6849!
6850!-----------------------------------------------------------------------
6851! Set distributed-memory communicator handle (context ID).
6852!-----------------------------------------------------------------------
6853!
6854 IF (PRESENT(inpcomm)) THEN
6855 mycomm=inpcomm
6856 ELSE
6857 mycomm=ocn_comm_world
6858 END IF
6859# endif
6860!
6861!-----------------------------------------------------------------------
6862! Collect and reduce requested scalar variables.
6863!-----------------------------------------------------------------------
6864!
6865! Pack data to reduce.
6866!
6867 asend=a
6868 npts=1
6869!
6870! Collect and reduce.
6871!
6872# if defined REDUCE_ALLREDUCE
6873 IF (handle_op(1:3).eq.'MIN') THEN
6874 handle=mpi_min
6875 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6876 handle=mpi_max
6877 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6878 handle=mpi_sum
6879 END IF
6880 CALL mpi_allreduce (asend, areduce, npts, mp_float, handle, &
6881 & mycomm, myerror)
6882 IF (myerror.ne.mpi_success) THEN
6883 CALL mpi_error_string (myerror, string, lstr, serror)
6884 lstr=len_trim(string)
6885 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6886 & string(1:lstr)
6887 exit_flag=2
6888 RETURN
6889 END IF
6890# elif defined REDUCE_ALLGATHER
6891 CALL mpi_allgather (asend, npts, mp_float, &
6892 & arecv, npts, mp_float, &
6893 & mycomm, myerror)
6894 IF (myerror.ne.mpi_success) THEN
6895 CALL mpi_error_string (myerror, string, lstr, serror)
6896 lstr=len_trim(string)
6897 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6898 & string(1:lstr)
6899 exit_flag=2
6900 RETURN
6901 END IF
6902 areduce=arecv(0)
6903 DO rank=1,ntilei(ng)*ntilej(ng)-1
6904 IF (handle_op(1:3).eq.'MIN') THEN
6905 areduce=min(areduce,arecv(rank))
6906 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6907 areduce=max(areduce,arecv(rank))
6908 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6909 areduce=areduce+arecv(rank)
6910 END IF
6911 END DO
6912# elif defined REDUCE_SENDRECV
6913 IF (myrank.eq.mymaster) THEN
6914 DO rank=1,ntilei(ng)*ntilej(ng)-1
6915 CALL mpi_irecv (arecv(rank), npts, mp_float, rank, &
6916 & rank+500, mycomm, rrequest(rank), &
6917 & myerror)
6918 END DO
6919 areduce=asend
6920 DO rank=1,ntilei(ng)*ntilej(ng)-1
6921 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6922 IF (myerror.ne.mpi_success) THEN
6923 CALL mpi_error_string (myerror, string, lstr, serror)
6924 lstr=len_trim(string)
6925 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6926 exit_flag=2
6927 RETURN
6928 END IF
6929 IF (handle_op(1:3).eq.'MIN') THEN
6930 areduce=min(areduce,arecv(rank))
6931 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6932 areduce=max(areduce,arecv(rank))
6933 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6934 areduce=areduce+arecv(rank)
6935 END IF
6936 END DO
6937 ELSE
6938 CALL mpi_isend (asend, npts, mp_float, mymaster, myrank+500, &
6939 & mycomm, request, myerror)
6940 CALL mpi_wait (request, sstatus, myerror)
6941 IF (myerror.ne.mpi_success) THEN
6942 CALL mpi_error_string (myerror, string, lstr, serror)
6943 lstr=len_trim(string)
6944 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6945 exit_flag=2
6946 RETURN
6947 END IF
6948 END IF
6949!
6950! Broadcast reduced variables from process to all processes in the
6951! group.
6952!
6953 CALL mpi_bcast (areduce, npts, mp_float, mymaster, &
6954 & mycomm, myerror)
6955 IF (myerror.ne.mpi_success) THEN
6956 CALL mpi_error_string (myerror, string, lstr, serror)
6957 lstr=len_trim(string)
6958 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6959 exit_flag=2
6960 RETURN
6961 END IF
6962# endif
6963 10 FORMAT (/,' MP_REDUCE_0D - error during ',a,' call, Task = ', &
6964 & i3.3,' Error = ',i3,/,16x,a)
6965!
6966! Unpack.
6967!
6968 a=areduce
6969# ifdef PROFILE
6970!
6971!-----------------------------------------------------------------------
6972! Turn off time clocks.
6973!-----------------------------------------------------------------------
6974!
6975 CALL wclock_off (ng, model, 65, __line__, myfile)
6976# endif
6977!
6978 RETURN

◆ mp_reduce_0dp()

subroutine distribute_mod::mp_reduce_0dp ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) asize,
real(dp), intent(inout) a,
character (len=*), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 6379 of file distribute.F.

6380!
6381!***********************************************************************
6382! !
6383! This routine collects and reduces requested double precision !
6384! variables from all nodes in the group. Then, it broadcasts !
6385! reduced variables to all nodes in the group. !
6386! !
6387! On Input: !
6388! !
6389! ng Nested grid number. !
6390! model Calling model identifier. !
6391! Asize Number of scalar variables to reduce. !
6392! A Vector of scalar variables to reduce. !
6393! handle_op Reduction operation handle (string). The following !
6394! reduction operations are supported: !
6395! 'MIN', 'MAX', 'SUM' !
6396! InpComm Communicator handle (integer, OPTIONAL). !
6397! !
6398! On Output: !
6399! !
6400! A Vector of reduced scalar variables. !
6401! !
6402!***********************************************************************
6403!
6404! Imported variable declarations.
6405!
6406 integer, intent(in) :: ng, model, Asize
6407
6408 integer, intent(in), optional :: InpComm
6409!
6410 character (len=*), intent(in) :: handle_op
6411!
6412 real(dp), intent(inout) :: A
6413!
6414! Local variable declarations.
6415!
6416 integer :: Lstr, MyCOMM, MyError, Npts, Serror
6417 integer :: handle, i, rank, request
6418
6419 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6420
6421 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6422 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6423!
6424 real(dp) :: Areduce
6425 real(dp) :: Asend
6426 real(dp), dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6427!
6428 character (len=MPI_MAX_ERROR_STRING) :: string
6429
6430 character (len=*), parameter :: MyFile = &
6431 & __FILE__//", mp_reduce_0dp"
6432
6433# ifdef PROFILE
6434!
6435!-----------------------------------------------------------------------
6436! Turn on time clocks.
6437!-----------------------------------------------------------------------
6438!
6439 CALL wclock_on (ng, model, 65, __line__, myfile)
6440# endif
6441# ifdef MPI
6442!
6443!-----------------------------------------------------------------------
6444! Set distributed-memory communicator handle (context ID).
6445!-----------------------------------------------------------------------
6446!
6447 IF (PRESENT(inpcomm)) THEN
6448 mycomm=inpcomm
6449 ELSE
6450 mycomm=ocn_comm_world
6451 END IF
6452# endif
6453!
6454!-----------------------------------------------------------------------
6455! Collect and reduce requested scalar variables.
6456!-----------------------------------------------------------------------
6457!
6458! Pack data to reduce.
6459!
6460 asend=a
6461 npts=1
6462!
6463! Collect and reduce.
6464!
6465# if defined REDUCE_ALLREDUCE
6466 IF (handle_op(1:3).eq.'MIN') THEN
6467 handle=mpi_min
6468 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6469 handle=mpi_max
6470 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6471 handle=mpi_sum
6472 END IF
6473 CALL mpi_allreduce (asend, areduce, npts, mp_double, handle, &
6474 & mycomm, myerror)
6475 IF (myerror.ne.mpi_success) THEN
6476 CALL mpi_error_string (myerror, string, lstr, serror)
6477 lstr=len_trim(string)
6478 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6479 & string(1:lstr)
6480 exit_flag=2
6481 RETURN
6482 END IF
6483# elif defined REDUCE_ALLGATHER
6484 CALL mpi_allgather (asend, npts, mp_double, &
6485 & arecv, npts, mp_double, &
6486 & mycomm, myerror)
6487 IF (myerror.ne.mpi_success) THEN
6488 CALL mpi_error_string (myerror, string, lstr, serror)
6489 lstr=len_trim(string)
6490 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6491 & string(1:lstr)
6492 exit_flag=2
6493 RETURN
6494 END IF
6495 areduce=arecv(0)
6496 DO rank=1,ntilei(ng)*ntilej(ng)-1
6497 IF (handle_op(1:3).eq.'MIN') THEN
6498 areduce=min(areduce,arecv(rank))
6499 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6500 areduce=max(areduce,arecv(rank))
6501 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6502 areduce=areduce+arecv(rank)
6503 END IF
6504 END DO
6505# elif defined REDUCE_SENDRECV
6506 IF (myrank.eq.mymaster) THEN
6507 DO rank=1,ntilei(ng)*ntilej(ng)-1
6508 CALL mpi_irecv (arecv(rank), npts, mp_double, rank, &
6509 & rank+500, mycomm, rrequest(rank), &
6510 & myerror)
6511 END DO
6512 areduce=asend
6513 DO rank=1,ntilei(ng)*ntilej(ng)-1
6514 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6515 IF (myerror.ne.mpi_success) THEN
6516 CALL mpi_error_string (myerror, string, lstr, serror)
6517 lstr=len_trim(string)
6518 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6519 exit_flag=2
6520 RETURN
6521 END IF
6522 IF (handle_op(1:3).eq.'MIN') THEN
6523 areduce=min(areduce,arecv(rank))
6524 ELSE IF (handle_op(1:3).eq.'MAX') THEN
6525 areduce=max(areduce,arecv(rank))
6526 ELSE IF (handle_op(1:3).eq.'SUM') THEN
6527 areduce=areduce+arecv(rank)
6528 END IF
6529 END DO
6530 ELSE
6531 CALL mpi_isend (asend, npts, mp_double, mymaster, myrank+500, &
6532 & mycomm, request, myerror)
6533 CALL mpi_wait (request, sstatus, myerror)
6534 IF (myerror.ne.mpi_success) THEN
6535 CALL mpi_error_string (myerror, string, lstr, serror)
6536 lstr=len_trim(string)
6537 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6538 exit_flag=2
6539 RETURN
6540 END IF
6541 END IF
6542!
6543! Broadcast reduced variables from process to all processes in the
6544! group.
6545!
6546 CALL mpi_bcast (areduce, npts, mp_double, mymaster, &
6547 & mycomm, myerror)
6548 IF (myerror.ne.mpi_success) THEN
6549 CALL mpi_error_string (myerror, string, lstr, serror)
6550 lstr=len_trim(string)
6551 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6552 exit_flag=2
6553 RETURN
6554 END IF
6555# endif
6556 10 FORMAT (/,' MP_REDUCE_0DP - error during ',a,' call, Task = ', &
6557 & i3.3,' Error = ',i3,/,16x,a)
6558!
6559! Unpack.
6560!
6561 a=areduce
6562# ifdef PROFILE
6563!
6564!-----------------------------------------------------------------------
6565! Turn off time clocks.
6566!-----------------------------------------------------------------------
6567!
6568 CALL wclock_off (ng, model, 65, __line__, myfile)
6569# endif
6570!
6571 RETURN

◆ mp_reduce_1d()

subroutine distribute_mod::mp_reduce_1d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) asize,
real(r8), dimension(asize), intent(inout) a,
character (len=*), dimension(asize), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 6981 of file distribute.F.

6982!
6983!***********************************************************************
6984! !
6985! This routine collects and reduces requested variables from all !
6986! nodes in the group. Then, it broadcasts reduced variables to !
6987! all nodes in the group. !
6988! !
6989! On Input: !
6990! !
6991! ng Nested grid number. !
6992! model Calling model identifier. !
6993! Asize Number of scalar variables to reduce. !
6994! A Vector of scalar variables to reduce. !
6995! handle_op Reduction operation handle (string). The following !
6996! reduction operations are supported: !
6997! 'MIN', 'MAX', 'SUM' !
6998! InpComm Communicator handle (integer, OPTIONAL). !
6999! !
7000! On Output: !
7001! !
7002! A Vector of reduced scalar variables. !
7003! !
7004!***********************************************************************
7005!
7006! Imported variable declarations.
7007!
7008 integer, intent(in) :: ng, model, Asize
7009
7010 integer, intent(in), optional :: InpComm
7011!
7012 character (len=*), intent(in) :: handle_op(Asize)
7013!
7014 real(r8), intent(inout) :: A(Asize)
7015!
7016! Local variable declarations.
7017!
7018 integer :: Lstr, MyCOMM, MyError, Serror
7019 integer :: handle, i, rank, request
7020
7021 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
7022
7023 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
7024 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
7025!
7026 real(r8), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
7027 real(r8), dimension(Asize) :: Areduce
7028 real(r8), dimension(Asize) :: Asend
7029!
7030 character (len=MPI_MAX_ERROR_STRING) :: string
7031
7032 character (len=*), parameter :: MyFile = &
7033 & __FILE__//", mp_reduce_1d"
7034
7035# ifdef PROFILE
7036!
7037!-----------------------------------------------------------------------
7038! Turn on time clocks.
7039!-----------------------------------------------------------------------
7040!
7041 CALL wclock_on (ng, model, 65, __line__, myfile)
7042# endif
7043# ifdef MPI
7044!
7045!-----------------------------------------------------------------------
7046! Set distributed-memory communicator handle (context ID).
7047!-----------------------------------------------------------------------
7048!
7049 IF (PRESENT(inpcomm)) THEN
7050 mycomm=inpcomm
7051 ELSE
7052 mycomm=ocn_comm_world
7053 END IF
7054# endif
7055!
7056!-----------------------------------------------------------------------
7057! Collect and reduce requested scalar variables.
7058!-----------------------------------------------------------------------
7059!
7060! Maximum automatic buffer memory size in bytes.
7061!
7062 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
7063 & 2*asize)*kind(a),r8))
7064!
7065! Pack data to reduce.
7066!
7067 DO i=1,asize
7068 asend(i)=a(i)
7069 END DO
7070!
7071! Collect and reduce.
7072!
7073# if defined REDUCE_ALLREDUCE
7074 DO i=1,asize
7075 IF (handle_op(i)(1:3).eq.'MIN') THEN
7076 handle=mpi_min
7077 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7078 handle=mpi_max
7079 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7080 handle=mpi_sum
7081 END IF
7082 CALL mpi_allreduce (asend(i), areduce(i), 1, mp_float, handle, &
7083 & mycomm, myerror)
7084 IF (myerror.ne.mpi_success) THEN
7085 CALL mpi_error_string (myerror, string, lstr, serror)
7086 lstr=len_trim(string)
7087 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
7088 & string(1:lstr)
7089 exit_flag=2
7090 RETURN
7091 END IF
7092 END DO
7093# elif defined REDUCE_ALLGATHER
7094 CALL mpi_allgather (asend, asize, mp_float, &
7095 & arecv, asize, mp_float, &
7096 & mycomm, myerror)
7097 IF (myerror.ne.mpi_success) THEN
7098 CALL mpi_error_string (myerror, string, lstr, serror)
7099 lstr=len_trim(string)
7100 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
7101 & string(1:lstr)
7102 exit_flag=2
7103 RETURN
7104 END IF
7105 DO i=1,asize
7106 areduce(i)=arecv(i,0)
7107 DO rank=1,ntilei(ng)*ntilej(ng)-1
7108 IF (handle_op(i)(1:3).eq.'MIN') THEN
7109 areduce(i)=min(areduce(i),arecv(i,rank))
7110 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7111 areduce(i)=max(areduce(i),arecv(i,rank))
7112 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7113 areduce(i)=areduce(i)+arecv(i,rank)
7114 END IF
7115 END DO
7116 END DO
7117# elif defined REDUCE_SENDRECV
7118 IF (myrank.eq.mymaster) THEN
7119 DO rank=1,ntilei(ng)*ntilej(ng)-1
7120 CALL mpi_irecv (arecv(1,rank), asize, mp_float, rank, &
7121 & rank+500, mycomm, rrequest(rank), myerror)
7122 END DO
7123 DO i=1,asize
7124 areduce(i)=asend(i)
7125 END DO
7126 DO rank=1,ntilei(ng)*ntilej(ng)-1
7127 CALL mpi_wait (rrequest(rank), rstatus, myerror)
7128 IF (myerror.ne.mpi_success) THEN
7129 CALL mpi_error_string (myerror, string, lstr, serror)
7130 lstr=len_trim(string)
7131 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
7132 exit_flag=2
7133 RETURN
7134 END IF
7135 DO i=1,asize
7136 IF (handle_op(i)(1:3).eq.'MIN') THEN
7137 areduce(i)=min(areduce(i),arecv(i,rank))
7138 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
7139 areduce(i)=max(areduce(i),arecv(i,rank))
7140 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
7141 areduce(i)=areduce(i)+arecv(i,rank)
7142 END IF
7143 END DO
7144 END DO
7145 ELSE
7146 CALL mpi_isend (asend, asize, mp_float, mymaster, myrank+500, &
7147 & mycomm, request, myerror)
7148 CALL mpi_wait (request, sstatus, myerror)
7149 IF (myerror.ne.mpi_success) THEN
7150 CALL mpi_error_string (myerror, string, lstr, serror)
7151 lstr=len_trim(string)
7152 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
7153 exit_flag=2
7154 RETURN
7155 END IF
7156 END IF
7157!
7158! Broadcast reduced variables from process to all processes in the
7159! group.
7160!
7161 CALL mpi_bcast (areduce, asize, mp_float, mymaster, &
7162 & mycomm, myerror)
7163 IF (myerror.ne.mpi_success) THEN
7164 CALL mpi_error_string (myerror, string, lstr, serror)
7165 lstr=len_trim(string)
7166 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
7167 exit_flag=2
7168 RETURN
7169 END IF
7170# endif
7171 10 FORMAT (/,' MP_REDUCE_1D - error during ',a,' call, Task = ', &
7172 & i3.3,' Error = ',i3,/,16x,a)
7173!
7174! Unpack.
7175!
7176 DO i=1,asize
7177 a(i)=areduce(i)
7178 END DO
7179# ifdef PROFILE
7180!
7181!-----------------------------------------------------------------------
7182! Turn off time clocks.
7183!-----------------------------------------------------------------------
7184!
7185 CALL wclock_off (ng, model, 65, __line__, myfile)
7186# endif
7187!
7188 RETURN

◆ mp_reduce_1dp()

subroutine distribute_mod::mp_reduce_1dp ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) asize,
real(dp), dimension(asize), intent(inout) a,
character (len=*), dimension(asize), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 6574 of file distribute.F.

6575!
6576!***********************************************************************
6577! !
6578! This routine collects and reduces requested double precision !
6579! variables from all nodes in the group. Then, it broadcasts !
6580! reduced variables to all nodes in the group. !
6581! !
6582! On Input: !
6583! !
6584! ng Nested grid number. !
6585! model Calling model identifier. !
6586! Asize Number of scalar variables to reduce. !
6587! A Vector of scalar variables to reduce. !
6588! handle_op Reduction operation handle (string). The following !
6589! reduction operations are supported: !
6590! 'MIN', 'MAX', 'SUM' !
6591! InpComm Communicator handle (integer, OPTIONAL). !
6592! !
6593! On Output: !
6594! !
6595! A Vector of reduced scalar variables. !
6596! !
6597!***********************************************************************
6598!
6599! Imported variable declarations.
6600!
6601 integer, intent(in) :: ng, model, Asize
6602
6603 integer, intent(in), optional :: InpComm
6604!
6605 character (len=*), intent(in) :: handle_op(Asize)
6606!
6607 real(dp), intent(inout) :: A(Asize)
6608!
6609! Local variable declarations.
6610!
6611 integer :: Lstr, MyCOMM, MyError, Serror
6612 integer :: handle, i, rank, request
6613
6614 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6615
6616 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6617 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6618!
6619 real(dp), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6620 real(dp), dimension(Asize) :: Areduce
6621 real(dp), dimension(Asize) :: Asend
6622!
6623 character (len=MPI_MAX_ERROR_STRING) :: string
6624
6625 character (len=*), parameter :: MyFile = &
6626 & __FILE__//", mp_reduce_1dp"
6627
6628# ifdef PROFILE
6629!
6630!-----------------------------------------------------------------------
6631! Turn on time clocks.
6632!-----------------------------------------------------------------------
6633!
6634 CALL wclock_on (ng, model, 65, __line__, myfile)
6635# endif
6636# ifdef MPI
6637!
6638!-----------------------------------------------------------------------
6639! Set distributed-memory communicator handle (context ID).
6640!-----------------------------------------------------------------------
6641!
6642 IF (PRESENT(inpcomm)) THEN
6643 mycomm=inpcomm
6644 ELSE
6645 mycomm=ocn_comm_world
6646 END IF
6647# endif
6648!
6649!-----------------------------------------------------------------------
6650! Collect and reduce requested scalar variables.
6651!-----------------------------------------------------------------------
6652!
6653! Maximum automatic buffer memory size in bytes.
6654!
6655 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
6656 & 2*asize)*kind(a),r8))
6657!
6658! Pack data to reduce.
6659!
6660 DO i=1,asize
6661 asend(i)=a(i)
6662 END DO
6663!
6664! Collect and reduce.
6665!
6666# if defined REDUCE_ALLREDUCE
6667 DO i=1,asize
6668 IF (handle_op(i)(1:3).eq.'MIN') THEN
6669 handle=mpi_min
6670 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6671 handle=mpi_max
6672 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6673 handle=mpi_sum
6674 END IF
6675 CALL mpi_allreduce (asend(i), areduce(i), 1, mp_double, handle, &
6676 & mycomm, myerror)
6677 IF (myerror.ne.mpi_success) THEN
6678 CALL mpi_error_string (myerror, string, lstr, serror)
6679 lstr=len_trim(string)
6680 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6681 & string(1:lstr)
6682 exit_flag=2
6683 RETURN
6684 END IF
6685 END DO
6686# elif defined REDUCE_ALLGATHER
6687 CALL mpi_allgather (asend, asize, mp_double, &
6688 & arecv, asize, mp_double, &
6689 & mycomm, myerror)
6690 IF (myerror.ne.mpi_success) THEN
6691 CALL mpi_error_string (myerror, string, lstr, serror)
6692 lstr=len_trim(string)
6693 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6694 & string(1:lstr)
6695 exit_flag=2
6696 RETURN
6697 END IF
6698 DO i=1,asize
6699 areduce(i)=arecv(i,0)
6700 DO rank=1,ntilei(ng)*ntilej(ng)-1
6701 IF (handle_op(i)(1:3).eq.'MIN') THEN
6702 areduce(i)=min(areduce(i),arecv(i,rank))
6703 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6704 areduce(i)=max(areduce(i),arecv(i,rank))
6705 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6706 areduce(i)=areduce(i)+arecv(i,rank)
6707 END IF
6708 END DO
6709 END DO
6710# elif defined REDUCE_SENDRECV
6711 IF (myrank.eq.mymaster) THEN
6712 DO rank=1,ntilei(ng)*ntilej(ng)-1
6713 CALL mpi_irecv (arecv(1,rank), asize, mp_double, rank, &
6714 & rank+500, mycomm, rrequest(rank), &
6715 & myerror)
6716 END DO
6717 DO i=1,asize
6718 areduce(i)=asend(i)
6719 END DO
6720 DO rank=1,ntilei(ng)*ntilej(ng)-1
6721 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6722 IF (myerror.ne.mpi_success) THEN
6723 CALL mpi_error_string (myerror, string, lstr, serror)
6724 lstr=len_trim(string)
6725 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6726 exit_flag=2
6727 RETURN
6728 END IF
6729 DO i=1,asize
6730 IF (handle_op(i)(1:3).eq.'MIN') THEN
6731 areduce(i)=min(areduce(i),arecv(i,rank))
6732 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6733 areduce(i)=max(areduce(i),arecv(i,rank))
6734 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6735 areduce(i)=areduce(i)+arecv(i,rank)
6736 END IF
6737 END DO
6738 END DO
6739 ELSE
6740 CALL mpi_isend (asend, asize, mp_double, mymaster, myrank+500, &
6741 & mycomm, request, myerror)
6742 CALL mpi_wait (request, sstatus, myerror)
6743 IF (myerror.ne.mpi_success) THEN
6744 CALL mpi_error_string (myerror, string, lstr, serror)
6745 lstr=len_trim(string)
6746 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6747 exit_flag=2
6748 RETURN
6749 END IF
6750 END IF
6751!
6752! Broadcast reduced variables from process to all processes in the
6753! group.
6754!
6755 CALL mpi_bcast (areduce, asize, mp_double, mymaster, &
6756 & mycomm, myerror)
6757 IF (myerror.ne.mpi_success) THEN
6758 CALL mpi_error_string (myerror, string, lstr, serror)
6759 lstr=len_trim(string)
6760 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6761 exit_flag=2
6762 RETURN
6763 END IF
6764# endif
6765 10 FORMAT (/,' MP_REDUCE_1DP - error during ',a,' call, Task = ', &
6766 & i3.3,' Error = ',i3,/,16x,a)
6767!
6768! Unpack.
6769!
6770 DO i=1,asize
6771 a(i)=areduce(i)
6772 END DO
6773# ifdef PROFILE
6774!
6775!-----------------------------------------------------------------------
6776! Turn off time clocks.
6777!-----------------------------------------------------------------------
6778!
6779 CALL wclock_off (ng, model, 65, __line__, myfile)
6780# endif
6781!
6782 RETURN

◆ mp_reduce_i8()

subroutine distribute_mod::mp_reduce_i8 ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) asize,
integer(i8b), dimension(asize), intent(inout) a,
character (len=*), dimension(asize), intent(in) handle_op,
integer, intent(in), optional inpcomm )

Definition at line 6167 of file distribute.F.

6168!
6169!***********************************************************************
6170! !
6171! This routine collects and reduces requested 1D integer array !
6172! variables from all nodes in the group. !
6173! !
6174! On Input: !
6175! !
6176! ng Nested grid number. !
6177! model Calling model identifier. !
6178! Asize Number of scalar variables to reduce. !
6179! A Vector of scalar variables to reduce. !
6180! handle_op Reduction operation handle (string). The following !
6181! reduction operations are supported: !
6182! 'MIN', 'MAX', 'SUM' !
6183! InpComm Communicator handle (integer, OPTIONAL). !
6184! !
6185! On Output: !
6186! !
6187! A Vector of reduced scalar variables. !
6188! !
6189!***********************************************************************
6190!
6191! Imported variable declarations.
6192!
6193 integer, intent(in) :: ng, model, Asize
6194
6195 integer, intent(in), optional :: InpComm
6196!
6197 character (len=*), intent(in) :: handle_op(Asize)
6198!
6199 integer(i8b), intent(inout) :: A(Asize)
6200!
6201! Local variable declarations.
6202!
6203 integer :: Lstr, MyCOMM, MyError, Serror
6204 integer :: handle, i, rank, request
6205
6206 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
6207
6208 integer, dimension(MPI_STATUS_SIZE) :: Rstatus
6209 integer, dimension(MPI_STATUS_SIZE) :: Sstatus
6210!
6211 integer(i8b), dimension(Asize,0:NtileI(ng)*NtileJ(ng)-1) :: Arecv
6212 integer(i8b), dimension(Asize) :: Areduce
6213 integer(i8b), dimension(Asize) :: Asend
6214!
6215 character (len=MPI_MAX_ERROR_STRING) :: string
6216
6217 character (len=*), parameter :: MyFile = &
6218 & __FILE__//", mp_reduce_1di"
6219
6220# ifdef PROFILE
6221!
6222!-----------------------------------------------------------------------
6223! Turn on time clocks.
6224!-----------------------------------------------------------------------
6225!
6226 CALL wclock_on (ng, model, 65, __line__, myfile)
6227# endif
6228# ifdef MPI
6229!
6230!-----------------------------------------------------------------------
6231! Set distributed-memory communicator handle (context ID).
6232!-----------------------------------------------------------------------
6233!
6234 IF (PRESENT(inpcomm)) THEN
6235 mycomm=inpcomm
6236 ELSE
6237 mycomm=ocn_comm_world
6238 END IF
6239# endif
6240!
6241!-----------------------------------------------------------------------
6242! Collect and reduce requested scalar variables.
6243!-----------------------------------------------------------------------
6244!
6245! Maximum automatic buffer memory size in bytes.
6246!
6247 bmemmax(ng)=max(bmemmax(ng), real((SIZE(arecv)+ &
6248 & 2*asize)*kind(a),r8))
6249!
6250! Pack data to reduce.
6251!
6252 DO i=1,asize
6253 asend(i)=a(i)
6254 END DO
6255!
6256! Collect and reduce.
6257!
6258# if defined REDUCE_ALLREDUCE
6259 DO i=1,asize
6260 IF (handle_op(i)(1:3).eq.'MIN') THEN
6261 handle=mpi_min
6262 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6263 handle=mpi_max
6264 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6265 handle=mpi_sum
6266 END IF
6267 CALL mpi_allreduce (asend(i), areduce(i), 1, mpi_integer, &
6268 & handle, mycomm, myerror)
6269 IF (myerror.ne.mpi_success) THEN
6270 CALL mpi_error_string (myerror, string, lstr, serror)
6271 lstr=len_trim(string)
6272 WRITE (stdout,10) 'MPI_ALLREDUCE', myrank, myerror, &
6273 & string(1:lstr)
6274 exit_flag=2
6275 RETURN
6276 END IF
6277 END DO
6278# elif defined REDUCE_ALLGATHER
6279 CALL mpi_allgather (asend, asize, mpi_integer, &
6280 & arecv, asize, mpi_integer, &
6281 & mycomm, myerror)
6282 IF (myerror.ne.mpi_success) THEN
6283 CALL mpi_error_string (myerror, string, lstr, serror)
6284 lstr=len_trim(string)
6285 WRITE (stdout,10) 'MPI_ALLGATHER', myrank, myerror, &
6286 & string(1:lstr)
6287 exit_flag=2
6288 RETURN
6289 END IF
6290 DO i=1,asize
6291 areduce(i)=arecv(i,0)
6292 DO rank=1,ntilei(ng)*ntilej(ng)-1
6293 IF (handle_op(i)(1:3).eq.'MIN') THEN
6294 areduce(i)=min(areduce(i),arecv(i,rank))
6295 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6296 areduce(i)=max(areduce(i),arecv(i,rank))
6297 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6298 areduce(i)=areduce(i)+arecv(i,rank)
6299 END IF
6300 END DO
6301 END DO
6302# elif defined REDUCE_SENDRECV
6303 IF (myrank.eq.mymaster) THEN
6304 DO rank=1,ntilei(ng)*ntilej(ng)-1
6305 CALL mpi_irecv (arecv(1,rank), asize, mpi_integer, rank, &
6306 & rank+500, mycomm, rrequest(rank), &
6307 & myerror)
6308 END DO
6309 DO i=1,asize
6310 areduce(i)=asend(i)
6311 END DO
6312 DO rank=1,ntilei(ng)*ntilej(ng)-1
6313 CALL mpi_wait (rrequest(rank), rstatus, myerror)
6314 IF (myerror.ne.mpi_success) THEN
6315 CALL mpi_error_string (myerror, string, lstr, serror)
6316 lstr=len_trim(string)
6317 WRITE (stdout,10) 'MPI_IRECV', rank, rerror, string(1:lstr)
6318 exit_flag=2
6319 RETURN
6320 END IF
6321 DO i=1,asize
6322 IF (handle_op(i)(1:3).eq.'MIN') THEN
6323 areduce(i)=min(areduce(i),arecv(i,rank))
6324 ELSE IF (handle_op(i)(1:3).eq.'MAX') THEN
6325 areduce(i)=max(areduce(i),arecv(i,rank))
6326 ELSE IF (handle_op(i)(1:3).eq.'SUM') THEN
6327 areduce(i)=areduce(i)+arecv(i,rank)
6328 END IF
6329 END DO
6330 END DO
6331 ELSE
6332 CALL mpi_isend (asend, asize, mpi_integer, mymaster, &
6333 & myrank+500, mycomm, request, myerror)
6334 CALL mpi_wait (request, sstatus, myerror)
6335 IF (myerror.ne.mpi_success) THEN
6336 CALL mpi_error_string (myerror, string, lstr, serror)
6337 lstr=len_trim(string)
6338 WRITE (stdout,10) 'MPI_ISEND', myrank, serror, string(1:lstr)
6339 exit_flag=2
6340 RETURN
6341 END IF
6342 END IF
6343!
6344! Broadcast reduced variables from process to all processes in the
6345! group.
6346!
6347 CALL mpi_bcast (areduce, asize, mpi_integer, mymaster, &
6348 & mycomm, myerror)
6349 IF (myerror.ne.mpi_success) THEN
6350 CALL mpi_error_string (myerror, string, lstr, serror)
6351 lstr=len_trim(string)
6352 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
6353 exit_flag=2
6354 RETURN
6355 END IF
6356# endif
6357 10 FORMAT (/,' MP_REDUCE_I8 - error during ',a,' call, Task = ', &
6358 & i3.3,' Error = ',i3,/,16x,a)
6359!
6360! Unpack.
6361!
6362 DO i=1,asize
6363 a(i)=areduce(i)
6364 END DO
6365# ifdef PROFILE
6366!
6367!-----------------------------------------------------------------------
6368! Turn off time clocks.
6369!-----------------------------------------------------------------------
6370!
6371 CALL wclock_off (ng, model, 65, __line__, myfile)
6372# endif
6373!
6374 RETURN

◆ mp_scatter2d()

subroutine distribute_mod::mp_scatter2d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) lbi,
integer, intent(in) ubi,
integer, intent(in) lbj,
integer, intent(in) ubj,
integer, intent(in) nghost,
integer, intent(in) gtype,
real(r8), intent(inout) amin,
real(r8), intent(inout) amax,
integer, intent(in) nwpts,
integer, dimension(nwpts), intent(in) ij_water,
integer, intent(in) npts,
real(r8), dimension(npts+2), intent(inout) a,
real(r8), dimension(lbi:ubi,lbj:ubj), intent(out) awrk )

Definition at line 7323 of file distribute.F.

7329!
7330!***********************************************************************
7331! !
7332! This routine scatters input global data, packed as 1D real array, !
7333! to each tiled array. Because this routine is also used by the !
7334! adjoint model, the ghost-points in the halo region are NOT updated !
7335! in the ouput tile array (Awrk). It is used by the master node to !
7336! distribute date read from NetCDF files during serial I/O. !
7337! !
7338! On Input: !
7339! !
7340! ng Nested grid number. !
7341! model Calling model identifier. !
7342! LBi I-dimension Lower bound. !
7343! UBi I-dimension Upper bound. !
7344! LBj J-dimension Lower bound. !
7345! UBj J-dimension Upper bound. !
7346! Nghost Number of ghost-points in the halo region. !
7347! gtype C-grid type. If negative and Land-Sea mask is !
7348! available, only water-points are processed. !
7349! Amin Input array minimum value. !
7350! Amax Input array maximum value. !
7351! NWpts Number of water points. !
7352! IJ_water IJ-indices for water points. !
7353! Npts Number of points to processes in A. !
7354! A Input global data from each node packed into 1D array !
7355! in column-major order. That is, in the same way !
7356! that Fortran multi-dimensional arrays are stored !
7357! in memory. Only valid on root task, MyMaster. !
7358! Npts Number of points to processes in A. !
7359! !
7360! On Output: !
7361! !
7362! Awrk 2D tiled, floating-point array. !
7363! !
7364!***********************************************************************
7365!
7366! Imported variable declarations.
7367!
7368 integer, intent(in) :: ng, model
7369 integer, intent(in) :: LBi, UBi, LBj, UBj
7370 integer, intent(in) :: Nghost, gtype, Npts
7371
7372# if defined READ_WATER && defined MASKING
7373 integer, intent(in) :: NWpts
7374 integer, intent(in) :: IJ_water(NWpts)
7375# endif
7376!
7377 real(r8), intent(inout) :: Amin, Amax
7378 real(r8), intent(inout) :: A(Npts+2)
7379 real(r8), intent(out) :: Awrk(LBi:UBi,LBj:UBj)
7380!
7381! Local variable declarations.
7382!
7383 integer :: Io, Ie, Jo, Je, Ioff, Joff
7384 integer :: Imin, Imax, Jmin, Jmax
7385 integer :: iLB, iUB, jLB, jUB
7386 integer :: Isize, Jsize, IJsize, Vsize
7387 integer :: Lstr, MyError, MySize, MyType, Ntasks, Serror, ghost
7388 integer :: Cgrid, i, ic, ij, j, jc, mc, nc, rank
7389!
7390 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
7391!
7392# ifndef SCATTER_BCAST
7393 integer, allocatable :: ij_global(:,:)
7394!
7395 real(r8) :: Astats(2)
7396 real(r8), allocatable :: Vrecv(:)
7397 real(r8), dimension(Npts) :: Vreset
7398# endif
7399 real(r8), dimension(Npts+2) :: Vglobal
7400!
7401 character (len=10) :: MyMethod
7402 character (len=MPI_MAX_ERROR_STRING) :: string
7403
7404 character (len=*), parameter :: MyFile = &
7405 & __FILE__//", mp_scatter2d"
7406
7407# ifdef PROFILE
7408!
7409!-----------------------------------------------------------------------
7410! Turn on time clocks.
7411!-----------------------------------------------------------------------
7412!
7413 CALL wclock_on (ng, model, 67, __line__, myfile)
7414# endif
7415!
7416!-----------------------------------------------------------------------
7417! Set horizontal starting and ending indices for parallel domain
7418! partitions in the XI- and ETA-directions.
7419!-----------------------------------------------------------------------
7420!
7421! Maximum automatic buffer memory size in bytes.
7422!
7423 bmemmax(ng)=max(bmemmax(ng), real(SIZE(vglobal)*kind(a),r8))
7424!
7425! Set full grid first and last point according to staggered C-grid
7426! classification. Notice that the offsets are for the private array
7427! counter.
7428!
7429 mytype=abs(gtype)
7430
7431 SELECT CASE (mytype)
7432 CASE (p2dvar, p3dvar)
7433 io=iobounds(ng) % ILB_psi
7434 ie=iobounds(ng) % IUB_psi
7435 jo=iobounds(ng) % JLB_psi
7436 je=iobounds(ng) % JUB_psi
7437 ioff=0
7438 joff=1
7439 CASE (r2dvar, r3dvar)
7440 io=iobounds(ng) % ILB_rho
7441 ie=iobounds(ng) % IUB_rho
7442 jo=iobounds(ng) % JLB_rho
7443 je=iobounds(ng) % JUB_rho
7444 ioff=1
7445 joff=0
7446 CASE (u2dvar, u3dvar)
7447 io=iobounds(ng) % ILB_u
7448 ie=iobounds(ng) % IUB_u
7449 jo=iobounds(ng) % JLB_u
7450 je=iobounds(ng) % JUB_u
7451 ioff=0
7452 joff=0
7453 CASE (v2dvar, v3dvar)
7454 io=iobounds(ng) % ILB_v
7455 ie=iobounds(ng) % IUB_v
7456 jo=iobounds(ng) % JLB_v
7457 je=iobounds(ng) % JUB_v
7458 ioff=1
7459 joff=1
7460 CASE DEFAULT ! RHO-points
7461 io=iobounds(ng) % ILB_rho
7462 ie=iobounds(ng) % IUB_rho
7463 jo=iobounds(ng) % JLB_rho
7464 je=iobounds(ng) % JUB_rho
7465 ioff=1
7466 joff=0
7467 END SELECT
7468!
7469 isize=ie-io+1
7470 jsize=je-jo+1
7471 ijsize=isize*jsize
7472!
7473! Set Scatter counts and displacement vectors. Use non-overlapping
7474! (ghost=0) ranges according to tile rank in 'mpi_scatterv' to work
7475! correctly.
7476!
7477 ghost=0 ! non-overlapping
7478!
7479 SELECT CASE (mytype)
7480 CASE (p2dvar, p3dvar)
7481 cgrid=1
7482 CASE (r2dvar, r3dvar)
7483 cgrid=2
7484 CASE (u2dvar, u3dvar)
7485 cgrid=3
7486 CASE (v2dvar, v3dvar)
7487 cgrid=4
7488 CASE DEFAULT ! RHO-points
7489 cgrid=2
7490 END SELECT
7491!
7492 ntasks=ntilei(ng)*ntilej(ng)
7493 nc=0
7494 DO rank=0,ntasks-1
7495 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
7496 iub=bounds(ng) % Imax(cgrid,ghost,rank)
7497 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
7498 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
7499 displs(rank)=nc
7500 DO j=jlb,jub
7501 DO i=ilb,iub
7502 nc=nc+1
7503 END DO
7504 END DO
7505 counts(rank)=nc-displs(rank)
7506 END DO
7507!
7508! Load global data into send buffer, Vglobal, which is only known by
7509! the root process at this point. If water points input data, fill
7510! land points.
7511!
7512 vglobal=0.0_r8
7513 vsize=npts
7514!
7515 IF (myrank.eq.mymaster) Then
7516 IF (gtype.gt.0) THEN
7517 vglobal(1:vsize)=a(1:vsize)
7518# if defined READ_WATER && defined MASKING
7519 ELSE
7520 ij=0
7521 mc=0
7522 nc=0
7523 DO j=jo,je
7524 jc=(j-joff)*isize
7525 DO i=io,ie
7526 ij=ij+1
7527 ic=i+ioff+jc
7528 IF (ij_water(mc+1).eq.ij) THEN
7529 mc=mc+1
7530 nc=nc+1
7531 vglobal(ic)=a(nc)
7532 ELSE
7533 vglobal(ic)=0.0_r8
7534 END IF
7535 END DO
7536 END DO
7537 vsize=ic
7538# endif
7539 END IF
7540 END IF
7541!
7542!-----------------------------------------------------------------------
7543! Scatter requested global data.
7544!-----------------------------------------------------------------------
7545
7546# ifdef SCATTER_BCAST
7547!
7548! Set tile range to include overlapping halos, if requested.
7549!
7550 IF (nghost.eq.0) THEN
7551 ghost=0 ! non-overlapping
7552 ELSE
7553 ghost=1 ! overlapping
7554 END IF
7555 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
7556 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
7557 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
7558 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
7559!
7560! Append Min/Max values since they are only known by root process
7561!
7562 IF (myrank.eq.mymaster) Then
7563 vglobal(vsize+1)=amin
7564 vglobal(vsize+2)=amax
7565 END IF
7566 vsize=vsize+2
7567!
7568! Broadcast data to all processes in the group, itself included.
7569!
7570 CALL mpi_bcast (vglobal, vsize, mp_float, mymaster, &
7571 & ocn_comm_world, myerror)
7572 IF (myerror.ne.mpi_success) THEN
7573 CALL mpi_error_string (myerror, string, lstr, serror)
7574 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, trim(string)
7575 10 FORMAT (/,' ROMS_SCATTER2D - error during ',a, &
7576 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
7577 exit_flag=2
7578 RETURN
7579 END IF
7580!
7581! Unpack data buffer and load into tiled array.
7582!
7583 DO j=jmin, jmax
7584 jc=(j-joff)*isize
7585 DO i=imin, imax
7586 ic=i+ioff+jc
7587 awrk(i,j)=vglobal(ic)
7588 END DO
7589 END DO
7590 amin=vglobal(vsize-1)
7591 amax=vglobal(vsize)
7592
7593# else
7594!
7595! Set tile range for non-overlapping tiles.
7596!
7597 ghost=0 ! non-overlapping
7598 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
7599 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
7600 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
7601 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
7602!
7603! If master node, set (i,j) indices map from global array to vector.
7604!
7605 IF (myrank.eq.mymaster) THEN
7606 allocate ( ij_global(io:ie,jo:je) )
7607!
7608 DO j=jo,je
7609 jc=(j-joff)*isize
7610 DO i=io,ie
7611 ij=i+ioff+jc
7612 ij_global(i,j)=ij
7613 END DO
7614 END DO
7615!
7616! Reorganize the input global vector in such a way that the tiled data
7617! is continuous in memory to facilitate "SCATTERV" with different size
7618! sections.
7619!
7620 nc=0
7621 DO rank=0,ntasks-1
7622 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
7623 iub=bounds(ng) % Imax(cgrid,ghost,rank)
7624 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
7625 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
7626 DO j=jlb,jub
7627 DO i=ilb,iub
7628 ij=ij_global(i,j)
7629 nc=nc+1
7630 vreset(nc)=vglobal(ij)
7631 END DO
7632 END DO
7633 END DO
7634 deallocate (ij_global)
7635 END IF
7636!
7637! Scatter global data to local tiled arrays.
7638!
7639 mysize=(imax-imin+1)*(jmax-jmin+1)
7640 allocate ( vrecv(mysize) )
7641 vrecv=0.0_r8
7642!
7643 CALL mpi_scatterv (vreset, counts, displs, mp_float, &
7644 & vrecv, mysize, mp_float, &
7645 & mymaster, ocn_comm_world, myerror)
7646 IF (myerror.ne.mpi_success) THEN
7647 CALL mpi_error_string (myerror, string, lstr, serror)
7648 WRITE (stdout,20) 'MPI_SCATTERV', myrank, myerror, &
7649 & trim(string)
7650 20 FORMAT (/,' MP_SCATTER2D - error during ',a, &
7651 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
7652 exit_flag=2
7653 RETURN
7654 END IF
7655!
7656! Unpack data buffer and load into tiled array
7657!
7658 nc=0
7659 DO j=jmin,jmax
7660 DO i=imin,imax
7661 nc=nc+1
7662 awrk(i,j)=vrecv(nc)
7663 END DO
7664 END DO
7665 deallocate ( vrecv )
7666!
7667! If requested, include halo exchanges.
7668!
7669 IF (nghost.gt.0) THEN
7670 CALL mp_exchange2d (ng, myrank, model, 1, &
7671 & lbi, ubi, lbj, ubj, &
7672 & nghostpoints, &
7673 & ewperiodic(ng), nsperiodic(ng), &
7674 & awrk)
7675 END IF
7676!
7677! Broadcast global Min/Max values to all tasks in the group since they
7678! are only known by root.
7679!
7680 astats(1)=amin
7681 astats(2)=amax
7682 mysize=2
7683!
7684 CALL mpi_bcast (astats, mysize, mp_float, mymaster, &
7685 & ocn_comm_world, myerror)
7686 IF (myerror.ne.mpi_success) THEN
7687 CALL mpi_error_string (myerror, string, lstr, serror)
7688 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, trim(string)
7689 30 FORMAT (/,' MP_SCATTER2D - error during ',a,' call, Task = ', &
7690 & i3.3, ' Error = ',i3,/,15x,a)
7691 exit_flag=2
7692 RETURN
7693 END IF
7694!
7695 amin=astats(1)
7696 amax=astats(2)
7697# endif
7698# ifdef PROFILE
7699!
7700!-----------------------------------------------------------------------
7701! Turn off time clocks.
7702!-----------------------------------------------------------------------
7703!
7704 CALL wclock_off (ng, model, 67, __line__, myfile)
7705# endif
7706!
7707 RETURN

References mod_param::bmemmax, mod_param::bounds, mod_scalars::ewperiodic, mod_scalars::exit_flag, mod_param::iobounds, mp_exchange_mod::mp_exchange2d(), mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::nghostpoints, mod_scalars::nsperiodic, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_iounits::stdout, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by nf_fread2d_mod::nf_fread2d::nf90_fread2d(), nf_fread3d_mod::nf_fread3d::nf90_fread3d(), nf_fread4d_mod::nf_fread4d::nf90_fread4d(), and white_noise_mod::white_noise2d().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_scatter3d()

subroutine distribute_mod::mp_scatter3d ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) lbi,
integer, intent(in) ubi,
integer, intent(in) lbj,
integer, intent(in) ubj,
integer, intent(in) lbk,
integer, intent(in) ubk,
integer, intent(in) nghost,
integer, intent(in) gtype,
real(r8), intent(inout) amin,
real(r8), intent(inout) amax,
integer, intent(in) nwpts,
integer, dimension(nwpts), intent(in) ij_water,
integer, intent(in) npts,
real(r8), dimension(npts+2), intent(inout) a,
real(r8), dimension(lbi:ubi,lbj:ubj,lbk:ubk), intent(out) awrk )

Definition at line 8088 of file distribute.F.

8094!
8095!***********************************************************************
8096! !
8097! This routine broadcasts input global data, packed as 1D real array, !
8098! to each tiled array. Because this routine is also used by the !
8099! adjoint model, the ghost-points in the halo region are NOT updated !
8100! in the ouput tile array (Awrk). It is used by the master node to !
8101! scatter input global data to each tiled node. !
8102! !
8103! On Input: !
8104! !
8105! ng Nested grid number. !
8106! model Calling model identifier. !
8107! LBi I-dimension Lower bound. !
8108! UBi I-dimension Upper bound. !
8109! LBj J-dimension Lower bound. !
8110! UBj J-dimension Upper bound. !
8111! LBk K-dimension Lower bound. !
8112! UBk K-dimension Upper bound. !
8113! Nghost Number of ghost-points in the halo region. !
8114! gtype C-grid type. If negative and Land-Sea mask is !
8115! available, only water-points are processed. !
8116! Amin Input array minimum value. !
8117! Amax Input array maximum value. !
8118! NWpts Number of water points. !
8119! IJ_water IJ-indices for water points. !
8120! Npts Number of points to processes in A. !
8121! A Input global data from each node packed into 1D array !
8122! in column-major order. That is, in the same way !
8123! that Fortran multi-dimensional arrays are stored !
8124! in memory. Only valid on root task, MyMaster. !
8125! Npts Number of points to processes in A. !
8126! !
8127! On Output: !
8128! !
8129! Awrk 3D tiled, floating-point array. !
8130! !
8131!***********************************************************************
8132!
8133! Imported variable declarations.
8134!
8135 integer, intent(in) :: ng, model
8136 integer, intent(in) :: LBi, UBi, LBj, UBj, LBk, UBk
8137 integer, intent(in) :: Nghost, gtype, Npts
8138
8139# if defined READ_WATER && defined MASKING
8140 integer, intent(in) :: NWpts
8141 integer, intent(in) :: IJ_water(NWpts)
8142# endif
8143!
8144 real(r8), intent(inout) :: Amin, Amax
8145 real(r8), intent(inout) :: A(Npts+2)
8146 real(r8), intent(out) :: Awrk(LBi:UBi,LBj:UBj,LBk:UBk)
8147!
8148! Local variable declarations.
8149!
8150 integer :: Io, Ie, Jo, Je, Ioff, Joff, Koff
8151 integer :: Imin, Imax, Jmin, Jmax
8152 integer :: iLB, iUB, jLB, jUB
8153 integer :: Isize, Jsize, Ksize, IJsize, Vsize, Vsize2d
8154 integer :: Lstr, MyError, MySize, MyType, Ntasks, Serror, ghost
8155 integer :: Cgrid, i, ic, ij, ijk, j, jc, k, kc, mc, nc, rank
8156!
8157 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: counts, displs
8158!
8159# ifndef SCATTER_BCAST
8160 integer, allocatable :: ijk_global(:,:,:)
8161!
8162 real(r8) :: Astats(2)
8163 real(r8), allocatable :: Vrecv(:)
8164 real(r8), dimension(Npts) :: Vreset
8165# endif
8166 real(r8), dimension(Npts+2) :: Vglobal
8167!
8168 character (len=10) :: MyMethod
8169 character (len=MPI_MAX_ERROR_STRING) :: string
8170
8171 character (len=*), parameter :: MyFile = &
8172 & __FILE__//", mp_scatter3d"
8173
8174# ifdef PROFILE
8175!
8176!-----------------------------------------------------------------------
8177! Turn on time clocks.
8178!-----------------------------------------------------------------------
8179!
8180 CALL wclock_on (ng, model, 67, __line__, myfile)
8181# endif
8182!
8183!-----------------------------------------------------------------------
8184! Set horizontal starting and ending indices for parallel domain
8185! partitions in the XI- and ETA-directions.
8186!-----------------------------------------------------------------------
8187!
8188! Maximum automatic buffer memory size in bytes.
8189!
8190 bmemmax(ng)=max(bmemmax(ng), real(SIZE(vglobal)*kind(a),r8))
8191!
8192! Set full grid first and last point according to staggered C-grid
8193! classification. Notice that the offsets are for the private array
8194! counters.
8195!
8196 mytype=abs(gtype)
8197
8198 SELECT CASE (mytype)
8199 CASE (p2dvar, p3dvar)
8200 io=iobounds(ng) % ILB_psi
8201 ie=iobounds(ng) % IUB_psi
8202 jo=iobounds(ng) % JLB_psi
8203 je=iobounds(ng) % JUB_psi
8204 ioff=0
8205 joff=1
8206 CASE (r2dvar, r3dvar)
8207 io=iobounds(ng) % ILB_rho
8208 ie=iobounds(ng) % IUB_rho
8209 jo=iobounds(ng) % JLB_rho
8210 je=iobounds(ng) % JUB_rho
8211 ioff=1
8212 joff=0
8213 CASE (u2dvar, u3dvar)
8214 io=iobounds(ng) % ILB_u
8215 ie=iobounds(ng) % IUB_u
8216 jo=iobounds(ng) % JLB_u
8217 je=iobounds(ng) % JUB_u
8218 ioff=0
8219 joff=0
8220 CASE (v2dvar, v3dvar)
8221 io=iobounds(ng) % ILB_v
8222 ie=iobounds(ng) % IUB_v
8223 jo=iobounds(ng) % JLB_v
8224 je=iobounds(ng) % JUB_v
8225 ioff=1
8226 joff=1
8227 CASE DEFAULT ! RHO-points
8228 io=iobounds(ng) % ILB_rho
8229 ie=iobounds(ng) % IUB_rho
8230 jo=iobounds(ng) % JLB_rho
8231 je=iobounds(ng) % JUB_rho
8232 ioff=1
8233 joff=0
8234 END SELECT
8235
8236 IF (lbk.eq.0) THEN
8237 koff=0
8238 ELSE
8239 koff=1
8240 END IF
8241
8242 isize=ie-io+1
8243 jsize=je-jo+1
8244 ksize=ubk-lbk+1
8245 ijsize=isize*jsize
8246!
8247! Set Scatter counts and displacement vectors. Use non-overlapping
8248! (ghost=0) ranges according to tile rank in 'mpi_scatterv' to work
8249! correctly.
8250!
8251 ghost=0 ! non-overlapping
8252!
8253 SELECT CASE (mytype)
8254 CASE (p2dvar, p3dvar)
8255 cgrid=1
8256 CASE (r2dvar, r3dvar)
8257 cgrid=2
8258 CASE (u2dvar, u3dvar)
8259 cgrid=3
8260 CASE (v2dvar, v3dvar)
8261 cgrid=4
8262 CASE DEFAULT ! RHO-points
8263 cgrid=2
8264 END SELECT
8265!
8266 ntasks=ntilei(ng)*ntilej(ng)
8267 nc=0
8268 DO rank=0,ntasks-1
8269 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
8270 iub=bounds(ng) % Imax(cgrid,ghost,rank)
8271 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
8272 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
8273 displs(rank)=nc
8274 DO k=lbk,ubk
8275 DO j=jlb,jub
8276 DO i=ilb,iub
8277 nc=nc+1
8278 END DO
8279 END DO
8280 END DO
8281 counts(rank)=nc-displs(rank)
8282 END DO
8283!
8284! Load global data into send buffer, Vglobal, which is only known by
8285! the root process at this point. If water points input data, fill
8286! land points.
8287!
8288 vglobal=0.0_r8
8289 vsize=npts
8290!
8291 IF (myrank.eq.mymaster) Then
8292 IF (gtype.gt.0) THEN
8293 vglobal(1:vsize)=a(1:vsize)
8294# if defined READ_WATER && defined MASKING
8295 ELSE
8296 nc=0
8297 DO k=lbk,ubk
8298 kc=(k-koff)*ijsize
8299 ij=0
8300 mc=0
8301 DO j=jo,je
8302 jc=(j-joff)*isize
8303 DO i=io,ie
8304 ij=ij+1
8305 ic=i+ioff+jc+kc
8306 IF (ij_water(mc+1).eq.ij) THEN
8307 mc=mc+1
8308 nc=nc+1
8309 vglobal(ic)=a(nc)
8310 ELSE
8311 vglobal(ic)=0.0_r8
8312 END IF
8313 END DO
8314 END DO
8315 END DO
8316 vsize=ic
8317# endif
8318 END IF
8319 END IF
8320!
8321!-----------------------------------------------------------------------
8322! Scatter requested array data.
8323!-----------------------------------------------------------------------
8324
8325# ifdef SCATTER_BCAST
8326!
8327! Set tile range to include overlapping halos, if requested.
8328!
8329 IF (nghost.eq.0) THEN
8330 ghost=0 ! non-overlapping
8331 ELSE
8332 ghost=1 ! overlapping
8333 END IF
8334 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
8335 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
8336 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
8337 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
8338!
8339! Append Min/Max values since they are only known by root.
8340!
8341 IF (myrank.eq.mymaster) Then
8342 vglobal(vsize+1)=amin
8343 vglobal(vsize+2)=amax
8344 END IF
8345 vsize=vsize+2
8346!
8347! Broadcast data to all processes in the group, itself included.
8348!
8349 CALL mpi_bcast (vglobal, vsize, mp_float, mymaster, &
8350 & ocn_comm_world, myerror)
8351 IF (myerror.ne.mpi_success) THEN
8352 CALL mpi_error_string (myerror, string, lstr, serror)
8353 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, trim(string)
8354 10 FORMAT (/,' MP_SCATTER3D - error during ',a, &
8355 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8356 exit_flag=2
8357 RETURN
8358 END IF
8359!
8360! Unpack data buffer and load into tiled array.
8361!
8362 DO k=lbk,ubk
8363 kc=(k-koff)*isize*jsize
8364 DO j=jmin,jmax
8365 jc=(j-joff)*isize
8366 DO i=imin,imax
8367 ic=i+ioff+jc+kc
8368 awrk(i,j,k)=vglobal(ic)
8369 END DO
8370 END DO
8371 END DO
8372 amin=vglobal(vsize-1)
8373 amax=vglobal(vsize)
8374
8375# else
8376!
8377! Set tile range for non-overlapping tiles.
8378!
8379 ghost=0 ! non-overlapping
8380 imin=bounds(ng) % Imin(cgrid,ghost,myrank)
8381 imax=bounds(ng) % Imax(cgrid,ghost,myrank)
8382 jmin=bounds(ng) % Jmin(cgrid,ghost,myrank)
8383 jmax=bounds(ng) % Jmax(cgrid,ghost,myrank)
8384!
8385! If mater node, Set (i,j,k) indices map from global array to vector.
8386!
8387 IF (myrank.eq.mymaster) THEN
8388 allocate ( ijk_global(io:ie,jo:je,lbk:ubk) )
8389!
8390 DO k=lbk,ubk
8391 kc=(k-koff)*ijsize
8392 DO j=jo,je
8393 jc=(j-joff)*isize
8394 DO i=io,ie
8395 ijk=i+ioff+jc+kc
8396 ijk_global(i,j,k)=ijk
8397 END DO
8398 END DO
8399 END DO
8400!
8401! Reorganize the input global vector in such a way that the tile data
8402! is continuous in memory to facilitate "SCATTERV" with different size
8403! sections.
8404!
8405 nc=0
8406 DO rank=0,ntasks-1
8407 ilb=bounds(ng) % Imin(cgrid,ghost,rank)
8408 iub=bounds(ng) % Imax(cgrid,ghost,rank)
8409 jlb=bounds(ng) % Jmin(cgrid,ghost,rank)
8410 jub=bounds(ng) % Jmax(cgrid,ghost,rank)
8411 DO k=lbk,ubk
8412 DO j=jlb,jub
8413 DO i=ilb,iub
8414 ijk=ijk_global(i,j,k)
8415 nc=nc+1
8416 vreset(nc)=vglobal(ijk)
8417 END DO
8418 END DO
8419 END DO
8420 END DO
8421 deallocate (ijk_global)
8422 END IF
8423!
8424! Distribute global data into local tiled arrays.
8425!
8426 mysize=(imax-imin+1)*(jmax-jmin+1)*(ubk-lbk+1)
8427 allocate ( vrecv(mysize) )
8428 vrecv=0.0_r8
8429!
8430 CALL mpi_scatterv (vreset, counts, displs, mp_float, &
8431 & vrecv, mysize, mp_float, &
8432 & mymaster, ocn_comm_world, myerror)
8433 IF (myerror.ne.mpi_success) THEN
8434 CALL mpi_error_string (myerror, string, lstr, serror)
8435 WRITE (stdout,20) 'MPI_SCATTERV', myrank, myerror, &
8436 & trim(string)
8437 20 FORMAT (/,' MP_SCATTER3D - error during ',a, &
8438 & ' call, Task = ', i3.3, ' Error = ',i3,/,15x,a)
8439 exit_flag=2
8440 RETURN
8441 END IF
8442!
8443! Unpack data buffer and load into tiled array
8444!
8445 nc=0
8446 DO k=lbk,ubk
8447 DO j=jmin,jmax
8448 DO i=imin,imax
8449 nc=nc+1
8450 awrk(i,j,k)=vrecv(nc)
8451 END DO
8452 END DO
8453 END DO
8454 deallocate ( vrecv )
8455!
8456! If requested, include halo exchanges.
8457!
8458 IF (nghost.gt.0) THEN
8459 CALL mp_exchange3d (ng, myrank, model, 1, &
8460 & lbi, ubi, lbj, ubj, lbk, ubk, &
8461 & nghostpoints, &
8462 & ewperiodic(ng), nsperiodic(ng), &
8463 & awrk)
8464 END IF
8465!
8466! Broadcast global Min/Max values to all tasks in the group since they
8467! are only known by root.
8468!
8469 astats(1)=amin
8470 astats(2)=amax
8471 mysize=2
8472!
8473 CALL mpi_bcast (astats, mysize, mp_float, mymaster, &
8474 & ocn_comm_world, myerror)
8475 IF (myerror.ne.mpi_success) THEN
8476 CALL mpi_error_string (myerror, string, lstr, serror)
8477 lstr=len_trim(string)
8478 WRITE (stdout,30) 'MPI_BCAST', myrank, myerror, trim(string)
8479 30 FORMAT (/,' MP_SCATTER3D - error during ',a,' call, Task = ', &
8480 & i3.3, ' Error = ',i3,/,15x,a)
8481 exit_flag=2
8482 RETURN
8483 END IF
8484!
8485 amin=astats(1)
8486 amax=astats(2)
8487# endif
8488# ifdef PROFILE
8489!
8490!-----------------------------------------------------------------------
8491! Turn off time clocks.
8492!-----------------------------------------------------------------------
8493!
8494 CALL wclock_off (ng, model, 67, __line__, myfile)
8495# endif
8496!
8497 RETURN

References mod_param::bmemmax, mod_param::bounds, mod_scalars::ewperiodic, mod_scalars::exit_flag, mod_param::iobounds, mp_exchange_mod::mp_exchange3d(), mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::nghostpoints, mod_scalars::nsperiodic, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_param::p2dvar, mod_param::p3dvar, mod_param::r2dvar, mod_param::r3dvar, mod_iounits::stdout, mod_param::u2dvar, mod_param::u3dvar, mod_param::v2dvar, mod_param::v3dvar, wclock_off(), and wclock_on().

Referenced by nf_fread3d_mod::nf_fread3d::nf90_fread3d(), nf_fread4d_mod::nf_fread4d::nf90_fread4d(), and white_noise_mod::white_noise3d().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ mp_scatter_state()

subroutine distribute_mod::mp_scatter_state ( integer, intent(in) ng,
integer, intent(in) model,
integer, intent(in) mstr,
integer, intent(in) mend,
integer, intent(in) asize,
real(r8), dimension(asize), intent(inout) a,
real(r8), dimension(mstr:mend), intent(out) awrk )

Definition at line 8500 of file distribute.F.

8502!
8503!***********************************************************************
8504! !
8505! This routine scatters (global to threaded) state data to all nodes !
8506! in the group. Before this can be done, the global data needs to be !
8507! collected from all the nodes by the master. This is achieved by !
8508! summing the input values at each point. This routine is used to !
8509! pack the state data for the GST analysis propagators. !
8510! !
8511! On Input: !
8512! !
8513! ng Nested grid number. !
8514! model Calling model identifier. !
8515! Mstr Threaded array lower bound. !
8516! Mend Threaded array upper bound. !
8517! Asize Size of array A. !
8518! A Threaded 1D array process. !
8519! !
8520! On Output: !
8521! !
8522! A Collected data from all nodes. !
8523! Awrk Threaded block of data. !
8524! !
8525!***********************************************************************
8526!
8527! Imported variable declarations.
8528!
8529 integer, intent(in) :: ng, model
8530 integer, intent(in) :: Mstr, Mend, Asize
8531!
8532 real(r8), intent(inout) :: A(Asize)
8533
8534 real(r8), intent(out) :: Awrk(Mstr:Mend)
8535!
8536! Local variable declarations.
8537!
8538 integer :: Lstr, MyError, Serror
8539 integer :: i, rank, request
8540
8541 integer, dimension(0:NtileI(ng)*NtileJ(ng)-1) :: Rrequest
8542
8543 integer, dimension(MPI_STATUS_SIZE) :: status
8544!
8545 real(r8), allocatable :: Arecv(:)
8546!
8547 character (len=MPI_MAX_ERROR_STRING) :: string
8548
8549 character (len=*), parameter :: MyFile = &
8550 & __FILE__//", mp_scatter_state"
8551
8552# ifdef PROFILE
8553!
8554!-----------------------------------------------------------------------
8555! Turn on time clocks.
8556!-----------------------------------------------------------------------
8557!
8558 CALL wclock_on (ng, model, 67, __line__, myfile)
8559# endif
8560!
8561!-----------------------------------------------------------------------
8562! Collect data blocks from all nodes and scatter the data to all nodes.
8563!-----------------------------------------------------------------------
8564!
8565! Maximum automatic buffer memory size in bytes.
8566!
8567 bmemmax(ng)=max(bmemmax(ng), real(asize*kind(a),r8))
8568!
8569! All nodes have distinct pieces of the data and zero everywhere else.
8570! So the strategy here is for the master node to receive the data from
8571! the other nodes (excluding itself) and accumulate the sum at each
8572! point. Then, the master node broadcast (itself included) its copy of
8573! the accumlated data to other the nodes in the group. After this, each
8574! node loads only the required block of the data into output array.
8575!
8576! Notice that only the master node allocates the recieving buffer
8577! (Arecv). It also receives only buffer at the time to avoid having
8578! a very large communication array. So here memory is more important
8579! than time.
8580!
8581 IF (myrank.eq.mymaster) THEN
8582!
8583! If master node, allocate and receive buffer.
8584!
8585 IF (.not.allocated(arecv)) THEN
8586 allocate (arecv(asize))
8587 END IF
8588!
8589! If master node, loop over other nodes to receive and accumulate the
8590! data.
8591!
8592 DO rank=1,ntilei(ng)*ntilej(ng)-1
8593 CALL mpi_irecv (arecv, asize, mp_float, rank, rank+5, &
8594 & ocn_comm_world, rrequest(rank), myerror)
8595 CALL mpi_wait (rrequest(rank), status, myerror)
8596 IF (myerror.ne.mpi_success) THEN
8597 CALL mpi_error_string (myerror, string, lstr, serror)
8598 lstr=len_trim(string)
8599 WRITE (stdout,10) 'MPI_IRECV', rank, myerror, string(1:lstr)
8600 10 FORMAT (/,' MP_SCATTER_STATE - error during ',a, &
8601 & ' call, Task = ', i3.3,' Error = ',i3,/,13x,a)
8602 exit_flag=2
8603 RETURN
8604 END IF
8605 DO i=1,asize
8606 a(i)=a(i)+arecv(i)
8607 END DO
8608 END DO
8609!
8610! Otherwise, send data to master node.
8611!
8612 ELSE
8613 CALL mpi_isend (a, asize, mp_float, mymaster, myrank+5, &
8614 & ocn_comm_world, request, myerror)
8615 CALL mpi_wait (request, status, myerror)
8616 IF (myerror.ne.mpi_success) THEN
8617 CALL mpi_error_string (myerror, string, lstr, serror)
8618 lstr=len_trim(string)
8619 WRITE (stdout,10) 'MPI_ISEND', myrank, myerror, string(1:lstr)
8620 exit_flag=2
8621 RETURN
8622 END IF
8623 END IF
8624!
8625! Broadcast accumulated (full) data to all nodes.
8626!
8627 CALL mpi_bcast (a, asize, mp_float, mymaster, ocn_comm_world, &
8628 & myerror)
8629 IF (myerror.ne.mpi_success) THEN
8630 CALL mpi_error_string (myerror, string, lstr, serror)
8631 lstr=len_trim(string)
8632 WRITE (stdout,10) 'MPI_BCAST', myrank, myerror, string(1:lstr)
8633 exit_flag=2
8634 RETURN
8635 END IF
8636!
8637! Load appropriate data block into output array.
8638!
8639 DO i=mstr,mend
8640 awrk(i)=a(i)
8641 END DO
8642!
8643! Deallocate receive buffer.
8644!
8645 IF (allocated(arecv).and.(myrank.eq.mymaster)) THEN
8646 deallocate (arecv)
8647 END IF
8648
8649# ifdef PROFILE
8650!
8651!-----------------------------------------------------------------------
8652! Turn off time clocks.
8653!-----------------------------------------------------------------------
8654!
8655 CALL wclock_off (ng, model, 67, __line__, myfile)
8656# endif
8657!
8658 RETURN

References mod_param::bmemmax, mod_scalars::exit_flag, mod_parallel::mp_float, mod_parallel::mymaster, mod_parallel::myrank, mod_param::ntilei, mod_param::ntilej, mod_parallel::ocn_comm_world, mod_iounits::stdout, wclock_off(), and wclock_on().

Referenced by ad_pack(), ad_pack_tile(), and tl_pack().

Here is the call graph for this function:
Here is the caller graph for this function: