exchange the requested treeIDs between all MPI processs
Now each process knows, which halos are requested. Continue with identifying the actual leaf elements, which are then communicated
Type | Intent | Optional | Attributes | Name | ||
---|---|---|---|---|---|---|
type(treelmesh_type), | intent(in) | :: | tree |
the global tree |
||
type(tem_comm_env_type), | intent(in) | :: | proc |
Process description to use. |
||
type(tem_levelDesc_type), | intent(inout) | :: | me(tree%global%minlevel:) |
the level descriptor to be filled |
||
type(tem_commPattern_type), | intent(in) | :: | commPattern |
the communication pattern used |
||
type(tem_path_type), | intent(in) | :: | pathFirst(:) |
first and last treeID path in every process |
||
type(tem_path_type), | intent(in) | :: | pathLast(:) |
first and last treeID path in every process |
||
type(tem_stencilHeader_type), | intent(in) | :: | computeStencil(:) |
stencil definition |
subroutine communicate_elements( tree, proc, me, commPattern, & & pathFirst, pathLast, computeStencil ) ! --------------------------------------------------------------------------- !> the global tree type(treelmesh_type), intent(in) :: tree !> Process description to use. type(tem_comm_env_type), intent(in) :: proc !> the level descriptor to be filled type(tem_levelDesc_type), intent(inout) :: me(tree%global%minlevel:) !> the communication pattern used type(tem_commPattern_type), intent(in) :: commPattern !> first and last treeID path in every process type(tem_path_type), intent(in) :: pathFirst(:), pathLast(:) !> stencil definition type(tem_stencilHeader_type), intent(in) :: computeStencil(:) ! --------------------------------------------------------------------------- integer :: iLevel, iErr, nProcs, iProc integer,allocatable :: nHalos(:) integer :: nIterations logical :: redo ! locally indicate if another iteration has to be performed logical :: redo_global ! global indicator for another iteration ! --------------------------------------------------------------------------- call tem_horizontalSpacer( fUnit = logUnit(3) ) write(logUnit(3),*) 'Communicating elements ...' call tem_horizontalSpacer( fUnit = dbgUnit(1), before = 1 ) write(dbgUnit(1),*) 'Communicating elements ...' ! ----------- exchange number of requesting / requested treeIDs ------------- nIterations = 0 do !iIter = 1, 1 ! exchange halos until no new elements are created nIterations = nIterations + 1 write(logUnit(4),"(A,I0)") 'Halo count exchange iteration: ', nIterations write(dbgUnit(1),"(A,I0)") 'Halo count exchange iteration: ', nIterations redo = .false. ! communicate nHalos and allocate me%buffer call communicate_nElemsToTransfer( me, proc, tree%global%minLevel, & & tree%global%maxLevel ) ! ! ---done.--- exchange number of requesting / requested treeIDs ----------! write(dbgUnit(1),"(A)") 'Halo count exchange done!' write(logUnit(5),*) ' Done communicating nElems to Transfer' ! ----------- ----------! ! ! now we request the halo cells from the mpi processes, ! so they can send us their dependencies for these halo cells. ! We allow only dependencies with a difference of one level ! from lower to higher refinement levels and an arbitrary difference ! of refinement level from higher to lower refinement level. ! Since we get only the leaves as a dependency from the other mpi process ! we have to figure out later which cells we have to add locally ! ! 1) get the number of cells we will receive from the active processes, ! since we have ! to communicate only to the mpi processs which have/need informations ! 2) exchange the treeIDs of the halos ! ! First collapse the me%halos. ! We remove the processes where we don't have any elements to exchange ! with. ! Inverse Communciation (send to sourceProc, recv from targetProc) do iLevel = tree%global%minLevel, tree%global%maxLevel ! Send treeIDs write(logUnit(5),"(A,I0)") ' Requesting remote halos on level ', iLevel call request_remoteHalos( levelDesc = me, & & tree = tree, & & iLevel = iLevel, & & pathFirst = pathFirst, & & pathLast = pathLast, & & stencil = computeStencil(1), & & proc = proc ) if (tem_logging_isActive(main_debug%logger, 7)) then call tem_elemList_dump( me = me( iLevel )%elem, & & nUnit = dbgUnit(5), & & stencil = .true., & & string = 'after request remoteHalos' ) end if write(logUnit(5),*) ' Done requesting remote halos.' nProcs = me( iLevel )%haloList%partnerProc%nVals if( allocated( nHalos )) deallocate( nHalos ) allocate( nHalos( nProcs )) nHalos(:nProcs) = me( iLevel )%haloList%halos%val(:nProcs )%nVals write(logUnit(5),*) ' Identifying lists' call identify_lists( me(iLevel) ) ! If nHalos or nProcs changes, then do request again. ! As long as any level has change, do request again. redo = redo .or. ( any( me( iLevel )%haloList%halos%val(1:nProcs)%nVals & & /= nHalos(1:nProcs) ) & & .or. (nProcs /= me(iLevel)%haloList%partnerProc%nVals) ) end do !iLevel write(logUnit(6),*) ' Allreduce to check if changes occurred on any' & & //' process' ! ------------------------------------------------------------------------ ! JUROPA work-around for crash in the mpi_allreduce ! call mpi_barrier( proc%comm, iErr ) ! ------------------------------------------------------------------------ ! Determine among all neighbor processes, if further iterations required call mpi_allreduce( redo, redo_global, 1, mpi_logical, mpi_lor, & & proc%comm, iErr ) if ( .not. redo_global ) exit end do !exchange halos write(logUnit(3),*) 'Done exchanging number of elements to communicate.' !! Now each process knows, which halos are requested. !! Continue with identifying the actual leaf elements, which !! are then communicated write(logUnit(5),"(A)") 'Return halo counts and Redefine halos ...' do iLevel = tree%global%minLevel, tree%global%maxLevel write(logUnit(5),"(A,I0)") ' Returning halo counts on level ', iLevel ! Receive the number of really existing halo elements call return_haloCounts( sendbuffer = me( iLevel )%sendbuffer, & & recvbuffer = me( iLevel )%recvbuffer, & & comm = proc%comm ) call return_haloCounts( & & sendbuffer = me( iLevel )%sendbufferFromCoarser, & & recvbuffer = me( iLevel )%recvbufferFromCoarser, & & comm = proc%comm ) call return_haloCounts( & & sendbuffer = me( iLevel )%sendbufferFromFiner, & & recvbuffer = me( iLevel )%recvbufferFromFiner, & & comm = proc%comm ) ! reset the halos in the elem list do iProc = 1, me( iLevel )%haloList%PartnerProc%nVals ! First declare all local halos as non-existent, and set only those ! actually provided by the remote process. call changeType( me(iLevel)%elem, & & me(iLevel)%haloList%halos%val(iProc)%nVals, & & me(iLevel)%haloList%halos%val(iProc)%val(:),& & eT_nonExisting ) end do write(logUnit(5),*) ' Redefining halos ... ' ! Receive the number of really existing halo elements call redefine_halos( levelDesc = me( iLevel ), & & sendbuffer = me( iLevel )%sendbuffer, & & recvbuffer = me( iLevel )%recvbuffer, & & commPattern = commPattern, & & computeStencil = computeStencil, & & proc = proc ) call redefine_halos( levelDesc = me( iLevel ), & & sendbuffer = me( iLevel )%sendbufferFromCoarser, & & recvbuffer = me( iLevel )%recvbufferFromCoarser, & & commPattern = commPattern, & & computeStencil = computeStencil, & & proc = proc ) call redefine_halos( levelDesc = me( iLevel ), & & sendbuffer = me( iLevel )%sendbufferFromFiner, & & recvbuffer = me( iLevel )%recvbufferFromFiner, & & commPattern = commPattern, & & computeStencil = computeStencil, & & proc = proc ) call identify_lists( me(iLevel) ) end do ! dump debug output if (tem_logging_isActive(main_debug%logger, 7)) then do iLevel = tree%global%minLevel, tree%global%maxLevel call tem_elemList_dump( me = me( iLevel )%elem, & & nUnit = dbgUnit(5), & & stencil = .true., & & string = 'after redefine remoteHalos' ) end do end if write(logUnit(3),*) 'Done with communication of elements. ' call tem_horizontalSpacer( fUnit = logUnit(3), after = 1 ) write(dbgUnit(1),*) 'Done Communicating elements ...' call tem_horizontalSpacer( fUnit = dbgUnit(1), after = 1 ) end subroutine communicate_elements