tem_subTree_from Subroutine

public subroutine tem_subTree_from(me, map2global, treeID, comm, dirname, grwPnts)

This subroutine creates a subtree based on a provided map or list of treeIDs (in case a local shape is used) to the corresponding tree. Only processes in comm will be involved.

Arguments

Type IntentOptional Attributes Name
type(tem_subTree_type), intent(inout) :: me

subTree to be created from list of elements (map2global)

integer, intent(in), optional :: map2global(:)

position of the treeID in the global treeID list

integer(kind=long_k), intent(in), optional :: treeID(:)

list of treeIDs only use this in case a local shape is set

integer, intent(in), optional :: comm

mpi communicator to use, defaults to the one in me%global%comm if not specified

character(len=*), intent(in), optional :: dirname

directory to store the mesh in. is taken to be me%global%dirname if not specified

type(tem_grwPoints_type), intent(in), optional :: grwPnts

array of point vaues that neeeds to be stored in the subtree


Calls

proc~~tem_subtree_from~~CallsGraph proc~tem_subtree_from tem_subTree_from mpi_bcast mpi_bcast proc~tem_subtree_from->mpi_bcast mpi_comm_rank mpi_comm_rank proc~tem_subtree_from->mpi_comm_rank mpi_comm_size mpi_comm_size proc~tem_subtree_from->mpi_comm_size mpi_exscan mpi_exscan proc~tem_subtree_from->mpi_exscan mpi_reduce mpi_reduce proc~tem_subtree_from->mpi_reduce proc~tem_abort tem_abort proc~tem_subtree_from->proc~tem_abort mpi_abort mpi_abort proc~tem_abort->mpi_abort

Called by

proc~~tem_subtree_from~~CalledByGraph proc~tem_subtree_from tem_subTree_from proc~tem_create_subtree_of tem_create_subTree_of proc~tem_create_subtree_of->proc~tem_subtree_from proc~tem_create_subtree_of_st_funlist tem_create_subTree_of_st_funList proc~tem_create_subtree_of_st_funlist->proc~tem_create_subtree_of proc~tem_init_convergence tem_init_convergence proc~tem_init_convergence->proc~tem_create_subtree_of proc~tem_init_tracker_subtree tem_init_tracker_subTree proc~tem_init_tracker_subtree->proc~tem_create_subtree_of proc~tem_write_debugmesh tem_write_debugMesh proc~tem_write_debugmesh->proc~tem_create_subtree_of

Source Code

  subroutine tem_subTree_from( me, map2global, treeID, comm, dirname, grwPnts )
    ! ---------------------------------------------------------------------------
    !> subTree to be created from list of elements (map2global)
    type(tem_subTree_type), intent(inout) :: me
    !> position of the treeID in the global treeID list
    integer, optional, intent(in) :: map2global(:)
    !> list of treeIDs only use this in case a local shape is set
    integer(kind=long_k), optional, intent(in) :: treeID(:)
    !> mpi communicator to use, defaults to the one in me%global%comm if not
    !! specified
    integer, intent(in), optional :: comm
    !> directory to store the mesh in. is taken to be me%global%dirname if not
    !! specified
    character(len=*), intent(in), optional :: dirname
    !> array of point vaues that neeeds to be stored in the subtree
    type(tem_grwPoints_type), intent(in), optional :: grwPnts
    ! ---------------------------------------------------------------------------
    integer :: commloc
    integer(kind=long_k) :: offset, nElems, nPoints
    integer :: ierror
    integer :: nElemsList
    ! mpi variables
    integer :: commsize, rank, iPnt
    ! ---------------------------------------------------------------------------

    if (present(dirname)) then
      me%global%dirname = dirname
    end if

    if (present(comm)) then
      commloc = comm
    else
      commloc = me%global%comm
    end if

    ! allocate and store the points for get point tracking if
    ! applicable
    if (present(grwPnts)) then
      me%nPoints = grwPnts%coordX%nVals
      allocate(me%points(me%nPoints,3))
      do iPnt = 1, me%nPoints
        me%points(:, 1) = grwPnts%coordX%val(1:me%nPoints)
        me%points(:, 2) = grwPnts%coordY%val(1:me%nPoints)
        me%points(:, 3) = grwPnts%coordZ%val(1:me%nPoints)
      end do
    end if

    ! determine number of elements in the provided map2global
    nElemsList = 0
    if(present(map2global) .and. .not. present(treeID))then
      nElemsList  = size(map2global)
      allocate(me%map2global( nElemsList ))
      ! copy the map2global
      me%map2global = map2global
    else if(present(treeID) .and. .not. present(map2global))then
      nElemsList  = size(treeID)
      allocate(me%treeID( nElemsList ))
      ! copy the list of treeID
      me%treeID = treeID
      ! in case a treeID is set we have to set useLocalMesh to true
      me%useLocalMesh = .true.
    else
      write(logUnit(1),*)'error: either none or both of treeID or map2global '
      write(logUnit(1),*)'       are passed to tem_subTree_from. '//           &
        &                '       only one of them is allowed'
      call tem_abort()
    end if

    ! get size of communicator and my rank in it
    call mpi_comm_size(commloc, commsize, ierror)
    call mpi_comm_rank(commloc, rank, ierror)
    me%global%nparts = commsize
    me%global%mypart = rank

    ! assign treeID list to the new mesh. sort first
    offset = 0
    nElems  = int(nElemsList, kind=long_k)
    ! determine offset for each process (get nElems of ranks before)
    call MPI_Exscan( nElems, offset, 1, long_k_mpi, mpi_sum, commloc, ierror )
    me%elemOffset = offset
    me%nElems = nElemsList

    ! last process offset + its nElems is total number of elems
    nElems = offset + nElems

    ! distribute among new communicator
    call MPI_Bcast( nElems, 1, long_k_mpi, commsize-1, commloc, ierror )
    ! store in global number of elements
    me%global%nElems = nElems

    if (present(grwPnts)) then
      nPoints = me%nPoints
      call MPI_Reduce(nPoints, me%glob_nPoints, 1, long_k_mpi, mpi_sum, &
        &             0, commloc, ierror)
    end if

  end subroutine tem_subTree_from