AmrMultiGridLevel.h 8.36 KB
Newer Older
frey_m's avatar
frey_m committed
1 2 3 4 5
//
// Class AmrMultiGridLevel
//   This class represents a single AMR level, i.e. it stores all matrices
//   and vectors of a level.
//
frey_m's avatar
frey_m committed
6
// Copyright (c) 2017 - 2020, Matthias Frey, Paul Scherrer Institut, Villigen PSI, Switzerland
frey_m's avatar
frey_m committed
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
// All rights reserved
//
// Implemented as part of the PhD thesis
// "Precise Simulations of Multibunches in High Intensity Cyclotrons"
//
// This file is part of OPAL.
//
// OPAL is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// You should have received a copy of the GNU General Public License
// along with OPAL. If not, see <https://www.gnu.org/licenses/>.
//
22 23 24 25 26 27 28 29 30
#ifndef AMR_MULTI_GRID_LEVEL
#define AMR_MULTI_GRID_LEVEL

#include <vector>

#include <AMReX_IntVect.H>

#include "Ippl.h"

31
#include "AmrMultiGridDefs.h"
32

33 34
#include <unordered_map>

35 36 37 38
template <class MatrixType, class VectorType>
class AmrMultiGridLevel {
    
public:
39 40
    typedef amr::AmrField_t             AmrField_t;
    typedef amr::AmrGeometry_t          AmrGeometry_t;
41 42
    typedef std::unique_ptr<AmrField_t> AmrField_u;
    typedef std::shared_ptr<AmrField_t> AmrField_s;
43 44 45 46 47 48 49 50 51 52
    typedef amr::AmrIntVect_t           AmrIntVect_t;
    typedef MatrixType                  matrix_t;
    typedef VectorType                  vector_t;
    typedef amrex::BaseFab<int>         basefab_t;
    typedef amrex::FabArray<basefab_t>  mask_t;
    typedef std::shared_ptr<AmrBoundary<AmrMultiGridLevel<MatrixType,
                                                          VectorType
                                                          >
                                        >
                            > boundary_t;
53 54 55 56
    
    typedef amr::comm_t comm_t;
    typedef amr::dmap_t dmap_t;
    typedef amr::node_t node_t;
57
    typedef amr::global_ordinal_t go_t;
58 59 60 61
    typedef amr::scalar_t scalar_t;
    typedef amr::local_ordinal_t lo_t;
    
    /// Type for matrix indices
62
    typedef std::vector<go_t> indices_t;
63
    
64 65
    /// Type for matrix entries
    typedef std::vector<scalar_t> coefficients_t;
66
    
67
    // Type with matrix index (column) and coefficient value
frey_m's avatar
frey_m committed
68
    typedef std::unordered_map<go_t, scalar_t> umap_t;
69 70 71 72 73 74 75 76 77 78 79 80 81
    
    // covered   : ghost cells covered by valid cells of this FabArray
    //             (including periodically shifted valid cells)
    // notcovered: ghost cells not covered by valid cells
    //             (including ghost cells outside periodic boundaries) (BNDRY)
    // physbnd   : boundary cells outside the domain (excluding periodic boundaries)
    // interior  : interior cells (i.e., valid cells)
    enum Mask {
        COVERED   = -1,
        INTERIOR  =  0,
        BNDRY     =  1,
        PHYSBNDRY =  2
    };
82 83 84 85 86 87 88

    // NO   : not a refined cell 
    // YES  : cell got refined
    enum Refined {
        YES = 0,
        NO  = 1
    };
89 90
    
public:
91
    /*!
frey_m's avatar
frey_m committed
92
     * @param mesh scaling due to particle rest frame
93 94 95 96 97 98 99 100
     * @param _grids of this level
     * @param _dmap AMReX core distribution map
     * @param _geom of domain
     * @param rr refinement ratio
     * @param bc physical boundaries (x, y, z)
     * @param comm MPI communicator
     * @param node Kokkos node type (Serial, OpenMP, CUDA)
     */
frey_m's avatar
frey_m committed
101 102
    AmrMultiGridLevel(const Vector_t& meshScaling,
                      const amrex::BoxArray& _grids,
103 104 105
                      const amrex::DistributionMapping& _dmap,
                      const AmrGeometry_t& _geom,
                      const AmrIntVect_t& rr,
106
                      const boundary_t* bc,
107 108 109 110 111
                      const Teuchos::RCP<comm_t>& comm,
                      const Teuchos::RCP<node_t>& node);
    
    ~AmrMultiGridLevel();
    
112 113 114 115
    /*!
     * Map a 2D / 3D grid point to an array index
     * @param iv grid point (i, j, k)
     */
frey_m's avatar
frey_m committed
116
    go_t serialize(const AmrIntVect_t& iv) const;
117
    
118 119 120 121
    /*!
     * Checks if grid point is on the physical / mesh boundary
     * @param iv grid point (i, j, k)
     */
122 123
    bool isBoundary(const AmrIntVect_t& iv) const;
    
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154
    /*!
     * Checks all directions if physical / mesh boundary.
     * @param iv is the cell where we want to have the boundary value
     * @param map with indices global matrix indices and matrix values
     * @param value matrix entry (coefficients)
     */
    bool applyBoundary(const AmrIntVect_t& iv,
                       umap_t& map,
                       const scalar_t& value);
    
    /*!
     * Slightly faster version of apply().
     * @param iv is the cell where we want to have the boundary value
     * @param fab is the mask
     * @param map with indices global matrix indices and matrix values
     * @param value matrix entry (coefficients)
     * @precondition Basefab needs to be a mask with
     * AmrMultiGridLevel::Mask::PHYSBNDRY
     */
    bool applyBoundary(const AmrIntVect_t& iv,
                       const basefab_t& fab,
                       umap_t& map,
                       const scalar_t& value);
    
    /*!
     * Apply boundary in a certain direction.
     * @param iv is the cell where we want to have the boundary value
     * @param dir direction of physical / mesh boundary
     * @param map with indices global matrix indices and matrix values
     * @param value matrix entry (coefficients)
     */
155
    void applyBoundary(const AmrIntVect_t& iv,
156 157 158
                       const lo_t& dir,
                       umap_t& map,
                       const scalar_t& value);
159 160 161
    
    const AmrIntVect_t& refinement() const;
    
frey_m's avatar
frey_m committed
162 163 164 165 166 167 168 169 170
    /*!
     * @returns the mesh spacing in particle rest frame
     */
    const scalar_t* cellSize() const;
    
    /*!
     * @returns the mesh spacing in particle rest frame for a
     * certain direction
     */
frey_m's avatar
frey_m committed
171
    const scalar_t& cellSize(lo_t dir) const;
frey_m's avatar
frey_m committed
172 173 174 175 176 177 178 179 180 181
    
    /*!
     * @returns the inverse mesh spacing in particle rest frame
     */
    const scalar_t* invCellSize() const;
    
    /*!
     * @returns the inverse mesh spacing in particle rest
     * frame for a certain direction
     */
frey_m's avatar
frey_m committed
182
    const scalar_t& invCellSize(lo_t dir) const;
183 184 185 186 187 188 189 190

    /*!
     * Check if a cell on that level is valid
     * @param iv is the cell
     * @returns true if the cell is valid
     */
    bool isValid(const AmrIntVect_t& iv) const;

191 192 193 194
    /*!
     * Build a mask specifying if a grid point is covered,
     * an interior cell, at physical boundary or at interior boundary
     */
frey_m's avatar
frey_m committed
195 196 197
    void buildLevelMask();

private:
198 199 200 201 202
    /*!
     * Build Tpetra::Map of this level
     * @param comm MPI communicator
     * @param node Kokkos node type
     */
frey_m's avatar
frey_m committed
203 204
    void buildMap(const Teuchos::RCP<comm_t>& comm,
                  const Teuchos::RCP<node_t>& node);
205 206
    
public:
207 208 209
    const amrex::BoxArray& grids;           ///< boxes of this level
    const amrex::DistributionMapping& dmap; ///< AMReX core distribution map
    const AmrGeometry_t& geom;              ///< geometry of this problem
210
    
211
    Teuchos::RCP<dmap_t> map_p;         ///< Tpetra core map
212
    
213 214 215 216 217 218
    Teuchos::RCP<matrix_t> Anf_p;       ///< no fine Poisson matrix
    Teuchos::RCP<matrix_t> R_p;         ///< restriction matrix
    Teuchos::RCP<matrix_t> I_p;         ///< interpolation matrix
    Teuchos::RCP<matrix_t> Bcrse_p;     ///< boundary from coarse cells
    Teuchos::RCP<matrix_t> Bfine_p;     ///< boundary from fine cells
    Teuchos::RCP<matrix_t> Awf_p;       ///< composite Poisson matrix
219 220 221 222 223 224 225 226 227 228 229
    
    /// gradient matrices in x, y, and z to compute electric field
    Teuchos::RCP<matrix_t> G_p[AMREX_SPACEDIM];
    
    Teuchos::RCP<vector_t> rho_p;       ///< charge density
    Teuchos::RCP<vector_t> phi_p;       ///< potential vector
    Teuchos::RCP<vector_t> residual_p;  ///< residual over all cells
    Teuchos::RCP<vector_t> error_p;     ///< error over all cells
    Teuchos::RCP<matrix_t> UnCovered_p; ///< uncovered cells
    
    std::unique_ptr<mask_t> mask;       ///< interior, phys boundary, interface, covered
230 231
    std::unique_ptr<mask_t> refmask;    ///< covered (i.e. refined) or not-covered
    std::unique_ptr<mask_t> crsemask;
232 233
    
private:
frey_m's avatar
frey_m committed
234
    go_t nr_m[AMREX_SPACEDIM];          ///< number of grid points
235 236 237
    
    AmrIntVect_t rr_m;                  ///< refinement
    
238
    boundary_t bc_mp[AMREX_SPACEDIM];   ///< boundary conditions
frey_m's avatar
frey_m committed
239 240 241
    
    scalar_t dx_m[AMREX_SPACEDIM];      ///< cell size in particle rest frame
    scalar_t invdx_m[AMREX_SPACEDIM];   ///< inverse cell size in particle rest frame
242 243 244 245 246 247
};


#include "AmrMultiGridLevel.hpp"

#endif