MPI-AMRVAC  3.0
The MPI - Adaptive Mesh Refinement - Versatile Advection Code
amrini.t
Go to the documentation of this file.
1 !> Generate and initialize all grids at the coarsest level (level one)
2 subroutine initlevelone
5 
6  integer :: iigrid, igrid{#IFDEF EVOLVINGBOUNDARY , Morton_no}
7 
8  levmin=1
9  levmax=1
10 
11  call init_forest_root
12 
13  call getigrids
15 
16  ! fill solution space of all root grids
17  do iigrid=1,igridstail; igrid=igrids(iigrid);
18  call alloc_node(igrid)
19  ! in case gradient routine used in initial condition, ensure geometry known
20  call initial_condition(igrid)
21  end do
22  {#IFDEF EVOLVINGBOUNDARY
23  ! mark physical-boundary blocks on space-filling curve
24  do morton_no=morton_start(mype),morton_stop(mype)
25  igrid=sfc_to_igrid(morton_no)
26  if (phyboundblock(igrid)) sfc_phybound(morton_no)=1
27  end do
28  call mpi_allreduce(mpi_in_place,sfc_phybound,nleafs,mpi_integer,&
29  mpi_sum,icomm,ierrmpi)
30  }
31 
32  ! update ghost cells
33  call getbc(global_time,0.d0,ps,iwstart,nwgc)
34 
35 end subroutine initlevelone
36 
37 !> fill in initial condition
38 subroutine initial_condition(igrid)
39  ! Need only to set the mesh values (can leave ghost cells untouched)
42 
43  integer, intent(in) :: igrid
44 
45  ! in case gradient routine used in initial condition, ensure geometry known
46  block=>ps(igrid)
47  ^d&dxlevel(^d)=rnode(rpdx^d_,igrid);
48 
49  if (.not. associated(usr_init_one_grid)) then
50  call mpistop("usr_init_one_grid not defined")
51  else
52  call usr_init_one_grid(ixg^ll,ixm^ll,ps(igrid)%w,ps(igrid)%x)
53  end if
54 
55 end subroutine initial_condition
56 
57 !> modify initial condition
58 subroutine modify_ic
61 
62  integer :: iigrid, igrid
63 
64  do iigrid=1,igridstail; igrid=igrids(iigrid);
65  block=>ps(igrid)
66  ^d&dxlevel(^d)=rnode(rpdx^d_,igrid);
67 
68  if (.not. associated(usr_init_one_grid)) then
69  call mpistop("usr_init_one_grid not defined")
70  else
71  call usr_init_one_grid(ixg^ll,ixm^ll,ps(igrid)%w,ps(igrid)%x)
72  end if
73  end do
74 
75 end subroutine modify_ic
76 
77 {^nooned
78 !> improve initial condition after initialization
81  use mod_usr_methods
84  use mod_physics
86 
87  logical :: active
88 
89  if(associated(usr_improve_initial_condition)) then
91  else if(stagger_grid) then
92  if(associated(usr_init_vector_potential)) then
93  ! re-calculate magnetic field from the vector potential in a
94  ! completely divergency free way for AMR mesh in 3D
95  if(levmax>levmin.and.ndim==3) call recalculateb
96  end if
97  if(slab_uniform.and.associated(phys_clean_divb)) then
98  ! Project out the divB using multigrid poisson solver
99  ! if not initialised from vector potential
100  if(.not.use_multigrid) call mg_setup_multigrid()
101  call phys_clean_divb(global_time,0.d0,active)
102  call getbc(global_time,0.d0,ps,iwstart,nwgc)
103  end if
104  end if
105 
106 end subroutine improve_initial_condition
107 }
subroutine alloc_node(igrid)
allocate arrays on igrid node
subroutine initlevelone
Generate and initialize all grids at the coarsest level (level one)
Definition: amrini.t:3
subroutine initial_condition(igrid)
fill in initial condition
Definition: amrini.t:39
subroutine modify_ic
modify initial condition
Definition: amrini.t:59
subroutine improve_initial_condition()
improve initial condition after initialization
Definition: amrini.t:80
subroutine mpistop(message)
Exit MPI-AMRVAC with an error message.
Definition: comm_lib.t:194
subroutine build_connectivity
Definition: connectivity.t:45
subroutine getigrids
Definition: connectivity.t:26
subroutine init_forest_root
build root forest
Definition: forest.t:3
subroutine recalculateb
re-calculate the magnetic field from the vector potential in a completely divergency free way
update ghost cells of all blocks including physical boundaries
subroutine getbc(time, qdt, psb, nwstart, nwbc, req_diag)
do update ghost cells of all blocks including physical boundaries
This module contains definitions of global parameters and variables and some generic functions/subrou...
type(state), pointer block
Block pointer for using one block and its previous state.
double precision global_time
The global simulation time.
integer, parameter ndim
Number of spatial dimensions for grid variables.
logical stagger_grid
True for using stagger grid.
logical, dimension(:), allocatable phyboundblock
True if a block has any physical boundary.
integer icomm
The MPI communicator.
integer mype
The rank of the current MPI task.
integer, dimension(:), allocatable, parameter d
integer ixm
the mesh range (within a block with ghost cells)
integer ierrmpi
A global MPI error return code.
double precision, dimension(:,:), allocatable rnode
Corner coordinates.
logical use_multigrid
Use multigrid (only available in 2D and 3D)
logical slab_uniform
uniform Cartesian geometry or not (stretched Cartesian)
double precision, dimension(ndim) dxlevel
Module to couple the octree-mg library to AMRVAC. This file uses the VACPP preprocessor,...
subroutine mg_setup_multigrid()
Setup multigrid for usage.
This module defines the procedures of a physics module. It contains function pointers for the various...
Definition: mod_physics.t:4
procedure(sub_clean_divb), pointer phys_clean_divb
Definition: mod_physics.t:87
Module with all the methods that users can customize in AMRVAC.
procedure(p_no_args), pointer usr_improve_initial_condition
procedure(init_one_grid), pointer usr_init_one_grid
Initialize earch grid block data.
procedure(init_vector_potential), pointer usr_init_vector_potential