MPI-AMRVAC  2.2
The MPI - Adaptive Mesh Refinement - Versatile Advection Code
amrvac.t
Go to the documentation of this file.
1 !> AMRVAC solves a set of hyperbolic equations
2 !> \f$\vec{u}_t + \nabla_x \cdot \vec{f}(\vec{u}) = \vec{s}\f$
3 !> using adaptive mesh refinement.
4 program amrvac
5 
11  use mod_usr
12  use mod_initialize
13  use mod_particles
15  use mod_advance, only: process
18 
19  double precision :: time0, time_in
20  logical,save :: part_file_exists=.false.
21 
22 
23  call comm_start()
24 
25  time0 = mpi_wtime()
26  time_advance = .false.
27  time_bc = zero
28 
29  ! read command line arguments first
30  call read_arguments()
31 
32  ! the user_init routine should load a physics module
33  call usr_init()
34 
35  call initialize_amrvac()
36 
37  if (restart_from_file /= undefined) then
38  ! restart from previous file or dat file conversion
39  ! get input data from previous AMRVAC run
40 
41  ! read in dat file
42  call read_snapshot()
43 
44  ! rewrite it=0 snapshot when restart from it=0 state
45  if(it==0.and.itsave(1,2)==0) snapshotnext=snapshotnext-1
46 
47  if (reset_time) then
48  ! reset it and global time to original value
49  it = it_init
51  ! reset snapshot number
52  snapshotnext=0
53  end if
54 
55  if (reset_it) then
56  ! reset it to original value
57  it = it_init
58  end if
59 
60  ! modify initial condition
61  if (firstprocess) call modify_ic
62 
63  ! reset AMR grid
64  if (reset_grid) then
65  call settree
66  else
67  ! set up boundary flux conservation arrays
68  if (levmax>levmin) call allocatebflux
69  end if
70 
71  ! select active grids
72  call selectgrids
73 
74  ! update ghost cells
75  call getbc(global_time,0.d0,ps,1,nwflux+nwaux)
76 
77  if(use_particles) then
78  call read_particles_snapshot(part_file_exists)
79  if (.not. part_file_exists) call particles_create()
80  if(convert) then
81  call handle_particles()
82  call time_spent_on_particles()
83  call comm_finalize
84  stop
85  end if
86  end if
87 
88  if (convert) then
89  if (npe/=1.and.(.not.(index(convert_type,'mpi')>=1)) &
90  .and. convert_type .ne. 'user') &
91  call mpistop("non-mpi conversion only uses 1 cpu")
92 
93  ! Optionally call a user method that can modify the grid variables
94  ! before saving the converted data
95  if (associated(usr_process_grid) .or. &
96  associated(usr_process_global)) then
97  call process(it,global_time)
98  end if
99 
100  call generate_plotfile
101  call comm_finalize
102  stop
103  end if
104 
106 
107  else
108 
109  ! form and initialize all grids at level one
110  call initlevelone
111 
112  ! set up and initialize finer level grids, if needed
113  call settree
114 
116 
117  {^nooned
118  ! improve initial condition
120  }
121 
122  ! select active grids
123  call selectgrids
124 
125  if (use_particles) call particles_create()
126 
127  end if
128 
129  if (mype==0) then
130  print*,'-------------------------------------------------------------------------------'
131  write(*,'(a,f17.3,a)')' Startup phase took : ',mpi_wtime()-time0,' sec'
132  print*,'-------------------------------------------------------------------------------'
133  end if
134 
135  ! an interface to allow user to do special things before the main loop
136  if (associated(usr_before_main_loop)) &
137  call usr_before_main_loop()
138 
139  ! do time integration of all grids on all levels
140  call timeintegration()
141 
142  if (mype==0) then
143  print*,'-------------------------------------------------------------------------------'
144  write(*,'(a,f17.3,a)')' Finished AMRVAC in : ',mpi_wtime()-time0,' sec'
145  print*,'-------------------------------------------------------------------------------'
146  end if
147 
148  call comm_finalize
149 
150 contains
151 
152  subroutine timeintegration()
155  use mod_forest, only: nleafs_active
157  use mod_input_output, only: saveamrfile
159 
160  integer :: level, ifile, fixcount, ncells_block, igrid, iigrid
161  integer(kind=8) ncells_update
162  logical :: save_now, crashall
163  double precision :: time_last_print, time_write0, time_write, time_before_advance, dt_loop
164 
165  time_in=mpi_wtime()
166  time_last_print = -bigdouble
167  fixcount=1
168 
170 
171  do ifile=nfile,1,-1
172  if(resume_previous_run) then
173  tsavelast(ifile)=aint((global_time+smalldouble)/dtsave(ifile))*dtsave(ifile)
174  itsavelast(ifile)=it/ditsave(ifile)*ditsave(ifile)
175  else
176  tsavelast(ifile)=global_time
177  itsavelast(ifile)=it
178  end if
179  end do
180 
181  ! the next two are used to keep track of the performance during runtime:
182  ittimelast=it
183  timelast=mpi_wtime()
184 
185  ! ------ start of integration loop. ------------------
186  if (mype==0) then
187  write(*, '(A,ES9.2,A)') ' Start integrating, print status every ', &
188  time_between_print, ' seconds'
189  write(*, '(A4,A10,A12,A12,A12)') ' #', 'it', 'time', 'dt', 'wc-time(s)'
190  end if
191 
192  timeloop0=mpi_wtime()
193  time_bc=0.d0
194  time_write=0.d0
195  ncells_block={(ixghi^d-2*nghostcells)*}
196  ncells_update=0
197  dt_loop=0.d0
198 
199  time_advance=.true.
200 
201  time_evol : do
202 
203  time_before_advance=mpi_wtime()
204  ! set time step
205  call setdt()
206 
207  ! Optionally call a user method that can modify the grid variables at the
208  ! beginning of a time step
209  if (associated(usr_process_grid) .or. &
210  associated(usr_process_global)) then
211  call process(it,global_time)
212  end if
213 
214  ! Check if output needs to be written
215  do ifile=nfile,1,-1
216  save_file(ifile) = timetosave(ifile)
217  end do
218 
219  timeio0=mpi_wtime()
220 
221  if (timeio0 - time_last_print > time_between_print) then
222  time_last_print = timeio0
223  if (mype == 0) then
224  write(*, '(A4,I10,ES12.4,ES12.4,ES12.4)') " #", &
225  it, global_time, dt, timeio0 - time_in
226  end if
227  end if
228 
229  ! output data
230  if (any(save_file)) then
231  if(associated(usr_modify_output)) then
232  ! Users can modify or set variables before output is written
233  do iigrid=1,igridstail; igrid=igrids(iigrid);
234  ^d&dxlevel(^d)=rnode(rpdx^d_,igrid);
235  block=>ps(igrid)
236  call usr_modify_output(ixg^ll,ixm^ll,global_time,ps(igrid)%w,ps(igrid)%x)
237  end do
238  end if
239  time_write=0.d0
240  do ifile=nfile,1,-1
241  if (save_file(ifile)) then
242  time_write0=mpi_wtime()
243  call saveamrfile(ifile)
244  time_write=time_write+mpi_wtime()-time_write0
245  end if
246  end do
247  end if
248 
249 
250  ! output a snapshot when user write a file named 'savenow' in the same
251  ! folder as the executable amrvac
252  if (mype==0) inquire(file='savenow',exist=save_now)
253  if (npe>1) call mpi_bcast(save_now,1,mpi_logical,0,icomm,ierrmpi)
254 
255  if (save_now) then
256  if(mype==0) write(*,'(a,i7,a,i7,a,es12.4)') ' save a snapshot No.',&
257  snapshotnext,' at it=',it,' global_time=',global_time
258  call saveamrfile(1)
259  call saveamrfile(2)
260  call mpi_file_delete('savenow',mpi_info_null,ierrmpi)
261  endif
262  timeio_tot=timeio_tot+mpi_wtime()-timeio0
263 
264  pass_wall_time=mpi_wtime()-time0+dt_loop+4.d0*time_write >=wall_time_max
265 
266  ! exit time loop if time is up
267  if (it>=it_max .or. global_time>=time_max .or. pass_wall_time .or. final_dt_exit) exit time_evol
268 
269  ! solving equations
270  call advance(it)
271 
272  ! if met unphysical values, output the last good status and stop the run
273  call mpi_allreduce(crash,crashall,1,mpi_logical,mpi_lor,icomm,ierrmpi)
274  if (crashall) then
275  do iigrid=1,igridstail; igrid=igrids(iigrid);
276  ps(igrid)%w=pso(igrid)%w
277  end do
278  call saveamrfile(1)
279  call saveamrfile(2)
280  if(mype==0) write(*,*) "Error: small value encountered, run crash."
281  call mpi_abort(icomm, iigrid, ierrmpi)
282  end if
283 
284  ! Optionally call a user method that can modify the grid variables at the
285  ! end of a time step: this is for two-way coupling to PIC, e.g.
286  if (associated(usr_process_adv_grid) .or. &
287  associated(usr_process_adv_global)) then
289  end if
290 
291  ! update AMR mesh and tree
292  timegr0=mpi_wtime()
293  if(ditregrid>1) then
294  if(fixcount<ditregrid) then
295  fixcount=fixcount+1
296  else
297  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
298  fixcount=1
299  endif
300  else
301  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
302  endif
303  timegr_tot=timegr_tot+(mpi_wtime()-timegr0)
304 
305  ! update time variables
306  it = it + 1
308 
309  if(it>9000000)then
311  itsavelast(:)=0
312  end if
313 
314  ! count updated cells
315  ncells_update=ncells_update+ncells_block*nleafs_active
316 
317  ! time lapses in one loop
318  dt_loop=mpi_wtime()-time_before_advance
319  end do time_evol
320 
321  time_advance=.false.
322 
323  timeloop=mpi_wtime()-timeloop0
324 
325  if (mype==0) then
326  write(*,'(a,f12.3,a)')' Total timeloop took : ',timeloop,' sec'
327  write(*,'(a,f12.3,a)')' Time spent on Regrid+Update: ',timegr_tot,' sec'
328  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timegr_tot/timeloop,' %'
329  write(*,'(a,f12.3,a)')' Time spent on IO in loop : ',timeio_tot,' sec'
330  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timeio_tot/timeloop,' %'
331  write(*,'(a,f12.3,a)')' Time spent on BC : ',time_bc,' sec'
332  write(*,'(a,f12.2,a)')' Percentage: ',100.0*time_bc/timeloop,' %'
333  write(*,'(a,f12.3,a)')' Time spent on run : ',timeloop-timeio_tot,' sec'
334  write(*,'(a,es12.3 )')' Cells_updated / cpu / sec : ',dble(ncells_update)*dble(nstep)/dble(npe)/timeloop
335  end if
336 
337  ! output end state
338  timeio0=mpi_wtime()
339  do ifile=nfile,1,-1
340  if(itsavelast(ifile)<it)call saveamrfile(ifile)
341  enddo
342  if (mype==0) call mpi_file_close(log_fh,ierrmpi)
343  timeio_tot=timeio_tot+(mpi_wtime()-timeio0)
344 
345  if (mype==0) then
346  write(*,'(a,f12.3,a)')' Total time spent on IO : ',timeio_tot,' sec'
347  write(*,'(a,f12.3,a)')' Total timeintegration took : ',mpi_wtime()-time_in,' sec'
348  write(*, '(A4,I10,ES12.3,ES12.3,ES12.3)') " #", &
349  it, global_time, dt, timeio0 - time_in
350  end if
351 
352  {#IFDEF RAY
353  call time_spent_on_rays
354  }
355 
356  if(use_particles) call time_spent_on_particles
357 
358  if (use_multigrid) call mg_timers_show(mg)
359  end subroutine timeintegration
360 
361  !> Save times are defined by either tsave(isavet(ifile),ifile) or
362  !> itsave(isaveit(ifile),ifile) or dtsave(ifile) or ditsave(ifile)
363  !> tsavestart(ifile) determines first start time. This only affects
364  !> read out times determined by dtsave(ifiles).
365  !> Other conditions may be included.
366  logical function timetosave(ifile)
368 
369  integer:: ifile
370  logical:: oksave
371 
372 ! if(mype==0) print*,ifile,'OK tsave 0',isavet(ifile),global_time
373  oksave=.false.
374  if (it==itsave(isaveit(ifile),ifile)) then
375 ! if(mype==0) print*,ifile,'OK tsave 1',isaveit(ifile),global_time
376  oksave=.true.
377  isaveit(ifile)=isaveit(ifile)+1
378  end if
379  if (it==itsavelast(ifile)+ditsave(ifile)) oksave=.true.
380 
381 ! if(mype==0) print*,ifile,'OK tsave2',isavet(ifile),global_time,tsave(isavet(ifile),ifile)
382  if (global_time>=tsave(isavet(ifile),ifile).and.global_time-dt<tsave(isavet(ifile),ifile)) then
383 ! if(mype==0) print*,ifile,'OK tsave3',isavet(ifile),global_time,tsave(isavet(ifile),ifile)
384  oksave=.true.
385  isavet(ifile)=isavet(ifile)+1
386  end if
387 
388  if(global_time>=tsavestart(ifile)-smalldouble)then
389  if (global_time>=tsavelast(ifile)+dtsave(ifile)-smalldouble)then
390  oksave=.true.
391  n_saves(ifile) = n_saves(ifile) + 1
392  endif
393  endif
394 
395  if (oksave) then
396  tsavelast(ifile) =global_time
397  itsavelast(ifile)=it
398  end if
399  timetosave=oksave
400 
401  return
402  end function timetosave
403 
404  !> Return true if the AMR grid should not be adapted any more. This is
405  !> controlled by tfixgrid or itfixgrid. Other conditions may be included.
406  logical function fixgrid()
408 
410  end function fixgrid
411 
412 end program amrvac
This module contains definitions of global parameters and variables and some generic functions/subrou...
Module for reading input and writing output.
subroutine, public process_advanced(iit, qt)
process_advanced is user entry in time loop, just after advance allows to modify solution, add extra variables, etc. added for handling two-way coupled PIC-MHD Warning: w is now at global_time^(n+1), global time and iteration at global_time^n, it^n
Definition: mod_advance.t:733
update ghost cells of all blocks including physical boundaries
procedure(p_no_args), pointer usr_before_main_loop
This module handles the initialization of various components of amrvac.
Definition: mod_initialize.t:2
double precision time_max
End time for the simulation.
subroutine time_spent_on_rays
Definition: raytracing.t:1884
procedure(process_adv_global), pointer usr_process_adv_global
double precision timeio0
Definition: mod_timing.t:10
procedure(process_global), pointer usr_process_global
character(len=std_len) convert_type
Which format to use when converting.
Module containing all the time stepping schemes.
Definition: mod_advance.t:2
integer nstep
How many sub-steps the time integrator takes.
logical resume_previous_run
If true, restart a previous run from the latest snapshot.
subroutine read_arguments()
Read the command line arguments passed to amrvac.
double precision wall_time_max
Ending wall time (in hours) for the simulation.
procedure(process_adv_grid), pointer usr_process_adv_grid
double precision timegr_tot
Definition: mod_timing.t:11
integer it_init
initial iteration count
subroutine, public initialize_amrvac()
Initialize amrvac: read par files and initialize variables.
integer it_max
Stop the simulation after this many time steps have been taken.
procedure(sub_modify_io), pointer usr_modify_output
Module with basic grid data structures.
Definition: mod_forest.t:2
Module for flux conservation near refinement boundaries.
character(len= *), parameter undefined
subroutine resettree
reset AMR and (de)allocate boundary flux storage at level changes
Definition: amrgrid.t:49
subroutine improve_initial_condition()
improve initial condition after initialization
Definition: amrini.t:88
type(mg_t) mg
Data structure containing the multigrid tree.
double precision timelast
Definition: mod_timing.t:16
integer npe
The number of MPI tasks.
Module to couple the octree-mg library to AMRVAC. This file uses the VACPP preprocessor, but its use is kept to a minimum.
logical function timetosave(ifile)
Save times are defined by either tsave(isavet(ifile),ifile) or itsave(isaveit(ifile),ifile) or dtsave(ifile) or ditsave(ifile) tsavestart(ifile) determines first start time. This only affects read out times determined by dtsave(ifiles). Other conditions may be included.
Definition: amrvac.t:367
subroutine timeintegration()
Definition: amrvac.t:153
double precision, dimension(nfile) tsavelast
logical convert
If true and restart_from_file is given, convert snapshots to other file formats.
subroutine read_snapshot
Routine to read in snapshots (.dat files). When it cannot recognize the file version, it will automatically try the &#39;old&#39; reader.
integer, dimension(nfile) isavet
This is a template for a new user problem of mhd.
logical pass_wall_time
If true, wall time is up, modify snapshotnext for later overwrite.
double precision timeloop0
Definition: mod_timing.t:11
integer nghostcells
Number of ghost cells surrounding a grid.
Module with all the methods that users can customize in AMRVAC.
subroutine, public advance(iit)
Advance all the grids over one time step, including all sources.
Definition: mod_advance.t:20
procedure(process_grid), pointer usr_process_grid
integer ixghi
Upper index of grid block arrays.
integer ditregrid
Reconstruct the AMR grid once every ditregrid iteration(s)
subroutine selectgrids
Definition: selectgrids.t:3
subroutine settree
Build up AMR.
Definition: amrgrid.t:3
double precision timeio_tot
Definition: mod_timing.t:10
integer ierrmpi
A global MPI error return code.
integer snapshotnext
IO: snapshot and collapsed views output numbers/labels.
integer, dimension(nfile) itsavelast
character(len=std_len) restart_from_file
If not &#39;unavailable&#39;, resume from snapshot with this base file name.
subroutine getbc(time, qdt, psb, nwstart, nwbc, req_diag)
do update ghost cells of all blocks including physical boundaries
integer, parameter filelog_
Constant indicating log output.
integer nleafs_active
Definition: mod_forest.t:78
double precision time_between_print
to monitor timeintegration loop at given wall-clock time intervals
integer, dimension(:), allocatable, parameter d
logical reset_it
If true, reset iteration count to 0.
integer mype
The rank of the current MPI task.
double precision global_time
The global simulation time.
subroutine comm_start
Initialize the MPI environment.
Definition: comm_lib.t:7
integer, dimension(1:nfile) n_saves
Number of saved files of each type.
subroutine saveamrfile(ifile)
logical reset_time
If true, reset iteration count and global_time to original values, and start writing snapshots at ind...
logical, dimension(nfile) save_file
whether or not to save an output file
double precision, dimension(nfile) dtsave
Repeatedly save output of type N when dtsave(N) simulation time has passed.
logical function fixgrid()
Return true if the AMR grid should not be adapted any more. This is controlled by tfixgrid or itfixgr...
Definition: amrvac.t:407
logical use_particles
Use particles module or not.
subroutine particles_create()
Create initial particles.
Definition: mod_particles.t:42
double precision, dimension(nfile) tsavestart
Start of read out (not counting specified read outs)
Module containing all the particle routines.
Definition: mod_particles.t:2
subroutine mpistop(message)
Exit MPI-AMRVAC with an error message.
Definition: comm_lib.t:198
subroutine modify_ic
modify initial condition
Definition: amrini.t:64
logical firstprocess
If true, call initonegrid_usr upon restarting.
double precision tfixgrid
Fix the AMR grid after this time.
procedure(sub_check_params), pointer phys_check_params
Definition: mod_physics.t:59
subroutine mg_setup_multigrid()
Setup multigrid for usage.
double precision, dimension(ndim) dxlevel
double precision, dimension(nsavehi, nfile) tsave
Save output of type N on times tsave(:, N)
subroutine usr_init()
This routine should set user methods, and activate the physics module.
integer, dimension(nfile) isaveit
integer snapshotini
Resume from the snapshot with this index.
integer ittimelast
Definition: mod_timing.t:15
logical crash
Save a snapshot before crash a run met unphysical values.
subroutine generate_plotfile
Definition: convert.t:3
logical use_multigrid
Use multigrid (only available in 2D and 3D)
integer slowsteps
If > 1, then in the first slowsteps-1 time steps dt is reduced by a factor .
integer icomm
The MPI communicator.
program amrvac
AMRVAC solves a set of hyperbolic equations using adaptive mesh refinement.
Definition: amrvac.t:4
integer refine_max_level
Maximal number of AMR levels.
This module defines the procedures of a physics module. It contains function pointers for the various...
Definition: mod_physics.t:4
integer, parameter nfile
Number of output methods.
integer log_fh
MPI file handle for logfile.
logical reset_grid
If true, rebuild the AMR grid upon restarting.
subroutine, public process(iit, qt)
process is a user entry in time loop, before output and advance allows to modify solution, add extra variables, etc. Warning: CFL dt already determined (and is not recomputed)!
Definition: mod_advance.t:698
subroutine setdt()
setdt - set dt for all levels between levmin and levmax. dtpar>0 –> use fixed dtpar for all level dt...
Definition: setdt.t:5
integer itfixgrid
Fix the AMR grid after this many time steps.
subroutine initlevelone
Generate and initialize all grids at the coarsest level (level one)
Definition: amrini.t:3
subroutine comm_finalize
Finalize (or shutdown) the MPI environment.
Definition: comm_lib.t:39
integer it
Number of time steps taken.
double precision time_bc
accumulated wall-clock time spent on boundary conditions
integer, dimension(nfile) ditsave
Repeatedly save output of type N when ditsave(N) time steps have passed.
double precision timegr0
Definition: mod_timing.t:11
double precision, dimension(:,:), allocatable rnode
Corner coordinates.
double precision timeloop
Definition: mod_timing.t:11
double precision time_init
Start time for the simulation.
subroutine, public allocatebflux
integer, parameter fileout_
Constant indicating regular output.
logical final_dt_exit
Force timeloop exit when final dt < dtmin.
integer, dimension(nsavehi, nfile) itsave
Save output of type N on iterations itsave(:, N)