MPI-AMRVAC  3.1
The MPI - Adaptive Mesh Refinement - Versatile Advection Code
amrvac.t
Go to the documentation of this file.
1 !> AMRVAC solves a set of hyperbolic equations
2 !> \f$\vec{u}_t + \nabla_x \cdot \vec{f}(\vec{u}) = \vec{s}\f$
3 !> using adaptive mesh refinement.
4 program amrvac
5 
10  use mod_usr
11  use mod_initialize
13  {^nooned
15  }
16  use mod_selectgrids, only: selectgrids
17  use mod_particles
19  use mod_advance, only: process
21  use mod_convert, only: init_convert
22  use mod_physics
27 
28 
29  double precision :: time0, time_in
30  logical,save :: part_file_exists=.false.
31 
32 
33  call comm_start()
34 
35  time0 = mpi_wtime()
36  time_advance = .false.
37  time_bc = zero
38 
39  ! read command line arguments first
40  call read_arguments()
41 
42  ! init_convert is called before usr_init as user might associate a convert method
43  call init_convert()
44  ! the user_init routine should load a physics module
45  call usr_init()
46 
47  call initialize_amrvac()
48 
49  if (restart_from_file /= undefined) then
50  ! restart from previous file or dat file conversion
51  ! get input data from previous AMRVAC run
52 
53  ! read in dat file
54  call read_snapshot()
55 
56  ! rewrite it=0 snapshot when restart from it=0 state
57  if(it==0.and.itsave(1,2)==0) snapshotnext=snapshotnext-1
58 
59  if (reset_time) then
60  ! reset it and global time to original value
61  it = it_init
63  ! reset snapshot number
64  snapshotnext=0
65  end if
66 
67  if (reset_it) then
68  ! reset it to original value
69  it = it_init
70  end if
71 
72  ! modify initial condition
73  if (firstprocess) then
74  ! update ghost cells for all need-boundary variables before modification
75  call getbc(global_time,0.d0,ps,1,nwflux+nwaux)
76  call modify_ic
77  end if
78 
79  ! select active grids
80  call selectgrids
81 
82  ! update ghost cells for all need-boundary variables
83  call getbc(global_time,0.d0,ps,1,nwflux+nwaux)
84 
85  ! reset AMR grid
86  if (reset_grid) then
87  call settree
88  else
89  ! set up boundary flux conservation arrays
90  if (levmax>levmin) call allocatebflux
91  end if
92 
93  ! all blocks refined to the same level for output
94  if(convert .and. level_io>0 .or. level_io_min.ne.1 .or. level_io_max.ne.nlevelshi) &
96 
97  {^nooned
98  ! improve initial condition after restart and modification
100  }
101 
103 
104  if(use_particles) then
105  call read_particles_snapshot(part_file_exists)
106  call init_gridvars()
107  if (.not. part_file_exists) call particles_create()
108  if(convert) then
109  call handle_particles()
110  call finish_gridvars()
111  call time_spent_on_particles()
112  call comm_finalize
113  stop
114  end if
115  end if
116 
117  if(convert) then
118  if (npe/=1.and.(.not.(index(convert_type,'mpi')>=1)) &
119  .and. convert_type .ne. 'user') &
120  call mpistop("non-mpi conversion only uses 1 cpu")
121  if(mype==0.and.level_io>0) write(unitterm,*)'reset tree to fixed level=',level_io
122 
123  ! Optionally call a user method that can modify the grid variables
124  ! before saving the converted data
125  if (associated(usr_process_grid) .or. &
126  associated(usr_process_global)) then
127  call process(it,global_time)
128  end if
129  !here requires -1 snapshot
131 
132  if(associated(phys_special_advance)) then
133  ! e.g. calculate MF velocity from magnetic field
135  end if
136 
137  call generate_plotfile
138  call comm_finalize
139  stop
140  end if
141 
142  else
143 
144  ! form and initialize all grids at level one
145  call initlevelone
146 
147  ! set up and initialize finer level grids, if needed
148  call settree
149 
151 
152  {^nooned
153  ! improve initial condition
155  }
156 
157  ! select active grids
158  call selectgrids
159 
160  if (use_particles) then
161  call init_gridvars()
162  call particles_create()
163  end if
164 
165  end if
166 
167  ! initialize something base on tree information
169 
170  if (mype==0) then
171  print*,'-------------------------------------------------------------------------------'
172  write(*,'(a,f17.3,a)')' Startup phase took : ',mpi_wtime()-time0,' sec'
173  print*,'-------------------------------------------------------------------------------'
174  end if
175 
176  ! an interface to allow user to do special things before the main loop
177  if (associated(usr_before_main_loop)) &
178  call usr_before_main_loop()
179 
180  ! do time integration of all grids on all levels
181  call timeintegration()
182 
183  if (mype==0) then
184  print*,'-------------------------------------------------------------------------------'
185  write(*,'(a,f17.3,a)')' Finished AMRVAC in : ',mpi_wtime()-time0,' sec'
186  print*,'-------------------------------------------------------------------------------'
187  end if
188 
189  call comm_finalize
190 
191 contains
192 
193  subroutine timeintegration()
194  use mod_timing
196  use mod_forest, only: nleafs_active
198  use mod_input_output, only: saveamrfile
201  use mod_dt, only: setdt
202 
203 
204  integer :: level, ifile, fixcount, ncells_block, igrid, iigrid
205  integer(kind=8) ncells_update
206  logical :: crashall
207  double precision :: time_last_print, time_write0, time_write, time_before_advance, dt_loop
208 
209  time_in=mpi_wtime()
210  time_last_print = -bigdouble
211  fixcount=1
212 
214 
215  do ifile=nfile,1,-1
216  if(resume_previous_run) then
217  tsavelast(ifile)=aint((global_time+smalldouble)/dtsave(ifile))*dtsave(ifile)
218  itsavelast(ifile)=it/ditsave(ifile)*ditsave(ifile)
219  else
220  tsavelast(ifile)=global_time
221  itsavelast(ifile)=it
222  end if
223  end do
224 
225  ! the next two are used to keep track of the performance during runtime:
226  ittimelast=it
227  timelast=mpi_wtime()
228 
229  ! ------ start of integration loop. ------------------
230  if (mype==0) then
231  write(*, '(A,ES9.2,A)') ' Start integrating, print status every ', &
232  time_between_print, ' seconds'
233  write(*, '(A4,A10,A12,A12,A12)') ' #', 'it', 'time', 'dt', 'wc-time(s)'
234  end if
235 
236  timeloop0=mpi_wtime()
237  time_bc=0.d0
238  time_write=0.d0
239  ncells_block={(ixghi^d-2*nghostcells)*}
240  ncells_update=0
241  dt_loop=0.d0
242 
243  time_advance=.true.
244 
245  time_evol : do
246 
247  time_before_advance=mpi_wtime()
248  ! set time step
249  call setdt()
250 
251  ! Optionally call a user method that can modify the grid variables at the
252  ! beginning of a time step
253  if (associated(usr_process_grid) .or. &
254  associated(usr_process_global)) then
255  call process(it,global_time)
256  end if
257 
258  ! Check if output needs to be written
259  do ifile=nfile,1,-1
260  save_file(ifile) = timetosave(ifile)
261  end do
262 
263  timeio0=mpi_wtime()
264 
265  if (timeio0 - time_last_print > time_between_print) then
266  time_last_print = timeio0
267  if (mype == 0) then
268  write(*, '(A4,I10,ES12.4,ES12.4,ES12.4)') " #", &
270  end if
271  end if
272 
273  ! output data
274  if (any(save_file)) then
275  if(associated(usr_modify_output)) then
276  ! Users can modify or set variables before output is written
277  do iigrid=1,igridstail; igrid=igrids(iigrid);
278  ^d&dxlevel(^d)=rnode(rpdx^d_,igrid);
279  block=>ps(igrid)
280  call usr_modify_output(ixg^ll,ixm^ll,global_time,ps(igrid)%w,ps(igrid)%x)
281  end do
282  end if
283  time_write=0.d0
284  do ifile=nfile,1,-1
285  if (save_file(ifile)) then
286  time_write0=mpi_wtime()
287  call saveamrfile(ifile)
288  time_write=time_write+mpi_wtime()-time_write0
289  end if
290  end do
291  end if
292 
293 
294  ! output a snapshot when user write a file named 'savenow' in the same
295  ! folder as the executable amrvac
296  if (mype==0) inquire(file='savenow',exist=save_now)
297  if (npe>1) call mpi_bcast(save_now,1,mpi_logical,0,icomm,ierrmpi)
298 
299  if (save_now) then
300  if(mype==0) write(*,'(a,i7,a,i7,a,es12.4)') ' save a snapshot No.',&
301  snapshotnext,' at it=',it,' global_time=',global_time
302  call saveamrfile(1)
303  call saveamrfile(2)
304  call mpi_file_delete('savenow',mpi_info_null,ierrmpi)
305  end if
306  timeio_tot=timeio_tot+mpi_wtime()-timeio0
307 
308  pass_wall_time=mpi_wtime()-time0+dt_loop+4.d0*time_write >=wall_time_max
309 
310  ! exit time loop if time is up
311  if (it>=it_max .or. global_time>=time_max .or. pass_wall_time .or. final_dt_exit) exit time_evol
312 
313  ! solving equations
314  call advance(it)
315 
316  ! if met unphysical values, output the last good status and stop the run
317  call mpi_allreduce(crash,crashall,1,mpi_logical,mpi_lor,icomm,ierrmpi)
318  if (crashall) then
319  call saveamrfile(1)
320  call saveamrfile(2)
321  if(mype==0) write(*,*) "Error: small value encountered, run crash."
322  call mpi_abort(icomm, iigrid, ierrmpi)
323  end if
324 
325  ! Optionally call a user method that can modify the grid variables at the
326  ! end of a time step: this is for two-way coupling to PIC, e.g.
327  if (associated(usr_process_adv_grid) .or. &
328  associated(usr_process_adv_global)) then
330  end if
331 
332  ! update AMR mesh and tree
333  timegr0=mpi_wtime()
334  if(ditregrid>1) then
335  if(fixcount<ditregrid) then
336  fixcount=fixcount+1
337  else
338  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
339  fixcount=1
340  end if
341  else
342  if (refine_max_level>1 .and. .not.(fixgrid())) call resettree
343  end if
344  timegr_tot=timegr_tot+(mpi_wtime()-timegr0)
345 
346  ! update time variables
347  it = it + 1
349 
350  if(it>9000000)then
352  itsavelast(:)=0
353  end if
354 
355  ! count updated cells
356  ncells_update=ncells_update+ncells_block*nleafs_active
357 
358  ! time lapses in one loop
359  dt_loop=mpi_wtime()-time_before_advance
360  end do time_evol
361 
362  if(use_particles) then
363  call finish_gridvars()
364  end if
365 
366  time_advance=.false.
367 
368  timeloop=mpi_wtime()-timeloop0
369 
370  if (mype==0) then
371  write(*,'(a,f12.3,a)')' Total timeloop took : ',timeloop,' sec'
372  write(*,'(a,f12.3,a)')' Time spent on AMR : ',timegr_tot,' sec'
373  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timegr_tot/timeloop,' %'
374  write(*,'(a,f12.3,a)')' Time spent on IO in loop : ',timeio_tot,' sec'
375  write(*,'(a,f12.2,a)')' Percentage: ',100.0*timeio_tot/timeloop,' %'
376  write(*,'(a,f12.3,a)')' Time spent on ghost cells : ',time_bc,' sec'
377  write(*,'(a,f12.2,a)')' Percentage: ',100.0*time_bc/timeloop,' %'
378  write(*,'(a,f12.3,a)')' Time spent on computing : ',timeloop-timeio_tot-timegr_tot-time_bc,' sec'
379  write(*,'(a,f12.2,a)')' Percentage: ',100.0*(timeloop-timeio_tot-timegr_tot-time_bc)/timeloop,' %'
380  write(*,'(a,es12.3 )')' Cells updated / proc / sec : ',dble(ncells_update)*dble(nstep)/dble(npe)/timeloop
381  end if
382 
383  ! output end state
384  timeio0=mpi_wtime()
385  do ifile=nfile,1,-1
386  if(itsavelast(ifile)<it) call saveamrfile(ifile)
387  end do
388  if (mype==0) call mpi_file_close(log_fh,ierrmpi)
389  timeio_tot=timeio_tot+(mpi_wtime()-timeio0)
390 
391  if (mype==0) then
392  write(*,'(a,f12.3,a)')' Total time spent on IO : ',timeio_tot,' sec'
393  write(*,'(a,f12.3,a)')' Total timeintegration took : ',mpi_wtime()-time_in,' sec'
394  write(*, '(A4,I10,ES12.3,ES12.3,ES12.3)') " #", &
396  end if
397 
398  {#IFDEF RAY
399  call time_spent_on_rays
400  }
401 
402  if(use_particles) call time_spent_on_particles
403 
404  if (use_multigrid) call mg_timers_show(mg)
405  end subroutine timeintegration
406 
407  !> Save times are defined by either tsave(isavet(ifile),ifile) or
408  !> itsave(isaveit(ifile),ifile) or dtsave(ifile) or ditsave(ifile)
409  !> tsavestart(ifile) determines first start time. This only affects
410  !> read out times determined by dtsave(ifiles).
411  !> Other conditions may be included.
412  logical function timetosave(ifile)
414 
415  integer:: ifile
416  logical:: oksave
417 
418  oksave=.false.
419  if (it==itsave(isaveit(ifile),ifile)) then
420  oksave=.true.
421  isaveit(ifile)=isaveit(ifile)+1
422  end if
423  if (it==itsavelast(ifile)+ditsave(ifile)) oksave=.true.
424 
425  if (global_time>=tsave(isavet(ifile),ifile).and.global_time-dt<tsave(isavet(ifile),ifile)) then
426  oksave=.true.
427  isavet(ifile)=isavet(ifile)+1
428  end if
429 
430  if(global_time>=tsavestart(ifile)-smalldouble)then
431  if (global_time>=tsavelast(ifile)+dtsave(ifile)-smalldouble)then
432  oksave=.true.
433  n_saves(ifile) = n_saves(ifile) + 1
434  endif
435  endif
436 
437  if (oksave) then
438  tsavelast(ifile) =global_time
439  itsavelast(ifile)=it
440  end if
441  timetosave=oksave
442 
443  return
444  end function timetosave
445 
446  !> Return true if the AMR grid should not be adapted any more. This is
447  !> controlled by tfixgrid or itfixgrid. Other conditions may be included.
448  logical function fixgrid()
450 
452  end function fixgrid
453 
454 end program amrvac
logical function fixgrid()
Return true if the AMR grid should not be adapted any more. This is controlled by tfixgrid or itfixgr...
Definition: amrvac.t:449
logical function timetosave(ifile)
Save times are defined by either tsave(isavet(ifile),ifile) or itsave(isaveit(ifile),...
Definition: amrvac.t:413
program amrvac
AMRVAC solves a set of hyperbolic equations using adaptive mesh refinement.
Definition: amrvac.t:4
subroutine timeintegration()
Definition: amrvac.t:194
Module containing all the time stepping schemes.
Definition: mod_advance.t:2
subroutine, public process_advanced(iit, qt)
process_advanced is user entry in time loop, just after advance allows to modify solution,...
Definition: mod_advance.t:773
subroutine, public advance(iit)
Advance all the grids over one time step, including all sources.
Definition: mod_advance.t:18
subroutine, public process(iit, qt)
process is a user entry in time loop, before output and advance allows to modify solution,...
Definition: mod_advance.t:740
subroutine, public resettree
reset AMR and (de)allocate boundary flux storage at level changes
Definition: mod_amr_grid.t:47
subroutine, public resettree_convert
Force the tree to desired level(s) from level_io(_min/_max) used for conversion to vtk output.
Definition: mod_amr_grid.t:79
subroutine, public settree
Build up AMR.
Definition: mod_amr_grid.t:14
subroutine, public comm_start
Initialize the MPI environment.
Definition: mod_comm_lib.t:17
subroutine, public comm_finalize
Finalize (or shutdown) the MPI environment.
Definition: mod_comm_lib.t:49
subroutine, public mpistop(message)
Exit MPI-AMRVAC with an error message.
Definition: mod_comm_lib.t:208
subroutine generate_plotfile
subroutine init_convert()
Definition: mod_convert.t:33
Definition: mod_dt.t:1
subroutine, public setdt()
setdt - set dt for all levels between levmin and levmax. dtpar>0 --> use fixed dtpar for all level dt...
Definition: mod_dt.t:16
Module for flux conservation near refinement boundaries.
subroutine, public allocatebflux
Module with basic grid data structures.
Definition: mod_forest.t:2
integer nleafs_active
Definition: mod_forest.t:78
update ghost cells of all blocks including physical boundaries
subroutine getbc(time, qdt, psb, nwstart, nwbc, req_diag)
do update ghost cells of all blocks including physical boundaries
This module contains definitions of global parameters and variables and some generic functions/subrou...
double precision, dimension(nfile) tsavelast
type(state), pointer block
Block pointer for using one block and its previous state.
integer nstep
How many sub-steps the time integrator takes.
integer it_max
Stop the simulation after this many time steps have been taken.
logical reset_it
If true, reset iteration count to 0.
integer ixghi
Upper index of grid block arrays.
logical, dimension(nfile) save_file
whether or not to save an output file
logical resume_previous_run
If true, restart a previous run from the latest snapshot.
double precision global_time
The global simulation time.
integer, dimension(nsavehi, nfile) itsave
Save output of type N on iterations itsave(:, N)
double precision time_max
End time for the simulation.
double precision time_init
Start time for the simulation.
logical firstprocess
If true, call initonegrid_usr upon restarting.
integer snapshotini
Resume from the snapshot with this index.
integer it
Number of time steps taken.
integer it_init
initial iteration count
integer ditregrid
Reconstruct the AMR grid once every ditregrid iteration(s)
character(len=std_len) convert_type
Which format to use when converting.
integer itfixgrid
Fix the AMR grid after this many time steps.
integer, parameter nlevelshi
The maximum number of levels in the grid refinement.
logical use_particles
Use particles module or not.
integer icomm
The MPI communicator.
logical reset_time
If true, reset iteration count and global_time to original values, and start writing snapshots at ind...
integer mype
The rank of the current MPI task.
integer, dimension(1:nfile) n_saves
Number of saved files of each type.
double precision, dimension(nfile) tsavestart
Start of read out (not counting specified read outs)
integer, dimension(nfile) ditsave
Repeatedly save output of type N when ditsave(N) time steps have passed.
integer, dimension(:), allocatable, parameter d
double precision wall_time_max
Ending wall time (in hours) for the simulation.
integer ierrmpi
A global MPI error return code.
logical autoconvert
If true, already convert to output format during the run.
integer slowsteps
If > 1, then in the first slowsteps-1 time steps dt is reduced by a factor .
integer snapshotnext
IO: snapshot and collapsed views output numbers/labels.
integer npe
The number of MPI tasks.
integer, dimension(nfile) itsavelast
double precision time_between_print
to monitor timeintegration loop at given wall-clock time intervals
integer, parameter unitterm
Unit for standard output.
double precision, dimension(nfile) dtsave
Repeatedly save output of type N when dtsave(N) simulation time has passed.
character(len=std_len) restart_from_file
If not 'unavailable', resume from snapshot with this base file name.
double precision, dimension(:,:), allocatable rnode
Corner coordinates.
integer, parameter filelog_
Constant indicating log output.
integer, parameter fileout_
Constant indicating regular output.
double precision time_bc
accumulated wall-clock time spent on boundary conditions
double precision tfixgrid
Fix the AMR grid after this time.
integer nghostcells
Number of ghost cells surrounding a grid.
character(len= *), parameter undefined
double precision, dimension(nsavehi, nfile) tsave
Save output of type N on times tsave(:, N)
logical convert
If true and restart_from_file is given, convert snapshots to other file formats.
logical reset_grid
If true, rebuild the AMR grid upon restarting.
logical crash
Save a snapshot before crash a run met unphysical values.
logical use_multigrid
Use multigrid (only available in 2D and 3D)
integer refine_max_level
Maximal number of AMR levels.
integer, parameter nfile
Number of output methods.
logical pass_wall_time
If true, wall time is up, modify snapshotnext for later overwrite.
logical final_dt_exit
Force timeloop exit when final dt < dtmin.
integer, dimension(nfile) isaveit
double precision, dimension(ndim) dxlevel
integer, dimension(nfile) isavet
integer log_fh
MPI file handle for logfile.
subroutine, public improve_initial_condition()
improve initial condition after initialization
subroutine, public initlevelone
Generate and initialize all grids at the coarsest level (level one)
subroutine, public modify_ic
modify initial condition
This module handles the initialization of various components of amrvac.
Definition: mod_initialize.t:2
subroutine, public initialize_amrvac()
Initialize amrvac: read par files and initialize variables.
logical save_now
whether a manually inserted snapshot is saved
Module for reading input and writing output.
subroutine saveamrfile(ifile)
subroutine read_arguments()
Read the command line arguments passed to amrvac.
subroutine read_snapshot
Routine to read in snapshots (.dat files). When it cannot recognize the file version,...
Module to couple the octree-mg library to AMRVAC. This file uses the VACPP preprocessor,...
type(mg_t) mg
Data structure containing the multigrid tree.
subroutine mg_setup_multigrid()
Setup multigrid for usage.
Module containing all the particle routines.
Definition: mod_particles.t:2
subroutine particles_create()
Create initial particles.
Definition: mod_particles.t:42
This module defines the procedures of a physics module. It contains function pointers for the various...
Definition: mod_physics.t:4
procedure(sub_special_advance), pointer phys_special_advance
Definition: mod_physics.t:73
subroutine, public selectgrids
integer ittimelast
Definition: mod_timing.t:13
double precision timegr_tot
Definition: mod_timing.t:9
double precision timeio_tot
Definition: mod_timing.t:8
double precision time_in
Definition: mod_timing.t:8
double precision timelast
Definition: mod_timing.t:14
double precision timeio0
Definition: mod_timing.t:8
double precision timeloop
Definition: mod_timing.t:9
double precision timeloop0
Definition: mod_timing.t:9
double precision timegr0
Definition: mod_timing.t:9
subroutine, public initialize_trac_after_settree
Definition: mod_trac.t:892
Module with all the methods that users can customize in AMRVAC.
procedure(process_grid), pointer usr_process_grid
procedure(process_adv_grid), pointer usr_process_adv_grid
procedure(sub_modify_io), pointer usr_modify_output
procedure(p_no_args), pointer usr_before_main_loop
procedure(process_global), pointer usr_process_global
procedure(process_adv_global), pointer usr_process_adv_global