potential_workers_mpi Module Procedure

module procedure potential_workers_mpi module subroutine potential_workers_mpi(it, t, dt, sig0, sigP, sigH, incap, vs2, vs3, vn2, vn3, sourcemlat, B1, x, potsolve, flagcap, E1, E2, E3, J1, J2, J3)

ROOT MPI COMM./SOLVE ROUTINE FOR POTENTIAL. THIS VERSION INCLUDES THE POLARIZATION CURRENT TIME DERIVATIVE PART AND CONVECTIVE PARTS IN MATRIX SOLUTION. STATE VARIABLES VS2,3 INCLUDE GHOST CELLS. FOR NOW THE POLARIZATION TERMS ARE PASSED BACK TO MAIN FN, EVEN THOUGH THEY ARE NOT USED (THEY MAY BE IN THE FUTURE)

wind x2 current, note that all workers already have a copy of this. wind x3 current wind x2 current wind x3 current !!!!!!

Neumann conditions; incorporate a source term and execute the solve

workers don't have access to boundary conditions, unless root sends

need to pick out the ExB drift here (i.e. the drifts from highest altitudes); but this is only valid for Cartesian, so it's okay for the foreseeable future

Dirichlet conditions - since this is field integrated we just copy BCs specified by user to other locations along field line

ZZZ - conductivities need to be properly scaled here... So does the source term... Maybe leave as broken for now since I don't really plan to use this code

!!!!!!!

STORE PREVIOUS TIME TOTAL FIELDS BEFORE UPDATING THE ELECTRIC FIELDS WITH NEW POTENTIAL (OLD FIELDS USED TO CALCULATE POLARIZATION CURRENT) causes major memory leak. maybe from arithmetic statement argument? Left here as a 'lesson learned' (or is it a gfortran bug...)

print *, 'Max E2,3 BG and response values are:  ',maxval(abs(E02)), maxval(abs(E03)), maxval(abs(E2)),maxval(abs(E3))

!!!!!!

Neumann conditions, this is boundary location-agnostic since both bottom and top FACs are known - they have to be loaded into VVmaxx1 and Vminx1. For numerical purposes we prefer to integrate from the location of nonzero current (usually highest altitude in open grid). for a cartesian grid in the northern hemisphere (assumed) we have the x1-direction being against the magnetic field...

Dirichlet conditions - we need to integrate from the lowest altitude (where FAC is known to be zero, note this is not necessarilty the logical bottom of the grid), upwards (to where it isn't) !!!!!!!

ZZZ - this rey needsz to be current at the "top"

Arguments

Type IntentOptional AttributesName
integer, intent(in) :: it
real(kind=wp), intent(in) :: t
real(kind=wp), intent(in) :: dt
real(kind=wp), intent(in), dimension(:,:,:):: sig0
real(kind=wp), intent(in), dimension(:,:,:):: sigP
real(kind=wp), intent(in), dimension(:,:,:):: sigH
real(kind=wp), intent(in), dimension(:,:,:):: incap
real(kind=wp), intent(in), dimension(-1:,-1:,-1:,:):: vs2
real(kind=wp), intent(in), dimension(-1:,-1:,-1:,:):: vs3
real(kind=wp), intent(in), dimension(:,:,:):: vn2
real(kind=wp), intent(in), dimension(:,:,:):: vn3
real(kind=wp), intent(in) :: sourcemlat
real(kind=wp), intent(in), dimension(-1:,-1:,-1:):: B1
type(curvmesh), intent(in) :: x
integer, intent(in) :: potsolve
integer, intent(in) :: flagcap
real(kind=wp), intent(out), dimension(:,:,:):: E1
real(kind=wp), intent(out), dimension(:,:,:):: E2
real(kind=wp), intent(out), dimension(:,:,:):: E3
real(kind=wp), intent(out), dimension(:,:,:):: J1
real(kind=wp), intent(out), dimension(:,:,:):: J2
real(kind=wp), intent(out), dimension(:,:,:):: J3

Calls

proc~~potential_workers_mpi~~CallsGraph proc~potential_workers_mpi potential_workers_mpi interface~bcast_recv bcast_recv proc~potential_workers_mpi->interface~bcast_recv proc~elliptic_workers elliptic_workers proc~potential_workers_mpi->proc~elliptic_workers mpi_recv mpi_recv proc~potential_workers_mpi->mpi_recv proc~halo_pot halo_pot proc~potential_workers_mpi->proc~halo_pot interface~div3d div3D proc~potential_workers_mpi->interface~div3d interface~integral3d1_curv_alt integral3D1_curv_alt proc~potential_workers_mpi->interface~integral3d1_curv_alt interface~gather_send gather_send proc~potential_workers_mpi->interface~gather_send interface~integral3d1 integral3D1 proc~potential_workers_mpi->interface~integral3d1 interface~grad3d2 grad3D2 proc~potential_workers_mpi->interface~grad3d2 interface~grad3d1 grad3D1 proc~potential_workers_mpi->interface~grad3d1 interface~grad3d3 grad3D3 proc~potential_workers_mpi->interface~grad3d3 interface~bcast_recv4d_23 bcast_recv4D_23 interface~bcast_recv->interface~bcast_recv4d_23 interface~bcast_recv3d_23 bcast_recv3D_23 interface~bcast_recv->interface~bcast_recv3d_23 interface~bcast_recv2d_23 bcast_recv2D_23 interface~bcast_recv->interface~bcast_recv2d_23 interface~bcast_recv1d_23 bcast_recv1D_23 interface~bcast_recv->interface~bcast_recv1d_23 mumps_exec mumps_exec proc~elliptic_workers->mumps_exec proc~check_mumps_status check_mumps_status proc~elliptic_workers->proc~check_mumps_status proc~quiet_mumps quiet_mumps proc~elliptic_workers->proc~quiet_mumps interface~halo halo proc~halo_pot->interface~halo interface~div3d_curv_23 div3D_curv_23 interface~div3d->interface~div3d_curv_23 proc~integral3d1_curv_alt integral3D1_curv_alt interface~integral3d1_curv_alt->proc~integral3d1_curv_alt interface~gather_send4d_23 gather_send4D_23 interface~gather_send->interface~gather_send4d_23 interface~gather_send2d_23 gather_send2D_23 interface~gather_send->interface~gather_send2d_23 interface~gather_send3d_23 gather_send3D_23 interface~gather_send->interface~gather_send3d_23 interface~integral3d1_curv integral3D1_curv interface~integral3d1->interface~integral3d1_curv interface~grad3d2_curv_23 grad3D2_curv_23 interface~grad3d2->interface~grad3d2_curv_23 interface~grad3d1_curv_23 grad3D1_curv_23 interface~grad3d1->interface~grad3d1_curv_23 interface~grad3d3_curv_23 grad3D3_curv_23 interface~grad3d3->interface~grad3d3_curv_23 proc~div3d_curv_23 div3D_curv_23 interface~div3d_curv_23->proc~div3d_curv_23 proc~integral3d1_curv integral3D1_curv interface~integral3d1_curv->proc~integral3d1_curv proc~bcast_recv4d_23 bcast_recv4D_23 interface~bcast_recv4d_23->proc~bcast_recv4d_23 proc~grad3d3_curv_23 grad3D3_curv_23 interface~grad3d3_curv_23->proc~grad3d3_curv_23 proc~gather_send4d_23 gather_send4D_23 interface~gather_send4d_23->proc~gather_send4d_23 proc~bcast_recv3d_23 bcast_recv3D_23 interface~bcast_recv3d_23->proc~bcast_recv3d_23 proc~bcast_recv2d_23 bcast_recv2D_23 interface~bcast_recv2d_23->proc~bcast_recv2d_23 proc~gather_send2d_23 gather_send2D_23 interface~gather_send2d_23->proc~gather_send2d_23 proc~grad3d2_curv_23 grad3D2_curv_23 interface~grad3d2_curv_23->proc~grad3d2_curv_23 proc~grad3d1_curv_23 grad3D1_curv_23 interface~grad3d1_curv_23->proc~grad3d1_curv_23 interface~halo_23 halo_23 interface~halo->interface~halo_23 proc~gather_send3d_23 gather_send3D_23 interface~gather_send3d_23->proc~gather_send3d_23 proc~bcast_recv4d_23->mpi_recv proc~halo_23 halo_23 interface~halo_23->proc~halo_23 mpi_send mpi_send proc~gather_send2d_23->mpi_send proc~bcast_recv2d_23->mpi_recv proc~bcast_recv3d_23->mpi_recv proc~gather_send3d_23->mpi_send proc~gather_send4d_23->mpi_send mpi_isend mpi_isend proc~halo_23->mpi_isend mpi_irecv mpi_irecv proc~halo_23->mpi_irecv proc~grid2id grid2ID proc~halo_23->proc~grid2id mpi_waitall mpi_waitall proc~halo_23->mpi_waitall

Called by

proc~~potential_workers_mpi~~CalledByGraph proc~potential_workers_mpi potential_workers_mpi interface~potential_workers_mpi potential_workers_mpi interface~potential_workers_mpi->proc~potential_workers_mpi proc~electrodynamics_curv electrodynamics_curv proc~electrodynamics_curv->interface~potential_workers_mpi interface~electrodynamics electrodynamics interface~electrodynamics->proc~electrodynamics_curv program~gemini3d Gemini3D program~gemini3d->interface~electrodynamics

Contents

None