Using PETSc

106. Using PETSc#

PETSc https://www.mcs.anl.gov/petsc
Portable, Extensible Toolkit for Scientific Computation Toolkit

PETSc is the standard library for parallel solvers. Many other libraries offer interfaces to PETSc.

We learn how to interface PETSc from NGSolve.

from ipyparallel import Cluster
c = await Cluster(engines="mpi").start_and_connect(n=4, activate=True)
c.ids
Starting 4 engines with <class 'ipyparallel.cluster.launcher.MPIEngineSetLauncher'>
[0, 1, 2, 3]
%%px
from ngsolve import *
comm = MPI.COMM_WORLD
ngmesh = unit_square.GenerateMesh(maxh=0.1, comm=comm)
    
for l in range(3):
    ngmesh.Refine()
mesh = Mesh(ngmesh)

We import PETSc via its Python interface petsc4py:

%%px
import numpy as np
import petsc4py.PETSc as psc
%%px
fes = H1(mesh, order=1)
print ("ndof =", fes.ndof, "ndglob =", fes.ndofglobal)
u,v = fes.TnT()
a = BilinearForm(grad(u)*grad(v)*dx+u*v*ds).Assemble()
f = LinearForm(x*v*dx).Assemble()
gfu = GridFunction(fes)
[stdout:0] ndof = 0 ndglob = 7521
[stdout:3] ndof = 2565 ndglob = 7521
[stdout:1] ndof = 2509 ndglob = 7521
[stdout:2] ndof = 2593 ndglob = 7521

We convert the local sparse matrix to a local PETSc AIJ matrix:

%%px
locmat = a.mat.local_mat
val,col,ind = locmat.CSR()
ind = np.array(ind, dtype='int32')
apsc_loc = psc.Mat().createAIJ(size=(locmat.height, locmat.width), csr=(ind,col,val), comm=MPI.COMM_SELF)

The NGSolve ParallelDofs object corresponds to a PETSc IndexSet object, which we create next. In PETSc, dofs are globally enumerated, what is not the case in NGSolve. For this purpose, a ParallelDofs class can generate a globally consistent enumeration of dofs. The generated globnums array contains the global dof-numbers of the local dofs.

%%px
pardofs = fes.ParallelDofs()
globnums, nglob = pardofs.EnumerateGlobally() 
# print (list(globnums))

iset = psc.IS().createGeneral (indices=globnums, comm=comm)
lgmap = psc.LGMap().createIS(iset)

We can now create the global matrix using our local2global map:

%%px
mat = psc.Mat().create(comm=comm)
mat.setSizes(size=(nglob,nglob), bsize=1)
mat.setType(psc.Mat.Type.IS)
mat.setLGMap(lgmap)
mat.setISLocalMat(apsc_loc)
mat.assemble()
mat.convert("mpiaij")
Out[0:6]: <petsc4py.PETSc.Mat at 0x115a2afc0>
Out[3:6]: <petsc4py.PETSc.Mat at 0x116077010>
Out[2:6]: <petsc4py.PETSc.Mat at 0x1147935b0>
Out[1:6]: <petsc4py.PETSc.Mat at 0x114952f70>

We copy the right-hand side vector to a PETSc vector. PETSc vectors (knowing about the global connectivity) are created from the parallel PETSc matrix. We copy the NGSolve-vector to a local sub-vector:

%%px

f.vec.Cumulate()

v1, v2 = mat.createVecs()

v2loc = v2.getSubVector(iset)
v2loc.getArray()[:] = f.vec 
v2.restoreSubVector(iset, v2loc)
%%px
ksp = psc.KSP()
ksp.create()
ksp.setOperators(mat)
ksp.setType(psc.KSP.Type.CG)
ksp.setNormType(psc.KSP.NormType.NORM_NATURAL)
ksp.getPC().setType("gamg")
ksp.setTolerances(rtol=1e-6, atol=0, divtol=1e16, max_it=400)
ksp.solve(v2,v1)   

print ("petsc-its =", ksp.its)
[stdout:2] petsc-its = 8
[stdout:3] petsc-its = 8
[stdout:0] petsc-its = 8
[stdout:1] petsc-its = 8
%%px
v1loc = v1.getSubVector(iset)
gfu.vec[:] = v1loc.getArray()
%%px
print ("<f,u> =", InnerProduct(f.vec, gfu.vec))
[stdout:3] <f,u> = 0.07795635130813684
[stdout:2] <f,u> = 0.07795635130813684
[stdout:1] <f,u> = 0.07795635130813684
[stdout:0] <f,u> = 0.07795635130813684
from ngsolve.webgui import Draw
gfu = c[:]["gfu"]
Draw (gfu[0], order=1);
c.shutdown(hub=True)