Solving Stokes, in parallel

8. Solving Stokes, in parallel#

Starting an MPI-cluster on the local machine:

from ipyparallel import Cluster
c = await Cluster(engines="mpi").start_and_connect(n=4, activate=True)
Starting 4 engines with <class 'ipyparallel.cluster.launcher.MPIEngineSetLauncher'>

Generate and distribute mesh, refine in parallel:

%%px
from ngsolve import *
from netgen.occ import *
from ngsolve.krylovspace import BramblePasciakCG
from mpi4py.MPI import COMM_WORLD as comm

box = Box((0,0,0), (2,0.41,0.41))
box.faces.name="wall"
box.faces.Min(X).name="inlet"
box.faces.Max(X).name="outlet"
cyl = Cylinder((0.2,0,0.2), Y, h=0.41,r=0.05)
cyl.faces.name="cyl"
shape = box-cyl
ngmesh = OCCGeometry(shape).GenerateMesh(maxh=0.05, comm=comm)
    
for r in range(1): ngmesh.Refine()
mesh = Mesh(ngmesh)
print (mesh.GetNE(VOL))
[stdout:0] 0
[stdout:3] 35872
[stdout:1] 36184
[stdout:2] 37288

Using parallel preconditioners from PETSc:

(thanks a lot for support from S. Zampini and U. Zerbinat)

%%px
import ngsolve.ngs2petsc as n2p
import petsc4py.PETSc as psc

A stabilized \(p=1\) method for Stokes:

\[\begin{split} K = \left( \begin{array}{cc} A & B^T \\ B & -C \end{array} \right) \qquad A = \left( \begin{array}{ccc} A_1 & \cdot & \cdot \\ \cdot & A_1 & \cdot \\ \cdot & \cdot & A_1 \\ \end{array} \right) \end{split}\]
%%px
V = VectorH1(mesh, order=1, dirichlet="wall|inlet|cyl")
V1 = H1(mesh, order=1, dirichlet="wall|inlet|cyl")
Q = H1(mesh, order=1)
printonce ("ndof = ", V.ndofglobal,'+',Q.ndofglobal,'=',
        V.ndofglobal+Q.ndofglobal)

u,v = V.TnT()
u1,v1 = V1.TnT()
p,q = Q.TnT()

h = specialcf.mesh_size

bfa1 = BilinearForm(InnerProduct(grad(u1),grad(v1))*dx)
bfb = BilinearForm(div(u)*q*dx).Assemble()
bfc = BilinearForm(h*h*grad(p)*grad(q)*dx).Assemble()

prea1 = Preconditioner(bfa1, "gamg") # AMG precond from PETSc
bfa1.Assemble()

# make block-diagonal A matrix:
mata = sum( [Ri.T@bfa1.mat@Ri for Ri in V.restrictions] )
prea = sum( [Ei@prea1@Ei.T for Ei in V.embeddings])    
    
bfschur = BilinearForm(p*q*dx, diagonal=True).Assemble()
preschur = bfschur.mat.Inverse()
[stdout:0] ndof =  66711 + 22237 = 88948
%%px
gfu = GridFunction(V)
gfp = GridFunction(Q)

uin = (1.5*4*y*(0.41-y)/(0.41*0.41)*z*(0.41-z)/0.41**2,0, 0)

gfu.Set(uin, definedon=mesh.Boundaries("inlet"))

resf = (-mata * gfu.vec).Evaluate()
resg = (-bfb.mat * gfu.vec).Evaluate()

sol = BramblePasciakCG (A=mata, B=bfb.mat, C=bfc.mat, f=resf, g=resg, \
                preA=prea, preS=preschur, maxit=500, 
                printrates='\r' if comm.rank==0 else False)

gfu.vec.data += sol[0]
gfp.vec.data += sol[1]
[stdout:0] lammin/lammax =  0.5127769379395615 / 0.9981636558143341
Iteration 155 err= 7.922074290304299e-09     
gfu = c[:]["gfu"]
from ngsolve import *
from ngsolve.webgui import Draw
ea = { "euler_angles" : (-77, 6, 47) }
clipping = { "clipping" : {"y":1, "z":0, "function" : True } }
Draw (Norm(gfu[0]), gfu[0].space.mesh, **ea, **clipping, order=1)
Draw (Norm(gfu[2]), gfu[2].space.mesh, **ea, **clipping, order=1);
gfp = c[:]["gfp"]
Draw (gfp[0], order=1, **ea, **clipping);
c.shutdown(hub=True)