Merge pull request #58 from fverdugo/francesc

More changes in jacobi_notebook
This commit is contained in:
Francesc Verdugo 2024-09-23 16:34:25 +02:00 committed by GitHub
commit b5efc3dfee
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 86 additions and 50 deletions

View File

@ -292,72 +292,108 @@ end
```julia ```julia
function jacobi_mpi(n,niters) function jacobi_mpi(n,niters)
comm = MPI.COMM_WORLD u, u_new = init(n,comm)
nranks = MPI.Comm_size(comm) load = length(u)-2
rank = MPI.Comm_rank(comm) rank = MPI.Comm_rank(comm)
if mod(n,nranks) != 0 nranks = MPI.Comm_size(comm)
println("n must be a multiple of nranks") nreqs = 2*((rank != 0) + (rank != (nranks-1)))
MPI.Abort(comm,1) reqs = MPI.MultiRequest(nreqs)
end
load = div(n,nranks)
u = zeros(load+2)
u[1] = -1
u[end] = 1
u_new = copy(u)
for t in 1:niters for t in 1:niters
reqs = MPI.Request[] ireq = 0
if rank != 0 if rank != 0
neig_rank = rank-1 neig_rank = rank-1
req = MPI.Isend(view(u,2:2),comm,dest=neig_rank,tag=0) u_snd = view(u,2:2)
push!(reqs,req) u_rcv = view(u,1:1)
req = MPI.Irecv!(view(u,1:1),comm,source=neig_rank,tag=0) dest = neig_rank
push!(reqs,req) source = neig_rank
ireq += 1
MPI.Isend(u_snd,comm,reqs[ireq];dest)
ireq += 1
MPI.Irecv!(u_rcv,comm,reqs[ireq];source)
end end
if rank != (nranks-1) if rank != (nranks-1)
neig_rank = rank+1 neig_rank = rank+1
s = load+1 u_snd = view(u,(load+1):(load+1))
r = load+2 u_rcv = view(u,(load+2):(load+2))
req = MPI.Isend(view(u,s:s),comm,dest=neig_rank,tag=0) dest = neig_rank
push!(reqs,req) source = neig_rank
req = MPI.Irecv!(view(u,r:r),comm,source=neig_rank,tag=0) ireq += 1
push!(reqs,req) MPI.Isend(u_snd,comm,reqs[ireq];dest)
ireq += 1
MPI.Irecv!(u_rcv,comm,reqs[ireq];source)
end end
# Upload interior cells
for i in 3:load for i in 3:load
u_new[i] = 0.5*(u[i-1]+u[i+1]) u_new[i] = 0.5*(u[i-1]+u[i+1])
end end
# Wait for the communications to finish
MPI.Waitall(reqs) MPI.Waitall(reqs)
# Update boundaries
for i in (2,load+1) for i in (2,load+1)
u_new[i] = 0.5*(u[i-1]+u[i+1]) u_new[i] = 0.5*(u[i-1]+u[i+1])
end end
u, u_new = u_new, u u, u_new = u_new, u
end
return u
end
```
end ### Exercise 2
# Gather the results
if rank !=0 ```julia
lb = 2 function jacobi_mpi(n,niters,tol,comm) # new tol arg
ub = load+1 u, u_new = init(n,comm)
MPI.Send(view(u,lb:ub),comm,dest=0) load = length(u)-2
u_all = zeros(0) # This will nevel be used rank = MPI.Comm_rank(comm)
else nranks = MPI.Comm_size(comm)
u_all = zeros(n+2) nreqs = 2*((rank != 0) + (rank != (nranks-1)))
# Set boundary reqs = MPI.MultiRequest(nreqs)
u_all[1] = -1 for t in 1:niters
u_all[end] = 1 ireq = 0
# Set data for rank 0 if rank != 0
lb = 2 neig_rank = rank-1
ub = load+1 u_snd = view(u,2:2)
u_all[lb:ub] = view(u,lb:ub) u_rcv = view(u,1:1)
# Set data for other ranks dest = neig_rank
for other_rank in 1:(nranks-1) source = neig_rank
lb += load ireq += 1
ub += load MPI.Isend(u_snd,comm,reqs[ireq];dest)
MPI.Recv!(view(u_all,lb:ub),comm;source=other_rank) ireq += 1
MPI.Irecv!(u_rcv,comm,reqs[ireq];source)
end end
if rank != (nranks-1)
neig_rank = rank+1
u_snd = view(u,(load+1):(load+1))
u_rcv = view(u,(load+2):(load+2))
dest = neig_rank
source = neig_rank
ireq += 1
MPI.Isend(u_snd,comm,reqs[ireq];dest)
ireq += 1
MPI.Irecv!(u_rcv,comm,reqs[ireq];source)
end
MPI.Waitall(reqs)
# Compute the max diff in the current
# rank while doing the local update
mydiff = 0.0
for i in 2:load+1
u_new[i] = 0.5*(u[i-1]+u[i+1])
diff_i = abs(u_new[i] - u[i])
mydiff = max(mydiff,diff_i)
end
# Now we need to find the global diff
diff_ref = Ref(mydiff)
MPI.Allreduce!(diff_ref,max,comm)
diff = diff_ref[]
# If global diff below tol, stop!
if diff < tol
return u_new
end
u, u_new = u_new, u
end end
return u_all return u
end end
``` ```

View File

@ -281,7 +281,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"n = 5\n", "n = 5\n",
"tol = 1e-9\n", "tol = 1e-10\n",
"jacobi_with_tol(n,tol)" "jacobi_with_tol(n,tol)"
] ]
}, },
@ -1531,7 +1531,7 @@
" u = jacobi_mpi(n,niters,comm)\n", " u = jacobi_mpi(n,niters,comm)\n",
" @show u\n", " @show u\n",
"end\n", "end\n",
"run(`$(mpiexec()) -np 1 julia --project=. -e $code`);" "run(`$(mpiexec()) -np 3 julia --project=. -e $code`);"
] ]
}, },
{ {
@ -1914,7 +1914,7 @@
"source": [ "source": [
"### Exercise 2\n", "### Exercise 2\n",
"\n", "\n",
"In the parallel implementation of the Jacobi method, we assumed that the method runs for a given number of iterations. In function, `jacobi_with_tol` at the beginning of the notebook shows how the Jacobi iterations can be stopped when the difference between iterations is small. Implement a parallel version of this function. Start with the in Exercise 1 and add the stopping criterion implemented in `jacobi_with_tol`. Use a text editor and the Julia REPL. Do not try to implement the code in a notebook." "In the parallel implementation of the Jacobi method, we assumed that the method runs for a given number of iterations. In function, `jacobi_with_tol` at the beginning of the notebook shows how the Jacobi iterations can be stopped when the difference between iterations is small. Implement a parallel version of this function. Start with the code in Exercise 1 and add the stopping criterion implemented in `jacobi_with_tol`. Use a text editor and the Julia REPL. Do not try to implement the code in a notebook."
] ]
}, },
{ {

View File

@ -1534,7 +1534,7 @@
], ],
"metadata": { "metadata": {
"kernelspec": { "kernelspec": {
"display_name": "Julia 1.9.1", "display_name": "Julia 1.9.0",
"language": "julia", "language": "julia",
"name": "julia-1.9" "name": "julia-1.9"
}, },
@ -1542,7 +1542,7 @@
"file_extension": ".jl", "file_extension": ".jl",
"mimetype": "application/julia", "mimetype": "application/julia",
"name": "julia", "name": "julia",
"version": "1.9.1" "version": "1.9.0"
} }
}, },
"nbformat": 4, "nbformat": 4,