1 // ---------------------------------------------------------------------
2 //
3 // Copyright (C) 2004 - 2019 by the deal.II authors
4 //
5 // This file is part of the deal.II library.
6 //
7 // The deal.II library is free software; you can use it, redistribute
8 // it, and/or modify it under the terms of the GNU Lesser General
9 // Public License as published by the Free Software Foundation; either
10 // version 2.1 of the License, or (at your option) any later version.
11 // The full text of the license can be found in the file LICENSE.md at
12 // the top level directory of deal.II.
13 //
14 // ---------------------------------------------------------------------
15 
16 #ifndef dealii_petsc_solver_h
17 #  define dealii_petsc_solver_h
18 
19 
20 #  include <deal.II/base/config.h>
21 
22 #  ifdef DEAL_II_WITH_PETSC
23 
24 #    include <deal.II/lac/exceptions.h>
25 #    include <deal.II/lac/solver_control.h>
26 
27 #    include <petscksp.h>
28 
29 #    include <memory>
30 
31 #    ifdef DEAL_II_WITH_SLEPC
32 #      include <deal.II/lac/slepc_spectral_transformation.h>
33 #    endif
34 
35 DEAL_II_NAMESPACE_OPEN
36 
37 // Forward declarations
38 #    ifndef DOXYGEN
39 #      ifdef DEAL_II_WITH_SLEPC
40 namespace SLEPcWrappers
41 {
42   // forward declarations
43   class TransformationBase;
44 } // namespace SLEPcWrappers
45 #      endif
46 #    endif
47 
48 namespace PETScWrappers
49 {
50   // forward declarations
51 #    ifndef DOXYGEN
52   class MatrixBase;
53   class VectorBase;
54   class PreconditionerBase;
55 #    endif
56 
57 
58   /**
59    * Base class for solver classes using the PETSc solvers. Since solvers in
60    * PETSc are selected based on flags passed to a generic solver object,
61    * basically all the actual solver calls happen in this class, and derived
62    * classes simply set the right flags to select one solver or another, or to
63    * set certain parameters for individual solvers.
64    *
65    * Optionally, the user can create a solver derived from the SolverBase
66    * class and can set the default arguments necessary to solve the linear
67    * system of equations with SolverControl. These default options can be
68    * overridden by specifying command line arguments of the form @p -ksp_*.
69    * For example, @p -ksp_monitor_true_residual prints out true residual norm
70    * (unpreconditioned) at each iteration and @p -ksp_view provides
71    * information about the linear solver and the preconditioner used in the
72    * current context. The type of the solver can also be changed during
73    * runtime by specifying @p -ksp_type {richardson, cg, gmres, fgmres, ..} to
74    * dynamically test the optimal solver along with a suitable preconditioner
75    * set using @p -pc_type {jacobi, bjacobi, ilu, lu, ..}. There are several
76    * other command line options available to modify the behavior of the PETSc
77    * linear solver and can be obtained from the <a
78    * href="http://www.mcs.anl.gov/petsc">documentation and manual pages</a>.
79    *
80    * @note Repeated calls to solve() on a solver object with a Preconditioner
81    * must be used with care. The preconditioner is initialized in the first
82    * call to solve() and subsequent calls reuse the solver and preconditioner
83    * object. This is done for performance reasons. The solver and
84    * preconditioner can be reset by calling reset().
85    *
86    * One of the gotchas of PETSc is that -- in particular in MPI mode -- it
87    * often does not produce very helpful error messages. In order to save
88    * other users some time in searching a hard to track down error, here is
89    * one situation and the error message one gets there: when you don't
90    * specify an MPI communicator to your solver's constructor. In this case,
91    * you will get an error of the following form from each of your parallel
92    * processes:
93    * @verbatim
94    *   [1]PETSC ERROR: PCSetVector() line 1173 in src/ksp/pc/interface/precon.c
95    *   [1]PETSC ERROR:   Arguments must have same communicators!
96    *   [1]PETSC ERROR:   Different communicators in the two objects: Argument #
97    * 1 and 2! [1]PETSC ERROR: KSPSetUp() line 195 in
98    * src/ksp/ksp/interface/itfunc.c
99    * @endverbatim
100    *
101    * This error, on which one can spend a very long time figuring out what
102    * exactly goes wrong, results from not specifying an MPI communicator. Note
103    * that the communicator @em must match that of the matrix and all vectors
104    * in the linear system which we want to solve. Aggravating the situation is
105    * the fact that the default argument to the solver classes, @p
106    * PETSC_COMM_SELF, is the appropriate argument for the sequential case
107    * (which is why it is the default argument), so this error only shows up in
108    * parallel mode.
109    *
110    * @ingroup PETScWrappers
111    */
112   class SolverBase
113   {
114   public:
115     /**
116      * Constructor. Takes the solver control object and the MPI communicator
117      * over which parallel computations are to happen.
118      *
119      * Note that the communicator used here must match the communicator used
120      * in the system matrix, solution, and right hand side object of the solve
121      * to be done with this solver. Otherwise, PETSc will generate hard to
122      * track down errors, see the documentation of the SolverBase class.
123      */
124     SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator);
125 
126     /**
127      * Destructor.
128      */
129     virtual ~SolverBase() = default;
130 
131     /**
132      * Solve the linear system <tt>Ax=b</tt>. Depending on the information
133      * provided by derived classes and the object passed as a preconditioner,
134      * one of the linear solvers and preconditioners of PETSc is chosen.
135      * Repeated calls to solve() do not reconstruct the preconditioner for
136      * performance reasons. See class Documentation.
137      */
138     void
139     solve(const MatrixBase &        A,
140           VectorBase &              x,
141           const VectorBase &        b,
142           const PreconditionerBase &preconditioner);
143 
144 
145     /**
146      * Resets the contained preconditioner and solver object. See class
147      * description for more details.
148      */
149     virtual void
150     reset();
151 
152 
153     /**
154      * Sets a prefix name for the solver object. Useful when customizing the
155      * PETSc KSP object with command-line options.
156      */
157     void
158     set_prefix(const std::string &prefix);
159 
160 
161     /**
162      * Access to object that controls convergence.
163      */
164     SolverControl &
165     control() const;
166 
167     /**
168      * initialize the solver with the preconditioner. This function is
169      * intended for use with SLEPc spectral transformation class.
170      */
171     void
172     initialize(const PreconditionerBase &preconditioner);
173 
174   protected:
175     /**
176      * Reference to the object that controls convergence of the iterative
177      * solver. In fact, for these PETSc wrappers, PETSc does so itself, but we
178      * copy the data from this object before starting the solution process,
179      * and copy the data back into it afterwards.
180      */
181     SolverControl &solver_control;
182 
183     /**
184      * Copy of the MPI communicator object to be used for the solver.
185      */
186     const MPI_Comm mpi_communicator;
187 
188     /**
189      * Function that takes a Krylov Subspace Solver context object, and sets
190      * the type of solver that is requested by the derived class.
191      */
192     virtual void
193     set_solver_type(KSP &ksp) const = 0;
194 
195     /**
196      * Solver prefix name to qualify options specific to the PETSc KSP object
197      * in the current context. Note: A hyphen (-) must NOT be given at the
198      * beginning of the prefix name. The first character of all runtime
199      * options is AUTOMATICALLY the hyphen.
200      */
201     std::string prefix_name;
202 
203   private:
204     /**
205      * A function that is used in PETSc as a callback to check on convergence.
206      * It takes the information provided from PETSc and checks it against
207      * deal.II's own SolverControl objects to see if convergence has been
208      * reached.
209      */
210     static PetscErrorCode
211     convergence_test(KSP                 ksp,
212                      const PetscInt      iteration,
213                      const PetscReal     residual_norm,
214                      KSPConvergedReason *reason,
215                      void *              solver_control);
216 
217     /**
218      * A structure that contains the PETSc solver and preconditioner objects.
219      * This object is preserved between subsequent calls to the solver if the
220      * same preconditioner is used as in the previous solver step. This may
221      * save some computation time, if setting up a preconditioner is
222      * expensive, such as in the case of an ILU for example.
223      *
224      * The actual declaration of this class is complicated by the fact that
225      * PETSc changed its solver interface completely and incompatibly between
226      * versions 2.1.6 and 2.2.0 :-(
227      *
228      * Objects of this type are explicitly created, but are destroyed when the
229      * surrounding solver object goes out of scope, or when we assign a new
230      * value to the pointer to this object. The respective *Destroy functions
231      * are therefore written into the destructor of this object, even though
232      * the object does not have a constructor.
233      */
234     struct SolverData
235     {
236       /**
237        * Destructor
238        */
239       ~SolverData();
240 
241       /**
242        * Object for Krylov subspace solvers.
243        */
244       KSP ksp;
245     };
246 
247     /**
248      * Pointer to an object that stores the solver context. This is recreated
249      * in the main solver routine if necessary.
250      */
251     std::unique_ptr<SolverData> solver_data;
252 
253 #    ifdef DEAL_II_WITH_SLEPC
254     // Make the transformation class a friend, since it needs to set the KSP
255     // solver.
256     friend class SLEPcWrappers::TransformationBase;
257 #    endif
258   };
259 
260 
261 
262   /**
263    * An implementation of the solver interface using the PETSc Richardson
264    * solver.
265    *
266    * @ingroup PETScWrappers
267    */
268   class SolverRichardson : public SolverBase
269   {
270   public:
271     /**
272      * Standardized data struct to pipe additional data to the solver.
273      */
274     struct AdditionalData
275     {
276       /**
277        * Constructor. By default, set the damping parameter to one.
278        */
279       explicit AdditionalData(const double omega = 1);
280 
281       /**
282        * Relaxation parameter.
283        */
284       double omega;
285     };
286 
287     /**
288      * Constructor. In contrast to deal.II's own solvers, there is no need to
289      * give a vector memory object. However, PETSc solvers want to have an MPI
290      * communicator context over which computations are parallelized. By
291      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
292      * that for single processor (non-MPI) versions, this parameter does not
293      * have any effect.
294      *
295      * The last argument takes a structure with additional, solver dependent
296      * flags for tuning.
297      *
298      * Note that the communicator used here must match the communicator used
299      * in the system matrix, solution, and right hand side object of the solve
300      * to be done with this solver. Otherwise, PETSc will generate hard to
301      * track down errors, see the documentation of the SolverBase class.
302      */
303     SolverRichardson(SolverControl &       cn,
304                      const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
305                      const AdditionalData &data             = AdditionalData());
306 
307   protected:
308     /**
309      * Store a copy of the flags for this particular solver.
310      */
311     const AdditionalData additional_data;
312 
313     /**
314      * Function that takes a Krylov Subspace Solver context object, and sets
315      * the type of solver that is appropriate for this class.
316      */
317     virtual void
318     set_solver_type(KSP &ksp) const override;
319   };
320 
321 
322 
323   /**
324    * An implementation of the solver interface using the PETSc Chebyshev (or,
325    * prior version 3.3, Chebychev) solver.
326    *
327    * @ingroup PETScWrappers
328    */
329   class SolverChebychev : public SolverBase
330   {
331   public:
332     /**
333      * Standardized data struct to pipe additional data to the solver.
334      */
335     struct AdditionalData
336     {};
337 
338     /**
339      * Constructor. In contrast to deal.II's own solvers, there is no need to
340      * give a vector memory object. However, PETSc solvers want to have an MPI
341      * communicator context over which computations are parallelized. By
342      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
343      * that for single processor (non-MPI) versions, this parameter does not
344      * have any effect.
345      *
346      * The last argument takes a structure with additional, solver dependent
347      * flags for tuning.
348      *
349      * Note that the communicator used here must match the communicator used
350      * in the system matrix, solution, and right hand side object of the solve
351      * to be done with this solver. Otherwise, PETSc will generate hard to
352      * track down errors, see the documentation of the SolverBase class.
353      */
354     SolverChebychev(SolverControl &       cn,
355                     const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
356                     const AdditionalData &data             = AdditionalData());
357 
358   protected:
359     /**
360      * Store a copy of the flags for this particular solver.
361      */
362     const AdditionalData additional_data;
363 
364     /**
365      * Function that takes a Krylov Subspace Solver context object, and sets
366      * the type of solver that is appropriate for this class.
367      */
368     virtual void
369     set_solver_type(KSP &ksp) const override;
370   };
371 
372 
373 
374   /**
375    * An implementation of the solver interface using the PETSc CG solver.
376    *
377    * @ingroup PETScWrappers
378    */
379   class SolverCG : public SolverBase
380   {
381   public:
382     /**
383      * Standardized data struct to pipe additional data to the solver.
384      */
385     struct AdditionalData
386     {};
387 
388     /**
389      * Constructor. In contrast to deal.II's own solvers, there is no need to
390      * give a vector memory object. However, PETSc solvers want to have an MPI
391      * communicator context over which computations are parallelized. By
392      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
393      * that for single processor (non-MPI) versions, this parameter does not
394      * have any effect.
395      *
396      * The last argument takes a structure with additional, solver dependent
397      * flags for tuning.
398      *
399      * Note that the communicator used here must match the communicator used
400      * in the system matrix, solution, and right hand side object of the solve
401      * to be done with this solver. Otherwise, PETSc will generate hard to
402      * track down errors, see the documentation of the SolverBase class.
403      */
404     SolverCG(SolverControl &       cn,
405              const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
406              const AdditionalData &data             = AdditionalData());
407 
408   protected:
409     /**
410      * Store a copy of the flags for this particular solver.
411      */
412     const AdditionalData additional_data;
413 
414     /**
415      * Function that takes a Krylov Subspace Solver context object, and sets
416      * the type of solver that is appropriate for this class.
417      */
418     virtual void
419     set_solver_type(KSP &ksp) const override;
420   };
421 
422 
423 
424   /**
425    * An implementation of the solver interface using the PETSc BiCG solver.
426    *
427    * @ingroup PETScWrappers
428    */
429   class SolverBiCG : public SolverBase
430   {
431   public:
432     /**
433      * Standardized data struct to pipe additional data to the solver.
434      */
435     struct AdditionalData
436     {};
437 
438     /**
439      * Constructor. In contrast to deal.II's own solvers, there is no need to
440      * give a vector memory object. However, PETSc solvers want to have an MPI
441      * communicator context over which computations are parallelized. By
442      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
443      * that for single processor (non-MPI) versions, this parameter does not
444      * have any effect.
445      *
446      * The last argument takes a structure with additional, solver dependent
447      * flags for tuning.
448      *
449      * Note that the communicator used here must match the communicator used
450      * in the system matrix, solution, and right hand side object of the solve
451      * to be done with this solver. Otherwise, PETSc will generate hard to
452      * track down errors, see the documentation of the SolverBase class.
453      */
454     SolverBiCG(SolverControl &       cn,
455                const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
456                const AdditionalData &data             = AdditionalData());
457 
458   protected:
459     /**
460      * Store a copy of the flags for this particular solver.
461      */
462     const AdditionalData additional_data;
463 
464     /**
465      * Function that takes a Krylov Subspace Solver context object, and sets
466      * the type of solver that is appropriate for this class.
467      */
468     virtual void
469     set_solver_type(KSP &ksp) const override;
470   };
471 
472 
473 
474   /**
475    * An implementation of the solver interface using the PETSc GMRES solver.
476    *
477    * @ingroup PETScWrappers
478    */
479   class SolverGMRES : public SolverBase
480   {
481   public:
482     /**
483      * Standardized data struct to pipe additional data to the solver.
484      */
485     struct AdditionalData
486     {
487       /**
488        * Constructor. By default, set the number of temporary vectors to 30,
489        * i.e. do a restart every 30 iterations.
490        */
491       AdditionalData(const unsigned int restart_parameter     = 30,
492                      const bool         right_preconditioning = false);
493 
494       /**
495        * Maximum number of tmp vectors.
496        */
497       unsigned int restart_parameter;
498 
499       /**
500        * Flag for right preconditioning.
501        */
502       bool right_preconditioning;
503     };
504 
505     /**
506      * Constructor. In contrast to deal.II's own solvers, there is no need to
507      * give a vector memory object. However, PETSc solvers want to have an MPI
508      * communicator context over which computations are parallelized. By
509      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
510      * that for single processor (non-MPI) versions, this parameter does not
511      * have any effect.
512      *
513      * The last argument takes a structure with additional, solver dependent
514      * flags for tuning.
515      *
516      * Note that the communicator used here must match the communicator used
517      * in the system matrix, solution, and right hand side object of the solve
518      * to be done with this solver. Otherwise, PETSc will generate hard to
519      * track down errors, see the documentation of the SolverBase class.
520      */
521     SolverGMRES(SolverControl &       cn,
522                 const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
523                 const AdditionalData &data             = AdditionalData());
524 
525   protected:
526     /**
527      * Store a copy of the flags for this particular solver.
528      */
529     const AdditionalData additional_data;
530 
531     /**
532      * Function that takes a Krylov Subspace Solver context object, and sets
533      * the type of solver that is appropriate for this class.
534      */
535     virtual void
536     set_solver_type(KSP &ksp) const override;
537   };
538 
539 
540 
541   /**
542    * An implementation of the solver interface using the PETSc BiCGStab
543    * solver.
544    *
545    * @ingroup PETScWrappers
546    */
547   class SolverBicgstab : public SolverBase
548   {
549   public:
550     /**
551      * Standardized data struct to pipe additional data to the solver.
552      */
553     struct AdditionalData
554     {};
555 
556     /**
557      * Constructor. In contrast to deal.II's own solvers, there is no need to
558      * give a vector memory object. However, PETSc solvers want to have an MPI
559      * communicator context over which computations are parallelized. By
560      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
561      * that for single processor (non-MPI) versions, this parameter does not
562      * have any effect.
563      *
564      * The last argument takes a structure with additional, solver dependent
565      * flags for tuning.
566      *
567      * Note that the communicator used here must match the communicator used
568      * in the system matrix, solution, and right hand side object of the solve
569      * to be done with this solver. Otherwise, PETSc will generate hard to
570      * track down errors, see the documentation of the SolverBase class.
571      */
572     SolverBicgstab(SolverControl &       cn,
573                    const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
574                    const AdditionalData &data             = AdditionalData());
575 
576   protected:
577     /**
578      * Store a copy of the flags for this particular solver.
579      */
580     const AdditionalData additional_data;
581 
582     /**
583      * Function that takes a Krylov Subspace Solver context object, and sets
584      * the type of solver that is appropriate for this class.
585      */
586     virtual void
587     set_solver_type(KSP &ksp) const override;
588   };
589 
590   /**
591    * An implementation of the solver interface using the PETSc CG Squared
592    * solver.
593    *
594    * @ingroup PETScWrappers
595    */
596   class SolverCGS : public SolverBase
597   {
598   public:
599     /**
600      * Standardized data struct to pipe additional data to the solver.
601      */
602     struct AdditionalData
603     {};
604 
605     /**
606      * Constructor. In contrast to deal.II's own solvers, there is no need to
607      * give a vector memory object. However, PETSc solvers want to have an MPI
608      * communicator context over which computations are parallelized. By
609      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
610      * that for single processor (non-MPI) versions, this parameter does not
611      * have any effect.
612      *
613      * The last argument takes a structure with additional, solver dependent
614      * flags for tuning.
615      *
616      * Note that the communicator used here must match the communicator used
617      * in the system matrix, solution, and right hand side object of the solve
618      * to be done with this solver. Otherwise, PETSc will generate hard to
619      * track down errors, see the documentation of the SolverBase class.
620      */
621     SolverCGS(SolverControl &       cn,
622               const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
623               const AdditionalData &data             = AdditionalData());
624 
625   protected:
626     /**
627      * Store a copy of the flags for this particular solver.
628      */
629     const AdditionalData additional_data;
630 
631     /**
632      * Function that takes a Krylov Subspace Solver context object, and sets
633      * the type of solver that is appropriate for this class.
634      */
635     virtual void
636     set_solver_type(KSP &ksp) const override;
637   };
638 
639 
640 
641   /**
642    * An implementation of the solver interface using the PETSc TFQMR solver.
643    *
644    * @ingroup PETScWrappers
645    */
646   class SolverTFQMR : public SolverBase
647   {
648   public:
649     /**
650      * Standardized data struct to pipe additional data to the solver.
651      */
652     struct AdditionalData
653     {};
654 
655     /**
656      * Constructor. In contrast to deal.II's own solvers, there is no need to
657      * give a vector memory object. However, PETSc solvers want to have an MPI
658      * communicator context over which computations are parallelized. By
659      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
660      * that for single processor (non-MPI) versions, this parameter does not
661      * have any effect.
662      *
663      * The last argument takes a structure with additional, solver dependent
664      * flags for tuning.
665      *
666      * Note that the communicator used here must match the communicator used
667      * in the system matrix, solution, and right hand side object of the solve
668      * to be done with this solver. Otherwise, PETSc will generate hard to
669      * track down errors, see the documentation of the SolverBase class.
670      */
671     SolverTFQMR(SolverControl &       cn,
672                 const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
673                 const AdditionalData &data             = AdditionalData());
674 
675   protected:
676     /**
677      * Store a copy of the flags for this particular solver.
678      */
679     const AdditionalData additional_data;
680 
681     /**
682      * Function that takes a Krylov Subspace Solver context object, and sets
683      * the type of solver that is appropriate for this class.
684      */
685     virtual void
686     set_solver_type(KSP &ksp) const override;
687   };
688 
689 
690 
691   /**
692    * An implementation of the solver interface using the PETSc TFQMR-2 solver
693    * (called TCQMR in PETSc). Note that this solver had a serious bug in
694    * versions up to and including PETSc 2.1.6, in that it did not check
695    * convergence and always returned an error code. Thus, this class will
696    * abort with an error indicating failure to converge with PETSc 2.1.6 and
697    * prior. This should be fixed in later versions of PETSc, though.
698    *
699    * @ingroup PETScWrappers
700    */
701   class SolverTCQMR : public SolverBase
702   {
703   public:
704     /**
705      * Standardized data struct to pipe additional data to the solver.
706      */
707     struct AdditionalData
708     {};
709 
710     /**
711      * Constructor. In contrast to deal.II's own solvers, there is no need to
712      * give a vector memory object. However, PETSc solvers want to have an MPI
713      * communicator context over which computations are parallelized. By
714      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
715      * that for single processor (non-MPI) versions, this parameter does not
716      * have any effect.
717      *
718      * The last argument takes a structure with additional, solver dependent
719      * flags for tuning.
720      *
721      * Note that the communicator used here must match the communicator used
722      * in the system matrix, solution, and right hand side object of the solve
723      * to be done with this solver. Otherwise, PETSc will generate hard to
724      * track down errors, see the documentation of the SolverBase class.
725      */
726     SolverTCQMR(SolverControl &       cn,
727                 const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
728                 const AdditionalData &data             = AdditionalData());
729 
730   protected:
731     /**
732      * Store a copy of the flags for this particular solver.
733      */
734     const AdditionalData additional_data;
735 
736     /**
737      * Function that takes a Krylov Subspace Solver context object, and sets
738      * the type of solver that is appropriate for this class.
739      */
740     virtual void
741     set_solver_type(KSP &ksp) const override;
742   };
743 
744 
745 
746   /**
747    * An implementation of the solver interface using the PETSc CR solver.
748    *
749    * @ingroup PETScWrappers
750    */
751   class SolverCR : public SolverBase
752   {
753   public:
754     /**
755      * Standardized data struct to pipe additional data to the solver.
756      */
757     struct AdditionalData
758     {};
759 
760     /**
761      * Constructor. In contrast to deal.II's own solvers, there is no need to
762      * give a vector memory object. However, PETSc solvers want to have an MPI
763      * communicator context over which computations are parallelized. By
764      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
765      * that for single processor (non-MPI) versions, this parameter does not
766      * have any effect.
767      *
768      * The last argument takes a structure with additional, solver dependent
769      * flags for tuning.
770      *
771      * Note that the communicator used here must match the communicator used
772      * in the system matrix, solution, and right hand side object of the solve
773      * to be done with this solver. Otherwise, PETSc will generate hard to
774      * track down errors, see the documentation of the SolverBase class.
775      */
776     SolverCR(SolverControl &       cn,
777              const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
778              const AdditionalData &data             = AdditionalData());
779 
780   protected:
781     /**
782      * Store a copy of the flags for this particular solver.
783      */
784     const AdditionalData additional_data;
785 
786     /**
787      * Function that takes a Krylov Subspace Solver context object, and sets
788      * the type of solver that is appropriate for this class.
789      */
790     virtual void
791     set_solver_type(KSP &ksp) const override;
792   };
793 
794 
795 
796   /**
797    * An implementation of the solver interface using the PETSc Least Squares
798    * solver.
799    *
800    * @ingroup PETScWrappers
801    */
802   class SolverLSQR : public SolverBase
803   {
804   public:
805     /**
806      * Standardized data struct to pipe additional data to the solver.
807      */
808     struct AdditionalData
809     {};
810 
811     /**
812      * Constructor. In contrast to deal.II's own solvers, there is no need to
813      * give a vector memory object. However, PETSc solvers want to have an MPI
814      * communicator context over which computations are parallelized. By
815      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
816      * that for single processor (non-MPI) versions, this parameter does not
817      * have any effect.
818      *
819      * The last argument takes a structure with additional, solver dependent
820      * flags for tuning.
821      *
822      * Note that the communicator used here must match the communicator used
823      * in the system matrix, solution, and right hand side object of the solve
824      * to be done with this solver. Otherwise, PETSc will generate hard to
825      * track down errors, see the documentation of the SolverBase class.
826      */
827     SolverLSQR(SolverControl &       cn,
828                const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
829                const AdditionalData &data             = AdditionalData());
830 
831   protected:
832     /**
833      * Store a copy of the flags for this particular solver.
834      */
835     const AdditionalData additional_data;
836 
837     /**
838      * Function that takes a Krylov Subspace Solver context object, and sets
839      * the type of solver that is appropriate for this class.
840      */
841     virtual void
842     set_solver_type(KSP &ksp) const override;
843   };
844 
845 
846   /**
847    * An implementation of the solver interface using the PETSc PREONLY solver.
848    * Actually this is NOT a real solution algorithm. solve() only applies the
849    * preconditioner once and returns immediately. Its only purpose is to
850    * provide a solver object, when the preconditioner should be used as a real
851    * solver. It is very useful in conjunction with the complete LU
852    * decomposition preconditioner <tt> PreconditionLU </tt>, which in
853    * conjunction with this solver class becomes a direct solver.
854    *
855    * @ingroup PETScWrappers
856    */
857   class SolverPreOnly : public SolverBase
858   {
859   public:
860     /**
861      * Standardized data struct to pipe additional data to the solver.
862      */
863     struct AdditionalData
864     {};
865 
866     /**
867      * Constructor. In contrast to deal.II's own solvers, there is no need to
868      * give a vector memory object. However, PETSc solvers want to have an MPI
869      * communicator context over which computations are parallelized. By
870      * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
871      * that for single processor (non-MPI) versions, this parameter does not
872      * have any effect.
873      *
874      * The last argument takes a structure with additional, solver dependent
875      * flags for tuning.
876      *
877      * Note that the communicator used here must match the communicator used
878      * in the system matrix, solution, and right hand side object of the solve
879      * to be done with this solver. Otherwise, PETSc will generate hard to
880      * track down errors, see the documentation of the SolverBase class.
881      */
882     SolverPreOnly(SolverControl &       cn,
883                   const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
884                   const AdditionalData &data             = AdditionalData());
885 
886   protected:
887     /**
888      * Store a copy of the flags for this particular solver.
889      */
890     const AdditionalData additional_data;
891 
892     /**
893      * Function that takes a Krylov Subspace Solver context object, and sets
894      * the type of solver that is appropriate for this class.
895      */
896     virtual void
897     set_solver_type(KSP &ksp) const override;
898   };
899 
900   /**
901    * An implementation of the solver interface using the sparse direct MUMPS
902    * solver through PETSc. This class has the usual interface of all other
903    * solver classes but it is of course different in that it doesn't implement
904    * an iterative solver. As a consequence, things like the SolverControl
905    * object have no particular meaning here.
906    *
907    * MUMPS allows to make use of symmetry in this matrix. In this class this
908    * is made possible by the set_symmetric_mode() function. If your matrix is
909    * symmetric, you can use this class as follows:
910    * @code
911    *   SolverControl cn;
912    *   PETScWrappers::SparseDirectMUMPS solver(cn, mpi_communicator);
913    *   solver.set_symmetric_mode(true);
914    *   solver.solve(system_matrix, solution, system_rhs);
915    * @endcode
916    *
917    * @note The class internally calls KSPSetFromOptions thus you are able to
918    * use all the PETSc parameters for MATSOLVERMUMPS package. See
919    * http://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MATSOLVERMUMPS.html
920    *
921    * @ingroup PETScWrappers
922    */
923   class SparseDirectMUMPS : public SolverBase
924   {
925   public:
926     /**
927      * Standardized data structure to pipe additional data to the solver.
928      */
929     struct AdditionalData
930     {};
931     /**
932      * Constructor
933      */
934     SparseDirectMUMPS(SolverControl &       cn,
935                       const MPI_Comm &      mpi_communicator = PETSC_COMM_SELF,
936                       const AdditionalData &data = AdditionalData());
937 
938     /**
939      * The method to solve the linear system.
940      */
941     void
942     solve(const MatrixBase &A, VectorBase &x, const VectorBase &b);
943 
944     /**
945      * The method allows to take advantage if the system matrix is symmetric
946      * by using LDL^T decomposition instead of more expensive LU. The argument
947      * indicates whether the matrix is symmetric or not.
948      */
949     void
950     set_symmetric_mode(const bool flag);
951 
952   protected:
953     /**
954      * Store a copy of flags for this particular solver.
955      */
956     const AdditionalData additional_data;
957 
958     virtual void
959     set_solver_type(KSP &ksp) const override;
960 
961   private:
962     /**
963      * A function that is used in PETSc as a callback to check convergence. It
964      * takes the information provided from PETSc and checks it against
965      * deal.II's own SolverControl objects to see if convergence has been
966      * reached.
967      */
968     static PetscErrorCode
969     convergence_test(KSP                 ksp,
970                      const PetscInt      iteration,
971                      const PetscReal     residual_norm,
972                      KSPConvergedReason *reason,
973                      void *              solver_control);
974 
975     /**
976      * A structure that contains the PETSc solver and preconditioner objects.
977      * Since the solve member function in the base is not used here, the
978      * private SolverData struct located in the base could not be used either.
979      */
980     struct SolverDataMUMPS
981     {
982       /**
983        * Destructor
984        */
985       ~SolverDataMUMPS();
986 
987       KSP ksp;
988       PC  pc;
989     };
990 
991     std::unique_ptr<SolverDataMUMPS> solver_data;
992 
993     /**
994      * Flag specifies whether matrix being factorized is symmetric or not. It
995      * influences the type of the used preconditioner (PCLU or PCCHOLESKY)
996      */
997     bool symmetric_mode;
998   };
999 } // namespace PETScWrappers
1000 
1001 DEAL_II_NAMESPACE_CLOSE
1002 
1003 #  endif // DEAL_II_WITH_PETSC
1004 
1005 /*----------------------------   petsc_solver.h ---------------------------*/
1006 
1007 #endif
1008 /*----------------------------   petsc_solver.h ---------------------------*/
1009