Reference documentation for deal.II version 9.1.0-pre
petsc_vector.h
1 // ---------------------------------------------------------------------
2 //
3 // Copyright (C) 2004 - 2018 by the deal.II authors
4 //
5 // This file is part of the deal.II library.
6 //
7 // The deal.II library is free software; you can use it, redistribute
8 // it, and/or modify it under the terms of the GNU Lesser General
9 // Public License as published by the Free Software Foundation; either
10 // version 2.1 of the License, or (at your option) any later version.
11 // The full text of the license can be found in the file LICENSE.md at
12 // the top level directory of deal.II.
13 //
14 // ---------------------------------------------------------------------
15 
16 #ifndef dealii_petsc_vector_h
17 # define dealii_petsc_vector_h
18 
19 
20 # include <deal.II/base/config.h>
21 
22 # ifdef DEAL_II_WITH_PETSC
23 
24 # include <deal.II/base/index_set.h>
25 # include <deal.II/base/subscriptor.h>
26 
27 # include <deal.II/lac/exceptions.h>
28 # include <deal.II/lac/petsc_vector_base.h>
29 # include <deal.II/lac/vector.h>
30 # include <deal.II/lac/vector_operation.h>
31 # include <deal.II/lac/vector_type_traits.h>
32 
33 DEAL_II_NAMESPACE_OPEN
34 
35 
39 namespace PETScWrappers
40 {
48  namespace MPI
49  {
158  class Vector : public VectorBase
159  {
160  public:
165 
169  Vector();
170 
187  explicit Vector(const MPI_Comm &communicator,
188  const size_type n,
189  const size_type local_size);
190 
191 
202  template <typename Number>
203  explicit Vector(const MPI_Comm & communicator,
204  const ::Vector<Number> &v,
205  const size_type local_size);
206 
207 
220  DEAL_II_DEPRECATED
221  explicit Vector(const MPI_Comm & communicator,
222  const VectorBase &v,
223  const size_type local_size);
224 
248  Vector(const IndexSet &local,
249  const IndexSet &ghost,
250  const MPI_Comm &communicator);
251 
263  explicit Vector(const IndexSet &local, const MPI_Comm &communicator);
264 
269  virtual void
270  clear() override;
271 
276  Vector &
277  operator=(const Vector &v);
278 
285  Vector &
286  operator=(const PetscScalar s);
287 
297  template <typename number>
298  Vector &
299  operator=(const ::Vector<number> &v);
300 
317  void
318  reinit(const MPI_Comm &communicator,
319  const size_type N,
320  const size_type local_size,
321  const bool omit_zeroing_entries = false);
322 
332  void
333  reinit(const Vector &v, const bool omit_zeroing_entries = false);
334 
342  void
343  reinit(const IndexSet &local,
344  const IndexSet &ghost,
345  const MPI_Comm &communicator);
346 
354  void
355  reinit(const IndexSet &local, const MPI_Comm &communicator);
356 
361  const MPI_Comm &
362  get_mpi_communicator() const override;
363 
375  void
376  print(std::ostream & out,
377  const unsigned int precision = 3,
378  const bool scientific = true,
379  const bool across = true) const;
380 
387  bool
388  all_zero() const;
389 
390  protected:
397  virtual void
399 
400 
401 
407  virtual void
408  create_vector(const size_type n,
409  const size_type local_size,
410  const IndexSet &ghostnodes);
411 
412 
413  private:
417  MPI_Comm communicator;
418  };
419 
420 
421  // ------------------ template and inline functions -------------
422 
423 
432  inline void
434  {
435  u.swap(v);
436  }
437 
438 
439 # ifndef DOXYGEN
440 
441  template <typename number>
442  Vector::Vector(const MPI_Comm & communicator,
443  const ::Vector<number> &v,
444  const size_type local_size)
445  : communicator(communicator)
446  {
448 
449  *this = v;
450  }
451 
452 
453 
454  inline Vector &
455  Vector::operator=(const PetscScalar s)
456  {
458 
459  return *this;
460  }
461 
462 
463 
464  template <typename number>
465  inline Vector &
466  Vector::operator=(const ::Vector<number> &v)
467  {
468  Assert(size() == v.size(), ExcDimensionMismatch(size(), v.size()));
469 
470  // FIXME: the following isn't necessarily fast, but this is due to
471  // the fact that PETSc doesn't offer an inlined access operator.
472  //
473  // if someone wants to contribute some code: to make this code
474  // faster, one could either first convert all values to PetscScalar,
475  // and then set them all at once using VecSetValues. This has the
476  // drawback that it could take quite some memory, if the vector is
477  // large, and it would in addition allocate memory on the heap, which
478  // is expensive. an alternative would be to split the vector into
479  // chunks of, say, 128 elements, convert a chunk at a time and set it
480  // in the output vector using VecSetValues. since 128 elements is
481  // small enough, this could easily be allocated on the stack (as a
482  // local variable) which would make the whole thing much more
483  // efficient.
484  //
485  // a second way to make things faster is for the special case that
486  // number==PetscScalar. we could then declare a specialization of
487  // this template, and omit the conversion. the problem with this is
488  // that the best we can do is to use VecSetValues, but this isn't
489  // very efficient either: it wants to see an array of indices, which
490  // in this case a) again takes up a whole lot of memory on the heap,
491  // and b) is totally dumb since its content would simply be the
492  // sequence 0,1,2,3,...,n. the best of all worlds would probably be a
493  // function in Petsc that would take a pointer to an array of
494  // PetscScalar values and simply copy n elements verbatim into the
495  // vector...
496  for (size_type i = 0; i < v.size(); ++i)
497  (*this)(i) = v(i);
498 
500 
501  return *this;
502  }
503 
504 
505 
506  inline const MPI_Comm &
508  {
509  return communicator;
510  }
511 
512 # endif // DOXYGEN
513  } // namespace MPI
514 } // namespace PETScWrappers
515 
516 namespace internal
517 {
518  namespace LinearOperatorImplementation
519  {
520  template <typename>
521  class ReinitHelper;
522 
527  template <>
528  class ReinitHelper<PETScWrappers::MPI::Vector>
529  {
530  public:
531  template <typename Matrix>
532  static void
533  reinit_range_vector(const Matrix & matrix,
535  bool /*omit_zeroing_entries*/)
536  {
537  v.reinit(matrix.locally_owned_range_indices(),
538  matrix.get_mpi_communicator());
539  }
540 
541  template <typename Matrix>
542  static void
543  reinit_domain_vector(const Matrix & matrix,
545  bool /*omit_zeroing_entries*/)
546  {
547  v.reinit(matrix.locally_owned_domain_indices(),
548  matrix.get_mpi_communicator());
549  }
550  };
551 
552  } // namespace LinearOperatorImplementation
553 } /* namespace internal */
554 
563 template <>
564 struct is_serial_vector<PETScWrappers::MPI::Vector> : std::false_type
565 {};
566 
567 
568 DEAL_II_NAMESPACE_CLOSE
569 
570 # endif // DEAL_II_WITH_PETSC
571 
572 #endif
573 /*------------------------- petsc_vector.h -------------------------*/
virtual void clear() override
VectorBase & operator=(const PetscScalar s)
unsigned long long int global_dof_index
Definition: types.h:72
void compress(const VectorOperation::values operation)
#define Assert(cond, exc)
Definition: exceptions.h:1227
static::ExceptionBase & ExcDimensionMismatch(std::size_t arg1, std::size_t arg2)
types::global_dof_index size_type
Definition: petsc_vector.h:164
void print(std::ostream &out, const unsigned int precision=3, const bool scientific=true, const bool across=true) const
Vector & operator=(const Vector &v)
size_type local_size() const
const MPI_Comm & get_mpi_communicator() const override
void reinit(const MPI_Comm &communicator, const size_type N, const size_type local_size, const bool omit_zeroing_entries=false)
virtual void create_vector(const size_type n, const size_type local_size)
void swap(Vector &u, Vector &v)
Definition: petsc_vector.h:433