Skip to content
19 changes: 17 additions & 2 deletions include/numerics/petsc_vector.h
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,11 @@ class PetscVector final : public NumericVector<T>

/**
* Copy assignment operator.
* Calls VecCopy after performing various checks.
* Supported assignments based on ParallelType combination (note that we lump ghosted into
* parallel for this method documentation):
* - Assign from parallel to parallel
* - Assign from serial to serial
* - Assign from parallel to serial
* \returns A reference to *this as the derived type.
*/
PetscVector<T> & operator= (const PetscVector<T> & v);
Expand Down Expand Up @@ -703,7 +707,11 @@ void PetscVector<T>::init (const numeric_index_type n,
if (this->initialized())
this->clear();

if (ptype == AUTOMATIC)
if (this->comm().size() == 1)
// This can help with some branching decisions and is also consistent with what PETSc does... a
// single rank Vec is going to be a sequential vector
this->_type = SERIAL;
else if (ptype == AUTOMATIC)
{
if (n == n_local)
this->_type = SERIAL;
Expand Down Expand Up @@ -773,6 +781,13 @@ void PetscVector<T>::init (const numeric_index_type n,
{
parallel_object_only();

if (this->comm().size() == 1)
{
libmesh_assert(ghost.empty());
this->init(n, n_local, fast, SERIAL);
return;
}

PetscInt petsc_n=static_cast<PetscInt>(n);
PetscInt petsc_n_local=static_cast<PetscInt>(n_local);
PetscInt petsc_n_ghost=static_cast<PetscInt>(ghost.size());
Expand Down
202 changes: 89 additions & 113 deletions src/numerics/petsc_vector.C
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,8 @@ void PetscVector<T>::add (const T a_in, const NumericVector<T> & v_in)

LibmeshPetscCall(VecAXPY(_vec, a, v->vec()));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -375,7 +376,8 @@ void PetscVector<T>::scale (const T factor_in)

LibmeshPetscCall(VecScale(_vec, factor));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -416,7 +418,8 @@ void PetscVector<T>::abs()

LibmeshPetscCall(VecAbs(_vec));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -480,7 +483,8 @@ PetscVector<T>::operator = (const T s_in)
{
LibmeshPetscCall(VecSet(_vec, s));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -511,19 +515,59 @@ template <typename T>
PetscVector<T> &
PetscVector<T>::operator = (const PetscVector<T> & v)
{
enum AssignmentType { ParallelToSerial, SerialToParallel, SameToSame, Error };

parallel_object_only();
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(v.type())));

if (this == &v)
return *this;

this->_restore_array();
v._restore_array();

libmesh_assert_equal_to (this->size(), v.size());
libmesh_assert_equal_to (this->local_size(), v.local_size());
libmesh_assert (v.closed());

AssignmentType assign_type = Error;
if (this->type() == SERIAL && v.type() != SERIAL)
assign_type = ParallelToSerial;
else if (this->type() != SERIAL && v.type() == SERIAL)
assign_type = SerialToParallel;
else if (this->local_size() == v.local_size())
assign_type = SameToSame;

libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<AssignmentType>::type>(assign_type)));

LibmeshPetscCall(VecCopy (v._vec, this->_vec));
switch (assign_type)
{
case ParallelToSerial:
{
// scatter from parallel to serial
libmesh_assert(v.comm().size() > 1);
WrappedPetsc<VecScatter> scatter;
LibmeshPetscCall(VecScatterCreateToAll(v._vec, scatter.get(), nullptr));
VecScatterBeginEnd(v.comm(), scatter, v._vec, _vec, INSERT_VALUES, SCATTER_FORWARD);
break;
}
case SameToSame:
// serial to serial or parallel to parallel
LibmeshPetscCall(VecCopy(v._vec, _vec));
break;
case SerialToParallel:
libmesh_not_implemented_msg(
"Scattering from a serial vector on every rank to a parallel vector is not behavior we "
"define because we do not verify the serial vector is the same on each rank");
default:
libmesh_error_msg("Unhandled vector combination");
}

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -579,99 +623,21 @@ void PetscVector<T>::localize (NumericVector<T> & v_local_in) const
{
parallel_object_only();

this->_restore_array();

// Make sure the NumericVector passed in is really a PetscVector
PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);

libmesh_assert(v_local);
// v_local_in should be closed
libmesh_assert(v_local->closed());
libmesh_assert_equal_to (v_local->size(), this->size());
// 1) v_local_in is a large vector to hold the whole world
// 2) v_local_in is a ghosted vector
// 3) v_local_in is a parallel vector
// Cases 2) and 3) should be scalable
libmesh_assert(this->size()==v_local->local_size() || this->local_size()==v_local->local_size());

libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(v_local_in.type())));

if (v_local->type() == SERIAL && this->size() == v_local->local_size())
{
WrappedPetsc<VecScatter> scatter;
LibmeshPetscCall(VecScatterCreateToAll(_vec, scatter.get(), nullptr));
VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
}
// Two vectors have the same size, and we should just do a simple copy.
// v_local could be either a parallel or ghosted vector
else if (this->local_size() == v_local->local_size())
LibmeshPetscCall(VecCopy(_vec,v_local->_vec));
else
libmesh_error_msg("Vectors are inconsistent");

// Make sure ghost dofs are up to date
// We do not call "close" here to save a global reduction
if (v_local->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
v_local_in = *this;
}



template <typename T>
void PetscVector<T>::localize (NumericVector<T> & v_local_in,
const std::vector<numeric_index_type> & send_list) const
const std::vector<numeric_index_type> & /*send_list*/) const
{
parallel_object_only();

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(int(v_local_in.type())));

// FIXME: Workaround for a strange bug at large-scale.
// If we have ghosting, PETSc lets us just copy the solution, and
// doing so avoids a segfault?
if (v_local_in.type() == GHOSTED &&
this->type() == PARALLEL)
{
v_local_in = *this;
return;
}

// Normal code path begins here

this->_restore_array();

// Make sure the NumericVector passed in is really a PetscVector
PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);

libmesh_assert(v_local);
libmesh_assert_equal_to (v_local->size(), this->size());
libmesh_assert_less_equal (send_list.size(), v_local->size());

const numeric_index_type n_sl =
cast_int<numeric_index_type>(send_list.size());

std::vector<PetscInt> idx(n_sl + this->local_size());
for (numeric_index_type i=0; i<n_sl; i++)
idx[i] = static_cast<PetscInt>(send_list[i]);
for (auto i : make_range(this->local_size()))
idx[n_sl+i] = i + this->first_local_index();

// Create the index set & scatter objects
WrappedPetsc<IS> is;
PetscInt * idxptr = idx.empty() ? nullptr : idx.data();
LibmeshPetscCall(ISCreateGeneral(this->comm().get(), n_sl+this->local_size(),
idxptr, PETSC_USE_POINTER, is.get()));

WrappedPetsc<VecScatter> scatter;
LibmeshPetscCall(VecScatterCreate(_vec, is,
v_local->_vec, is,
scatter.get()));

// Perform the scatter
VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);

// Make sure ghost dofs are up to date
if (v_local->type() == GHOSTED)
v_local->close();
this->localize(v_local_in);
}


Expand Down Expand Up @@ -1000,7 +966,8 @@ void PetscVector<T>::pointwise_mult (const NumericVector<T> & vec1,
// Call PETSc function.
LibmeshPetscCall(VecPointwiseMult(_vec, vec1_petsc->vec(), vec2_petsc->vec()));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand All @@ -1023,7 +990,8 @@ void PetscVector<T>::pointwise_divide (const NumericVector<T> & vec1,
// Call PETSc function.
LibmeshPetscCall(VecPointwiseDivide(_vec, vec1_petsc->vec(), vec2_petsc->vec()));

libmesh_assert(this->comm().verify(int(this->type())));
libmesh_assert(this->comm().verify(
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));

if (this->type() == GHOSTED)
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
Expand Down Expand Up @@ -1103,29 +1071,37 @@ void PetscVector<T>::create_subvector(NumericVector<T> & subvector,
// If not, we use the appropriate PETSc routines to initialize it.
if (!petsc_subvector->initialized())
{
libmesh_assert(petsc_subvector->_type == AUTOMATIC || petsc_subvector->_type == PARALLEL);

if (supplying_global_rows)
// Initialize the petsc_subvector to have enough space to hold
// the entries which will be scattered into it. Note: such an
// init() function (where we let PETSc decide the number of local
// entries) is not currently offered by the PetscVector
// class. Should we differentiate here between sequential and
// parallel vector creation based on this->n_processors() ?
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
PETSC_DECIDE, // n_local
cast_int<PetscInt>(rows.size()), // n_global
&(petsc_subvector->_vec)));
if (this->type() == SERIAL)
{
libmesh_assert(this->comm().verify(rows.size()));
LibmeshPetscCall(VecCreateSeq(this->comm().get(), rows.size(), &(petsc_subvector->_vec)));
petsc_subvector->_type = SERIAL;
}
else
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
cast_int<PetscInt>(rows.size()),
PETSC_DETERMINE,
&(petsc_subvector->_vec)));
{
if (supplying_global_rows)
// Initialize the petsc_subvector to have enough space to hold
// the entries which will be scattered into it. Note: such an
// init() function (where we let PETSc decide the number of local
// entries) is not currently offered by the PetscVector
// class. Should we differentiate here between sequential and
// parallel vector creation based on this->n_processors() ?
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
PETSC_DECIDE, // n_local
cast_int<PetscInt>(rows.size()), // n_global
&(petsc_subvector->_vec)));
else
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
cast_int<PetscInt>(rows.size()),
PETSC_DETERMINE,
&(petsc_subvector->_vec)));

// We created a parallel vector
petsc_subvector->_type = PARALLEL;
}

LibmeshPetscCall(VecSetFromOptions (petsc_subvector->_vec));

// We created a parallel vector
petsc_subvector->_type = PARALLEL;

// Mark the subvector as initialized
petsc_subvector->_is_initialized = true;
Expand Down