diff --git a/include/numerics/petsc_vector.h b/include/numerics/petsc_vector.h index 57391039a9..c3fb360809 100644 --- a/include/numerics/petsc_vector.h +++ b/include/numerics/petsc_vector.h @@ -121,7 +121,11 @@ class PetscVector final : public NumericVector /** * Copy assignment operator. - * Calls VecCopy after performing various checks. + * Supported assignments based on ParallelType combination (note that we lump ghosted into + * parallel for this method documentation): + * - Assign from parallel to parallel + * - Assign from serial to serial + * - Assign from parallel to serial * \returns A reference to *this as the derived type. */ PetscVector & operator= (const PetscVector & v); @@ -703,7 +707,11 @@ void PetscVector::init (const numeric_index_type n, if (this->initialized()) this->clear(); - if (ptype == AUTOMATIC) + if (this->comm().size() == 1) + // This can help with some branching decisions and is also consistent with what PETSc does... a + // single rank Vec is going to be a sequential vector + this->_type = SERIAL; + else if (ptype == AUTOMATIC) { if (n == n_local) this->_type = SERIAL; @@ -773,6 +781,13 @@ void PetscVector::init (const numeric_index_type n, { parallel_object_only(); + if (this->comm().size() == 1) + { + libmesh_assert(ghost.empty()); + this->init(n, n_local, fast, SERIAL); + return; + } + PetscInt petsc_n=static_cast(n); PetscInt petsc_n_local=static_cast(n_local); PetscInt petsc_n_ghost=static_cast(ghost.size()); diff --git a/src/numerics/petsc_vector.C b/src/numerics/petsc_vector.C index 5cca270b57..a7bfab100c 100644 --- a/src/numerics/petsc_vector.C +++ b/src/numerics/petsc_vector.C @@ -335,7 +335,8 @@ void PetscVector::add (const T a_in, const NumericVector & v_in) LibmeshPetscCall(VecAXPY(_vec, a, v->vec())); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -375,7 +376,8 @@ void PetscVector::scale (const T factor_in) LibmeshPetscCall(VecScale(_vec, factor)); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -416,7 +418,8 @@ void PetscVector::abs() LibmeshPetscCall(VecAbs(_vec)); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -480,7 +483,8 @@ PetscVector::operator = (const T s_in) { LibmeshPetscCall(VecSet(_vec, s)); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -511,19 +515,59 @@ template PetscVector & PetscVector::operator = (const PetscVector & v) { + enum AssignmentType { ParallelToSerial, SerialToParallel, SameToSame, Error }; + parallel_object_only(); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(v.type()))); + + if (this == &v) + return *this; this->_restore_array(); v._restore_array(); libmesh_assert_equal_to (this->size(), v.size()); - libmesh_assert_equal_to (this->local_size(), v.local_size()); libmesh_assert (v.closed()); + AssignmentType assign_type = Error; + if (this->type() == SERIAL && v.type() != SERIAL) + assign_type = ParallelToSerial; + else if (this->type() != SERIAL && v.type() == SERIAL) + assign_type = SerialToParallel; + else if (this->local_size() == v.local_size()) + assign_type = SameToSame; + + libmesh_assert(this->comm().verify( + static_cast::type>(assign_type))); - LibmeshPetscCall(VecCopy (v._vec, this->_vec)); + switch (assign_type) + { + case ParallelToSerial: + { + // scatter from parallel to serial + libmesh_assert(v.comm().size() > 1); + WrappedPetsc scatter; + LibmeshPetscCall(VecScatterCreateToAll(v._vec, scatter.get(), nullptr)); + VecScatterBeginEnd(v.comm(), scatter, v._vec, _vec, INSERT_VALUES, SCATTER_FORWARD); + break; + } + case SameToSame: + // serial to serial or parallel to parallel + LibmeshPetscCall(VecCopy(v._vec, _vec)); + break; + case SerialToParallel: + libmesh_not_implemented_msg( + "Scattering from a serial vector on every rank to a parallel vector is not behavior we " + "define because we do not verify the serial vector is the same on each rank"); + default: + libmesh_error_msg("Unhandled vector combination"); + } - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -579,99 +623,21 @@ void PetscVector::localize (NumericVector & v_local_in) const { parallel_object_only(); - this->_restore_array(); - - // Make sure the NumericVector passed in is really a PetscVector - PetscVector * v_local = cast_ptr *>(&v_local_in); - - libmesh_assert(v_local); - // v_local_in should be closed - libmesh_assert(v_local->closed()); - libmesh_assert_equal_to (v_local->size(), this->size()); - // 1) v_local_in is a large vector to hold the whole world - // 2) v_local_in is a ghosted vector - // 3) v_local_in is a parallel vector - // Cases 2) and 3) should be scalable - libmesh_assert(this->size()==v_local->local_size() || this->local_size()==v_local->local_size()); - + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(v_local_in.type()))); - if (v_local->type() == SERIAL && this->size() == v_local->local_size()) - { - WrappedPetsc scatter; - LibmeshPetscCall(VecScatterCreateToAll(_vec, scatter.get(), nullptr)); - VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD); - } - // Two vectors have the same size, and we should just do a simple copy. - // v_local could be either a parallel or ghosted vector - else if (this->local_size() == v_local->local_size()) - LibmeshPetscCall(VecCopy(_vec,v_local->_vec)); - else - libmesh_error_msg("Vectors are inconsistent"); - - // Make sure ghost dofs are up to date - // We do not call "close" here to save a global reduction - if (v_local->type() == GHOSTED) - VecGhostUpdateBeginEnd(this->comm(), v_local->_vec, INSERT_VALUES, SCATTER_FORWARD); + v_local_in = *this; } template void PetscVector::localize (NumericVector & v_local_in, - const std::vector & send_list) const + const std::vector & /*send_list*/) const { - parallel_object_only(); - - libmesh_assert(this->comm().verify(int(this->type()))); - libmesh_assert(this->comm().verify(int(v_local_in.type()))); - - // FIXME: Workaround for a strange bug at large-scale. - // If we have ghosting, PETSc lets us just copy the solution, and - // doing so avoids a segfault? - if (v_local_in.type() == GHOSTED && - this->type() == PARALLEL) - { - v_local_in = *this; - return; - } - - // Normal code path begins here - - this->_restore_array(); - - // Make sure the NumericVector passed in is really a PetscVector - PetscVector * v_local = cast_ptr *>(&v_local_in); - - libmesh_assert(v_local); - libmesh_assert_equal_to (v_local->size(), this->size()); - libmesh_assert_less_equal (send_list.size(), v_local->size()); - - const numeric_index_type n_sl = - cast_int(send_list.size()); - - std::vector idx(n_sl + this->local_size()); - for (numeric_index_type i=0; i(send_list[i]); - for (auto i : make_range(this->local_size())) - idx[n_sl+i] = i + this->first_local_index(); - - // Create the index set & scatter objects - WrappedPetsc is; - PetscInt * idxptr = idx.empty() ? nullptr : idx.data(); - LibmeshPetscCall(ISCreateGeneral(this->comm().get(), n_sl+this->local_size(), - idxptr, PETSC_USE_POINTER, is.get())); - - WrappedPetsc scatter; - LibmeshPetscCall(VecScatterCreate(_vec, is, - v_local->_vec, is, - scatter.get())); - - // Perform the scatter - VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD); - - // Make sure ghost dofs are up to date - if (v_local->type() == GHOSTED) - v_local->close(); + this->localize(v_local_in); } @@ -1000,7 +966,8 @@ void PetscVector::pointwise_mult (const NumericVector & vec1, // Call PETSc function. LibmeshPetscCall(VecPointwiseMult(_vec, vec1_petsc->vec(), vec2_petsc->vec())); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -1023,7 +990,8 @@ void PetscVector::pointwise_divide (const NumericVector & vec1, // Call PETSc function. LibmeshPetscCall(VecPointwiseDivide(_vec, vec1_petsc->vec(), vec2_petsc->vec())); - libmesh_assert(this->comm().verify(int(this->type()))); + libmesh_assert(this->comm().verify( + static_cast::type>(this->type()))); if (this->type() == GHOSTED) VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD); @@ -1103,29 +1071,37 @@ void PetscVector::create_subvector(NumericVector & subvector, // If not, we use the appropriate PETSc routines to initialize it. if (!petsc_subvector->initialized()) { - libmesh_assert(petsc_subvector->_type == AUTOMATIC || petsc_subvector->_type == PARALLEL); - - if (supplying_global_rows) - // Initialize the petsc_subvector to have enough space to hold - // the entries which will be scattered into it. Note: such an - // init() function (where we let PETSc decide the number of local - // entries) is not currently offered by the PetscVector - // class. Should we differentiate here between sequential and - // parallel vector creation based on this->n_processors() ? - LibmeshPetscCall(VecCreateMPI(this->comm().get(), - PETSC_DECIDE, // n_local - cast_int(rows.size()), // n_global - &(petsc_subvector->_vec))); + if (this->type() == SERIAL) + { + libmesh_assert(this->comm().verify(rows.size())); + LibmeshPetscCall(VecCreateSeq(this->comm().get(), rows.size(), &(petsc_subvector->_vec))); + petsc_subvector->_type = SERIAL; + } else - LibmeshPetscCall(VecCreateMPI(this->comm().get(), - cast_int(rows.size()), - PETSC_DETERMINE, - &(petsc_subvector->_vec))); + { + if (supplying_global_rows) + // Initialize the petsc_subvector to have enough space to hold + // the entries which will be scattered into it. Note: such an + // init() function (where we let PETSc decide the number of local + // entries) is not currently offered by the PetscVector + // class. Should we differentiate here between sequential and + // parallel vector creation based on this->n_processors() ? + LibmeshPetscCall(VecCreateMPI(this->comm().get(), + PETSC_DECIDE, // n_local + cast_int(rows.size()), // n_global + &(petsc_subvector->_vec))); + else + LibmeshPetscCall(VecCreateMPI(this->comm().get(), + cast_int(rows.size()), + PETSC_DETERMINE, + &(petsc_subvector->_vec))); + + // We created a parallel vector + petsc_subvector->_type = PARALLEL; + } LibmeshPetscCall(VecSetFromOptions (petsc_subvector->_vec)); - // We created a parallel vector - petsc_subvector->_type = PARALLEL; // Mark the subvector as initialized petsc_subvector->_is_initialized = true;