version 3.11-dev
multidomain/assembler.hh
Go to the documentation of this file.
1// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2// vi: set et ts=4 sw=4 sts=4:
3//
4// SPDX-FileCopyrightText: Copyright © DuMux Project contributors, see AUTHORS.md in root folder
5// SPDX-License-Identifier: GPL-3.0-or-later
6//
14#ifndef DUMUX_MULTIDOMAIN_ASSEMBLER_HH
15#define DUMUX_MULTIDOMAIN_ASSEMBLER_HH
16
17#include <cassert>
18#include <iostream>
19#include <type_traits>
20#include <tuple>
21
22#include <dune/common/hybridutilities.hh>
23#include <dune/istl/matrixindexset.hh>
24
36
39#include "subdomaincvfelocalassembler_.hh"
41#include "assemblerview.hh"
42
44#if HAVE_DUMUX_OLD_STAGGERED
45#include <dumux/multidomain/subdomainstaggeredlocalassembler.hh>
46#endif
47
48#include "assembler.hh"
49
50namespace Dumux::Experimental {
51
61template<class MDTraits, class CMType, DiffMethod diffMethod, bool useImplicitAssembly = true>
63{
64 template<std::size_t id>
65 using SubDomainTypeTag = typename MDTraits::template SubDomain<id>::TypeTag;
66
67public:
68 using Traits = MDTraits;
69
70 using Scalar = typename MDTraits::Scalar;
71
72 template<std::size_t id>
73 using LocalResidual = typename MDTraits::template SubDomain<id>::LocalResidual;
74
75 template<std::size_t id>
76 using GridVariables = typename MDTraits::template SubDomain<id>::GridVariables;
77
78 template<std::size_t id>
79 using GridGeometry = typename MDTraits::template SubDomain<id>::GridGeometry;
80
81 template<std::size_t id>
82 using Problem = typename MDTraits::template SubDomain<id>::Problem;
83
84 using JacobianMatrix = typename MDTraits::JacobianMatrix;
85 using SolutionVector = typename MDTraits::SolutionVector;
86 using ResidualType = typename MDTraits::ResidualVector;
87
88 using CouplingManager = CMType;
89
93 static constexpr bool isImplicit()
94 { return useImplicitAssembly; }
95
96private:
97
98 using ProblemTuple = typename MDTraits::template TupleOfSharedPtrConst<Problem>;
99 using GridGeometryTuple = typename MDTraits::template TupleOfSharedPtrConst<GridGeometry>;
100 using GridVariablesTuple = typename MDTraits::template TupleOfSharedPtr<GridVariables>;
101
103 using ThisType = MultiDomainAssembler<MDTraits, CouplingManager, diffMethod, isImplicit()>;
104
105 template<std::size_t id>
106 using SubDomainAssemblerView = MultiDomainAssemblerSubDomainView<ThisType, id>;
107
108 template<class DiscretizationMethod, std::size_t id>
109 struct SubDomainAssemblerType;
110
111 template<std::size_t id>
112 struct SubDomainAssemblerType<DiscretizationMethods::CCTpfa, id>
113 {
114 using type = SubDomainCCLocalAssembler<id, SubDomainTypeTag<id>, SubDomainAssemblerView<id>, diffMethod, isImplicit()>;
115 };
116
117 template<std::size_t id>
118 struct SubDomainAssemblerType<DiscretizationMethods::CCMpfa, id>
119 {
120 using type = SubDomainCCLocalAssembler<id, SubDomainTypeTag<id>, SubDomainAssemblerView<id>, diffMethod, isImplicit()>;
121 };
122
123 template<std::size_t id, class DM>
124 struct SubDomainAssemblerType<DiscretizationMethods::CVFE<DM>, id>
125 {
126 using type = Dumux::Experimental::SubDomainCVFELocalAssembler<id, SubDomainTypeTag<id>, SubDomainAssemblerView<id>, diffMethod, isImplicit()>;
127 };
128
129 template<std::size_t id>
130 struct SubDomainAssemblerType<DiscretizationMethods::Staggered, id>
131 {
132 using type = SubDomainStaggeredLocalAssembler<id, SubDomainTypeTag<id>, SubDomainAssemblerView<id>, diffMethod, isImplicit()>;
133 };
134
135 template<std::size_t id>
136 struct SubDomainAssemblerType<DiscretizationMethods::FCStaggered, id>
137 {
138 using type = SubDomainFaceCenteredLocalAssembler<id, SubDomainTypeTag<id>, SubDomainAssemblerView<id>, diffMethod, isImplicit()>;
139 };
140
141 template<std::size_t id>
142 using SubDomainAssembler = typename SubDomainAssemblerType<typename GridGeometry<id>::DiscretizationMethod, id>::type;
143
144public:
145
152 GridGeometryTuple gridGeometry,
153 GridVariablesTuple gridVariables,
154 std::shared_ptr<CouplingManager> couplingManager)
156 , problemTuple_(std::move(problem))
157 , gridGeometryTuple_(std::move(gridGeometry))
158 , gridVariablesTuple_(std::move(gridVariables))
159 , timeLoop_()
160 , isStationaryProblem_(true)
161 , warningIssued_(false)
162 {
163 static_assert(isImplicit(), "Explicit assembler for stationary problem doesn't make sense!");
164 std::cout << "Instantiated assembler for a stationary problem." << std::endl;
165
169 && getParam<bool>("Assembly.Multithreading", true);
170
171 maybeComputeColors_();
172 }
173
180 GridGeometryTuple gridGeometry,
181 GridVariablesTuple gridVariables,
182 std::shared_ptr<CouplingManager> couplingManager,
183 std::shared_ptr<const TimeLoop> timeLoop,
184 const SolutionVector& prevSol)
186 , problemTuple_(std::move(problem))
187 , gridGeometryTuple_(std::move(gridGeometry))
188 , gridVariablesTuple_(std::move(gridVariables))
189 , timeLoop_(timeLoop)
190 , prevSol_(&prevSol)
191 , isStationaryProblem_(false)
192 , warningIssued_(false)
193 {
194 std::cout << "Instantiated assembler for an instationary problem." << std::endl;
195
199 && getParam<bool>("Assembly.Multithreading", true);
200
201 maybeComputeColors_();
202 }
203
209 {
210 checkAssemblerState_();
211 resetJacobian_();
212 resetResidual_();
213
214 using namespace Dune::Hybrid;
215 forEach(std::make_index_sequence<JacobianMatrix::N()>(), [&](const auto domainId)
216 {
217 auto& jacRow = (*jacobian_)[domainId];
218 auto& subRes = (*residual_)[domainId];
219 const auto& subSol = curSol[domainId];
220 this->assembleJacobianAndResidual_(domainId, jacRow, subRes, curSol);
221
222 const auto gridGeometry = std::get<domainId>(gridGeometryTuple_);
223 enforcePeriodicConstraints_(domainId, jacRow, subRes, *gridGeometry, subSol);
224 enforceProblemConstraints_(domainId, jacRow, subRes, *gridGeometry, subSol);
225 });
226 }
227
230 {
231 resetResidual_();
232 assembleResidual(*residual_, curSol);
233 }
234
237 {
238 r = 0.0;
239
240 checkAssemblerState_();
241
242 // update the grid variables for the case of active caching
243 updateGridVariables(curSol);
244
245 using namespace Dune::Hybrid;
246 forEach(integralRange(Dune::Hybrid::size(r)), [&](const auto domainId)
247 {
248 auto& subRes = r[domainId];
249 const auto& subSol = curSol[domainId];
250 this->assembleResidual_(domainId, subRes, curSol);
251
252 const auto gridGeometry = std::get<domainId>(gridGeometryTuple_);
253 enforcePeriodicConstraints_(domainId, subRes, *gridGeometry, subSol);
254 enforceProblemConstraints_(domainId, subRes, *gridGeometry, subSol);
255 });
256 }
257
263 void setLinearSystem(std::shared_ptr<JacobianMatrix> A,
264 std::shared_ptr<ResidualType> r)
265 {
266 jacobian_ = A;
267 residual_ = r;
268
269 setJacobianBuildMode(*jacobian_);
270 setJacobianPattern_(*jacobian_);
271 setResidualSize_(*residual_);
272 }
273
279 {
280 jacobian_ = std::make_shared<JacobianMatrix>();
281 residual_ = std::make_shared<ResidualType>();
282
283 setJacobianBuildMode(*jacobian_);
284 setJacobianPattern_(*jacobian_);
285 setResidualSize_(*residual_);
286 }
287
292 {
293 using namespace Dune::Hybrid;
294 forEach(std::make_index_sequence<JacobianMatrix::N()>(), [&](const auto i)
295 {
296 forEach(jac[i], [&](auto& jacBlock)
297 {
298 using BlockType = std::decay_t<decltype(jacBlock)>;
299 if (jacBlock.buildMode() == BlockType::BuildMode::unknown)
300 jacBlock.setBuildMode(BlockType::BuildMode::random);
301 else if (jacBlock.buildMode() != BlockType::BuildMode::random)
302 DUNE_THROW(Dune::NotImplemented, "Only BCRS matrices with random build mode are supported at the moment");
303 });
304 });
305 }
306
311 {
312 setJacobianPattern_(*jacobian_);
313 setResidualSize_(*residual_);
314 maybeComputeColors_();
315 }
316
321 {
322 using namespace Dune::Hybrid;
323 forEach(integralRange(Dune::Hybrid::size(gridVariablesTuple_)), [&](const auto domainId)
324 { this->gridVariables(domainId).update(curSol[domainId]); });
325 }
326
330 void resetTimeStep(const SolutionVector& curSol)
331 {
332 using namespace Dune::Hybrid;
333 forEach(integralRange(Dune::Hybrid::size(gridVariablesTuple_)), [&](const auto domainId)
334 { this->gridVariables(domainId).resetTimeStep(curSol[domainId]); });
335 }
336
338 template<std::size_t i>
339 std::size_t numDofs(Dune::index_constant<i> domainId) const
340 { return std::get<domainId>(gridGeometryTuple_)->numDofs(); }
341
343 template<std::size_t i>
344 const auto& problem(Dune::index_constant<i> domainId) const
345 { return *std::get<domainId>(problemTuple_); }
346
348 template<std::size_t i>
349 const auto& gridGeometry(Dune::index_constant<i> domainId) const
350 { return *std::get<domainId>(gridGeometryTuple_); }
351
353 template<std::size_t i>
354 const auto& gridView(Dune::index_constant<i> domainId) const
355 { return gridGeometry(domainId).gridView(); }
356
358 template<std::size_t i>
359 GridVariables<i>& gridVariables(Dune::index_constant<i> domainId)
360 { return *std::get<domainId>(gridVariablesTuple_); }
361
363 template<std::size_t i>
364 const GridVariables<i>& gridVariables(Dune::index_constant<i> domainId) const
365 { return *std::get<domainId>(gridVariablesTuple_); }
366
369 { return *couplingManager_; }
370
373 { return *jacobian_; }
374
377 { return *residual_; }
378
380 const SolutionVector& prevSol() const
381 { return *prevSol_; }
382
387 void setTimeManager(std::shared_ptr<const TimeLoop> timeLoop)
388 { timeLoop_ = timeLoop; isStationaryProblem_ = !(static_cast<bool>(timeLoop)); }
389
395 { prevSol_ = &u; }
396
401 { return isStationaryProblem_; }
402
406 template<std::size_t i>
407 LocalResidual<i> localResidual(Dune::index_constant<i> domainId) const
408 { return LocalResidual<i>(std::get<domainId>(problemTuple_).get(), timeLoop_.get()); }
409
410protected:
412 std::shared_ptr<CouplingManager> couplingManager_;
413
414private:
418 void setJacobianPattern_(JacobianMatrix& jac) const
419 {
420 using namespace Dune::Hybrid;
421 forEach(std::make_index_sequence<JacobianMatrix::N()>(), [&](const auto domainI)
422 {
423 forEach(integralRange(Dune::Hybrid::size(jac[domainI])), [&](const auto domainJ)
424 {
425 const auto pattern = this->getJacobianPattern_(domainI, domainJ);
426 pattern.exportIdx(jac[domainI][domainJ]);
427 });
428 });
429 }
430
434 void setResidualSize_(ResidualType& res) const
435 {
436 using namespace Dune::Hybrid;
437 forEach(integralRange(Dune::Hybrid::size(res)), [&](const auto domainId)
438 { res[domainId].resize(this->numDofs(domainId)); });
439 }
440
441 // reset the residual vector to 0.0
442 void resetResidual_()
443 {
444 if(!residual_)
445 {
446 residual_ = std::make_shared<ResidualType>();
447 setResidualSize_(*residual_);
448 }
449
450 (*residual_) = 0.0;
451 }
452
453 // reset the jacobian vector to 0.0
454 void resetJacobian_()
455 {
456 if(!jacobian_)
457 {
458 jacobian_ = std::make_shared<JacobianMatrix>();
459 setJacobianBuildMode(*jacobian_);
460 setJacobianPattern_(*jacobian_);
461 }
462
463 (*jacobian_) = 0.0;
464 }
465
467 void maybeComputeColors_()
468 {
469 if constexpr (CouplingManagerSupportsMultithreadedAssembly<CouplingManager>::value)
470 if (enableMultithreading_)
471 couplingManager_->computeColorsForAssembly();
472 }
473
474 // check if the assembler is in a correct state for assembly
475 void checkAssemblerState_() const
476 {
477 if (!isStationaryProblem_ && !prevSol_)
478 DUNE_THROW(Dune::InvalidStateException, "Assembling instationary problem but previous solution was not set!");
479
480 if (isStationaryProblem_ && prevSol_)
481 DUNE_THROW(Dune::InvalidStateException, "Assembling stationary problem but a previous solution was set."
482 << " Did you forget to set the timeLoop to make this problem instationary?");
483 }
484
485 template<std::size_t i, class JacRow, class SubRes>
486 void assembleJacobianAndResidual_(Dune::index_constant<i> domainId, JacRow& jacRow, SubRes& subRes,
487 const SolutionVector& curSol)
488 {
489 assemble_(domainId, [&](const auto& element)
490 {
491 MultiDomainAssemblerSubDomainView view{*this, domainId};
492 SubDomainAssembler<i> subDomainAssembler(view, element, curSol, *couplingManager_);
493 subDomainAssembler.assembleJacobianAndResidual(jacRow, subRes, gridVariablesTuple_);
494 });
495 }
496
497 template<std::size_t i, class SubRes>
498 void assembleResidual_(Dune::index_constant<i> domainId, SubRes& subRes,
499 const SolutionVector& curSol)
500 {
501 assemble_(domainId, [&](const auto& element)
502 {
503 MultiDomainAssemblerSubDomainView view{*this, domainId};
504 SubDomainAssembler<i> subDomainAssembler(view, element, curSol, *couplingManager_);
505 subDomainAssembler.assembleResidual(subRes);
506 });
507 }
508
514 template<std::size_t i, class AssembleElementFunc>
515 void assemble_(Dune::index_constant<i> domainId, AssembleElementFunc&& assembleElement) const
516 {
517 // a state that will be checked on all processes
518 bool succeeded = false;
519
520 // try assembling using the local assembly function
521 try
522 {
523 if constexpr (CouplingManagerSupportsMultithreadedAssembly<CouplingManager>::value)
524 {
525 if (enableMultithreading_)
526 {
527 couplingManager_->assembleMultithreaded(
528 domainId, std::forward<AssembleElementFunc>(assembleElement)
529 );
530 return;
531 }
532 }
533
534 // fallback for coupling managers that don't support multithreaded assembly (yet)
535 // or if multithreaded assembly is disabled
536 // let the local assembler add the element contributions
537 for (const auto& element : elements(gridView(domainId)))
538 assembleElement(element);
539
540 // if we get here, everything worked well on this process
541 succeeded = true;
542 }
543 // throw exception if a problem occurred
544 catch (NumericalProblem &e)
545 {
546 std::cout << "rank " << gridView(domainId).comm().rank()
547 << " caught an exception while assembling:" << e.what()
548 << "\n";
549 succeeded = false;
550 }
551
552 // make sure everything worked well on all processes
553 if (gridView(domainId).comm().size() > 1)
554 succeeded = gridView(domainId).comm().min(succeeded);
555
556 // if not succeeded rethrow the error on all processes
557 if (!succeeded)
558 DUNE_THROW(NumericalProblem, "A process did not succeed in linearizing the system");
559 }
560
561 // get diagonal block pattern
562 template<std::size_t i, std::size_t j, typename std::enable_if_t<(i==j), int> = 0>
563 Dune::MatrixIndexSet getJacobianPattern_(Dune::index_constant<i> domainI,
564 Dune::index_constant<j> domainJ) const
565 {
566 const auto& gg = gridGeometry(domainI);
567 auto pattern = getJacobianPattern<isImplicit()>(gg);
568 couplingManager_->extendJacobianPattern(domainI, pattern);
569 return pattern;
570 }
571
572 // get coupling block pattern
573 template<std::size_t i, std::size_t j, typename std::enable_if_t<(i!=j), int> = 0>
574 Dune::MatrixIndexSet getJacobianPattern_(Dune::index_constant<i> domainI,
575 Dune::index_constant<j> domainJ) const
576 {
577 return getCouplingJacobianPattern<isImplicit()>(*couplingManager_,
578 domainI, gridGeometry(domainI),
579 domainJ, gridGeometry(domainJ));
580 }
581
582 // build periodic constraints into the system matrix
583 template<std::size_t i, class JacRow, class Res, class GG, class Sol>
584 void enforcePeriodicConstraints_(Dune::index_constant<i> domainI, JacRow& jacRow, Res& res, const GG& gridGeometry, const Sol& curSol) const
585 {
586 if constexpr (Dumux::Detail::hasPeriodicDofMap<GG>())
587 {
588 for (const auto& m : gridGeometry.periodicDofMap())
589 {
590 if (m.first < m.second)
591 {
592 auto& jac = jacRow[domainI];
593
594 // add the second row to the first
595 res[m.first] += res[m.second];
596
597 const auto end = jac[m.second].end();
598 for (auto it = jac[m.second].begin(); it != end; ++it)
599 jac[m.first][it.index()] += (*it);
600
601
602 // enforce the solution of the first periodic DOF to the second one
603 res[m.second] = curSol[m.second] - curSol[m.first];
604
605 // set derivatives accordingly in jacobian, i.e. id for m.second and -id for m.first
606 auto setMatrixBlock = [] (auto& matrixBlock, double diagValue)
607 {
608 for (int eIdx = 0; eIdx < matrixBlock.N(); ++eIdx)
609 matrixBlock[eIdx][eIdx] = diagValue;
610 };
611
612 for (auto it = jac[m.second].begin(); it != end; ++it)
613 {
614 auto& matrixBlock = *it;
615 matrixBlock = 0.0;
616
617 assert(matrixBlock.N() == matrixBlock.M());
618 if(it.index() == m.second)
619 setMatrixBlock(matrixBlock, 1.0);
620
621 if(it.index() == m.first)
622 setMatrixBlock(matrixBlock, -1.0);
623
624 }
625
626 using namespace Dune::Hybrid;
627 forEach(makeIncompleteIntegerSequence<JacRow::size(), domainI>(), [&](const auto couplingDomainId)
628 {
629 auto& jacCoupling = jacRow[couplingDomainId];
630
631 for (auto it = jacCoupling[m.second].begin(); it != jacCoupling[m.second].end(); ++it)
632 jacCoupling[m.first][it.index()] += (*it);
633
634 for (auto it = jacCoupling[m.second].begin(); it != jacCoupling[m.second].end(); ++it)
635 (*it) = 0.0;
636 });
637 }
638 }
639 }
640 }
641
642 // enforce global constraints into the system matrix
643 template<std::size_t i, class JacRow, class Res, class GG, class Sol>
644 void enforceProblemConstraints_(Dune::index_constant<i> domainI, JacRow& jacRow, Res& res, const GG& gridGeometry, const Sol& curSol) const
645 {
646 if constexpr (Dumux::Detail::hasSubProblemGlobalConstraints<Problem<domainI>>())
647 {
648 auto& jac = jacRow[domainI];
649 auto applyDirichletConstraint = [&] (const auto& dofIdx,
650 const auto& values,
651 const auto eqIdx,
652 const auto pvIdx)
653 {
654 (res)[dofIdx][eqIdx] = curSol[dofIdx][pvIdx] - values[pvIdx];
655
656 auto& row = jac[dofIdx];
657 for (auto col = row.begin(); col != row.end(); ++col)
658 row[col.index()][eqIdx] = 0.0;
659
660 jac[dofIdx][dofIdx][eqIdx][pvIdx] = 1.0;
661
662 using namespace Dune::Hybrid;
663 forEach(makeIncompleteIntegerSequence<JacRow::size(), domainI>(), [&](const auto couplingDomainId)
664 {
665 auto& jacCoupling = jacRow[couplingDomainId];
666
667 auto& rowCoupling = jacCoupling[dofIdx];
668 for (auto colCoupling = rowCoupling.begin(); colCoupling != rowCoupling.end(); ++colCoupling)
669 rowCoupling[colCoupling.index()][eqIdx] = 0.0;
670 });
671
672 };
673
674 for (const auto& constraintData : this->problem(domainI).constraints())
675 {
676 const auto& constraintInfo = constraintData.constraintInfo();
677 const auto& values = constraintData.values();
678 const auto dofIdx = constraintData.dofIndex();
679 // set the constraint in residual and jacobian
680 for (int eqIdx = 0; eqIdx < constraintInfo.size(); ++eqIdx)
681 {
682 if (constraintInfo.isConstraintEquation(eqIdx))
683 {
684 const auto pvIdx = constraintInfo.eqToPriVarIndex(eqIdx);
685 assert(0 <= pvIdx && pvIdx < constraintInfo.size());
686 applyDirichletConstraint(dofIdx, values, eqIdx, pvIdx);
687 }
688 }
689 }
690 }
691 }
692
693 // build periodic constraints in residual only
694 template<std::size_t i, class Res, class GG, class Sol>
695 void enforcePeriodicConstraints_(Dune::index_constant<i> domainI, Res& res, const GG& gridGeometry, const Sol& curSol) const
696 {
697 if constexpr (Dumux::Detail::hasPeriodicDofMap<GG>())
698 {
699 for (const auto& m : gridGeometry.periodicDofMap())
700 {
701 if (m.first < m.second)
702 {
703 // add the second row to the first
704 res[m.first] += res[m.second];
705
706 // enforce the solution of the first periodic DOF to the second one
707 res[m.second] = curSol[m.second] - curSol[m.first];
708 }
709 }
710 }
711 }
712
713 // enforce global constraints in residual only
714 template<std::size_t i, class Res, class GG, class Sol>
715 void enforceProblemConstraints_(Dune::index_constant<i> domainI, Res& res, const GG& gridGeometry, const Sol& curSol) const
716 {
717 if constexpr (Dumux::Detail::hasSubProblemGlobalConstraints<Problem<domainI>>())
718 {
719 for (const auto& constraintData : this->problem(domainI).constraints())
720 {
721 const auto& constraintInfo = constraintData.constraintInfo();
722 const auto& values = constraintData.values();
723 const auto dofIdx = constraintData.dofIndex();
724 for (int eqIdx = 0; eqIdx < constraintInfo.size(); ++eqIdx)
725 {
726 if (constraintInfo.isConstraintEquation(eqIdx))
727 {
728 const auto pvIdx = constraintInfo.eqToPriVarIndex(eqIdx);
729 assert(0 <= pvIdx && pvIdx < constraintInfo.size());
730 res[dofIdx][eqIdx] = curSol[dofIdx][pvIdx] - values[pvIdx];
731 }
732 }
733 }
734 }
735 }
736
738 ProblemTuple problemTuple_;
739
741 GridGeometryTuple gridGeometryTuple_;
742
744 GridVariablesTuple gridVariablesTuple_;
745
747 std::shared_ptr<const TimeLoop> timeLoop_;
748
750 const SolutionVector* prevSol_ = nullptr;
751
753 bool isStationaryProblem_;
754
756 std::shared_ptr<JacobianMatrix> jacobian_;
757 std::shared_ptr<ResidualType> residual_;
758
760 mutable bool warningIssued_;
761
763 bool enableMultithreading_ = false;
764};
765
766} // end namespace Dumux::Experimental
767
768#endif
Subdomain-specific views on multidomain assemblers.
A linear system assembler (residual and Jacobian) for general schemes (FV, CVFE, FE,...
Definition: multidomain/assembler.hh:63
typename MDTraits::ResidualVector ResidualType
Definition: multidomain/assembler.hh:86
bool isStationaryProblem() const
Whether we are assembling a stationary or instationary problem.
Definition: multidomain/assembler.hh:400
typename MDTraits::template SubDomain< id >::GridVariables GridVariables
Definition: multidomain/assembler.hh:76
void setJacobianBuildMode(JacobianMatrix &jac) const
Sets the jacobian build mode.
Definition: multidomain/assembler.hh:291
LocalResidual< i > localResidual(Dune::index_constant< i > domainId) const
Create a local residual object (used by the local assembler)
Definition: multidomain/assembler.hh:407
void updateGridVariables(const SolutionVector &curSol)
Updates the grid variables with the given solution.
Definition: multidomain/assembler.hh:320
ResidualType & residual()
the full residual vector
Definition: multidomain/assembler.hh:376
void updateAfterGridAdaption()
Resizes jacobian and residual and recomputes colors.
Definition: multidomain/assembler.hh:310
typename MDTraits::template SubDomain< id >::GridGeometry GridGeometry
Definition: multidomain/assembler.hh:79
void setLinearSystem(std::shared_ptr< JacobianMatrix > A, std::shared_ptr< ResidualType > r)
Tells the assembler which jacobian and residual to use. This also resizes the containers to the requi...
Definition: multidomain/assembler.hh:263
const GridVariables< i > & gridVariables(Dune::index_constant< i > domainId) const
the grid variables of domain i
Definition: multidomain/assembler.hh:364
std::size_t numDofs(Dune::index_constant< i > domainId) const
the number of dof locations of domain i
Definition: multidomain/assembler.hh:339
MultiDomainAssembler(ProblemTuple problem, GridGeometryTuple gridGeometry, GridVariablesTuple gridVariables, std::shared_ptr< CouplingManager > couplingManager)
The constructor for stationary problems.
Definition: multidomain/assembler.hh:151
void setTimeManager(std::shared_ptr< const TimeLoop > timeLoop)
Set time loop for instationary problems.
Definition: multidomain/assembler.hh:387
const auto & gridView(Dune::index_constant< i > domainId) const
the grid view of domain i
Definition: multidomain/assembler.hh:354
void assembleResidual(ResidualType &r, const SolutionVector &curSol)
assemble a residual r
Definition: multidomain/assembler.hh:236
typename MDTraits::SolutionVector SolutionVector
Definition: multidomain/assembler.hh:85
std::shared_ptr< CouplingManager > couplingManager_
the coupling manager coupling the sub domains
Definition: multidomain/assembler.hh:412
void setPreviousSolution(const SolutionVector &u)
Sets the solution from which to start the time integration. Has to be called prior to assembly for ti...
Definition: multidomain/assembler.hh:394
JacobianMatrix & jacobian()
the full Jacobian matrix
Definition: multidomain/assembler.hh:372
typename MDTraits::Scalar Scalar
Definition: multidomain/assembler.hh:70
MDTraits Traits
Definition: multidomain/assembler.hh:68
typename MDTraits::JacobianMatrix JacobianMatrix
Definition: multidomain/assembler.hh:84
void setLinearSystem()
The version without arguments uses the default constructor to create the jacobian and residual object...
Definition: multidomain/assembler.hh:278
GridVariables< i > & gridVariables(Dune::index_constant< i > domainId)
the grid variables of domain i
Definition: multidomain/assembler.hh:359
static constexpr bool isImplicit()
Returns true if the assembler considers implicit assembly.
Definition: multidomain/assembler.hh:93
const SolutionVector & prevSol() const
the solution of the previous time step
Definition: multidomain/assembler.hh:380
typename MDTraits::template SubDomain< id >::LocalResidual LocalResidual
Definition: multidomain/assembler.hh:73
CMType CouplingManager
Definition: multidomain/assembler.hh:88
MultiDomainAssembler(ProblemTuple problem, GridGeometryTuple gridGeometry, GridVariablesTuple gridVariables, std::shared_ptr< CouplingManager > couplingManager, std::shared_ptr< const TimeLoop > timeLoop, const SolutionVector &prevSol)
The constructor for instationary problems.
Definition: multidomain/assembler.hh:179
void resetTimeStep(const SolutionVector &curSol)
Resets the grid variables to the last time step.
Definition: multidomain/assembler.hh:330
const auto & problem(Dune::index_constant< i > domainId) const
the problem of domain i
Definition: multidomain/assembler.hh:344
const CouplingManager & couplingManager() const
the coupling manager
Definition: multidomain/assembler.hh:368
void assembleResidual(const SolutionVector &curSol)
compute the residuals using the internal residual
Definition: multidomain/assembler.hh:229
typename MDTraits::template SubDomain< id >::Problem Problem
Definition: multidomain/assembler.hh:82
const auto & gridGeometry(Dune::index_constant< i > domainId) const
the finite volume grid geometry of domain i
Definition: multidomain/assembler.hh:349
void assembleJacobianAndResidual(const SolutionVector &curSol)
Assembles the global Jacobian of the residual and the residual for the current solution.
Definition: multidomain/assembler.hh:208
The cell-centered scheme multidomain local assembler.
Definition: experimental/assembly/subdomaincclocalassembler.hh:195
The CVFE scheme multidomain local assembler.
Definition: experimental/assembly/subdomaincvfelocalassembler.hh:207
Subdomain-specific view on a multidomain assembler. Allows retrieval of sub-domain specific objects w...
Definition: assemblerview.hh:31
The cell-centered scheme multidomain local assembler.
Definition: multidomain/subdomaincclocalassembler.hh:270
The default time loop for instationary simulations.
Definition: common/timeloop.hh:139
Defines all properties used in Dumux.
Manages the handling of time dependent problems.
Helper function to generate Jacobian pattern for multi domain models.
An enum class to define various differentiation methods available in order to compute the derivatives...
Some exceptions thrown in DuMux
dune-grid capabilities compatibility layer
constexpr bool isSerial()
Checking whether the backend is serial.
Definition: multithreading.hh:45
Helper function to generate Jacobian pattern for different discretization methods.
The available discretization methods in Dumux.
A linear system assembler (residual and Jacobian) for general grid-based schemes with multiple domain...
A multidomain local assembler for Jacobian and residual contribution per element (cell-centered metho...
Multithreading in Dumux.
Definition: assembly/assembler.hh:44
bool allGridsSupportsMultithreading(const std::tuple< GG... > &gridGeometries)
Definition: multistagemultidomainfvassembler.hh:60
typename Detail::ConcatSeq< decltype(std::make_index_sequence< e >{}), e+1, decltype(std::make_index_sequence<(n > e) ?(n - e - 1) :0 >{})>::type makeIncompleteIntegerSequence
Definition: utility.hh:59
Provides a helper class for nonoverlapping decomposition.
Type traits to detect periodicity support.
Type trait that is specialized for coupling manager supporting multithreaded assembly.
Definition: multistagemultidomainfvassembler.hh:78
An assembler for Jacobian and residual contribution per element (face-centered staggered methods) for...
Utilities for template meta programming.