My Project
Loading...
Searching...
No Matches
CpGridData.hpp
1//===========================================================================
2//
3// File: CpGridData.hpp
4//
5// Created: Sep 17 21:11:41 2013
6//
7// Author(s): Atgeirr F Rasmussen <atgeirr@sintef.no>
8// Bård Skaflestad <bard.skaflestad@sintef.no>
9// Markus Blatt <markus@dr-blatt.de>
10// Antonella Ritorto <antonella.ritorto@opm-op.com>
11//
12// Comment: Major parts of this file originated in dune/grid/CpGrid.hpp
13// and got transfered here during refactoring for the parallelization.
14//
15// $Date$
16//
17// $Revision$
18//
19//===========================================================================
20
21/*
22 Copyright 2009, 2010 SINTEF ICT, Applied Mathematics.
23 Copyright 2009, 2010, 2013, 2022-2023 Equinor ASA.
24 Copyright 2013 Dr. Blatt - HPC-Simulation-Software & Services
25
26 This file is part of The Open Porous Media project (OPM).
27
28 OPM is free software: you can redistribute it and/or modify
29 it under the terms of the GNU General Public License as published by
30 the Free Software Foundation, either version 3 of the License, or
31 (at your option) any later version.
32
33 OPM is distributed in the hope that it will be useful,
34 but WITHOUT ANY WARRANTY; without even the implied warranty of
35 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
36 GNU General Public License for more details.
37
38 You should have received a copy of the GNU General Public License
39 along with OPM. If not, see <http://www.gnu.org/licenses/>.
40*/
48#ifndef OPM_CPGRIDDATA_HEADER
49#define OPM_CPGRIDDATA_HEADER
50
51
52#include <dune/common/parallel/mpihelper.hh>
53#ifdef HAVE_DUNE_ISTL
54#include <dune/istl/owneroverlapcopy.hh>
55#endif
56
57#include <dune/common/parallel/communication.hh>
58#include <dune/common/parallel/variablesizecommunicator.hh>
59#include <dune/grid/common/gridenums.hh>
60
61#if HAVE_ECL_INPUT
62#include <opm/input/eclipse/EclipseState/Grid/NNC.hpp>
63#endif
64
66
67#include "Entity2IndexDataHandle.hpp"
68#include "CpGridDataTraits.hpp"
69//#include "DataHandleWrappers.hpp"
70//#include "GlobalIdMapping.hpp"
71#include "Geometry.hpp"
72
73#include <array>
74#include <set>
75#include <vector>
76
77namespace Opm
78{
79class EclipseState;
80}
81namespace Dune
82{
83class CpGrid;
84
85namespace cpgrid
86{
87
88class IndexSet;
89class IdSet;
90class LevelGlobalIdSet;
91class PartitionTypeIndicator;
92template<int,int> class Geometry;
93template<int> class Entity;
94template<int> class EntityRep;
95}
96}
97
98void disjointPatches_check(Dune::CpGrid&,
99 const std::vector<std::array<int,3>>&,
100 const std::vector<std::array<int,3>>&);
101
102void lookup_check(const Dune::CpGrid&);
103
104void refine_and_check(const Dune::cpgrid::Geometry<3, 3>&,
105 const std::array<int, 3>&,
106 bool);
107
108void refinePatch_and_check(const std::array<int,3>&,
109 const std::array<int,3>&,
110 const std::array<int,3>&);
111
112void refinePatch_and_check(Dune::CpGrid&,
113 const std::vector<std::array<int,3>>&,
114 const std::vector<std::array<int,3>>&,
115 const std::vector<std::array<int,3>>&,
116 const std::vector<std::string>&);
117
118void check_global_refine(const Dune::CpGrid&,
119 const Dune::CpGrid&);
120
121namespace Dune
122{
123namespace cpgrid
124{
125namespace mover
126{
127template<class T, int i> struct Mover;
128}
129
135{
136 template<class T, int i> friend struct mover::Mover;
137
138 friend class GlobalIdSet;
139 friend class HierarchicIterator;
140 friend class Dune::cpgrid::IndexSet;
141
142 friend
143 void ::disjointPatches_check(Dune::CpGrid&,
144 const std::vector<std::array<int,3>>&,
145 const std::vector<std::array<int,3>>&);
146
147 friend
148 void ::lookup_check(const Dune::CpGrid&);
149
150 friend
151 void ::refine_and_check(const Dune::cpgrid::Geometry<3, 3>&,
152 const std::array<int, 3>&,
153 bool);
154 friend
155 void ::refinePatch_and_check(const std::array<int,3>&,
156 const std::array<int,3>&,
157 const std::array<int,3>&);
158
159 friend
160 void ::refinePatch_and_check(Dune::CpGrid&,
161 const std::vector<std::array<int,3>>&,
162 const std::vector<std::array<int,3>>&,
163 const std::vector<std::array<int,3>>&,
164 const std::vector<std::string>&);
165
166 friend
167 void ::check_global_refine(const Dune::CpGrid&,
168 const Dune::CpGrid&);
169
170private:
171 CpGridData(const CpGridData& g);
172
173public:
174 enum{
175#ifndef MAX_DATA_COMMUNICATED_PER_ENTITY
183#else
188 MAX_DATA_PER_CELL = MAX_DATA_COMMUNICATED_PER_ENTITY
189#endif
190 };
191
195 explicit CpGridData(MPIHelper::MPICommunicator comm, std::vector<std::shared_ptr<CpGridData>>& data);
196
197
198
200 explicit CpGridData(std::vector<std::shared_ptr<CpGridData>>& data);
202 ~CpGridData();
203
204
205
206
208 int size(int codim) const;
209
211 int size (GeometryType type) const
212 {
213 if (type.isCube()) {
214 return size(3 - type.dim());
215 } else {
216 return 0;
217 }
218 }
222 void readSintefLegacyFormat(const std::string& grid_prefix);
223
227 void writeSintefLegacyFormat(const std::string& grid_prefix) const;
228
234 void readEclipseFormat(const std::string& filename, bool periodic_extension, bool turn_normals = false);
235
236#if HAVE_ECL_INPUT
245 void processEclipseFormat(const Opm::Deck& deck, bool periodic_extension, bool turn_normals = false, bool clip_z = false,
246 const std::vector<double>& poreVolume = std::vector<double>());
247
260 std::vector<std::size_t> processEclipseFormat(const Opm::EclipseGrid* ecl_grid, Opm::EclipseState* ecl_state,
261 bool periodic_extension, bool turn_normals = false, bool clip_z = false,
262 bool pinchActive = true);
263#endif
264
276 void processEclipseFormat(const grdecl& input_data,
277#if HAVE_ECL_INPUT
278 Opm::EclipseState* ecl_state,
279#endif
280 std::array<std::set<std::pair<int, int>>, 2>& nnc,
281 bool remove_ij_boundary, bool turn_normals, bool pinchActive,
282 double tolerance_unique_points);
283
291 void getIJK(int c, std::array<int,3>& ijk) const
292 {
293 int gc = global_cell_[c];
294 ijk[0] = gc % logical_cartesian_size_[0]; gc /= logical_cartesian_size_[0];
295 ijk[1] = gc % logical_cartesian_size_[1];
296 ijk[2] = gc / logical_cartesian_size_[1];
297 }
298
299private:
307 std::array<int,3> getPatchDim(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
308
316 std::vector<int> getPatchCorners(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
317
325 std::vector<int> getPatchFaces(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
326
334 std::vector<int> getPatchCells(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
335
343 std::vector<int> getPatchBoundaryCorners(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
344
350 bool disjointPatches(const std::vector<std::array<int,3>>& startIJK_vec, const std::vector<std::array<int,3>>& endIJK_vec) const;
351
359 std::vector<int>
360 getPatchesCells(const std::vector<std::array<int,3>>& startIJK_vec, const std::vector<std::array<int,3>>& endIJK_vec) const;
361
376 Geometry<3,3> cellifyPatch(const std::array<int,3>& startIJK, const std::array<int,3>& endIJK,
377 const std::vector<int>& patch_cells, DefaultGeometryPolicy& cellifiedPatch_geometry,
378 std::array<int,8>& cellifiedPatch_to_point,
379 std::array<int,8>& allcorners_cellifiedPatch) const;
380
381 // @brief Compute the average of array<double,3>.
382 //
383 // @param [in] vector of array<double,3>
384 // @return array<double,3> (average of the entries of the given vector).
385 std::array<double,3> getAverageArr(const std::vector<std::array<double,3>>& vec) const;
386
387public:
409 std::tuple< const std::shared_ptr<CpGridData>,
410 const std::vector<std::array<int,2>>, // parent_to_refined_corners(~boundary_old_to_new_corners)
411 const std::vector<std::tuple<int,std::vector<int>>>, // parent_to_children_faces (~boundary_old_to_new_faces)
412 const std::tuple<int, std::vector<int>>, // parent_to_children_cells
413 const std::vector<std::array<int,2>>, // child_to_parent_faces
414 const std::vector<std::array<int,2>>> // child_to_parent_cells
415 refineSingleCell(const std::array<int,3>& cells_per_dim, const int& parent_idx) const;
416
429 std::tuple< std::shared_ptr<CpGridData>,
430 const std::vector<std::array<int,2>>, // boundary_old_to_new_corners
431 const std::vector<std::tuple<int,std::vector<int>>>, // boundary_old_to_new_faces
432 const std::vector<std::tuple<int,std::vector<int>>>, // parent_to_children_faces
433 const std::vector<std::tuple<int,std::vector<int>>>, // parent_to_children_cell
434 const std::vector<std::array<int,2>>, // child_to_parent_faces
435 const std::vector<std::array<int,2>>> // child_to_parent_cells
436 refinePatch(const std::array<int,3>& cells_per_dim, const std::array<int,3>& startIJK, const std::array<int,3>& endIJK) const;
437
438 // @breif Compute center of an entity/element/cell in the Eclipse way:
439 // - Average of the 4 corners of the bottom face.
440 // - Average of the 4 corners of the top face.
441 // Return average of the previous computations.
442 // @param [in] int Index of a cell.
443 // @return 'eclipse centroid'
444 std::array<double,3> computeEclCentroid(const int idx) const;
445
446 // @breif Compute center of an entity/element/cell in the Eclipse way:
447 // - Average of the 4 corners of the bottom face.
448 // - Average of the 4 corners of the top face.
449 // Return average of the previous computations.
450 // @param [in] Entity<0> Entity
451 // @return 'eclipse centroid'
452 std::array<double,3> computeEclCentroid(const Entity<0>& elem) const;
453
454 // Make unique boundary ids for all intersections.
455 void computeUniqueBoundaryIds();
456
460 bool uniqueBoundaryIds() const
461 {
462 return use_unique_boundary_ids_;
463 }
464
467 void setUniqueBoundaryIds(bool uids)
468 {
469 use_unique_boundary_ids_ = uids;
470 if (use_unique_boundary_ids_ && unique_boundary_ids_.empty()) {
471 computeUniqueBoundaryIds();
472 }
473 }
474
478 const std::vector<double>& zcornData() const {
479 return zcorn;
480 }
481
482
485 const IndexSet& indexSet() const
486 {
487 return *index_set_;
488 }
492 const std::array<int, 3>& logicalCartesianSize() const
493 {
494 return logical_cartesian_size_;
495 }
496
500 void distributeGlobalGrid(CpGrid& grid,
501 const CpGridData& view_data,
502 const std::vector<int>& cell_part);
503
509 template<class DataHandle>
510 void communicate(DataHandle& data, InterfaceType iftype, CommunicationDirection dir);
511
515 using Communication = CpGridDataTraits::Communication;
516 using CollectiveCommunication = CpGridDataTraits::CollectiveCommunication;
517#if HAVE_MPI
519 using AttributeSet = CpGridDataTraits::AttributeSet;
520
522 using Communicator = CpGridDataTraits::Communicator;
523
525 using InterfaceMap = CpGridDataTraits::InterfaceMap;
526
528 using CommunicationType = CpGridDataTraits::CommunicationType;
529
531 using ParallelIndexSet = CpGridDataTraits::ParallelIndexSet;
532
534 using RemoteIndices = CpGridDataTraits::RemoteIndices;
535
539 CommunicationType& cellCommunication()
540 {
541 return cell_comm_;
542 }
543
547 const CommunicationType& cellCommunication() const
548 {
549 return cell_comm_;
550 }
551
552 ParallelIndexSet& cellIndexSet()
553 {
554 return cellCommunication().indexSet();
555 }
556
557 const ParallelIndexSet& cellIndexSet() const
558 {
559 return cellCommunication().indexSet();
560 }
561
562 RemoteIndices& cellRemoteIndices()
563 {
564 return cellCommunication().remoteIndices();
565 }
566
567 const RemoteIndices& cellRemoteIndices() const
568 {
569 return cellCommunication().remoteIndices();
570 }
571#endif
572
574 const std::vector<int>& sortedNumAquiferCells() const
575 {
576 return aquifer_cells_;
577 }
578
579private:
580
582 void populateGlobalCellIndexSet();
583
584#if HAVE_MPI
585
591 template<class DataHandle>
592 void gatherData(DataHandle& data, CpGridData* global_view,
593 CpGridData* distributed_view);
594
595
602 template<int codim, class DataHandle>
603 void gatherCodimData(DataHandle& data, CpGridData* global_data,
604 CpGridData* distributed_data);
605
612 template<class DataHandle>
613 void scatterData(DataHandle& data, CpGridData* global_data,
614 CpGridData* distributed_data, const InterfaceMap& cell_inf,
615 const InterfaceMap& point_inf);
616
624 template<int codim, class DataHandle>
625 void scatterCodimData(DataHandle& data, CpGridData* global_data,
626 CpGridData* distributed_data);
627
636 template<int codim, class DataHandle>
637 void communicateCodim(Entity2IndexDataHandle<DataHandle, codim>& data, CommunicationDirection dir,
638 const Interface& interface);
639
648 template<int codim, class DataHandle>
649 void communicateCodim(Entity2IndexDataHandle<DataHandle, codim>& data, CommunicationDirection dir,
650 const InterfaceMap& interface);
651
652#endif
653
654 void computeGeometry(CpGrid& grid,
655 const DefaultGeometryPolicy& globalGeometry,
656 const std::vector<int>& globalAquiferCells,
657 const OrientedEntityTable<0, 1>& globalCell2Faces,
658 DefaultGeometryPolicy& geometry,
659 std::vector<int>& aquiferCells,
660 const OrientedEntityTable<0, 1>& cell2Faces,
661 const std::vector< std::array<int,8> >& cell2Points);
662
663 // Representing the topology
675 Opm::SparseTable<int> face_to_point_;
677 std::vector< std::array<int,8> > cell_to_point_;
684 std::array<int, 3> logical_cartesian_size_;
691 std::vector<int> global_cell_;
697 typedef FieldVector<double, 3> PointType;
701 cpgrid::EntityVariable<int, 1> unique_boundary_ids_;
703 std::unique_ptr<cpgrid::IndexSet> index_set_;
705 std::shared_ptr<const cpgrid::IdSet> local_id_set_;
707 std::shared_ptr<LevelGlobalIdSet> global_id_set_;
709 std::shared_ptr<PartitionTypeIndicator> partition_type_indicator_;
711 int level_{0};
713 std::vector<std::shared_ptr<CpGridData>>* level_data_ptr_;
714 // SUITABLE FOR ALL LEVELS EXCEPT FOR LEAFVIEW
716 std::vector<int> level_to_leaf_cells_; // In entry 'level cell index', we store 'leafview cell index'
// {level LGR, {child0, child1, ...}}
718 std::vector<std::tuple<int,std::vector<int>>> parent_to_children_cells_;
// {# children in x-direction, ... y-, ... z-}
720 std::array<int,3> cells_per_dim_;
721 // SUITABLE ONLY FOR LEAFVIEW
// {level, cell index in that level}
723 std::vector<std::array<int,2>> leaf_to_level_cells_;
724 // SUITABLE FOR ALL LEVELS INCLUDING LEAFVIEW
// {level parent cell, parent cell index}
726 std::vector<std::array<int,2>> child_to_parent_cells_;
727
728
730 Communication ccobj_;
731
732 // Boundary information (optional).
733 bool use_unique_boundary_ids_;
734
740 std::vector<double> zcorn;
741
743 std::vector<int> aquifer_cells_;
744
745#if HAVE_MPI
746
748 CommunicationType cell_comm_;
749
751 std::tuple<Interface,Interface,Interface,Interface,Interface> cell_interfaces_;
752 /*
753 // code deactivated, because users cannot access face indices and therefore
754 // communication on faces makes no sense!
756 std::tuple<InterfaceMap,InterfaceMap,InterfaceMap,InterfaceMap,InterfaceMap>
757 face_interfaces_;
758 */
760 std::tuple<InterfaceMap,InterfaceMap,InterfaceMap,InterfaceMap,InterfaceMap>
761 point_interfaces_;
762
763#endif
764
765 // Return the geometry vector corresponding to the given codim.
766 template <int codim>
767 const EntityVariable<Geometry<3 - codim, 3>, codim>& geomVector() const
768 {
769 return geometry_.geomVector<codim>();
770 }
771
772 friend class Dune::CpGrid;
773 template<int> friend class Entity;
774 template<int> friend class EntityRep;
775 friend class Intersection;
776 friend class PartitionTypeIndicator;
777};
778
779
780
781#if HAVE_MPI
782
783namespace
784{
789template<class T>
790T& getInterface(InterfaceType iftype,
791 std::tuple<T,T,T,T,T>& interfaces)
792{
793 switch(iftype)
794 {
795 case 0:
796 return std::get<0>(interfaces);
797 case 1:
798 return std::get<1>(interfaces);
799 case 2:
800 return std::get<2>(interfaces);
801 case 3:
802 return std::get<3>(interfaces);
803 case 4:
804 return std::get<4>(interfaces);
805 }
806 OPM_THROW(std::runtime_error, "Invalid Interface type was used during communication");
807}
808
809} // end unnamed namespace
810
811template<int codim, class DataHandle>
812void CpGridData::communicateCodim(Entity2IndexDataHandle<DataHandle, codim>& data, CommunicationDirection dir,
813 const Interface& interface)
814{
815 this->template communicateCodim<codim>(data, dir, interface.interfaces());
816}
817
818template<int codim, class DataHandle>
819void CpGridData::communicateCodim(Entity2IndexDataHandle<DataHandle, codim>& data_wrapper, CommunicationDirection dir,
820 const InterfaceMap& interface)
821{
822 Communicator comm(ccobj_, interface);
823
824 if(dir==ForwardCommunication)
825 comm.forward(data_wrapper);
826 else
827 comm.backward(data_wrapper);
828}
829#endif
830
831template<class DataHandle>
832void CpGridData::communicate(DataHandle& data, InterfaceType iftype,
833 CommunicationDirection dir)
834{
835#if HAVE_MPI
836 if(data.contains(3,0))
837 {
838 Entity2IndexDataHandle<DataHandle, 0> data_wrapper(*this, data);
839 communicateCodim<0>(data_wrapper, dir, getInterface(iftype, cell_interfaces_));
840 }
841 if(data.contains(3,3))
842 {
843 Entity2IndexDataHandle<DataHandle, 3> data_wrapper(*this, data);
844 communicateCodim<3>(data_wrapper, dir, getInterface(iftype, point_interfaces_));
845 }
846#else
847 // Suppress warnings for unused arguments.
848 (void) data;
849 (void) iftype;
850 (void) dir;
851#endif
852}
853}}
854
855#if HAVE_MPI
856#include <opm/grid/cpgrid/Entity.hpp>
857#include <opm/grid/cpgrid/Indexsets.hpp>
858
859namespace Dune {
860namespace cpgrid {
861
862namespace mover
863{
864template<class T>
865class MoveBuffer
866{
867 friend class Dune::cpgrid::CpGridData;
868public:
869 void read(T& data)
870 {
871 data=buffer_[index_++];
872 }
873 void write(const T& data)
874 {
875 buffer_[index_++]=data;
876 }
877 void reset()
878 {
879 index_=0;
880 }
881 void resize(std::size_t size)
882 {
883 buffer_.resize(size);
884 index_=0;
885 }
886private:
887 std::vector<T> buffer_;
888 typename std::vector<T>::size_type index_;
889};
890template<class DataHandle,int codim>
891struct Mover
892{
893};
894
895template<class DataHandle>
896struct BaseMover
897{
898 BaseMover(DataHandle& data)
899 : data_(data)
900 {}
901 template<class E>
902 void moveData(const E& from, const E& to)
903 {
904 std::size_t size=data_.size(from);
905 buffer.resize(size);
906 data_.gather(buffer, from);
907 buffer.reset();
908 data_.scatter(buffer, to, size);
909 }
910 DataHandle& data_;
911 MoveBuffer<typename DataHandle::DataType> buffer;
912};
913
914
915template<class DataHandle>
916struct Mover<DataHandle,0> : public BaseMover<DataHandle>
917{
918 Mover<DataHandle,0>(DataHandle& data, CpGridData* gatherView,
919 CpGridData* scatterView)
920 : BaseMover<DataHandle>(data), gatherView_(gatherView), scatterView_(scatterView)
921 {}
922
923 void operator()(std::size_t from_cell_index,std::size_t to_cell_index)
924 {
925 Entity<0> from_entity=Entity<0>(*gatherView_, from_cell_index, true);
926 Entity<0> to_entity=Entity<0>(*scatterView_, to_cell_index, true);
927 this->moveData(from_entity, to_entity);
928 }
929 CpGridData* gatherView_;
930 CpGridData* scatterView_;
931};
932
933template<class DataHandle>
934struct Mover<DataHandle,1> : public BaseMover<DataHandle>
935{
936 Mover<DataHandle,1>(DataHandle& data, CpGridData* gatherView,
937 CpGridData* scatterView)
938 : BaseMover<DataHandle>(data), gatherView_(gatherView), scatterView_(scatterView)
939 {}
940
941 void operator()(std::size_t from_cell_index,std::size_t to_cell_index)
942 {
943 typedef typename OrientedEntityTable<0,1>::row_type row_type;
944 EntityRep<0> from_cell=EntityRep<0>(from_cell_index, true);
945 EntityRep<0> to_cell=EntityRep<0>(to_cell_index, true);
946 OrientedEntityTable<0,1>& table = gatherView_->cell_to_face_;
947 row_type from_faces=table.operator[](from_cell);
948 row_type to_faces=scatterView_->cell_to_face_[to_cell];
949
950 for(int i=0; i<from_faces.size(); ++i)
951 this->moveData(from_faces[i], to_faces[i]);
952 }
953 CpGridData *gatherView_;
954 CpGridData *scatterView_;
955};
956
957template<class DataHandle>
958struct Mover<DataHandle,3> : public BaseMover<DataHandle>
959{
960 Mover<DataHandle,3>(DataHandle& data, CpGridData* gatherView,
961 CpGridData* scatterView)
962 : BaseMover<DataHandle>(data), gatherView_(gatherView), scatterView_(scatterView)
963 {}
964 void operator()(std::size_t from_cell_index,std::size_t to_cell_index)
965 {
966 const std::array<int,8>& from_cell_points=
967 gatherView_->cell_to_point_[from_cell_index];
968 const std::array<int,8>& to_cell_points=
969 scatterView_->cell_to_point_[to_cell_index];
970 for(std::size_t i=0; i<8; ++i)
971 {
972 this->moveData(Entity<3>(*gatherView_, from_cell_points[i], true),
973 Entity<3>(*scatterView_, to_cell_points[i], true));
974 }
975 }
976 CpGridData* gatherView_;
977 CpGridData* scatterView_;
978};
979
980} // end mover namespace
981
982template<class DataHandle>
983void CpGridData::scatterData(DataHandle& data, CpGridData* global_data,
984 CpGridData* distributed_data, const InterfaceMap& cell_inf,
985 const InterfaceMap& point_inf)
986{
987#if HAVE_MPI
988 if(data.contains(3,0))
989 {
990 Entity2IndexDataHandle<DataHandle, 0> data_wrapper(*global_data, *distributed_data, data);
991 communicateCodim<0>(data_wrapper, ForwardCommunication, cell_inf);
992 }
993 if(data.contains(3,3))
994 {
995 Entity2IndexDataHandle<DataHandle, 3> data_wrapper(*global_data, *distributed_data, data);
996 communicateCodim<3>(data_wrapper, ForwardCommunication, point_inf);
997 }
998#endif
999}
1000
1001template<int codim, class DataHandle>
1002void CpGridData::scatterCodimData(DataHandle& data, CpGridData* global_data,
1003 CpGridData* distributed_data)
1004{
1005 CpGridData *gather_view, *scatter_view;
1006 gather_view=global_data;
1007 scatter_view=distributed_data;
1008
1009 mover::Mover<DataHandle,codim> mover(data, gather_view, scatter_view);
1010
1011
1012 for(auto index=distributed_data->cellIndexSet().begin(),
1013 end = distributed_data->cellIndexSet().end();
1014 index!=end; ++index)
1015 {
1016 std::size_t from=index->global();
1017 std::size_t to=index->local();
1018 mover(from,to);
1019 }
1020}
1021
1022namespace
1023{
1024
1025template<int codim, class T, class F>
1026void visitInterior(CpGridData& distributed_data, T begin, T endit, F& func)
1027{
1028 for(T it=begin; it!=endit; ++it)
1029 {
1030 Entity<codim> entity(distributed_data, it-begin, true);
1031 PartitionType pt = entity.partitionType();
1032 if(pt==Dune::InteriorEntity)
1033 {
1034 func(*it, entity);
1035 }
1036 }
1037}
1038
1039template<class DataHandle>
1040struct GlobalIndexSizeGatherer
1041{
1042 GlobalIndexSizeGatherer(DataHandle& data_,
1043 std::vector<int>& ownedGlobalIndices_,
1044 std::vector<int>& ownedSizes_)
1045 : data(data_), ownedGlobalIndices(ownedGlobalIndices_), ownedSizes(ownedSizes_)
1046 {}
1047
1048 template<class T, class E>
1049 void operator()(T& i, E& entity)
1050 {
1051 ownedGlobalIndices.push_back(i);
1052 ownedSizes.push_back(data.size(entity));
1053 }
1054 DataHandle& data;
1055 std::vector<int>& ownedGlobalIndices;
1056 std::vector<int>& ownedSizes;
1057};
1058
1059template<class DataHandle>
1060struct DataGatherer
1061{
1062 DataGatherer(mover::MoveBuffer<typename DataHandle::DataType>& buffer_,
1063 DataHandle& data_)
1064 : buffer(buffer_), data(data_)
1065 {}
1066
1067 template<class T, class E>
1068 void operator()(T& /* it */, E& entity)
1069 {
1070 data.gather(buffer, entity);
1071 }
1072 mover::MoveBuffer<typename DataHandle::DataType>& buffer;
1073 DataHandle& data;
1074};
1075
1076}
1077
1078template<class DataHandle>
1079void CpGridData::gatherData(DataHandle& data, CpGridData* global_data,
1080 CpGridData* distributed_data)
1081{
1082#if HAVE_MPI
1083 if(data.contains(3,0))
1084 gatherCodimData<0>(data, global_data, distributed_data);
1085 if(data.contains(3,3))
1086 gatherCodimData<3>(data, global_data, distributed_data);
1087#endif
1088}
1089
1090template<int codim, class DataHandle>
1091void CpGridData::gatherCodimData(DataHandle& data, CpGridData* global_data,
1092 CpGridData* distributed_data)
1093{
1094#if HAVE_MPI
1095 // Get the mapping to global index from the global id set
1096 const std::vector<int>& mapping =
1097 distributed_data->global_id_set_->getMapping<codim>();
1098
1099 // Get the global indices and data size for the entities whose data is
1100 // to be sent, i.e. the ones that we own.
1101 std::vector<int> owned_global_indices;
1102 std::vector<int> owned_sizes;
1103 owned_global_indices.reserve(mapping.size());
1104 owned_sizes.reserve(mapping.size());
1105
1106 GlobalIndexSizeGatherer<DataHandle> gisg(data, owned_global_indices, owned_sizes);
1107 visitInterior<codim>(*distributed_data, mapping.begin(), mapping.end(), gisg);
1108
1109 // communicate the number of indices that each processor sends
1110 int no_indices=owned_sizes.size();
1111 // We will take the address of the first elemet for MPI_Allgather below.
1112 // Make sure the containers have such an element.
1113 if ( owned_global_indices.empty() )
1114 owned_global_indices.resize(1);
1115 if ( owned_sizes.empty() )
1116 owned_sizes.resize(1);
1117 std::vector<int> no_indices_to_recv(distributed_data->ccobj_.size());
1118 distributed_data->ccobj_.allgather(&no_indices, 1, &(no_indices_to_recv[0]));
1119 // compute size of the vector capable for receiving all indices
1120 // and allgather the global indices and the sizes.
1121 // calculate displacements
1122 std::vector<int> displ(distributed_data->ccobj_.size()+1, 0);
1123 std::transform(displ.begin(), displ.end()-1, no_indices_to_recv.begin(), displ.begin()+1,
1124 std::plus<int>());
1125 int global_size=displ[displ.size()-1];//+no_indices_to_recv[displ.size()-1];
1126 std::vector<int> global_indices(global_size);
1127 std::vector<int> global_sizes(global_size);
1128 MPI_Allgatherv(&(owned_global_indices[0]), no_indices, MPITraits<int>::getType(),
1129 &(global_indices[0]), &(no_indices_to_recv[0]), &(displ[0]),
1130 MPITraits<int>::getType(),
1131 distributed_data->ccobj_);
1132 MPI_Allgatherv(&(owned_sizes[0]), no_indices, MPITraits<int>::getType(),
1133 &(global_sizes[0]), &(no_indices_to_recv[0]), &(displ[0]),
1134 MPITraits<int>::getType(),
1135 distributed_data->ccobj_);
1136 std::vector<int>().swap(owned_global_indices); // free data for reuse.
1137 // Compute the number of data items to send
1138 std::vector<int> no_data_send(distributed_data->ccobj_.size());
1139 for(typename std::vector<int>::iterator begin=no_data_send.begin(),
1140 i=begin, end=no_data_send.end(); i!=end; ++i)
1141 *i = std::accumulate(global_sizes.begin()+displ[i-begin],
1142 global_sizes.begin()+displ[i-begin+1], std::size_t());
1143 // free at least some memory that can be reused.
1144 std::vector<int>().swap(owned_sizes);
1145 // compute the displacements for receiving with allgatherv
1146 displ[0]=0;
1147 std::transform(displ.begin(), displ.end()-1, no_data_send.begin(), displ.begin()+1,
1148 std::plus<std::size_t>());
1149 // Compute the number of data items we will receive
1150 int no_data_recv = displ[displ.size()-1];//+global_sizes[displ.size()-1];
1151
1152 // Collect the data to send, gather it
1153 mover::MoveBuffer<typename DataHandle::DataType> local_data_buffer, global_data_buffer;
1154 if ( no_data_send[distributed_data->ccobj_.rank()] )
1155 {
1156 local_data_buffer.resize(no_data_send[distributed_data->ccobj_.rank()]);
1157 }
1158 else
1159 {
1160 local_data_buffer.resize(1);
1161 }
1162 global_data_buffer.resize(no_data_recv);
1163
1164 DataGatherer<DataHandle> gatherer(local_data_buffer, data);
1165 visitInterior<codim>(*distributed_data, mapping.begin(), mapping.end(), gatherer);
1166 MPI_Allgatherv(&(local_data_buffer.buffer_[0]), no_data_send[distributed_data->ccobj_.rank()],
1167 MPITraits<typename DataHandle::DataType>::getType(),
1168 &(global_data_buffer.buffer_[0]), &(no_data_send[0]), &(displ[0]),
1169 MPITraits<typename DataHandle::DataType>::getType(),
1170 distributed_data->ccobj_);
1171 Entity2IndexDataHandle<DataHandle, codim> edata(*global_data, data);
1172 int offset=0;
1173 for(int i=0; i< codim; ++i)
1174 offset+=global_data->size(i);
1175
1176 typename std::vector<int>::const_iterator s=global_sizes.begin();
1177 for(typename std::vector<int>::const_iterator i=global_indices.begin(),
1178 end=global_indices.end();
1179 i!=end; ++s, ++i)
1180 {
1181 edata.scatter(global_data_buffer, *i-offset, *s);
1182 }
1183#endif
1184}
1185
1186} // end namespace cpgrid
1187} // end namespace Dune
1188
1189#endif
1190
1191#endif
[ provides Dune::Grid ]
Definition CpGrid.hpp:226
Struct that hods all the data needed to represent a Cpgrid.
Definition CpGridData.hpp:135
@ MAX_DATA_PER_CELL
The maximum data items allowed per cell (DUNE < 2.5.2)
Definition CpGridData.hpp:182
int size(GeometryType type) const
number of leaf entities per geometry type in this process
Definition CpGridData.hpp:211
const std::array< int, 3 > & logicalCartesianSize() const
The logical cartesian size of the grid.
Definition CpGridData.hpp:492
void communicate(DataHandle &data, InterfaceType iftype, CommunicationDirection dir)
communicate objects for all codims on a given level
Definition CpGridData.hpp:832
bool uniqueBoundaryIds() const
Is the grid currently using unique boundary ids?
Definition CpGridData.hpp:460
void readSintefLegacyFormat(const std::string &grid_prefix)
Read the Sintef legacy grid format ('topogeom').
Definition readSintefLegacyFormat.cpp:67
int size(int codim) const
number of leaf entities per codim in this process
Definition CpGridData.cpp:144
void readEclipseFormat(const std::string &filename, bool periodic_extension, bool turn_normals=false)
Read the Eclipse grid format ('grdecl').
CpGridDataTraits::Communication Communication
The type of the collective communication.
Definition CpGridData.hpp:515
~CpGridData()
Destructor.
Definition CpGridData.cpp:97
void getIJK(int c, std::array< int, 3 > &ijk) const
Extract Cartesian index triplet (i,j,k) of an active cell.
Definition CpGridData.hpp:291
const IndexSet & indexSet() const
Get the index set.
Definition CpGridData.hpp:485
std::tuple< std::shared_ptr< CpGridData >, const std::vector< std::array< int, 2 > >, const std::vector< std::tuple< int, std::vector< int > > >, const std::vector< std::tuple< int, std::vector< int > > >, const std::vector< std::tuple< int, std::vector< int > > >, const std::vector< std::array< int, 2 > >, const std::vector< std::array< int, 2 > > > refinePatch(const std::array< int, 3 > &cells_per_dim, const std::array< int, 3 > &startIJK, const std::array< int, 3 > &endIJK) const
Refine a (connected block-shaped) patch of cells.
Definition CpGridData.cpp:2062
CpGridDataTraits::MPICommunicator MPICommunicator
The type of the mpi communicator.
Definition CpGridData.hpp:513
void processEclipseFormat(const grdecl &input_data, std::array< std::set< std::pair< int, int > >, 2 > &nnc, bool remove_ij_boundary, bool turn_normals, bool pinchActive, double tolerance_unique_points)
Read the Eclipse grid format ('grdecl').
Definition processEclipseFormat.cpp:268
std::tuple< const std::shared_ptr< CpGridData >, const std::vector< std::array< int, 2 > >, const std::vector< std::tuple< int, std::vector< int > > >, const std::tuple< int, std::vector< int > >, const std::vector< std::array< int, 2 > >, const std::vector< std::array< int, 2 > > > refineSingleCell(const std::array< int, 3 > &cells_per_dim, const int &parent_idx) const
Refine a single cell and return a shared pointer of CpGridData type.
Definition CpGridData.cpp:1933
void distributeGlobalGrid(CpGrid &grid, const CpGridData &view_data, const std::vector< int > &cell_part)
Redistribute a global grid.
Definition CpGridData.cpp:1513
const std::vector< double > & zcornData() const
Return the internalized zcorn copy from the grid processing, if no cells were adjusted during the min...
Definition CpGridData.hpp:478
void setUniqueBoundaryIds(bool uids)
Set whether we want to have unique boundary ids.
Definition CpGridData.hpp:467
void writeSintefLegacyFormat(const std::string &grid_prefix) const
Write the Sintef legacy grid format ('topogeom').
Definition writeSintefLegacyFormat.cpp:73
const std::vector< int > & sortedNumAquiferCells() const
Get sorted active cell indices of numerical aquifer.
Definition CpGridData.hpp:574
Definition DefaultGeometryPolicy.hpp:53
const EntityVariable< cpgrid::Geometry< 3 - codim, 3 >, codim > & geomVector() const
Definition DefaultGeometryPolicy.hpp:86
Wrapper that turns a data handle suitable for dune-grid into one based on integers instead of entitie...
Definition Entity2IndexDataHandle.hpp:56
A class design to hold a variable with a value for each entity of the given codimension,...
Definition EntityRep.hpp:267
Definition Entity.hpp:65
This class encapsulates geometry for vertices, intersections, and cells.
Definition Geometry.hpp:75
The global id set for Dune.
Definition Indexsets.hpp:304
Only needs to provide interface for doing nothing.
Definition Iterators.hpp:108
Definition Indexsets.hpp:55
Represents the topological relationships between sets of entities, for example cells and faces.
Definition OrientedEntityTable.hpp:139
A class design to hold a variable with a value for each entity of the given codimension,...
Definition EntityRep.hpp:302
A SparseTable stores a table with rows of varying size as efficiently as possible.
Definition SparseTable.hpp:55
Copyright 2019 Equinor AS.
Definition CartesianIndexMapper.hpp:10
Holds the implementation of the CpGrid as a pimple.
Definition CellQuadrature.cpp:68
Low-level corner-point processing routines and supporting data structures.
MPIHelper::MPICommunicator MPICommunicator
The type of the collective communication.
Definition CpGridDataTraits.hpp:55
AttributeSet
The type of the set of the attributes.
Definition CpGridDataTraits.hpp:65
Definition CpGridData.hpp:127
Raw corner-point specification of a particular geological model.
Definition preprocess.h:56