SMG2S
Sparse Matrix Generator with Given Spectrum
parVectorMap.hpp
1 /*
2 MIT License
3 Copyright (c) 2019 Xinzhe WU @ Maison de la Simulation, France
4 Copyright (c) 2019-2022, Xinzhe Wu @ Simulation and Data Laboratory Quantum
5  Materials, Forschungszentrum Juelich GmbH.
6 
7 Permission is hereby granted, free of charge, to any person obtaining a copy
8 of this software and associated documentation files (the "Software"), to deal
9 in the Software without restriction, including without limitation the rights
10 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 copies of the Software, and to permit persons to whom the Software is
12 furnished to do so, subject to the following conditions:
13 The above copyright notice and this permission notice shall be included in all
14 copies or substantial portions of the Software.
15 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 SOFTWARE.
22 */
23 
24 #ifndef __PARVECTORMAP_H__
25 #define __PARVECTORMAP_H__
26 
27 #include "utils/MPI_DataType.hpp"
28 
30 
39 template<typename S>
41 {
42 
43  private:
45  MPI_Comm comm;
47  int nproc;
49  int rank;
59  std::vector<S> lprocbound_map;
61  std::vector<S> uprocbound_map;
62 
63  public:
64  //constructor
66  parVectorMap();
68 
73  parVectorMap(MPI_Comm ncomm, S lbound, S ubound);
75  ~parVectorMap();
76 
78  bool operator == (const parVectorMap &map1){
79  bool ifsamecom;
80  int flag;
81  MPI_Comm_compare(comm, map1.comm, &flag);
82  if(flag == MPI_IDENT){
83  ifsamecom = true;
84  }else{
85  ifsamecom = false;
86  }
87 
88  return (ifsamecom && nproc == map1.nproc && rank == map1.rank && lower_bound == map1.lower_bound && upper_bound == map1.upper_bound && local_size == map1.local_size && global_size == map1.global_size);
89  };
90 
92  bool operator != (const parVectorMap &map1){
93  bool ifdiffcom;
94  int flag;
95  MPI_Comm_compare(comm, map1.comm, &flag);
96  if(flag == MPI_IDENT){
97  ifdiffcom = false;
98  }else{
99  ifdiffcom = true;
100  }
101 
102  return (ifdiffcom || nproc != map1.nproc || rank != map1.rank || lower_bound != map1.lower_bound || upper_bound != map1.upper_bound || local_size != map1.local_size || global_size != map1.global_size);
103  };
104 
106 
111  S Loc2Glob(S local_index);
113 
118  S Glob2Loc(S global_index);
119 
120  //get
122  MPI_Comm GetCurrentComm(){return comm;};
124  int GetRank(){return rank;};
130  S GetLocalSize(){return local_size;};
134  std::vector<S> GetLBoundMap(){return lprocbound_map;};
136  std::vector<S> GetUBoundMap(){return uprocbound_map;};
137 
138 };
139 
140 template<typename S>
142 
143 template<typename S>
144 parVectorMap<S>::parVectorMap(MPI_Comm ncomm, S lbound, S ubound)
145 {
146  MPI_Comm_dup(ncomm, &comm);
147  MPI_Comm_size(comm, &nproc);
148  MPI_Comm_rank(comm, &rank);
149 
150  //setting lower and upper bound
151  lower_bound = lbound;
152  upper_bound = ubound;
153  //get local vector size
154  local_size = upper_bound - lower_bound;
155  //initial global size
156  global_size = 0;
157 
158  MPI_Allreduce(&local_size, &global_size, 1, MPI_INT, MPI_SUM, comm);
159 
160  lprocbound_map.resize(nproc);
161  uprocbound_map.resize(nproc);
162 
163  MPI_Allgather(&lower_bound, 1 , getMPI_Type<S>() , lprocbound_map.data() , 1 , getMPI_Type<S>(), comm ) ;
164  MPI_Allgather(&upper_bound, 1 , getMPI_Type<S>() , uprocbound_map.data(), 1 , getMPI_Type<S>() , comm ) ;
165 
166 }
167 
168 template<typename S>
170 }
171 
172 template<typename S>
173 S parVectorMap<S>::Loc2Glob(S local_index){
174 
175  S global_index = lower_bound + local_index;
176 
177  try{
178  if (global_index > global_size){
179  global_index = -1;
180  throw (local_index);
181  }
182  }
183  catch(S local_index){
184  std::cout << "The given local index <" << local_index <<"> is out of bound." << std::endl;
185  }
186 
187  return global_index;
188 }
189 
190 template<typename S>
191 S parVectorMap<S>::Glob2Loc(S global_index){
192 
193  S local_index = global_index - lower_bound;
194 
195  try{
196  if (local_index < 0 || local_index >= local_size){
197  local_index = -1;
198  throw(global_index);
199  }
200  }catch(S global_index){
201 
202  }
203 
204  return local_index;
205 }
206 
207 
208 #endif
parVectorMap::GetLBoundMap
std::vector< S > GetLBoundMap()
Return parVectorMap<S>::lprocbound_map.
Definition: parVectorMap.hpp:134
parVectorMap
A class which determines the way to distribute a vector across MPI procs.
Definition: parVectorMap.hpp:40
parVectorMap::GetRank
int GetRank()
Return parVectorMap<S>::rank.
Definition: parVectorMap.hpp:124
parVectorMap::upper_bound
S upper_bound
`upper_bound-1 = ` the largest index of a distributed vector on each MPI proc
Definition: parVectorMap.hpp:53
parVectorMap::GetCurrentComm
MPI_Comm GetCurrentComm()
Return parVectorMap<S>::comm.
Definition: parVectorMap.hpp:122
parVectorMap::parVectorMap
parVectorMap()
A constructor of `parVectorMap`.
Definition: parVectorMap.hpp:141
parVectorMap::operator!=
bool operator!=(const parVectorMap &map1)
Compare if this map is different with another one `map1`.
Definition: parVectorMap.hpp:92
parVectorMap::rank
int rank
rank of each MPI procs within the working MPI communicator parVectorMap::comm
Definition: parVectorMap.hpp:49
parVectorMap::GetLowerBound
S GetLowerBound()
Return parVectorMap<S>::lower_bound.
Definition: parVectorMap.hpp:126
parVectorMap::GetUpperBound
S GetUpperBound()
Return parVectorMap<S>::upper_bound.
Definition: parVectorMap.hpp:128
parVectorMap::lower_bound
S lower_bound
the smallest index of a distributed vector on each MPI proc
Definition: parVectorMap.hpp:51
parVectorMap::uprocbound_map
std::vector< S > uprocbound_map
A `std::vector` which stores the parVectorMap::upper_bound of all MPI procs together.
Definition: parVectorMap.hpp:61
parVectorMap::Glob2Loc
S Glob2Loc(S global_index)
Convert a index of global vector into its index in the local vector on each MPI proc.
Definition: parVectorMap.hpp:191
parVectorMap::global_size
S global_size
Global size of this distributed vector.
Definition: parVectorMap.hpp:57
parVectorMap::GetLocalSize
S GetLocalSize()
Return parVectorMap<S>::local_size.
Definition: parVectorMap.hpp:130
parVectorMap::Loc2Glob
S Loc2Glob(S local_index)
Convert a index of local vector on each MPI proc into its index in the global distributed vector.
Definition: parVectorMap.hpp:173
parVectorMap::GetUBoundMap
std::vector< S > GetUBoundMap()
Return parVectorMap<S>::uprocbound_map.
Definition: parVectorMap.hpp:136
parVectorMap::operator==
bool operator==(const parVectorMap &map1)
Compare if this map is identical to another one `map1`.
Definition: parVectorMap.hpp:78
parVectorMap::lprocbound_map
std::vector< S > lprocbound_map
A `std::vector` which stores the parVectorMap::lower_bound of all MPI procs together.
Definition: parVectorMap.hpp:59
parVectorMap::comm
MPI_Comm comm
The working MPI Communicator.
Definition: parVectorMap.hpp:45
parVectorMap::nproc
int nproc
number of MPI procs within the working MPI communicator parVectorMap::comm
Definition: parVectorMap.hpp:47
parVectorMap::~parVectorMap
~parVectorMap()
A destructor of `parVectorMap`.
Definition: parVectorMap.hpp:169
parVectorMap::local_size
S local_size
The number of elements of vector stored on each MPI proc.
Definition: parVectorMap.hpp:55
parVectorMap::GetGlobalSize
S GetGlobalSize()
Return parVectorMap<S>::global_size.
Definition: parVectorMap.hpp:132