24 #ifndef __PAR_VECTOR_H__
25 #define __PAR_VECTOR_H__
27 #include <parVector/parVectorMap.hpp>
28 #include <utils/utils.hpp>
29 #include <utils/MPI_DataType.hpp>
40 template<
typename T,
typename S>
68 parVector(MPI_Comm ncomm, S lbound, S ubound);
116 return array[lindex];
124 return array[lindex];
229 template<
typename T,
typename S>
233 template<
typename T,
typename S>
236 MPI_Comm_dup(ncomm, &comm);
238 local_size = index_map.GetLocalSize();
239 global_size = index_map.GetGlobalSize();
240 array =
new T[local_size];
241 MPI_Comm_rank(ncomm, &MyPID);
242 MPI_Comm_size(ncomm, &nProcs);
245 template<
typename T,
typename S>
250 local_size = index_map.GetLocalSize();
251 global_size = index_map.GetGlobalSize();
252 array =
new T[local_size];
253 MPI_Comm_rank(comm, &MyPID);
254 MPI_Comm_size(comm, &nProcs);
257 template<
typename T,
typename S>
261 template<
typename T,
typename S>
264 for(S i= 0; i < local_size; i++) {
269 template<
typename T,
typename S>
276 template<
typename T,
typename S>
280 r = index_map.GetRank();
282 std::cout <<
"Parallel Vector View: " << std::endl << std::endl;
284 T *array = GetArray();
286 for(S i = 0; i < local_size; i++){
287 global = Loc2Glob(i);
288 std::cout <<
"[" << global <<
"]: " << array[i] << std::endl;
292 template<
typename T,
typename S>
295 if (row < local_size){
300 template<
typename T,
typename S>
303 for(S i = 0; i < nindex; i++){
304 SetValueLocal(rows[i],values[i]);
308 template<
typename T,
typename S>
311 int lower_bound = GetLowerBound();
312 int upper_bound = GetUpperBound();
313 if((index >= lower_bound) && (index < upper_bound)){
314 SetValueLocal(index_map.Glob2Loc(index), value);
318 template<
typename T,
typename S>
321 for(S i = 0; i < nindex; i++){
322 SetValueLocal(Glob2Loc(rows[i]),values[i]);
326 template<
typename T,
typename S>
329 if (row < local_size){
330 array[row] = array[row] + value;
334 template<
typename T,
typename S>
337 for(S i = 0; i < nindex; i++){
338 AddValueLocal(rows[i],values[i]);
342 template<
typename T,
typename S>
346 std::cout <<
"vector size not coherant" << std::endl;
349 for(S i = 0; i < local_size; i++){
350 array[i] = array[i] + v.
array[i];
355 template<
typename T,
typename S>
358 for(S i = 0; i < local_size; i++){
359 array[i] = scale*array[i];
363 template<
typename T,
typename S>
367 for(S i = 0; i < local_size; i++){
368 sum += array[i]*v.
array[i];
370 MPI_Allreduce(MPI_IN_PLACE, &sum, 1, getMPI_Type<T>(), MPI_SUM, comm);
375 template<
typename T,
typename S>
378 std::ifstream file(spectrum);
381 int lower_bound = GetLowerBound();
382 int upper_bound = GetUpperBound();
385 size1 =
sizeof(T) /
sizeof(Base<T>);
387 Base<T> in_vals[size1];
390 while (std::getline(file,line)) {
393 for(
int i = 0; i < size1; i++){
397 std::stringstream linestream ( line ) ;
400 for(
int i = 0; i < size1; i++){
401 linestream >> in_vals[i];
405 for(
int i = 0; i < size1; i++){
406 reinterpret_cast<Base<T>(&)[size1]
>(val)[i] = in_vals[i];
409 if((idx >= lower_bound) && (idx < upper_bound)){
410 SetValueLocal(index_map.Glob2Loc(idx),val);
415 template<
typename T,
typename S>
421 if( (
sizeof(T)/
sizeof(Base<T>) != 1) ){
425 std::cout <<
"SMG2S]> Caught Exception: for complex vector, please use writeToTxtCmplx" << std::endl;
430 header.append(
"%%SMG2S vector in real scalar\n");
431 std::string dim_info = std::to_string(global_size) +
" " + std::to_string(global_size) +
"\n";
432 header.append(dim_info);
434 for(
auto i = 0; i < local_size; i++){
435 data += std::to_string(index_map.Loc2Glob(i)+1) +
" " + std::to_string(array[i]) +
"\n";
439 MPI_Offset write_offset;
440 MPI_Offset text_size;
441 MPI_Offset *write_size_per_proc;
444 write_size_per_proc = (MPI_Offset *)malloc(
sizeof(MPI_Offset) * nProcs);
446 text_size = data.size();
448 MPI_Allgather(&text_size, 1, MPI_OFFSET, write_size_per_proc, 1, MPI_OFFSET, comm);
450 write_offset = header.size();
452 for (
auto i = 0; i < MyPID; ++i) {
453 write_offset += write_size_per_proc[i];
457 MPI_File_delete (file_name.c_str(), MPI_INFO_NULL);
458 MPI_File_open(comm, file_name.c_str(), MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh);
462 MPI_File_write_at(fh, 0, header.c_str(), header.size(), MPI_CHAR, &sts);
465 MPI_File_write_at(fh, write_offset, data.c_str(), data.size(), MPI_CHAR, &sts);
468 free(write_size_per_proc);
471 template<
typename T,
typename S>
476 if( (
sizeof(T)/
sizeof(Base<T>) != 2) ){
480 std::cout <<
"SMG2S]> Caught Exception: for real vector, please use writeToTxt" << std::endl;
485 header.append(
"%%SMG2S vector in complex scalar\n");
486 std::string dim_info = std::to_string(global_size) +
" " + std::to_string(global_size) +
" " + std::to_string(global_size) +
"\n";
487 header.append(dim_info);
489 for(
auto i = 0; i < local_size; i++){
490 data += std::to_string(index_map.Loc2Glob(i)+1) +
" " + std::to_string(array[i].real()) + +
" " + std::to_string(array[i].imag()) +
"\n";
494 MPI_Offset write_offset;
495 MPI_Offset text_size;
496 MPI_Offset *write_size_per_proc;
499 write_size_per_proc = (MPI_Offset *)malloc(
sizeof(MPI_Offset) * nProcs);
501 text_size = data.size();
503 MPI_Allgather(&text_size, 1, MPI_OFFSET, write_size_per_proc, 1, MPI_OFFSET, comm);
505 write_offset = header.size();
507 for (
auto i = 0; i < MyPID; ++i) {
508 write_offset += write_size_per_proc[i];
512 MPI_File_delete (file_name.c_str(), MPI_INFO_NULL);
513 MPI_File_open(comm, file_name.c_str(), MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh);
517 MPI_File_write_at(fh, 0, header.c_str(), header.size(), MPI_CHAR, &sts);
520 MPI_File_write_at(fh, write_offset, data.c_str(), data.size(), MPI_CHAR, &sts);
523 free(write_size_per_proc);
527 template<
typename T,
typename S>
529 MPI_Request rtypereq, stypereq;
539 if(MyPID != nProcs - 1){
548 if(offset < 1 || offset > local_size){
552 std::cout <<
"SMG2S]> Caught Exception: input parameter is invalid for GetUpperNeighbor" << std::endl;
555 MPI_Isend(&array[local_size-offset], 1, getMPI_Type<T>(), down, 1, comm, &stypereq);
556 MPI_Irecv(&neighbor, 1, getMPI_Type<T>(), up, 1, comm, &rtypereq);
558 MPI_Wait(&rtypereq,&typestat);
564 template<
typename T,
typename S>
566 MPI_Request rtypereq, stypereq;
576 if(MyPID != nProcs - 1){
585 if(offset < 1 || offset > local_size){
589 std::cout <<
"SMG2S]> Caught Exception: input parameter is invalid for GetLowerNeighbor" << std::endl;
592 MPI_Isend(&array[offset-1], 1, getMPI_Type<T>(), up, 1, comm, &stypereq);
593 MPI_Irecv(&neighbor, 1, getMPI_Type<T>(), down, 1, comm, &rtypereq);
595 MPI_Wait(&rtypereq,&typestat);