[1984] | 1 | #include "transform_connector.hpp" |
---|
| 2 | #include "element.hpp" |
---|
| 3 | #include "scatterer_connector.hpp" |
---|
| 4 | #include "gatherer_connector.hpp" |
---|
| 5 | #include "client_client_dht_template.hpp" |
---|
| 6 | |
---|
| 7 | |
---|
| 8 | namespace xios |
---|
| 9 | { |
---|
| 10 | void CTransformConnector::computeConnector(void) |
---|
| 11 | { |
---|
| 12 | CClientClientDHTTemplate<int>::Index2VectorInfoTypeMap info ; |
---|
| 13 | |
---|
| 14 | // first, insert destination global index into DHT |
---|
| 15 | int rank ; |
---|
| 16 | MPI_Comm_rank(localComm_, &rank) ; |
---|
| 17 | CArray<size_t,1> globalIndex ; |
---|
| 18 | dstView_->getGlobalIndexView(globalIndex) ; |
---|
| 19 | for(int i=0;i<globalIndex.numElements();i++) info[globalIndex(i)].push_back(rank) ; |
---|
| 20 | CClientClientDHTTemplate<int> dataRanks(info, localComm_) ; |
---|
| 21 | |
---|
| 22 | // get global src index from src view. |
---|
| 23 | set<size_t> setGlobalIndex ; // all global index from src |
---|
| 24 | srcView_->getGlobalIndexView(globalIndex) ; |
---|
| 25 | for(int i=0;i<globalIndex.numElements();i++) setGlobalIndex.insert(globalIndex((i))) ; |
---|
| 26 | |
---|
| 27 | CArray<size_t,1> srcGlobalIndex(setGlobalIndex.size()) ; |
---|
| 28 | int i=0 ; |
---|
| 29 | for(auto& globalIndex : setGlobalIndex) |
---|
| 30 | { |
---|
| 31 | srcGlobalIndex(i) = globalIndex ; |
---|
| 32 | i++ ; |
---|
| 33 | } |
---|
| 34 | |
---|
| 35 | dataRanks.computeIndexInfoMapping(srcGlobalIndex) ; |
---|
| 36 | const auto& returnInfo = dataRanks.getInfoIndexMap() ; |
---|
| 37 | |
---|
| 38 | map<int,vector<size_t>> vectorIndex ; |
---|
| 39 | for(auto& rankIndGlo : returnInfo) |
---|
| 40 | { |
---|
| 41 | size_t indGlo = rankIndGlo.first ; |
---|
| 42 | auto& listRank = rankIndGlo.second ; |
---|
| 43 | for (auto& rank : listRank) vectorIndex[rank].push_back(indGlo); |
---|
| 44 | } |
---|
| 45 | |
---|
| 46 | // convert vectorIndex into array |
---|
| 47 | map<int,CArray<size_t,1>> dstArrayIndex ; |
---|
| 48 | for(auto& rankIndGlo : vectorIndex) |
---|
| 49 | { |
---|
| 50 | int rank = rankIndGlo.first ; |
---|
| 51 | auto& indexVector = rankIndGlo.second ; |
---|
| 52 | auto& arrayIndex = dstArrayIndex[rank] ; |
---|
| 53 | CArray<size_t,1> arrayTmp(indexVector.data(), shape(indexVector.size()), duplicateData) ; |
---|
| 54 | dstArrayIndex[rank].reference(arrayTmp) ; |
---|
| 55 | } |
---|
| 56 | |
---|
| 57 | // distributed element : where to send data |
---|
[2267] | 58 | auto dstElement = make_shared<CDistributedElement>(srcView_->getGlobalSize(), dstArrayIndex) ; |
---|
| 59 | dstElement->addFullView() ; |
---|
[1984] | 60 | |
---|
| 61 | // create scatterer connector |
---|
| 62 | int commSize ; |
---|
| 63 | MPI_Comm_size(localComm_, &commSize) ; |
---|
[2267] | 64 | scattererConnector_ = make_shared<CScattererConnector>(srcView_, dstElement->getView(CElementView::FULL), localComm_, commSize ) ; |
---|
[1984] | 65 | scattererConnector_->computeConnector() ; |
---|
| 66 | |
---|
| 67 | // how much sender ? |
---|
| 68 | vector<int> nbSenders(commSize) ; |
---|
| 69 | int nbSender ; |
---|
| 70 | for(auto& it : dstArrayIndex) nbSenders[it.first]=1 ; |
---|
| 71 | vector<int> recvCounts(commSize,1) ; |
---|
| 72 | MPI_Reduce_scatter(nbSenders.data(), &nbSender, recvCounts.data(), MPI_INT, MPI_SUM, localComm_) ; |
---|
| 73 | |
---|
| 74 | // transfer global index |
---|
| 75 | map<int,CArray<size_t,1>> remoteArrayIndex ; |
---|
| 76 | |
---|
| 77 | vector<MPI_Request> sendReq ; |
---|
| 78 | for(auto& it : dstArrayIndex) |
---|
| 79 | { |
---|
| 80 | MPI_Request req ; |
---|
| 81 | MPI_Isend(it.second.dataFirst(), it.second.numElements(), MPI_SIZE_T, it.first,0, localComm_,&req) ; |
---|
| 82 | sendReq.push_back(req) ; |
---|
| 83 | } |
---|
| 84 | |
---|
| 85 | for(int i=0; i<nbSender; i++) |
---|
| 86 | { |
---|
| 87 | int size ; |
---|
| 88 | MPI_Status status ; |
---|
| 89 | MPI_Probe(MPI_ANY_SOURCE, 0, localComm_, &status ) ; |
---|
| 90 | MPI_Get_count(&status, MPI_SIZE_T, &size) ; |
---|
| 91 | vector<size_t> recvBuff(size) ; |
---|
| 92 | MPI_Recv(recvBuff.data(), size, MPI_SIZE_T, status.MPI_SOURCE,0,localComm_,&status) ; |
---|
| 93 | CArray<size_t,1> arrayTmp(recvBuff.data(), shape(recvBuff.size()), duplicateData) ; |
---|
| 94 | remoteArrayIndex[status.MPI_SOURCE].reference(arrayTmp) ; |
---|
| 95 | recvRankSize_[status.MPI_SOURCE] = size ; |
---|
| 96 | } |
---|
| 97 | vector<MPI_Status> sendStatus(sendReq.size()) ; |
---|
| 98 | MPI_Waitall(sendReq.size(),sendReq.data(),sendStatus.data()) ; |
---|
| 99 | |
---|
[2267] | 100 | auto remoteElement = make_shared<CDistributedElement>(dstView_->getGlobalSize(), remoteArrayIndex) ; |
---|
| 101 | remoteElement->addFullView() ; |
---|
| 102 | gathererConnector_= make_shared<CGathererConnector>(remoteElement->getView(CElementView::FULL),dstView_) ; |
---|
[1984] | 103 | gathererConnector_->computeConnector() ; |
---|
| 104 | |
---|
| 105 | } |
---|
| 106 | } |
---|