#include "transform_connector.hpp" #include "element.hpp" #include "scatterer_connector.hpp" #include "gatherer_connector.hpp" #include "client_client_dht_template.hpp" namespace xios { void CTransformConnector::computeConnector(void) { CClientClientDHTTemplate::Index2VectorInfoTypeMap info ; // first, insert destination global index into DHT int rank ; MPI_Comm_rank(localComm_, &rank) ; CArray globalIndex ; dstView_->getGlobalIndexView(globalIndex) ; for(int i=0;i dataRanks(info, localComm_) ; // get global src index from src view. set setGlobalIndex ; // all global index from src srcView_->getGlobalIndexView(globalIndex) ; for(int i=0;i srcGlobalIndex(setGlobalIndex.size()) ; int i=0 ; for(auto& globalIndex : setGlobalIndex) { srcGlobalIndex(i) = globalIndex ; i++ ; } dataRanks.computeIndexInfoMapping(srcGlobalIndex) ; const auto& returnInfo = dataRanks.getInfoIndexMap() ; map> vectorIndex ; for(auto& rankIndGlo : returnInfo) { size_t indGlo = rankIndGlo.first ; auto& listRank = rankIndGlo.second ; for (auto& rank : listRank) vectorIndex[rank].push_back(indGlo); } // convert vectorIndex into array map> dstArrayIndex ; for(auto& rankIndGlo : vectorIndex) { int rank = rankIndGlo.first ; auto& indexVector = rankIndGlo.second ; auto& arrayIndex = dstArrayIndex[rank] ; CArray arrayTmp(indexVector.data(), shape(indexVector.size()), duplicateData) ; dstArrayIndex[rank].reference(arrayTmp) ; } // distributed element : where to send data CDistributedElement dstElement(srcView_->getGlobalSize(), dstArrayIndex) ; dstElement.addFullView() ; // create scatterer connector int commSize ; MPI_Comm_size(localComm_, &commSize) ; scattererConnector_ = new CScattererConnector(srcView_, dstElement.getView(CElementView::FULL), localComm_, commSize ) ; scattererConnector_->computeConnector() ; // how much sender ? vector nbSenders(commSize) ; int nbSender ; for(auto& it : dstArrayIndex) nbSenders[it.first]=1 ; vector recvCounts(commSize,1) ; MPI_Reduce_scatter(nbSenders.data(), &nbSender, recvCounts.data(), MPI_INT, MPI_SUM, localComm_) ; // transfer global index map> remoteArrayIndex ; vector sendReq ; for(auto& it : dstArrayIndex) { MPI_Request req ; MPI_Isend(it.second.dataFirst(), it.second.numElements(), MPI_SIZE_T, it.first,0, localComm_,&req) ; sendReq.push_back(req) ; } for(int i=0; i recvBuff(size) ; MPI_Recv(recvBuff.data(), size, MPI_SIZE_T, status.MPI_SOURCE,0,localComm_,&status) ; CArray arrayTmp(recvBuff.data(), shape(recvBuff.size()), duplicateData) ; remoteArrayIndex[status.MPI_SOURCE].reference(arrayTmp) ; recvRankSize_[status.MPI_SOURCE] = size ; } vector sendStatus(sendReq.size()) ; MPI_Waitall(sendReq.size(),sendReq.data(),sendStatus.data()) ; CDistributedElement remoteElement(dstView_->getGlobalSize(), remoteArrayIndex) ; remoteElement.addFullView() ; gathererConnector_=new CGathererConnector(remoteElement.getView(CElementView::FULL),dstView_) ; gathererConnector_->computeConnector() ; } }