#include "remote_connector.hpp" #include "client_client_dht_template.hpp" namespace xios { void CRemoteConnector::computeConnector(void) { CClientClientDHTTemplate::Index2VectorInfoTypeMap info ; for(auto& dstIndex : dstView_->getIndex()) { int rank=dstIndex.first ; auto& indexList=dstIndex.second ; auto& globalIndex = dstView_->getGlobalIndex()[rank] ; for(int i=0;i dataRanks(info, localComm_) ; set setGlobalIndex ; // all global index from src auto& srcIndex = srcView_->getIndex() ; auto& globalIndex = srcView_->getGlobalIndex() ; int globalIndexSize = globalIndex.numElements() ; int indexSize = srcIndex.numElements() ; for(int i=0;i=0 & srcIndex(i) srcGlobalIndex(setGlobalIndex.size()) ; int i=0 ; for(auto& globalIndex : setGlobalIndex) { srcGlobalIndex(i) = globalIndex ; i++ ; } dataRanks.computeIndexInfoMapping(srcGlobalIndex) ; const auto& returnInfo = dataRanks.getInfoIndexMap() ; // so we have the info where to send data : rank and local index. // create the interconnector : map> element ; for(int i=0;i=0 & srcIndex(i) arrayTmp(rankIndex.second.data(), shape(rankIndex.second.size()), duplicateData) ; element_[rankIndex.first].reference(arrayTmp) ; } // compute the number of senders for a remote destination view int commSize ; MPI_Comm_size(localComm_, &commSize) ; vector ranks(commSize,0) ; for(auto& rank : connector_) ranks[rank.first] = 1 ; MPI_Allreduce(MPI_IN_PLACE,ranks.data(),commSize,MPI_INT,MPI_SUM,localComm_) ; for(auto& rank : connector_) nbSenders_[rank.first] = ranks[rank.first] ; } }