Ignore:
Timestamp:
01/22/19 16:43:32 (5 years ago)
Author:
yushan
Message:

revert erroneous commit on trunk

File:
1 edited

Legend:

Unmodified
Added
Removed
  • XIOS/trunk/src/client_client_dht_template_impl.hpp

    r1638 r1639  
    1414{ 
    1515template<typename T, typename H> 
    16 CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const ep_lib::MPI_Comm& clientIntraComm) 
     16CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const MPI_Comm& clientIntraComm) 
    1717  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    1818{ 
    19   ep_lib::MPI_Comm_size(clientIntraComm, &nbClient_); 
     19  MPI_Comm_size(clientIntraComm, &nbClient_); 
    2020  this->computeMPICommLevel(); 
    2121  int nbLvl = this->getNbLevel(); 
     
    3434template<typename T, typename H> 
    3535CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const Index2InfoTypeMap& indexInfoMap, 
    36                                                         const ep_lib::MPI_Comm& clientIntraComm) 
     36                                                        const MPI_Comm& clientIntraComm) 
    3737  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    3838{ 
    39   ep_lib::MPI_Comm_size(clientIntraComm, &nbClient_); 
     39  MPI_Comm_size(clientIntraComm, &nbClient_); 
    4040  this->computeMPICommLevel(); 
    4141  int nbLvl = this->getNbLevel(); 
     
    5959template<typename T, typename H> 
    6060CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const Index2VectorInfoTypeMap& indexInfoMap, 
    61                                                         const ep_lib::MPI_Comm& clientIntraComm) 
     61                                                        const MPI_Comm& clientIntraComm) 
    6262  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    6363{ 
    64   ep_lib::MPI_Comm_size(clientIntraComm, &nbClient_); 
     64  MPI_Comm_size(clientIntraComm, &nbClient_); 
    6565  this->computeMPICommLevel(); 
    6666  int nbLvl = this->getNbLevel(); 
     
    9595template<typename T, typename H> 
    9696void CClientClientDHTTemplate<T,H>::computeIndexInfoMappingLevel(const CArray<size_t,1>& indices, 
    97                                                                  const ep_lib::MPI_Comm& commLevel, 
     97                                                                 const MPI_Comm& commLevel, 
    9898                                                                 int level) 
    9999{ 
    100100  int clientRank; 
    101   ep_lib::MPI_Comm_rank(commLevel,&clientRank); 
     101  MPI_Comm_rank(commLevel,&clientRank); 
    102102  int groupRankBegin = this->getGroupBegin()[level]; 
    103103  int nbClient = this->getNbInGroup()[level]; 
     
    169169    recvIndexBuff = new unsigned long[recvNbIndexCount]; 
    170170 
    171   int request_size = 0; 
    172   for (int idx = 0; idx < recvRankClient.size(); ++idx) 
    173   { 
    174     if (0 != recvNbIndexClientCount[idx]) 
    175       request_size ++; 
    176   } 
    177  
    178   request_size += client2ClientIndex.size(); 
    179  
    180   std::vector<ep_lib::MPI_Request> request(request_size); 
     171  std::vector<MPI_Request> request; 
    181172  std::vector<int>::iterator itbRecvIndex = recvRankClient.begin(), itRecvIndex, 
    182173                             iteRecvIndex = recvRankClient.end(), 
     
    185176  int currentIndex = 0; 
    186177  int nbRecvClient = recvRankClient.size(); 
    187   int request_position = 0; 
    188178  for (int idx = 0; idx < nbRecvClient; ++idx) 
    189179  { 
    190180    if (0 != recvNbIndexClientCount[idx]) 
    191       recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, &request[request_position++]); 
     181      recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, request); 
    192182    currentIndex += recvNbIndexClientCount[idx]; 
    193183  } 
     
    196186                                                iteIndex = client2ClientIndex.end(); 
    197187  for (itIndex = itbIndex; itIndex != iteIndex; ++itIndex) 
    198     sendIndexToClients(itIndex->first, (itIndex->second), sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, &request[request_position++]); 
    199  
    200   std::vector<ep_lib::MPI_Status> status(request.size()); 
    201   ep_lib::MPI_Waitall(request.size(), &request[0], &status[0]); 
     188    sendIndexToClients(itIndex->first, (itIndex->second), sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, request); 
     189 
     190  std::vector<MPI_Status> status(request.size()); 
     191  MPI_Waitall(request.size(), &request[0], &status[0]); 
    202192 
    203193  CArray<size_t,1>* tmpGlobalIndex; 
     
    252242  } 
    253243 
    254   int requestOnReturn_size=0; 
    255   for (int idx = 0; idx < recvRankOnReturn.size(); ++idx) 
    256   { 
    257     if (0 != recvNbIndexOnReturn[idx]) 
    258     { 
    259       requestOnReturn_size += 2; 
    260     } 
    261   } 
    262  
    263   for (int idx = 0; idx < nbRecvClient; ++idx) 
    264   { 
    265     if (0 != sendNbIndexOnReturn[idx]) 
    266     { 
    267       requestOnReturn_size += 2; 
    268     } 
    269   } 
    270  
    271   int requestOnReturn_position=0; 
    272  
    273   std::vector<ep_lib::MPI_Request> requestOnReturn(requestOnReturn_size); 
     244  std::vector<MPI_Request> requestOnReturn; 
    274245  currentIndex = 0; 
    275246  for (int idx = 0; idx < recvRankOnReturn.size(); ++idx) 
     
    277248    if (0 != recvNbIndexOnReturn[idx]) 
    278249    { 
    279       recvIndexFromClients(recvRankOnReturn[idx], recvIndexBuffOnReturn+currentIndex, recvNbIndexOnReturn[idx], commLevel, &requestOnReturn[requestOnReturn_position++]); 
     250      recvIndexFromClients(recvRankOnReturn[idx], recvIndexBuffOnReturn+currentIndex, recvNbIndexOnReturn[idx], commLevel, requestOnReturn); 
    280251      recvInfoFromClients(recvRankOnReturn[idx], 
    281252                          recvInfoBuffOnReturn+currentIndex*ProcessDHTElement<InfoType>::typeSize(), 
    282253                          recvNbIndexOnReturn[idx]*ProcessDHTElement<InfoType>::typeSize(), 
    283                           commLevel, &requestOnReturn[requestOnReturn_position++]); 
     254                          commLevel, requestOnReturn); 
    284255    } 
    285256    currentIndex += recvNbIndexOnReturn[idx]; 
     
    315286 
    316287      sendIndexToClients(rank, client2ClientIndexOnReturn[rank], 
    317                          sendNbIndexOnReturn[idx], commLevel, &requestOnReturn[requestOnReturn_position++]); 
     288                         sendNbIndexOnReturn[idx], commLevel, requestOnReturn); 
    318289      sendInfoToClients(rank, client2ClientInfoOnReturn[rank], 
    319                         sendNbIndexOnReturn[idx]*ProcessDHTElement<InfoType>::typeSize(), commLevel, &requestOnReturn[requestOnReturn_position++]); 
     290                        sendNbIndexOnReturn[idx]*ProcessDHTElement<InfoType>::typeSize(), commLevel, requestOnReturn); 
    320291    } 
    321292    currentIndex += recvNbIndexClientCount[idx]; 
    322293  } 
    323294 
    324   std::vector<ep_lib::MPI_Status> statusOnReturn(requestOnReturn.size()); 
    325   ep_lib::MPI_Waitall(requestOnReturn.size(), &requestOnReturn[0], &statusOnReturn[0]); 
     295  std::vector<MPI_Status> statusOnReturn(requestOnReturn.size()); 
     296  MPI_Waitall(requestOnReturn.size(), &requestOnReturn[0], &statusOnReturn[0]); 
    326297 
    327298  Index2VectorInfoTypeMap indexToInfoMapping; 
     
    389360template<typename T, typename H> 
    390361void CClientClientDHTTemplate<T,H>::computeDistributedIndex(const Index2VectorInfoTypeMap& indexInfoMap, 
    391                                                             const ep_lib::MPI_Comm& commLevel, 
     362                                                            const MPI_Comm& commLevel, 
    392363                                                            int level) 
    393364{ 
    394365  int clientRank; 
    395   ep_lib::MPI_Comm_rank(commLevel,&clientRank); 
     366  MPI_Comm_rank(commLevel,&clientRank); 
    396367  computeSendRecvRank(level, clientRank); 
    397368 
     
    468439  // it will send a message to the correct clients. 
    469440  // Contents of the message are index and its corresponding informatioin 
    470   int request_size = 0; 
    471    for (int idx = 0; idx < recvRankClient.size(); ++idx) 
    472    { 
    473      if (0 != recvNbIndexClientCount[idx]) 
    474      { 
    475        request_size += 2; 
    476      } 
    477    } 
    478   
    479    request_size += client2ClientIndex.size(); 
    480    request_size += client2ClientInfo.size(); 
    481   
    482    std::vector<ep_lib::MPI_Request> request(request_size); 
    483  
     441  std::vector<MPI_Request> request; 
    484442  int currentIndex = 0; 
    485443  int nbRecvClient = recvRankClient.size(); 
    486   int request_position=0; 
    487444  for (int idx = 0; idx < nbRecvClient; ++idx) 
    488445  { 
    489446    if (0 != recvNbIndexClientCount[idx]) 
    490447    { 
    491       recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, &request[request_position++]); 
    492       //if(clientRank==0) printf("recv index request = %p, inserted = %p, request = %d\n", &request[0], request.back(), *static_cast< int*>(request.back()->mpi_request)); 
     448      recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, request); 
    493449      recvInfoFromClients(recvRankClient[idx], 
    494                             recvInfoBuff+currentIndex*ProcessDHTElement<InfoType>::typeSize(), 
    495                             recvNbIndexClientCount[idx]*ProcessDHTElement<InfoType>::typeSize(), 
    496                             commLevel, &request[request_position++]); 
    497       //if(clientRank==0) printf("recv info request = %p, inserted = %p, request = %d\n", &request[0], request.back(), *static_cast< int*>(request.back()->mpi_request)); 
     450                          recvInfoBuff+currentIndex*ProcessDHTElement<InfoType>::typeSize(), 
     451                          recvNbIndexClientCount[idx]*ProcessDHTElement<InfoType>::typeSize(), 
     452                          commLevel, request); 
    498453    } 
    499454    currentIndex += recvNbIndexClientCount[idx]; 
     
    503458                                                iteIndex = client2ClientIndex.end(); 
    504459  for (itIndex = itbIndex; itIndex != iteIndex; ++itIndex) 
    505   {  sendIndexToClients(itIndex->first, itIndex->second, sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, &request[request_position++]); 
    506   }    //if(clientRank==0) printf("send index request = %p, inserted = %p, request = %d\n", &request[0], request.back(), *static_cast< int*>(request.back()->mpi_request));} 
     460    sendIndexToClients(itIndex->first, itIndex->second, sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, request); 
    507461  std::unordered_map<int, unsigned char*>::iterator itbInfo = client2ClientInfo.begin(), itInfo, 
    508462                                                      iteInfo = client2ClientInfo.end(); 
    509463  for (itInfo = itbInfo; itInfo != iteInfo; ++itInfo) 
    510    { sendInfoToClients(itInfo->first, itInfo->second, sendNbInfo[itInfo->first-groupRankBegin], commLevel, &request[request_position++]); 
    511    }//   if(clientRank==0) printf("send info request = %p, inserted = %p, request = %d\n", &request[0], request.back(), *static_cast< int*>(request.back()->mpi_request));} 
    512  
    513   std::vector<ep_lib::MPI_Status> status(request.size()); 
    514    
    515   ep_lib::MPI_Waitall(request.size(), &request[0], &status[0]); 
     464    sendInfoToClients(itInfo->first, itInfo->second, sendNbInfo[itInfo->first-groupRankBegin], commLevel, request); 
     465 
     466  std::vector<MPI_Status> status(request.size()); 
     467  MPI_Waitall(request.size(), &request[0], &status[0]); 
    516468 
    517469  Index2VectorInfoTypeMap indexToInfoMapping; 
     
    566518template<typename T, typename H> 
    567519void CClientClientDHTTemplate<T,H>::sendIndexToClients(int clientDestRank, size_t* indices, size_t indiceSize, 
    568                                                        const ep_lib::MPI_Comm& clientIntraComm, 
    569                                                        std::vector<ep_lib::MPI_Request>& requestSendIndex) 
    570 { 
    571   ep_lib::MPI_Request request; 
     520                                                       const MPI_Comm& clientIntraComm, 
     521                                                       std::vector<MPI_Request>& requestSendIndex) 
     522{ 
     523  MPI_Request request; 
    572524  requestSendIndex.push_back(request); 
    573   ep_lib::MPI_Isend(indices, indiceSize, EP_UNSIGNED_LONG, 
     525  MPI_Isend(indices, indiceSize, MPI_UNSIGNED_LONG, 
    574526            clientDestRank, MPI_DHT_INDEX, clientIntraComm, &(requestSendIndex.back())); 
    575 } 
    576  
    577 /*! 
    578   Send message containing index to clients 
    579   \param [in] clientDestRank rank of destination client 
    580   \param [in] indices index to send 
    581   \param [in] indiceSize size of index array to send 
    582   \param [in] clientIntraComm communication group of client 
    583   \param [in] requestSendIndex sending request 
    584 */ 
    585 template<typename T, typename H> 
    586 void CClientClientDHTTemplate<T,H>::sendIndexToClients(int clientDestRank, size_t* indices, size_t indiceSize, 
    587                                                        const ep_lib::MPI_Comm& clientIntraComm, 
    588                                                        ep_lib::MPI_Request* requestSendIndex) 
    589 { 
    590   ep_lib::MPI_Isend(indices, indiceSize, EP_UNSIGNED_LONG, 
    591             clientDestRank, MPI_DHT_INDEX, clientIntraComm, requestSendIndex); 
    592527} 
    593528 
     
    601536template<typename T, typename H> 
    602537void CClientClientDHTTemplate<T,H>::recvIndexFromClients(int clientSrcRank, size_t* indices, size_t indiceSize, 
    603                                                          const ep_lib::MPI_Comm& clientIntraComm, 
    604                                                          std::vector<ep_lib::MPI_Request>& requestRecvIndex) 
    605 { 
    606   ep_lib::MPI_Request request; 
     538                                                         const MPI_Comm& clientIntraComm, 
     539                                                         std::vector<MPI_Request>& requestRecvIndex) 
     540{ 
     541  MPI_Request request; 
    607542  requestRecvIndex.push_back(request); 
    608   ep_lib::MPI_Irecv(indices, indiceSize, EP_UNSIGNED_LONG, 
     543  MPI_Irecv(indices, indiceSize, MPI_UNSIGNED_LONG, 
    609544            clientSrcRank, MPI_DHT_INDEX, clientIntraComm, &(requestRecvIndex.back())); 
    610 } 
    611  
    612 /*! 
    613   Receive message containing index to clients 
    614   \param [in] clientDestRank rank of destination client 
    615   \param [in] indices index to send 
    616   \param [in] clientIntraComm communication group of client 
    617   \param [in] requestRecvIndex receiving request 
    618 */ 
    619 template<typename T, typename H> 
    620 void CClientClientDHTTemplate<T,H>::recvIndexFromClients(int clientSrcRank, size_t* indices, size_t indiceSize, 
    621                                                          const ep_lib::MPI_Comm& clientIntraComm, 
    622                                                          ep_lib::MPI_Request *requestRecvIndex) 
    623 { 
    624   ep_lib::MPI_Irecv(indices, indiceSize, EP_UNSIGNED_LONG, 
    625             clientSrcRank, MPI_DHT_INDEX, clientIntraComm, requestRecvIndex); 
    626545} 
    627546 
     
    636555template<typename T, typename H> 
    637556void CClientClientDHTTemplate<T,H>::sendInfoToClients(int clientDestRank, unsigned char* info, int infoSize, 
    638                                                       const ep_lib::MPI_Comm& clientIntraComm, 
    639                                                       std::vector<ep_lib::MPI_Request>& requestSendInfo) 
    640 { 
    641   ep_lib::MPI_Request request; 
     557                                                      const MPI_Comm& clientIntraComm, 
     558                                                      std::vector<MPI_Request>& requestSendInfo) 
     559{ 
     560  MPI_Request request; 
    642561  requestSendInfo.push_back(request); 
    643562 
    644   ep_lib::MPI_Isend(info, infoSize, EP_CHAR, 
     563  MPI_Isend(info, infoSize, MPI_CHAR, 
    645564            clientDestRank, MPI_DHT_INFO, clientIntraComm, &(requestSendInfo.back())); 
    646 } 
    647  
    648 /*! 
    649   Send message containing information to clients 
    650   \param [in] clientDestRank rank of destination client 
    651   \param [in] info info array to send 
    652   \param [in] infoSize info array size to send 
    653   \param [in] clientIntraComm communication group of client 
    654   \param [in] requestSendInfo sending request 
    655 */ 
    656 template<typename T, typename H> 
    657 void CClientClientDHTTemplate<T,H>::sendInfoToClients(int clientDestRank, unsigned char* info, int infoSize, 
    658                                                       const ep_lib::MPI_Comm& clientIntraComm, 
    659                                                       ep_lib::MPI_Request *requestSendInfo) 
    660 { 
    661   ep_lib::MPI_Isend(info, infoSize, EP_CHAR, 
    662             clientDestRank, MPI_DHT_INFO, clientIntraComm, requestSendInfo); 
    663565} 
    664566 
     
    673575template<typename T, typename H> 
    674576void CClientClientDHTTemplate<T,H>::recvInfoFromClients(int clientSrcRank, unsigned char* info, int infoSize, 
    675                                                         const ep_lib::MPI_Comm& clientIntraComm, 
    676                                                         std::vector<ep_lib::MPI_Request>& requestRecvInfo) 
    677 { 
    678   ep_lib::MPI_Request request; 
     577                                                        const MPI_Comm& clientIntraComm, 
     578                                                        std::vector<MPI_Request>& requestRecvInfo) 
     579{ 
     580  MPI_Request request; 
    679581  requestRecvInfo.push_back(request); 
    680582 
    681   ep_lib::MPI_Irecv(info, infoSize, EP_CHAR, 
     583  MPI_Irecv(info, infoSize, MPI_CHAR, 
    682584            clientSrcRank, MPI_DHT_INFO, clientIntraComm, &(requestRecvInfo.back())); 
    683 } 
    684  
    685 /*! 
    686   Receive message containing information from other clients 
    687   \param [in] clientDestRank rank of destination client 
    688   \param [in] info info array to receive 
    689   \param [in] infoSize info array size to receive 
    690   \param [in] clientIntraComm communication group of client 
    691   \param [in] requestRecvInfo list of receiving request 
    692 */ 
    693 template<typename T, typename H> 
    694 void CClientClientDHTTemplate<T,H>::recvInfoFromClients(int clientSrcRank, unsigned char* info, int infoSize, 
    695                                                         const ep_lib::MPI_Comm& clientIntraComm, 
    696                                                         ep_lib::MPI_Request* requestRecvInfo) 
    697 { 
    698   ep_lib::MPI_Irecv(info, infoSize, EP_CHAR, 
    699             clientSrcRank, MPI_DHT_INFO, clientIntraComm, requestRecvInfo); 
    700585} 
    701586 
     
    766651{ 
    767652  recvNbElements.resize(recvNbRank.size()); 
    768   std::vector<ep_lib::MPI_Request> request(sendNbRank.size()+recvNbRank.size()); 
    769   std::vector<ep_lib::MPI_Status> requestStatus(sendNbRank.size()+recvNbRank.size()); 
     653  std::vector<MPI_Request> request(sendNbRank.size()+recvNbRank.size()); 
     654  std::vector<MPI_Status> requestStatus(sendNbRank.size()+recvNbRank.size()); 
    770655 
    771656  int nRequest = 0; 
    772657  for (int idx = 0; idx < recvNbRank.size(); ++idx) 
    773658  { 
    774     ep_lib::MPI_Irecv(&recvNbElements[0]+idx, 1, EP_INT, 
     659    MPI_Irecv(&recvNbElements[0]+idx, 1, MPI_INT, 
    775660              recvNbRank[idx], MPI_DHT_INDEX_1, this->internalComm_, &request[nRequest]); 
    776661    ++nRequest; 
     
    779664  for (int idx = 0; idx < sendNbRank.size(); ++idx) 
    780665  { 
    781     ep_lib::MPI_Isend(&sendNbElements[0]+idx, 1, EP_INT, 
     666    MPI_Isend(&sendNbElements[0]+idx, 1, MPI_INT, 
    782667              sendNbRank[idx], MPI_DHT_INDEX_1, this->internalComm_, &request[nRequest]); 
    783668    ++nRequest; 
    784669  } 
    785670 
    786   ep_lib::MPI_Waitall(sendNbRank.size()+recvNbRank.size(), &request[0], &requestStatus[0]); 
     671  MPI_Waitall(sendNbRank.size()+recvNbRank.size(), &request[0], &requestStatus[0]); 
    787672} 
    788673 
     
    811696  std::vector<int> recvBuff(recvBuffSize*2,0); 
    812697 
    813   std::vector<ep_lib::MPI_Request> request(sendBuffSize+recvBuffSize); 
    814   std::vector<ep_lib::MPI_Status> requestStatus(sendBuffSize+recvBuffSize); 
     698  std::vector<MPI_Request> request(sendBuffSize+recvBuffSize); 
     699  std::vector<MPI_Status> requestStatus(sendBuffSize+recvBuffSize); 
    815700 
    816701  int nRequest = 0; 
    817702  for (int idx = 0; idx < recvBuffSize; ++idx) 
    818703  { 
    819     ep_lib::MPI_Irecv(&recvBuff[0]+2*idx, 2, EP_INT, 
     704    MPI_Irecv(&recvBuff[0]+2*idx, 2, MPI_INT, 
    820705              recvRank[idx], MPI_DHT_INDEX_0, this->internalComm_, &request[nRequest]); 
    821706    ++nRequest; 
     
    831716  for (int idx = 0; idx < sendBuffSize; ++idx) 
    832717  { 
    833     ep_lib::MPI_Isend(&sendBuff[idx*2], 2, EP_INT, 
     718    MPI_Isend(&sendBuff[idx*2], 2, MPI_INT, 
    834719              sendRank[idx], MPI_DHT_INDEX_0, this->internalComm_, &request[nRequest]); 
    835720    ++nRequest; 
    836721  } 
    837722 
    838   ep_lib::MPI_Waitall(sendBuffSize+recvBuffSize, &request[0], &requestStatus[0]); 
     723  MPI_Waitall(sendBuffSize+recvBuffSize, &request[0], &requestStatus[0]); 
    839724  int nbRecvRank = 0, nbRecvElements = 0; 
    840725  recvNbRank.clear(); 
Note: See TracChangeset for help on using the changeset viewer.