Ignore:
Timestamp:
05/31/17 10:36:33 (7 years ago)
Author:
oabramkina
Message:

Fixing a bug in context initialization. Now register context is scheduled by event scheduler.
Tests on Curie: test_complete and test_xios2_cmip6.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • XIOS/dev/dev_olga/src/client.cpp

    r1130 r1148  
    1818    MPI_Comm CClient::interComm ; 
    1919    std::list<MPI_Comm> CClient::contextInterComms; 
    20     vector <int> CClient::serverLeader; 
     20    int CClient::serverLeader; 
    2121    bool CClient::is_MPI_Initialized ; 
    22     int CClient::rank = INVALID_RANK; 
     22    int CClient::rank_ = INVALID_RANK; 
    2323    StdOFStream CClient::m_infoStream; 
    2424    StdOFStream CClient::m_errorStream; 
     
    6666 
    6767          MPI_Comm_size(CXios::globalComm,&size) ; 
    68           MPI_Comm_rank(CXios::globalComm,&rank); 
     68          MPI_Comm_rank(CXios::globalComm,&rank_); 
    6969 
    7070          hashAll=new unsigned long[size] ; 
     
    9999 
    100100          myColor=colors[hashClient] ; 
    101           MPI_Comm_split(CXios::globalComm,myColor,rank,&intraComm) ; 
     101          MPI_Comm_split(CXios::globalComm,myColor,rank_,&intraComm) ; 
    102102 
    103103          if (CXios::usingServer) 
    104104          { 
    105105            int clientLeader=leaders[hashClient] ; 
    106             serverLeader.push_back(leaders[hashServer]) ; 
     106            serverLeader=leaders[hashServer] ; 
    107107            int intraCommSize, intraCommRank ; 
    108108            MPI_Comm_size(intraComm,&intraCommSize) ; 
    109109            MPI_Comm_rank(intraComm,&intraCommRank) ; 
    110             info(50)<<"intercommCreate::client "<<rank<<" intraCommSize : "<<intraCommSize 
    111                    <<" intraCommRank :"<<intraCommRank<<"  clientLeader "<< serverLeader.back()<<endl ; 
    112              MPI_Intercomm_create(intraComm, 0, CXios::globalComm, serverLeader.back(), 0, &interComm) ; 
     110            info(50)<<"intercommCreate::client "<<rank_<<" intraCommSize : "<<intraCommSize 
     111                   <<" intraCommRank :"<<intraCommRank<<"  clientLeader "<< serverLeader<<endl ; 
     112             MPI_Intercomm_create(intraComm, 0, CXios::globalComm, serverLeader, 0, &interComm) ; 
     113             rank_ = intraCommRank; 
    113114          } 
    114115          else 
     
    149150        { 
    150151          MPI_Status status ; 
    151           MPI_Comm_rank(intraComm,&rank) ; 
     152          MPI_Comm_rank(intraComm,&rank_) ; 
    152153 
    153154          oasis_get_intercomm(interComm,CXios::xiosCodeId) ; 
    154           if (rank==0) MPI_Recv(&serverLeader,1, MPI_INT, 0, 0, interComm, &status) ; 
     155          if (rank_==0) MPI_Recv(&serverLeader,1, MPI_INT, 0, 0, interComm, &status) ; 
    155156          MPI_Bcast(&serverLeader,1,MPI_INT,0,intraComm) ; 
    156157 
     
    216217        buffer<<msg ; 
    217218 
    218         for (int i = 0; i < serverLeader.size(); ++i) 
    219         { 
    220           MPI_Send(buff, buffer.count(), MPI_CHAR, serverLeader[i], 1, CXios::globalComm) ; 
    221           MPI_Intercomm_create(contextComm, 0, CXios::globalComm, serverLeader[i], 10+globalRank, &contextInterComm) ; 
    222           info(10)<<"Register new Context : "<<id<<endl ; 
    223           MPI_Comm inter ; 
    224           MPI_Intercomm_merge(contextInterComm,0,&inter) ; 
    225           MPI_Barrier(inter) ; 
    226  
    227           context->initClient(contextComm,contextInterComm) ; 
    228  
    229           contextInterComms.push_back(contextInterComm); 
    230           MPI_Comm_free(&inter); 
    231         } 
     219        MPI_Send(buff, buffer.count(), MPI_CHAR, serverLeader, 1, CXios::globalComm) ; 
     220        MPI_Intercomm_create(contextComm, 0, CXios::globalComm, serverLeader, 10+globalRank, &contextInterComm) ; 
     221        info(10)<<"Register new Context : "<<id<<endl ; 
     222        MPI_Comm inter ; 
     223        MPI_Intercomm_merge(contextInterComm,0,&inter) ; 
     224        MPI_Barrier(inter) ; 
     225 
     226        context->initClient(contextComm,contextInterComm) ; 
     227 
     228        contextInterComms.push_back(contextInterComm); 
     229        MPI_Comm_free(&inter); 
    232230        delete [] buff ; 
    233231 
     
    276274   } 
    277275 
    278  
     276    /*! 
     277    * Return rank in model intraComm 
     278    */ 
    279279   int CClient::getRank() 
    280280   { 
    281      return rank; 
     281     return rank_; 
    282282   } 
    283283 
Note: See TracChangeset for help on using the changeset viewer.