Ignore:
Timestamp:
02/17/17 17:55:37 (7 years ago)
Author:
yushan
Message:

ep_lib namespace specified when netcdf involved

Location:
XIOS/dev/branch_yushan/src/node
Files:
9 edited

Legend:

Unmodified
Added
Removed
  • XIOS/dev/branch_yushan/src/node/axis.cpp

    r1037 r1053  
    742742      CContextServer* server = CContext::getCurrent()->server; 
    743743      axis->numberWrittenIndexes_ = axis->indexesToWrite.size(); 
    744       MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
    745       MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
     744      ep_lib::MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
     745      ep_lib::MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
    746746      axis->offsetWrittenIndexes_ -= axis->numberWrittenIndexes_; 
    747747    } 
  • XIOS/dev/branch_yushan/src/node/context.cpp

    r1037 r1053  
    236236 
    237237   //! Initialize client side 
    238    void CContext::initClient(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtServer /*= 0*/) 
     238   void CContext::initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer /*= 0*/) 
    239239   { 
    240240     hasClient=true; 
     
    248248     registryOut->setPath(getId()) ; 
    249249 
    250      MPI_Comm intraCommServer, interCommServer; 
     250     ep_lib::MPI_Comm intraCommServer, interCommServer; 
    251251     if (cxtServer) // Attached mode 
    252252     { 
     
    311311 
    312312   //! Initialize server 
    313    void CContext::initServer(MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtClient /*= 0*/) 
     313   void CContext::initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient /*= 0*/) 
    314314   { 
    315315     hasServer=true; 
     
    323323     registryOut->setPath(getId()) ; 
    324324 
    325      MPI_Comm intraCommClient, interCommClient; 
     325     ep_lib::MPI_Comm intraCommClient, interCommClient; 
    326326     if (cxtClient) // Attached mode 
    327327     { 
     
    369369          closeAllFile(); 
    370370          registryOut->hierarchicalGatherRegistry() ; 
     371          //registryOut->gatherRegistry() ; 
    371372          if (server->intraCommRank==0) CXios::globalRegistry->mergeRegistry(*registryOut) ; 
    372373        } 
    373374 
    374         for (std::list<MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it) 
     375        for (std::list<ep_lib::MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it) 
    375376          MPI_Comm_free(&(*it)); 
    376377        comms.clear(); 
  • XIOS/dev/branch_yushan/src/node/context.hpp

    r1037 r1053  
    8888      public : 
    8989         // Initialize server or client 
    90          void initServer(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtClient = 0); 
    91          void initClient(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtServer = 0); 
     90         void initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient = 0); 
     91         void initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer = 0); 
    9292         bool isInitialized(void); 
    9393 
     
    229229         StdString idServer_; 
    230230         CGarbageCollector garbageCollector; 
    231          std::list<MPI_Comm> comms; //!< Communicators allocated internally 
     231         std::list<ep_lib::MPI_Comm> comms; //!< Communicators allocated internally 
    232232 
    233233      public: // Some function maybe removed in the near future 
  • XIOS/dev/branch_yushan/src/node/domain.cpp

    r1037 r1053  
    475475   { 
    476476          CContext* context = CContext::getCurrent(); 
    477       CContextClient* client = context->client; 
     477    CContextClient* client = context->client; 
    478478          lon_g.resize(ni_glo) ; 
    479479          lat_g.resize(nj_glo) ; 
  • XIOS/dev/branch_yushan/src/node/field_impl.hpp

    r1037 r1053  
    2020    if (clientSourceFilter) 
    2121    { 
     22      printf("file_impl.hpp : clientSourceFilter->streamData\n"); 
    2223      clientSourceFilter->streamData(CContext::getCurrent()->getCalendar()->getCurrentDate(), _data); 
     24      printf("file_impl.hpp : clientSourceFilter->streamData OKOK\n"); 
    2325    } 
    2426    else if (!field_ref.isEmpty() || !content.empty()) 
     27    { 
    2528      ERROR("void CField::setData(const CArray<double, N>& _data)", 
    2629            << "Impossible to receive data from the model for a field [ id = " << getId() << " ] with a reference or an arithmetic operation."); 
     30    } 
    2731  } 
    2832 
  • XIOS/dev/branch_yushan/src/node/file.cpp

    r1037 r1053  
    564564 
    565565      if (isOpen) data_out->closeFile(); 
    566       if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective)); 
    567       else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective, time_counter_name)); 
     566      if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective)); 
     567      else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective, time_counter_name)); 
    568568      isOpen = true; 
    569569    } 
  • XIOS/dev/branch_yushan/src/node/file.hpp

    r1037 r1053  
    159159         bool isOpen; 
    160160         bool allDomainEmpty; 
    161          MPI_Comm fileComm; 
     161         ep_lib::MPI_Comm fileComm; 
    162162 
    163163      private : 
  • XIOS/dev/branch_yushan/src/node/mesh.cpp

    r1037 r1053  
    493493 * \param [in] bounds_lat Array of boundary latitudes. Its size depends on the element type. 
    494494 */ 
    495   void CMesh::createMeshEpsilon(const MPI_Comm& comm, 
     495  void CMesh::createMeshEpsilon(const ep_lib::MPI_Comm& comm, 
    496496                                const CArray<double, 1>& lonvalue, const CArray<double, 1>& latvalue, 
    497497                                const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat) 
     
    15341534   */ 
    15351535 
    1536   void CMesh::getGloNghbFacesNodeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx, 
     1536  void CMesh::getGloNghbFacesNodeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 
    15371537                               const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
    15381538                               CArray<int, 2>& nghbFaces) 
     
    16901690   */ 
    16911691 
    1692   void CMesh::getGloNghbFacesEdgeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx, 
     1692  void CMesh::getGloNghbFacesEdgeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 
    16931693                               const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
    16941694                               CArray<int, 2>& nghbFaces) 
     
    18711871   */ 
    18721872 
    1873   void CMesh::getGlobalNghbFaces(const int nghbType, const MPI_Comm& comm, 
     1873  void CMesh::getGlobalNghbFaces(const int nghbType, const ep_lib::MPI_Comm& comm, 
    18741874                                 const CArray<int, 1>& face_idx, 
    18751875                                 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
  • XIOS/dev/branch_yushan/src/node/mesh.hpp

    r931 r1053  
    6060                      const CArray<double, 2>&, const CArray<double, 2>& ); 
    6161                         
    62       void createMeshEpsilon(const MPI_Comm&, 
     62      void createMeshEpsilon(const ep_lib::MPI_Comm&, 
    6363                             const CArray<double, 1>&, const CArray<double, 1>&, 
    6464                             const CArray<double, 2>&, const CArray<double, 2>& ); 
    6565 
    66       void getGlobalNghbFaces(const int, const MPI_Comm&, const CArray<int, 1>&, 
     66      void getGlobalNghbFaces(const int, const ep_lib::MPI_Comm&, const CArray<int, 1>&, 
    6767                              const CArray<double, 2>&, const CArray<double, 2>&, 
    6868                              CArray<int, 2>&); 
     
    8484      CClientClientDHTSizet* pNodeGlobalIndex;                    // pointer to a map <nodeHash, nodeIdxGlo> 
    8585      CClientClientDHTSizet* pEdgeGlobalIndex;                    // pointer to a map <edgeHash, edgeIdxGlo> 
    86       void getGloNghbFacesNodeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
    87       void getGloNghbFacesEdgeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
     86      void getGloNghbFacesNodeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
     87      void getGloNghbFacesEdgeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
    8888      void getLocNghbFacesNodeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&); 
    8989      void getLocNghbFacesEdgeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&); 
Note: See TracChangeset for help on using the changeset viewer.