Changeset 1053 for XIOS/dev/branch_yushan/src/node
- Timestamp:
- 02/17/17 17:55:37 (7 years ago)
- Location:
- XIOS/dev/branch_yushan/src/node
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
XIOS/dev/branch_yushan/src/node/axis.cpp
r1037 r1053 742 742 CContextServer* server = CContext::getCurrent()->server; 743 743 axis->numberWrittenIndexes_ = axis->indexesToWrite.size(); 744 MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm);745 MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm);744 ep_lib::MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 745 ep_lib::MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 746 746 axis->offsetWrittenIndexes_ -= axis->numberWrittenIndexes_; 747 747 } -
XIOS/dev/branch_yushan/src/node/context.cpp
r1037 r1053 236 236 237 237 //! Initialize client side 238 void CContext::initClient( MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtServer /*= 0*/)238 void CContext::initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer /*= 0*/) 239 239 { 240 240 hasClient=true; … … 248 248 registryOut->setPath(getId()) ; 249 249 250 MPI_Comm intraCommServer, interCommServer;250 ep_lib::MPI_Comm intraCommServer, interCommServer; 251 251 if (cxtServer) // Attached mode 252 252 { … … 311 311 312 312 //! Initialize server 313 void CContext::initServer( MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtClient /*= 0*/)313 void CContext::initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient /*= 0*/) 314 314 { 315 315 hasServer=true; … … 323 323 registryOut->setPath(getId()) ; 324 324 325 MPI_Comm intraCommClient, interCommClient;325 ep_lib::MPI_Comm intraCommClient, interCommClient; 326 326 if (cxtClient) // Attached mode 327 327 { … … 369 369 closeAllFile(); 370 370 registryOut->hierarchicalGatherRegistry() ; 371 //registryOut->gatherRegistry() ; 371 372 if (server->intraCommRank==0) CXios::globalRegistry->mergeRegistry(*registryOut) ; 372 373 } 373 374 374 for (std::list< MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it)375 for (std::list<ep_lib::MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it) 375 376 MPI_Comm_free(&(*it)); 376 377 comms.clear(); -
XIOS/dev/branch_yushan/src/node/context.hpp
r1037 r1053 88 88 public : 89 89 // Initialize server or client 90 void initServer( MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtClient = 0);91 void initClient( MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtServer = 0);90 void initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient = 0); 91 void initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer = 0); 92 92 bool isInitialized(void); 93 93 … … 229 229 StdString idServer_; 230 230 CGarbageCollector garbageCollector; 231 std::list< MPI_Comm> comms; //!< Communicators allocated internally231 std::list<ep_lib::MPI_Comm> comms; //!< Communicators allocated internally 232 232 233 233 public: // Some function maybe removed in the near future -
XIOS/dev/branch_yushan/src/node/domain.cpp
r1037 r1053 475 475 { 476 476 CContext* context = CContext::getCurrent(); 477 477 CContextClient* client = context->client; 478 478 lon_g.resize(ni_glo) ; 479 479 lat_g.resize(nj_glo) ; -
XIOS/dev/branch_yushan/src/node/field_impl.hpp
r1037 r1053 20 20 if (clientSourceFilter) 21 21 { 22 printf("file_impl.hpp : clientSourceFilter->streamData\n"); 22 23 clientSourceFilter->streamData(CContext::getCurrent()->getCalendar()->getCurrentDate(), _data); 24 printf("file_impl.hpp : clientSourceFilter->streamData OKOK\n"); 23 25 } 24 26 else if (!field_ref.isEmpty() || !content.empty()) 27 { 25 28 ERROR("void CField::setData(const CArray<double, N>& _data)", 26 29 << "Impossible to receive data from the model for a field [ id = " << getId() << " ] with a reference or an arithmetic operation."); 30 } 27 31 } 28 32 -
XIOS/dev/branch_yushan/src/node/file.cpp
r1037 r1053 564 564 565 565 if (isOpen) data_out->closeFile(); 566 if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective));567 else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective, time_counter_name));566 if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective)); 567 else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective, time_counter_name)); 568 568 isOpen = true; 569 569 } -
XIOS/dev/branch_yushan/src/node/file.hpp
r1037 r1053 159 159 bool isOpen; 160 160 bool allDomainEmpty; 161 MPI_Comm fileComm;161 ep_lib::MPI_Comm fileComm; 162 162 163 163 private : -
XIOS/dev/branch_yushan/src/node/mesh.cpp
r1037 r1053 493 493 * \param [in] bounds_lat Array of boundary latitudes. Its size depends on the element type. 494 494 */ 495 void CMesh::createMeshEpsilon(const MPI_Comm& comm,495 void CMesh::createMeshEpsilon(const ep_lib::MPI_Comm& comm, 496 496 const CArray<double, 1>& lonvalue, const CArray<double, 1>& latvalue, 497 497 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat) … … 1534 1534 */ 1535 1535 1536 void CMesh::getGloNghbFacesNodeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx,1536 void CMesh::getGloNghbFacesNodeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 1537 1537 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 1538 1538 CArray<int, 2>& nghbFaces) … … 1690 1690 */ 1691 1691 1692 void CMesh::getGloNghbFacesEdgeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx,1692 void CMesh::getGloNghbFacesEdgeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 1693 1693 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 1694 1694 CArray<int, 2>& nghbFaces) … … 1871 1871 */ 1872 1872 1873 void CMesh::getGlobalNghbFaces(const int nghbType, const MPI_Comm& comm,1873 void CMesh::getGlobalNghbFaces(const int nghbType, const ep_lib::MPI_Comm& comm, 1874 1874 const CArray<int, 1>& face_idx, 1875 1875 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, -
XIOS/dev/branch_yushan/src/node/mesh.hpp
r931 r1053 60 60 const CArray<double, 2>&, const CArray<double, 2>& ); 61 61 62 void createMeshEpsilon(const MPI_Comm&,62 void createMeshEpsilon(const ep_lib::MPI_Comm&, 63 63 const CArray<double, 1>&, const CArray<double, 1>&, 64 64 const CArray<double, 2>&, const CArray<double, 2>& ); 65 65 66 void getGlobalNghbFaces(const int, const MPI_Comm&, const CArray<int, 1>&,66 void getGlobalNghbFaces(const int, const ep_lib::MPI_Comm&, const CArray<int, 1>&, 67 67 const CArray<double, 2>&, const CArray<double, 2>&, 68 68 CArray<int, 2>&); … … 84 84 CClientClientDHTSizet* pNodeGlobalIndex; // pointer to a map <nodeHash, nodeIdxGlo> 85 85 CClientClientDHTSizet* pEdgeGlobalIndex; // pointer to a map <edgeHash, edgeIdxGlo> 86 void getGloNghbFacesNodeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&);87 void getGloNghbFacesEdgeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&);86 void getGloNghbFacesNodeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 87 void getGloNghbFacesEdgeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 88 88 void getLocNghbFacesNodeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&); 89 89 void getLocNghbFacesEdgeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&);
Note: See TracChangeset
for help on using the changeset viewer.