Changeset 2200 for XIOS/trunk/src/node
- Timestamp:
- 08/19/21 13:28:53 (3 years ago)
- Location:
- XIOS/trunk/src/node
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
XIOS/trunk/src/node/file.cpp
r1639 r2200 867 867 \brief Sorting domains with the same name (= describing the same mesh) in the decreasing order of nvertex for UGRID files. 868 868 This insures that the domain with the highest nvertex is written first and thus all known mesh connectivity is generated at once by this domain. 869 Secondly, we associate the UGRID mesh to the domain in incresing order, i.e. nvertex=1 first, nvertex=2 and then nvertex>2. 870 In this case the connectivity of each component of the mesh (node, edge and face) are fully coherent. 869 871 */ 870 872 void CFile::sortEnabledFieldsForUgrid() … … 874 876 std::vector<int> domainNvertices; 875 877 std::vector<StdString> domainNames; 878 std::map<string, tuple<CDomain*,CDomain*,CDomain*>> registeredDomains ; 876 879 877 880 for (int i = 0; i < size; ++i) … … 916 919 domainNvertices.push_back(nvertex); 917 920 } 921 922 if (nvertex==1) std::get<0>(registeredDomains[domainName])=domain[0] ; 923 else if (nvertex==2) std::get<1>(registeredDomains[domainName])=domain[0] ; 924 else std::get<2>(registeredDomains[domainName])=domain[0] ; 925 } 926 927 for(auto& it:registeredDomains) 928 { 929 list<CDomain*> domains ; 930 string domainName=it.first ; 931 932 if (std::get<0>(it.second)!=nullptr) domains.push_back(std::get<0>(it.second)) ; 933 if (std::get<1>(it.second)!=nullptr) domains.push_back(std::get<1>(it.second)) ; 934 if (std::get<2>(it.second)!=nullptr) domains.push_back(std::get<2>(it.second)) ; 935 936 // for each component of a given mesh (i.e. domains with same name but different number of vertices) 937 // associate the UGRID mesh in increasing order 938 for(auto& domain : domains ) 939 { 940 domain-> computeWrittenIndex(); 941 CArray<int, 1>& indexToWrite = domain->localIndexToWriteOnServer; 942 int nbWritten = indexToWrite.numElements(); 943 CArray<double,1> writtenLat, writtenLon; 944 CArray<double,2> writtenBndsLat, writtenBndsLon; 945 946 writtenLat.resize(nbWritten); 947 writtenLon.resize(nbWritten); 948 for (int idx = 0; idx < nbWritten; ++idx) 949 { 950 writtenLat(idx) = domain->latvalue(indexToWrite(idx)); 951 writtenLon(idx) = domain->lonvalue(indexToWrite(idx)); 952 } 953 954 int nvertex = domain->nvertex, idx; 955 if (nvertex>1) 956 { 957 writtenBndsLat.resize(nvertex, nbWritten); 958 writtenBndsLon.resize(nvertex, nbWritten); 959 CArray<double,2>& boundslat = domain->bounds_latvalue; 960 CArray<double,2>& boundslon = domain->bounds_lonvalue; 961 for (idx = 0; idx < nbWritten; ++idx) 962 for (int nv = 0; nv < nvertex; ++nv) 963 { 964 writtenBndsLat(nv, idx) = boundslat(nv, int(indexToWrite(idx))); 965 writtenBndsLon(nv, idx) = boundslon(nv, int(indexToWrite(idx))); 966 } 967 } 968 domain->assignMesh(domainName, domain->nvertex); 969 CContextServer* server=CContext::getCurrent()->server ; 970 domain->mesh->createMeshEpsilon(server->intraComm, writtenLon, writtenLat, writtenBndsLon, writtenBndsLat); 971 } 972 918 973 } 919 974 } -
XIOS/trunk/src/node/mesh.cpp
r1639 r2200 433 433 node_lat = latvalue; 434 434 435 unsigned long nodeCount = nbNodes_; 436 unsigned long nodeStart, nbNodes; 437 MPI_Scan(&nodeCount, &nodeStart, 1, MPI_UNSIGNED_LONG, MPI_SUM, comm); 438 int nNodes = nodeStart; 439 MPI_Bcast(&nNodes, 1, MPI_UNSIGNED_LONG, mpiSize-1, comm); 440 nbNodesGlo = nNodes; 441 442 nodeStart -= nodeCount; 443 node_start = nodeStart; 444 node_count = nodeCount; 445 435 446 // Global node indexes 436 447 vector<size_t> hashValues(4); … … 441 452 for (size_t nh = 0; nh < 4; ++nh) 442 453 { 443 nodeHash2IdxGlo[hashValues[nh]].push_back( mpiRank*nbNodes_ + nn);454 nodeHash2IdxGlo[hashValues[nh]].push_back(nodeStart + nn); 444 455 } 445 456 } 446 457 pNodeGlobalIndex = new CClientClientDHTSizet (nodeHash2IdxGlo, comm); 458 447 459 nodesAreWritten = true; 448 460 } … … 460 472 unsigned long nbEdgesOnProc = nbEdges_; 461 473 unsigned long nbEdgesAccum; 474 unsigned long nbEdgesGlo; 462 475 MPI_Scan(&nbEdgesOnProc, &nbEdgesAccum, 1, MPI_UNSIGNED_LONG, MPI_SUM, comm); 476 nbEdgesGlo = nbEdgesAccum ; 477 MPI_Bcast(&nbEdgesGlo, 1, MPI_UNSIGNED_LONG, mpiSize-1, comm); 463 478 nbEdgesAccum -= nbEdges_; 479 edge_start = nbEdgesAccum ; 480 edge_count = nbEdgesOnProc ; 464 481 465 482 CClientClientDHTSizet::Index2VectorInfoTypeMap edgeHash2IdxGlo; … … 670 687 } //nvertex = 2 671 688 672 else 689 else // nvertex > 2 673 690 { 674 691 nbFaces_ = bounds_lon.shape()[1]; … … 1601 1618 } // createMeshEpsilon 1602 1619 1620 1603 1621 ///---------------------------------------------------------------- 1604 1622 /*!
Note: See TracChangeset
for help on using the changeset viewer.