Changeset 2613
- Timestamp:
- 03/08/24 17:05:40 (11 months ago)
- Location:
- XIOS3/trunk/src
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
XIOS3/trunk/src/io/nc4_data_output.cpp
r2600 r2613 411 411 start[0]=domain->ibeginValue_; 412 412 count[0]=domain->niValue_; 413 CArray<double,1> lon = domain->lonvalue(Range(0,domain->niValue_-1)); 413 CArray<double,1> lon; 414 lon.resize( domain->niValue_); 415 for (int i=0;i<domain->niValue_;i++) lon(i) = domain->lonvalue(i); 414 416 SuperClassWriter::writeData(CArray<double,1>(lon.copy()), lonid, isCollective, 0,&start,&count); 415 417 } -
XIOS3/trunk/src/io/onetcdf4.cpp
r2600 r2613 478 478 normalizingWeight = userChunkingWeights[i]; 479 479 } 480 481 std::vector<double> chunkingRatioPerDims; // will store coefficients used to compute chunk size 482 double productRatios = 1; // last_coeff = pow( shrink_ratio / (product of all ratios), 1/countChunkingDims ) 483 for (int i=0;i<userChunkingWeights.size();i++) 480 if (normalizingWeight!=0) // no chunk for scalar 484 481 { 485 chunkingRatioPerDims.push_back( userChunkingWeights[i] / normalizingWeight ); 486 if (chunkingRatioPerDims[i]) productRatios *= chunkingRatioPerDims[i]; 487 } 488 for (int i=0;i<userChunkingWeights.size();i++) 489 { 490 chunkingRatioPerDims[i] *= pow( chunkingRatio / productRatios, 1./countChunkingDims ); 491 } 492 493 std::vector<double>::iterator itChunkingRatios = chunkingRatioPerDims.begin(); 494 //itId = dim.rbegin(); 495 double correctionFromPreviousDim = 1.; 496 for (vector<StdSize>::reverse_iterator itDim = dimsizes.rbegin(); itDim != dimsizes.rend(); ++itDim, ++itChunkingRatios, ++itId) 497 { 498 *itChunkingRatios *= correctionFromPreviousDim; 499 correctionFromPreviousDim = 1; 500 if (*itChunkingRatios > 1) // else target larger than size ! 482 std::vector<double> chunkingRatioPerDims; // will store coefficients used to compute chunk size 483 double productRatios = 1; // last_coeff = pow( shrink_ratio / (product of all ratios), 1/countChunkingDims ) 484 for (int i=0;i<userChunkingWeights.size();i++) 501 485 { 502 StdSize dimensionSize = *itDim; 503 //info(0) << *itId << " " << *itDim << " " << *itChunkingRatios << " " << (*itDim)/(*itChunkingRatios) << endl; 504 *itDim = ceil( *itDim / ceil(*itChunkingRatios) ); 505 correctionFromPreviousDim = *itChunkingRatios/ ((double)dimensionSize/(*itDim)); 486 chunkingRatioPerDims.push_back( userChunkingWeights[i] / normalizingWeight ); 487 if (chunkingRatioPerDims[i]) productRatios *= chunkingRatioPerDims[i]; 506 488 } 507 } 489 for (int i=0;i<userChunkingWeights.size();i++) 490 { 491 chunkingRatioPerDims[i] *= pow( chunkingRatio / productRatios, 1./countChunkingDims ); 492 } 493 494 std::vector<double>::iterator itChunkingRatios = chunkingRatioPerDims.begin(); 495 //itId = dim.rbegin(); 496 double correctionFromPreviousDim = 1.; 497 for (vector<StdSize>::reverse_iterator itDim = dimsizes.rbegin(); itDim != dimsizes.rend(); ++itDim, ++itChunkingRatios, ++itId) 498 { 499 *itChunkingRatios *= correctionFromPreviousDim; 500 correctionFromPreviousDim = 1; 501 if (*itChunkingRatios > 1) // else target larger than size ! 502 { 503 StdSize dimensionSize = *itDim; 504 //info(0) << *itId << " " << *itDim << " " << *itChunkingRatios << " " << (*itDim)/(*itChunkingRatios) << endl; 505 *itDim = ceil( *itDim / ceil(*itChunkingRatios) ); 506 correctionFromPreviousDim = *itChunkingRatios/ ((double)dimensionSize/(*itDim)); 507 } 508 } 509 } 508 510 int storageType = (0 == dimSize) ? NC_CONTIGUOUS : NC_CHUNKED; 509 511 CNetCdfInterface::defVarChunking(grpid, varid, storageType, &dimsizes[0]); -
XIOS3/trunk/src/manager/contexts_manager.cpp
r2580 r2613 237 237 if (comm!=MPI_COMM_NULL) 238 238 { 239 MPI_Bcast(&ret,1,MPI_INT,0,comm) ; 239 int cast_ret = 0; 240 if (commRank==0) cast_ret = ret; 241 MPI_Bcast(&cast_ret,1,MPI_INT,0,comm) ; 242 ret = cast_ret; 240 243 if (ret) 241 244 { -
XIOS3/trunk/src/node/axis.cpp
r2606 r2613 310 310 int sz(1); 311 311 MPI_Comm_size( comm, &sz ); 312 size_tdistributedHash = 0;312 unsigned long long distributedHash = 0; 313 313 if (sz!=1) // compute the connector only if the element is distributed 314 314 { … … 324 324 gridTransformConnector->transfer(this->value, distributedValue ); 325 325 326 size_tlocalHash = 0;327 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+= globalIndex(iloc)*distributedValue(iloc);326 unsigned long long localHash = 0; 327 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+=((unsigned long long)(abs(globalIndex(iloc)*distributedValue(iloc))))%LLONG_MAX; 328 328 distributedHash = 0; 329 MPI_Allreduce( &localHash, &distributedHash, 1, MPI_UNSIGNED_LONG , MPI_SUM, comm );329 MPI_Allreduce( &localHash, &distributedHash, 1, MPI_UNSIGNED_LONG_LONG, MPI_SUM, comm ); 330 330 } 331 331 else // if the element is not distributed, the local hash is valid … … 333 333 int globalSize = this->n_glo.getValue(); 334 334 int localSize = globalSize; 335 size_tlocalHash = 0;336 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+= iloc*this->value(iloc);335 unsigned long long localHash = 0; 336 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+=((unsigned long long)(abs(iloc*this->value(iloc))))%LLONG_MAX; 337 337 distributedHash = localHash; 338 338 } -
XIOS3/trunk/src/node/domain.cpp
r2606 r2613 1809 1809 int sz(1); 1810 1810 MPI_Comm_size( comm, &sz ); 1811 size_tdistributedHash = 0;1811 unsigned long long distributedHash = 0; 1812 1812 if (sz!=1) // compute the connector only if the element is distributed 1813 1813 { … … 1826 1826 // Compute the distributed hash (v0) of the element 1827 1827 // it will be associated to the default element name (= map key), and to the name really written 1828 size_t localHash = 0; 1829 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+=globalIndex(iloc)*lon_distributedValue(iloc)*lat_distributedValue(iloc); 1828 unsigned long long localHash = 0; 1829 for (int iloc=0; iloc<localSize ; iloc++ ) 1830 { 1831 localHash+=((unsigned long long)(abs(globalIndex(iloc)*lon_distributedValue(iloc)*lat_distributedValue(iloc))))%LLONG_MAX; 1832 } 1830 1833 distributedHash = 0; 1831 MPI_Allreduce( &localHash, &distributedHash, 1, MPI_UNSIGNED_LONG , MPI_SUM, comm );1834 MPI_Allreduce( &localHash, &distributedHash, 1, MPI_UNSIGNED_LONG_LONG, MPI_SUM, comm ); 1832 1835 } 1833 1836 else // if the element is not distributed, the local hash is valid … … 1835 1838 int globalSize = this->ni_glo.getValue()*this->nj_glo.getValue(); 1836 1839 int localSize = globalSize; 1837 size_tlocalHash = 0;1838 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+= iloc*this->lonvalue(iloc)*this->latvalue(iloc);1840 unsigned long long localHash = 0; 1841 for (int iloc=0; iloc<localSize ; iloc++ ) localHash+=((unsigned long long)(abs(iloc*this->lonvalue(iloc)*this->latvalue(iloc))))%LLONG_MAX; 1839 1842 distributedHash = localHash; 1840 1843 } -
XIOS3/trunk/src/transformation/scalar_algorithm/scalar_algorithm_redistribute.cpp
r2507 r2613 95 95 CArray<double,1> valSrc, valDst ; 96 96 valSrc.resize(scalarSource->getLocalView(CElementView::FULL)->getSize()) ; 97 valDst.resize(scalarDestination->getLocalView(CElementView::FULL)->getSize()) ;98 99 if (scalarSource->hasValue())100 {101 if (valSrc.numElements()>0) valSrc(0)=scalarSource->value ;102 transformConnector->transfer(valSrc, valDst) ;103 if (valDst.numElements()>0) scalarDestination->value = valDst(0) ;104 }105 97 106 98 if (scalarSource->hasBounds()) … … 124 116 125 117 transformMask->transfer(workflow, mask, false) ; 126 scalarDestination->mask = mask(0) ; 118 if (mask.numElements()>0) 119 { 120 scalarDestination->mask = mask(0) ; 121 } 127 122 128 123 scalarDestination->checkAttributes() ; 124 this->computeAlgorithm(scalarSource->getLocalView(CElementView::WORKFLOW), scalarDestination->getLocalView(CElementView::WORKFLOW)) ; 125 126 valDst.resize(scalarDestination->getLocalView(CElementView::FULL)->getSize()) ; 127 if (scalarSource->hasValue()) 128 { 129 if (valSrc.numElements()>0) valSrc(0)=scalarSource->value ; 130 transformConnector->transfer(valSrc, valDst) ; 131 if (valDst.numElements()>0) scalarDestination->value = valDst(0) ; 132 } 129 133 } 130 134 CATCH
Note: See TracChangeset
for help on using the changeset viewer.