1 | /*! |
---|
2 | \file distribution_client.cpp |
---|
3 | \author Ha NGUYEN |
---|
4 | \since 13 Jan 2015 |
---|
5 | \date 09 Mars 2015 |
---|
6 | |
---|
7 | \brief Index distribution on client side. |
---|
8 | */ |
---|
9 | #include "distribution_client.hpp" |
---|
10 | |
---|
11 | namespace xios { |
---|
12 | |
---|
13 | CDistributionClient::CDistributionClient(int rank, int dims, const CArray<size_t,1>& globalIndex) |
---|
14 | : CDistribution(rank, dims, globalIndex) |
---|
15 | , axisDomainOrder_() |
---|
16 | , nLocal_(), nGlob_(), nBeginLocal_(), nBeginGlobal_(),nZoomBegin_(), nZoomEnd_() |
---|
17 | , dataNIndex_(), dataDims_(), dataBegin_(), dataIndex_(), domainMasks_(), axisMasks_() |
---|
18 | , gridMask_(), indexMap_() |
---|
19 | , isDataDistributed_(true), axisNum_(0), domainNum_(0) |
---|
20 | , localDataIndex_(), localMaskIndex_() |
---|
21 | , globalLocalDataSendToServerMap_() |
---|
22 | , infoIndex_(), isComputed_(false) |
---|
23 | , elementLocalIndex_(), elementGlobalIndex_(), elementIndexData_() |
---|
24 | , elementZoomMask_(), elementNLocal_(), elementNGlobal_() |
---|
25 | { |
---|
26 | } |
---|
27 | |
---|
28 | CDistributionClient::CDistributionClient(int rank, CGrid* grid) |
---|
29 | : CDistribution(rank, 0) |
---|
30 | , axisDomainOrder_() |
---|
31 | , nLocal_(), nGlob_(), nBeginLocal_(), nBeginGlobal_(),nZoomBegin_(), nZoomEnd_() |
---|
32 | , dataNIndex_(), dataDims_(), dataBegin_(), dataIndex_(), domainMasks_(), axisMasks_() |
---|
33 | , gridMask_(), indexMap_() |
---|
34 | , isDataDistributed_(true), axisNum_(0), domainNum_(0) |
---|
35 | , localDataIndex_(), localMaskIndex_() |
---|
36 | , globalLocalDataSendToServerMap_() |
---|
37 | , infoIndex_(), isComputed_(false) |
---|
38 | , elementLocalIndex_(), elementGlobalIndex_(), elementIndexData_() |
---|
39 | , elementZoomMask_(), elementNLocal_(), elementNGlobal_() |
---|
40 | { |
---|
41 | readDistributionInfo(grid); |
---|
42 | createGlobalIndex(); |
---|
43 | } |
---|
44 | |
---|
45 | CDistributionClient::~CDistributionClient() |
---|
46 | { /* Nothing to do */ } |
---|
47 | |
---|
48 | /*! |
---|
49 | Read information of a grid to generate distribution. |
---|
50 | Every grid is composed of several axis or/and domain(s). Their information are processed |
---|
51 | stored and used to calculate index distribution between client and server |
---|
52 | \param [in] grid Grid to read |
---|
53 | */ |
---|
54 | void CDistributionClient::readDistributionInfo(CGrid* grid) |
---|
55 | { |
---|
56 | std::vector<CDomain*> domList = grid->getDomains(); |
---|
57 | std::vector<CAxis*> axisList = grid->getAxis(); |
---|
58 | std::vector<CScalar*> scalarList = grid->getScalars(); |
---|
59 | CArray<int,1> axisDomainOrder = grid->axis_domain_order; |
---|
60 | |
---|
61 | readDistributionInfo(domList, axisList, scalarList, axisDomainOrder); |
---|
62 | |
---|
63 | // Then check mask of grid |
---|
64 | int gridDim = domList.size() * 2 + axisList.size(); |
---|
65 | grid->checkMask(); |
---|
66 | switch (gridDim) { |
---|
67 | case 0: |
---|
68 | gridMask_.resize(1); |
---|
69 | gridMask_(0) = true; |
---|
70 | break; |
---|
71 | case 1: |
---|
72 | readGridMaskInfo(grid->mask_1d); |
---|
73 | break; |
---|
74 | case 2: |
---|
75 | readGridMaskInfo(grid->mask_2d); |
---|
76 | break; |
---|
77 | case 3: |
---|
78 | readGridMaskInfo(grid->mask_3d); |
---|
79 | break; |
---|
80 | default: |
---|
81 | break; |
---|
82 | } |
---|
83 | } |
---|
84 | |
---|
85 | /*! |
---|
86 | Read information from domain(s) and axis to generate distribution. |
---|
87 | All information related to domain, e.g ibegin, jbegin, ni, nj, ni_glo, nj_glo |
---|
88 | as well as related to axis, e.g dataNIndex, dataIndex will be stored to compute |
---|
89 | the distribution between clients and servers. Till now, every data structure of domain has been kept |
---|
90 | like before, e.g: data_n_index to make sure a compability, however, it should be changed? |
---|
91 | \param [in] domList List of domains of grid |
---|
92 | \param [in] axisList List of axis of grid |
---|
93 | \param [in] scalarList List of scalar of grid |
---|
94 | \param [in] axisDomainOrder order of axis and domain inside a grid. 2 if domain, 1 if axis and zero if scalar |
---|
95 | // \param [in] gridMask Mask of grid, for now, keep it 3 dimension, but it needs changing |
---|
96 | */ |
---|
97 | void CDistributionClient::readDistributionInfo(const std::vector<CDomain*>& domList, |
---|
98 | const std::vector<CAxis*>& axisList, |
---|
99 | const std::vector<CScalar*>& scalarList, |
---|
100 | const CArray<int,1>& axisDomainOrder) |
---|
101 | { |
---|
102 | domainNum_ = domList.size(); |
---|
103 | axisNum_ = axisList.size(); |
---|
104 | numElement_ = axisDomainOrder.numElements(); // Number of element, e.x: Axis, Domain |
---|
105 | |
---|
106 | axisDomainOrder_.resize(numElement_); |
---|
107 | axisDomainOrder_ = axisDomainOrder; |
---|
108 | |
---|
109 | // Each domain or axis has its mask, of course |
---|
110 | domainMasks_.resize(domainNum_); |
---|
111 | for (int i = 0; i < domainNum_;++i) |
---|
112 | { |
---|
113 | domainMasks_[i].resize(domList[i]->mask_1d.numElements()); |
---|
114 | domainMasks_[i] = domList[i]->mask_1d; |
---|
115 | } |
---|
116 | |
---|
117 | axisMasks_.resize(axisNum_); |
---|
118 | for (int i = 0; i < axisNum_; ++i) |
---|
119 | { |
---|
120 | axisMasks_[i].resize(axisList[i]->mask.numElements()); |
---|
121 | axisMasks_[i] = axisList[i]->mask; |
---|
122 | } |
---|
123 | |
---|
124 | // Because domain and axis can be in any order (axis1, domain1, axis2, axis3, ) |
---|
125 | // their position should be specified. In axisDomainOrder, domain == true, axis == false |
---|
126 | int idx = 0; |
---|
127 | indexMap_.resize(numElement_); |
---|
128 | this->dims_ = numElement_; |
---|
129 | for (int i = 0; i < numElement_; ++i) |
---|
130 | { |
---|
131 | indexMap_[i] = idx; |
---|
132 | if (2 == axisDomainOrder(i)) |
---|
133 | { |
---|
134 | ++(this->dims_); |
---|
135 | idx += 2; |
---|
136 | } |
---|
137 | else ++idx; |
---|
138 | } |
---|
139 | |
---|
140 | // Size of each dimension (local and global) |
---|
141 | nLocal_.resize(this->dims_); |
---|
142 | nGlob_.resize(this->dims_); |
---|
143 | nBeginLocal_.resize(this->dims_,0); |
---|
144 | nBeginGlobal_.resize(this->dims_,0); |
---|
145 | nZoomBegin_.resize(this->dims_); |
---|
146 | nZoomEnd_.resize(this->dims_); |
---|
147 | |
---|
148 | // Data_n_index of domain or axis (For now, axis uses its size as data_n_index |
---|
149 | dataNIndex_.resize(numElement_); |
---|
150 | dataDims_.resize(numElement_); |
---|
151 | dataBegin_.resize(this->dims_); |
---|
152 | |
---|
153 | // Data_*_index of each dimension |
---|
154 | dataIndex_.resize(this->dims_); |
---|
155 | infoIndex_.resize(this->dims_); |
---|
156 | |
---|
157 | // A trick to determine position of each domain in domainList |
---|
158 | int domIndex = 0, axisIndex = 0, scalarIndex = 0; |
---|
159 | idx = 0; |
---|
160 | |
---|
161 | elementLocalIndex_.resize(numElement_); |
---|
162 | elementGlobalIndex_.resize(numElement_); |
---|
163 | elementIndexData_.resize(numElement_); |
---|
164 | elementZoomMask_.resize(numElement_); |
---|
165 | elementNLocal_.resize(numElement_); |
---|
166 | elementNGlobal_.resize(numElement_); |
---|
167 | elementNLocal_[0] = 1; |
---|
168 | elementNGlobal_[0] = 1; |
---|
169 | size_t localSize = 1, globalSize = 1; |
---|
170 | |
---|
171 | isDataDistributed_ = false; |
---|
172 | // Update all the vectors above |
---|
173 | for (idx = 0; idx < numElement_; ++idx) |
---|
174 | { |
---|
175 | int eleDim = axisDomainOrder(idx); |
---|
176 | elementNLocal_[idx] = localSize; |
---|
177 | elementNGlobal_[idx] = globalSize; |
---|
178 | |
---|
179 | // If this is a domain |
---|
180 | if (2 == eleDim) |
---|
181 | { |
---|
182 | // On the j axis |
---|
183 | nLocal_.at(indexMap_[idx]+1) = domList[domIndex]->nj.getValue(); |
---|
184 | nGlob_.at(indexMap_[idx]+1) = domList[domIndex]->nj_glo.getValue(); |
---|
185 | nBeginLocal_.at(indexMap_[idx]+1) = 0; |
---|
186 | nBeginGlobal_.at(indexMap_[idx]+1) = domList[domIndex]->jbegin; |
---|
187 | nZoomBegin_.at((indexMap_[idx]+1)) = domList[domIndex]->global_zoom_jbegin; |
---|
188 | nZoomEnd_.at((indexMap_[idx]+1)) = domList[domIndex]->global_zoom_jbegin + domList[domIndex]->global_zoom_nj-1; |
---|
189 | |
---|
190 | dataBegin_.at(indexMap_[idx]+1) = domList[domIndex]->data_jbegin.getValue(); |
---|
191 | dataIndex_.at(indexMap_[idx]+1).reference(domList[domIndex]->data_j_index); |
---|
192 | infoIndex_.at(indexMap_[idx]+1).reference(domList[domIndex]->j_index); |
---|
193 | |
---|
194 | // On the i axis |
---|
195 | nLocal_.at(indexMap_[idx]) = domList[domIndex]->ni.getValue(); |
---|
196 | nGlob_.at(indexMap_[idx]) = domList[domIndex]->ni_glo.getValue(); |
---|
197 | nBeginLocal_.at(indexMap_[idx]) = 0; |
---|
198 | nBeginGlobal_.at(indexMap_[idx]) = domList[domIndex]->ibegin; |
---|
199 | nZoomBegin_.at((indexMap_[idx])) = domList[domIndex]->global_zoom_ibegin; |
---|
200 | nZoomEnd_.at((indexMap_[idx])) = domList[domIndex]->global_zoom_ibegin + domList[domIndex]->global_zoom_ni-1; |
---|
201 | |
---|
202 | dataBegin_.at(indexMap_[idx]) = domList[domIndex]->data_ibegin.getValue(); |
---|
203 | dataIndex_.at(indexMap_[idx]).reference(domList[domIndex]->data_i_index); |
---|
204 | infoIndex_.at(indexMap_[idx]).reference(domList[domIndex]->i_index); |
---|
205 | |
---|
206 | dataNIndex_.at(idx) = domList[domIndex]->data_i_index.numElements(); |
---|
207 | dataDims_.at(idx) = domList[domIndex]->data_dim.getValue(); |
---|
208 | |
---|
209 | isDataDistributed_ |= domList[domIndex]->isDistributed(); |
---|
210 | |
---|
211 | localSize *= nLocal_.at(indexMap_[idx]+1)* nLocal_.at(indexMap_[idx]); |
---|
212 | globalSize *= nGlob_.at(indexMap_[idx]+1)* nGlob_.at(indexMap_[idx]); |
---|
213 | ++domIndex; |
---|
214 | } |
---|
215 | else if (1 == eleDim)// So it's an axis |
---|
216 | { |
---|
217 | nLocal_.at(indexMap_[idx]) = axisList[axisIndex]->n.getValue(); |
---|
218 | nGlob_.at(indexMap_[idx]) = axisList[axisIndex]->n_glo.getValue(); |
---|
219 | nBeginLocal_.at(indexMap_[idx]) = 0; |
---|
220 | nBeginGlobal_.at(indexMap_[idx]) = axisList[axisIndex]->begin.getValue(); |
---|
221 | nZoomBegin_.at((indexMap_[idx])) = axisList[axisIndex]->global_zoom_begin; |
---|
222 | nZoomEnd_.at((indexMap_[idx])) = axisList[axisIndex]->global_zoom_begin + axisList[axisIndex]->global_zoom_n-1; |
---|
223 | |
---|
224 | dataBegin_.at(indexMap_[idx]) = axisList[axisIndex]->data_begin.getValue(); |
---|
225 | dataIndex_.at(indexMap_[idx]).reference(axisList[axisIndex]->data_index); |
---|
226 | infoIndex_.at(indexMap_[idx]).reference(axisList[axisIndex]->index); |
---|
227 | dataNIndex_.at(idx) = axisList[axisIndex]->data_index.numElements(); |
---|
228 | dataDims_.at(idx) = 1; |
---|
229 | |
---|
230 | isDataDistributed_ |= axisList[axisIndex]->isDistributed(); |
---|
231 | |
---|
232 | localSize *= nLocal_.at(indexMap_[idx]); |
---|
233 | globalSize *= nGlob_.at(indexMap_[idx]); |
---|
234 | |
---|
235 | ++axisIndex; |
---|
236 | } |
---|
237 | else // scalar |
---|
238 | { |
---|
239 | nLocal_.at(indexMap_[idx]) = 1; |
---|
240 | nGlob_.at(indexMap_[idx]) = 1; |
---|
241 | nBeginLocal_.at(indexMap_[idx]) = 0; |
---|
242 | nBeginGlobal_.at(indexMap_[idx]) = 1; |
---|
243 | nZoomBegin_.at((indexMap_[idx])) = 0; |
---|
244 | nZoomEnd_.at((indexMap_[idx])) = 0; |
---|
245 | |
---|
246 | dataBegin_.at(indexMap_[idx]) = 0; |
---|
247 | dataIndex_.at(indexMap_[idx]).resize(1); dataIndex_.at(indexMap_[idx])(0) = 0; |
---|
248 | infoIndex_.at(indexMap_[idx]).resize(1); infoIndex_.at(indexMap_[idx])(0) = 0; |
---|
249 | dataNIndex_.at(idx) = 1; |
---|
250 | dataDims_.at(idx) = 1; |
---|
251 | |
---|
252 | isDataDistributed_ |= false; |
---|
253 | |
---|
254 | localSize *= nLocal_.at(indexMap_[idx]); |
---|
255 | globalSize *= nGlob_.at(indexMap_[idx]); |
---|
256 | |
---|
257 | ++scalarIndex; |
---|
258 | } |
---|
259 | } |
---|
260 | } |
---|
261 | |
---|
262 | /*! |
---|
263 | Create local index of domain(s). |
---|
264 | A domain can have data index which even contains the "ghost" points. Very often, these |
---|
265 | data surround the true data. In order to send correct data to server, |
---|
266 | a client need to know index of the true data. |
---|
267 | */ |
---|
268 | void CDistributionClient::createLocalDomainDataIndex() |
---|
269 | { |
---|
270 | int idxDomain = 0; |
---|
271 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
272 | { |
---|
273 | if (2 == axisDomainOrder_(i)) |
---|
274 | { |
---|
275 | elementIndexData_[i].resize(dataNIndex_[i]); |
---|
276 | elementIndexData_[i] = false; |
---|
277 | int iIdx, jIdx = 0, count = 0, localIndex; |
---|
278 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
279 | { |
---|
280 | iIdx = getDomainIndex((dataIndex_[indexMap_[i]])(j), (dataIndex_[indexMap_[i]+1])(j), |
---|
281 | dataBegin_[indexMap_[i]], dataBegin_[indexMap_[i]+1], |
---|
282 | dataDims_[i], nLocal_[indexMap_[i]], jIdx); |
---|
283 | |
---|
284 | if ((iIdx >= nBeginLocal_[indexMap_[i]]) && (iIdx < nLocal_[indexMap_[i]]) && |
---|
285 | (jIdx >= nBeginLocal_[indexMap_[i]+1]) && (jIdx < nLocal_[indexMap_[i]+1]) && |
---|
286 | (domainMasks_[idxDomain](iIdx + jIdx*nLocal_[indexMap_[i]]))) |
---|
287 | { |
---|
288 | ++count; |
---|
289 | elementIndexData_[i](j) = true; |
---|
290 | } |
---|
291 | } |
---|
292 | |
---|
293 | elementLocalIndex_[i].resize(count); |
---|
294 | elementGlobalIndex_[i].resize(count); |
---|
295 | elementZoomMask_[i].resize(count); |
---|
296 | elementZoomMask_[i] = false; |
---|
297 | count = 0; |
---|
298 | CArray<bool,1>& tmpIndexElementData = elementIndexData_[i]; |
---|
299 | CArray<bool,1>& tmpZoomMaskElement = elementZoomMask_[i]; |
---|
300 | CArray<int,1>& tmpLocalElementIndex = elementLocalIndex_[i]; |
---|
301 | CArray<size_t,1>& tmpGlobalElementIndex = elementGlobalIndex_[i]; |
---|
302 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
303 | { |
---|
304 | if (tmpIndexElementData(j)) |
---|
305 | { |
---|
306 | iIdx = getDomainIndex((dataIndex_[indexMap_[i]])(j), (dataIndex_[indexMap_[i]+1])(j), |
---|
307 | dataBegin_[indexMap_[i]], dataBegin_[indexMap_[i]+1], |
---|
308 | dataDims_[i], nLocal_[indexMap_[i]], jIdx); |
---|
309 | localIndex = tmpLocalElementIndex(count) = iIdx + jIdx * nLocal_[indexMap_[i]]; |
---|
310 | tmpGlobalElementIndex(count) = (infoIndex_[indexMap_[i]])(localIndex) + ((infoIndex_[indexMap_[i]+1])(localIndex))*nGlob_[indexMap_[i]]; |
---|
311 | if ((((infoIndex_[indexMap_[i]])(localIndex)) <= nZoomEnd_[indexMap_[i]]) |
---|
312 | && (nZoomBegin_[indexMap_[i]] <= ((infoIndex_[indexMap_[i]])(localIndex))) |
---|
313 | && (((infoIndex_[indexMap_[i]+1])(localIndex)) <= nZoomEnd_[indexMap_[i]+1]) |
---|
314 | && (nZoomBegin_[indexMap_[i]+1] <= ((infoIndex_[indexMap_[i]+1])(localIndex)))) |
---|
315 | { |
---|
316 | tmpZoomMaskElement(count) = true; |
---|
317 | } |
---|
318 | ++count; |
---|
319 | } |
---|
320 | } |
---|
321 | ++idxDomain; |
---|
322 | } |
---|
323 | } |
---|
324 | } |
---|
325 | |
---|
326 | /*! |
---|
327 | Create local index of axis. |
---|
328 | */ |
---|
329 | void CDistributionClient::createLocalAxisDataIndex() |
---|
330 | { |
---|
331 | int idxAxis = 0; |
---|
332 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
333 | { |
---|
334 | if (1 == axisDomainOrder_(i)) |
---|
335 | { |
---|
336 | elementIndexData_[i].resize(dataNIndex_[i]); |
---|
337 | elementIndexData_[i] = false; |
---|
338 | int iIdx = 0, count = 0, localIndex = 0; |
---|
339 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
340 | { |
---|
341 | iIdx = getAxisIndex((dataIndex_[indexMap_[i]])(j), dataBegin_[indexMap_[i]], nLocal_[indexMap_[i]]); |
---|
342 | if ((iIdx >= nBeginLocal_[indexMap_[i]]) && |
---|
343 | (iIdx < nLocal_[indexMap_[i]]) && (axisMasks_[idxAxis](iIdx))) |
---|
344 | { |
---|
345 | ++count; |
---|
346 | elementIndexData_[i](j) = true; |
---|
347 | } |
---|
348 | } |
---|
349 | |
---|
350 | elementLocalIndex_[i].resize(count); |
---|
351 | elementGlobalIndex_[i].resize(count); |
---|
352 | elementZoomMask_[i].resize(count); |
---|
353 | elementZoomMask_[i] = false; |
---|
354 | count = 0; |
---|
355 | CArray<bool,1>& tmpIndexElementData = elementIndexData_[i]; |
---|
356 | CArray<bool,1>& tmpZoomMaskElement = elementZoomMask_[i]; |
---|
357 | CArray<int,1>& tmpLocalElementIndex = elementLocalIndex_[i]; |
---|
358 | CArray<size_t,1>& tmpGlobalElementIndex = elementGlobalIndex_[i]; |
---|
359 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
360 | { |
---|
361 | if (tmpIndexElementData(j)) |
---|
362 | { |
---|
363 | iIdx = tmpLocalElementIndex(count) = getAxisIndex((dataIndex_[indexMap_[i]])(j), dataBegin_[indexMap_[i]], nLocal_[indexMap_[i]]); |
---|
364 | tmpGlobalElementIndex(count) = (infoIndex_[indexMap_[i]])(iIdx); |
---|
365 | if ((((infoIndex_[indexMap_[i]])(iIdx)) <= nZoomEnd_[indexMap_[i]]) |
---|
366 | && (nZoomBegin_[indexMap_[i]] <= ((infoIndex_[indexMap_[i]])(iIdx)))) |
---|
367 | { |
---|
368 | tmpZoomMaskElement(count) = true; |
---|
369 | } |
---|
370 | ++count; |
---|
371 | } |
---|
372 | } |
---|
373 | ++idxAxis; |
---|
374 | } |
---|
375 | } |
---|
376 | } |
---|
377 | |
---|
378 | /*! |
---|
379 | Create local index of scalar. |
---|
380 | */ |
---|
381 | void CDistributionClient::createLocalScalarDataIndex() |
---|
382 | { |
---|
383 | int idxAxis = 0; |
---|
384 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
385 | { |
---|
386 | if (0 == axisDomainOrder_(i)) |
---|
387 | { |
---|
388 | elementIndexData_[i].resize(dataNIndex_[i]); |
---|
389 | elementIndexData_[i] = true; |
---|
390 | int count = 1; |
---|
391 | |
---|
392 | elementLocalIndex_[i].resize(count); |
---|
393 | elementLocalIndex_[i] = 0; |
---|
394 | elementGlobalIndex_[i].resize(count); |
---|
395 | elementGlobalIndex_[i] = 0; |
---|
396 | elementZoomMask_[i].resize(count); |
---|
397 | elementZoomMask_[i] = true; |
---|
398 | } |
---|
399 | } |
---|
400 | } |
---|
401 | |
---|
402 | /*! |
---|
403 | Create global index on client |
---|
404 | In order to do the mapping between client-server, each client creates its own |
---|
405 | global index of sending data. This global index is then used to calculate to which server |
---|
406 | the client needs to send it data as well as which part of data belongs to the server. |
---|
407 | So as to make clients and server coherent in order of index, global index is calculated by |
---|
408 | take into account of C-convention, the rightmost dimension varies faster. |
---|
409 | */ |
---|
410 | void CDistributionClient::createGlobalIndexSendToServer() |
---|
411 | { |
---|
412 | if (isComputed_) return; |
---|
413 | isComputed_ = true; |
---|
414 | createLocalDomainDataIndex(); |
---|
415 | createLocalAxisDataIndex(); |
---|
416 | createLocalScalarDataIndex(); |
---|
417 | |
---|
418 | int idxDomain = 0, idxAxis = 0; |
---|
419 | std::vector<int> eachElementSize(numElement_); |
---|
420 | |
---|
421 | // Precompute size of the loop |
---|
422 | for (int i = 0; i < numElement_; ++i) |
---|
423 | { |
---|
424 | eachElementSize[i] = elementLocalIndex_[i].numElements(); |
---|
425 | } |
---|
426 | |
---|
427 | // Compute size of the global index on client |
---|
428 | std::vector<StdSize> idxLoop(numElement_,0); |
---|
429 | std::vector<StdSize> currentIndex(numElement_,0); |
---|
430 | std::vector<StdSize> currentGlobalIndex(numElement_,0); |
---|
431 | int innerLoopSize = eachElementSize[0]; |
---|
432 | size_t idx = 0, indexLocalDataOnClientCount = 0, indexSend2ServerCount = 0; |
---|
433 | size_t ssize = 1; |
---|
434 | for (int i = 0; i < numElement_; ++i) ssize *= eachElementSize[i]; |
---|
435 | while (idx < ssize) |
---|
436 | { |
---|
437 | for (int i = 0; i < numElement_-1; ++i) |
---|
438 | { |
---|
439 | if (idxLoop[i] == eachElementSize[i]) |
---|
440 | { |
---|
441 | idxLoop[i] = 0; |
---|
442 | ++idxLoop[i+1]; |
---|
443 | } |
---|
444 | } |
---|
445 | |
---|
446 | // Find out outer index |
---|
447 | // Depending the inner-most element is axis or domain, |
---|
448 | // The outer loop index begins correspondingly at one (1) or zero (0) |
---|
449 | for (int i = 1; i < numElement_; ++i) |
---|
450 | { |
---|
451 | currentIndex[i] = elementLocalIndex_[i](idxLoop[i]); |
---|
452 | } |
---|
453 | |
---|
454 | // Inner most index |
---|
455 | for (int i = 0; i < innerLoopSize; ++i) |
---|
456 | { |
---|
457 | int gridMaskIndex = 0; |
---|
458 | currentIndex[0] = elementLocalIndex_[0](i); |
---|
459 | for (int k = 0; k < this->numElement_; ++k) |
---|
460 | { |
---|
461 | gridMaskIndex += (currentIndex[k])*elementNLocal_[k]; |
---|
462 | } |
---|
463 | |
---|
464 | if (gridMask_(gridMaskIndex)) |
---|
465 | { |
---|
466 | ++indexLocalDataOnClientCount; |
---|
467 | bool isIndexOnServer = true; |
---|
468 | |
---|
469 | for (int idxElement = 0; idxElement < this->numElement_; ++idxElement) |
---|
470 | { |
---|
471 | isIndexOnServer = isIndexOnServer && elementZoomMask_[idxElement](idxLoop[idxElement]); |
---|
472 | } |
---|
473 | if (isIndexOnServer) ++indexSend2ServerCount; |
---|
474 | } |
---|
475 | } |
---|
476 | idxLoop[0] += innerLoopSize; |
---|
477 | idx += innerLoopSize; |
---|
478 | } |
---|
479 | |
---|
480 | // Now allocate these arrays |
---|
481 | localDataIndex_.resize(indexLocalDataOnClientCount); |
---|
482 | localMaskIndex_.resize(indexSend2ServerCount); |
---|
483 | globalLocalDataSendToServerMap_.rehash(std::ceil(indexSend2ServerCount/globalLocalDataSendToServerMap_.max_load_factor())); //globalLocalDataSendToServerMap_.reserve(indexSend2ServerCount); |
---|
484 | |
---|
485 | // We need to loop with data index |
---|
486 | idxLoop.assign(numElement_,0); |
---|
487 | idx = indexLocalDataOnClientCount = indexSend2ServerCount = 0; |
---|
488 | ssize = 1; for (int i = 0; i < numElement_; ++i) ssize *= dataNIndex_[i]; |
---|
489 | innerLoopSize = dataNIndex_[0]; |
---|
490 | int countLocalData = 0; |
---|
491 | std::vector<int> correctIndexOfElement(numElement_,0); |
---|
492 | bool isOuterIndexCorrect = true; |
---|
493 | while (idx < ssize) |
---|
494 | { |
---|
495 | for (int i = 0; i < numElement_-1; ++i) |
---|
496 | { |
---|
497 | if (idxLoop[i] == dataNIndex_[i]) |
---|
498 | { |
---|
499 | idxLoop[i] = 0; |
---|
500 | correctIndexOfElement[i] = 0; |
---|
501 | ++idxLoop[i+1]; |
---|
502 | if (isOuterIndexCorrect) ++correctIndexOfElement[i+1]; |
---|
503 | } |
---|
504 | } |
---|
505 | |
---|
506 | // Depending the inner-most element axis or domain, |
---|
507 | // The outer loop index begins correspondingly at one (1) or zero (0) |
---|
508 | bool isIndexElementDataCorrect = true; |
---|
509 | for (int i = 1; i < numElement_; ++i) |
---|
510 | { |
---|
511 | if (elementIndexData_[i](idxLoop[i])) |
---|
512 | { |
---|
513 | currentIndex[i] = elementLocalIndex_[i](correctIndexOfElement[i]); |
---|
514 | currentGlobalIndex[i] = elementGlobalIndex_[i](correctIndexOfElement[i]); |
---|
515 | isIndexElementDataCorrect &= true; |
---|
516 | } |
---|
517 | else isIndexElementDataCorrect = false; |
---|
518 | } |
---|
519 | |
---|
520 | isOuterIndexCorrect = isIndexElementDataCorrect; |
---|
521 | |
---|
522 | if (isOuterIndexCorrect) |
---|
523 | { |
---|
524 | // Inner most index |
---|
525 | int correctIndexInnerElement = 0; |
---|
526 | for (int i = 0; i < innerLoopSize; ++i) |
---|
527 | { |
---|
528 | bool isCurrentIndexDataCorrect = isOuterIndexCorrect; |
---|
529 | if (elementIndexData_[0](i)) |
---|
530 | { |
---|
531 | currentIndex[0] = elementLocalIndex_[0](correctIndexInnerElement); |
---|
532 | currentGlobalIndex[0] = elementGlobalIndex_[0](correctIndexInnerElement); |
---|
533 | isCurrentIndexDataCorrect &= true; |
---|
534 | ++correctIndexInnerElement; |
---|
535 | } |
---|
536 | else isCurrentIndexDataCorrect = false; |
---|
537 | |
---|
538 | if (isCurrentIndexDataCorrect) |
---|
539 | { |
---|
540 | int gridMaskIndex = 0; |
---|
541 | for (int k = 0; k < this->numElement_; ++k) |
---|
542 | { |
---|
543 | gridMaskIndex += (currentIndex[k])*elementNLocal_[k]; |
---|
544 | } |
---|
545 | |
---|
546 | if (gridMask_(gridMaskIndex)) |
---|
547 | { |
---|
548 | localDataIndex_[indexLocalDataOnClientCount] = countLocalData; |
---|
549 | bool isIndexOnServer = true; |
---|
550 | for (int idxElement = 0; idxElement < this->numElement_; ++idxElement) |
---|
551 | { |
---|
552 | isIndexOnServer = isIndexOnServer && elementZoomMask_[idxElement](correctIndexOfElement[idxElement]); |
---|
553 | } |
---|
554 | |
---|
555 | if (isIndexOnServer) |
---|
556 | { |
---|
557 | size_t globalIndex = 0; |
---|
558 | for (int k = 0; k < numElement_; ++k) |
---|
559 | { |
---|
560 | globalIndex += (currentGlobalIndex[k])*elementNGlobal_[k]; |
---|
561 | } |
---|
562 | globalLocalDataSendToServerMap_[globalIndex] = indexLocalDataOnClientCount; |
---|
563 | localMaskIndex_[indexSend2ServerCount] = gridMaskIndex; |
---|
564 | ++indexSend2ServerCount; |
---|
565 | } |
---|
566 | ++indexLocalDataOnClientCount; |
---|
567 | } |
---|
568 | } |
---|
569 | ++countLocalData; |
---|
570 | correctIndexOfElement[0] = correctIndexInnerElement;; |
---|
571 | } |
---|
572 | } |
---|
573 | idxLoop[0] += innerLoopSize; |
---|
574 | idx += innerLoopSize; |
---|
575 | } |
---|
576 | } |
---|
577 | |
---|
578 | void CDistributionClient::createGlobalIndex() |
---|
579 | { |
---|
580 | } |
---|
581 | |
---|
582 | /*! |
---|
583 | Retrieve index i and index j of a domain from its data index |
---|
584 | Data contains not only true data, which are sent to servers, but also ghost data, which |
---|
585 | very often play a role of border of each local data, so does data index. Because data of a domain |
---|
586 | can be one dimension, or two dimensions, there is a need to convert data index to domain index |
---|
587 | \param [in] dataIIndex index of i data |
---|
588 | \param [in] dataJIndex index of j data |
---|
589 | \param [in] dataIBegin index begin of i data |
---|
590 | \param [in] dataJBegin index begin of j data |
---|
591 | \param [in] dataDim dimension of data (1 or 2) |
---|
592 | \param [in] ni local size ni of domain |
---|
593 | \param [out] j j index of domain |
---|
594 | \return i index of domain |
---|
595 | */ |
---|
596 | int CDistributionClient::getDomainIndex(const int& dataIIndex, const int& dataJIndex, |
---|
597 | const int& dataIBegin, const int& dataJBegin, |
---|
598 | const int& dataDim, const int& ni, int& j) |
---|
599 | { |
---|
600 | int tempI = dataIIndex + dataIBegin, |
---|
601 | tempJ = (dataJIndex + dataJBegin); |
---|
602 | int i = (dataDim == 1) ? (tempI) % ni |
---|
603 | : (tempI) ; |
---|
604 | j = (dataDim == 1) ? (tempI) / ni |
---|
605 | : (tempJ) ; |
---|
606 | |
---|
607 | return i; |
---|
608 | } |
---|
609 | |
---|
610 | /*! |
---|
611 | Retrieve index of an axis from its data index |
---|
612 | \param [in] dataIndex index of data |
---|
613 | \param [in] dataBegin index begin of data |
---|
614 | \param [in] ni local size of axis |
---|
615 | \return index of domain |
---|
616 | */ |
---|
617 | int CDistributionClient::getAxisIndex(const int& dataIndex, const int& dataBegin, const int& ni) |
---|
618 | { |
---|
619 | int tempI = dataIndex + dataBegin; |
---|
620 | return ((tempI)%ni); |
---|
621 | } |
---|
622 | |
---|
623 | /*! |
---|
624 | Return global local data mapping of client |
---|
625 | */ |
---|
626 | CDistributionClient::GlobalLocalDataMap& CDistributionClient::getGlobalLocalDataSendToServer() |
---|
627 | { |
---|
628 | if (!isComputed_) createGlobalIndexSendToServer(); |
---|
629 | return globalLocalDataSendToServerMap_; |
---|
630 | } |
---|
631 | |
---|
632 | /*! |
---|
633 | Return local data index of client |
---|
634 | */ |
---|
635 | const std::vector<int>& CDistributionClient::getLocalDataIndexOnClient() |
---|
636 | { |
---|
637 | if (!isComputed_) createGlobalIndexSendToServer(); |
---|
638 | return localDataIndex_; |
---|
639 | } |
---|
640 | |
---|
641 | /*! |
---|
642 | Return local mask index of client |
---|
643 | */ |
---|
644 | const std::vector<int>& CDistributionClient::getLocalMaskIndexOnClient() |
---|
645 | { |
---|
646 | if (!isComputed_) createGlobalIndexSendToServer(); |
---|
647 | return localMaskIndex_; |
---|
648 | } |
---|
649 | |
---|
650 | } // namespace xios |
---|