1 | ### =========================================================================== |
---|
2 | ### |
---|
3 | ### Compute calving weights. |
---|
4 | ### |
---|
5 | ### =========================================================================== |
---|
6 | ## |
---|
7 | ## MOSAIX is under CeCILL_V2 licence. See "Licence_CeCILL_V2-en.txt" |
---|
8 | ## file for an english version of the licence and |
---|
9 | ## "Licence_CeCILL_V2-fr.txt" for a french version. |
---|
10 | ## |
---|
11 | ## Permission is hereby granted, free of charge, to any person or |
---|
12 | ## organization obtaining a copy of the software and accompanying |
---|
13 | ## documentation covered by this license (the "Software") to use, |
---|
14 | ## reproduce, display, distribute, execute, and transmit the |
---|
15 | ## Software, and to prepare derivative works of the Software, and to |
---|
16 | ## permit third-parties to whom the Software is furnished to do so, |
---|
17 | ## all subject to the following: |
---|
18 | ## |
---|
19 | ## Warning, to install, configure, run, use any of MOSAIX software or |
---|
20 | ## to read the associated documentation you'll need at least one (1) |
---|
21 | ## brain in a reasonably working order. Lack of this implement will |
---|
22 | ## void any warranties (either express or implied). Authors assumes |
---|
23 | ## no responsability for errors, omissions, data loss, or any other |
---|
24 | ## consequences caused directly or indirectly by the usage of his |
---|
25 | ## software by incorrectly or partially configured |
---|
26 | ## |
---|
27 | '''Python code to generates weights for calving. Launched by `CreateWeights.bash`. |
---|
28 | |
---|
29 | More information with python `CalvingWeights.py -h`. |
---|
30 | |
---|
31 | - The atmosphere model integrate the calving over on specific |
---|
32 | regions. Presently the regions are three latitudinal bands with |
---|
33 | limits 90°S/40°S/50°N/90°N. |
---|
34 | |
---|
35 | - The weights distribute the calving in the ocean in the |
---|
36 | corresponding latitudinal bands. By default, the distribution is |
---|
37 | uniform. |
---|
38 | |
---|
39 | - For the southernmost band, it is possible to use a geographical |
---|
40 | distribution read in a file. These files are currently available |
---|
41 | for eORCA1 and eORCA025. |
---|
42 | |
---|
43 | ## SVN information |
---|
44 | Author = "$Author$" |
---|
45 | Date = "$Date$" |
---|
46 | Revision = "$Revision$" |
---|
47 | Id = "$Id$" |
---|
48 | HeadURL = "$HeadURL$" |
---|
49 | ''' |
---|
50 | import sys |
---|
51 | import os |
---|
52 | import getpass |
---|
53 | import platform |
---|
54 | import argparse |
---|
55 | import time |
---|
56 | import numpy as np |
---|
57 | import xarray as xr |
---|
58 | from scipy import ndimage |
---|
59 | import nemo |
---|
60 | |
---|
61 | ## SVN information |
---|
62 | __SVN__ = ( { |
---|
63 | 'Author' : "$Author$", |
---|
64 | 'Date' : '$Date$', |
---|
65 | 'Revision' : '$Revision$', |
---|
66 | 'Id' : '$Id$', |
---|
67 | 'HeadURL' : '$HeadURL$', |
---|
68 | 'SVN_Date' : '$SVN_Date: $', |
---|
69 | } ) |
---|
70 | ### |
---|
71 | |
---|
72 | ### ===== Handling command line parameters ========================== |
---|
73 | # Creating a parser |
---|
74 | parser = argparse.ArgumentParser ( |
---|
75 | description = '''Compute calving weights''', |
---|
76 | epilog='-------- End of the help message --------') |
---|
77 | |
---|
78 | # Adding arguments |
---|
79 | parser.add_argument ('--oce' , help='oce model name', type=str, default='eORCA1.2', |
---|
80 | choices=['ORCA2.3', 'ORCA1.0', 'ORCA1.1', 'ORCA1_CNRM', 'eORCA1.2', |
---|
81 | 'eORCA1.4', 'eORCA1.4.2', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] ) |
---|
82 | parser.add_argument ('--atm' , type=str, default='ICO40', |
---|
83 | help='atm model name (ICO* or LMD*)' ) |
---|
84 | parser.add_argument ('--type' , help='Type of distribution', type=str, |
---|
85 | choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full' ) |
---|
86 | ## parser.add_argument ('--dir' , help='Directory of input file', type=str, default='./' ) |
---|
87 | parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str, |
---|
88 | default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' ) |
---|
89 | parser.add_argument ('--repartition_var' , type=str, default=None, |
---|
90 | help='Variable name for iceshelf' ) |
---|
91 | parser.add_argument ('--grids' , help='grids file name', default='grids.nc' ) |
---|
92 | parser.add_argument ('--areas' , help='masks file name', default='areas.nc' ) |
---|
93 | parser.add_argument ('--masks' , help='areas file name', default='masks.nc' ) |
---|
94 | parser.add_argument ('--o2a' , help='o2a file name' , default='o2a.nc' ) |
---|
95 | parser.add_argument ('--output', help='output rmp file name', |
---|
96 | default='rmp_tlmd_to_torc_calving.nc' ) |
---|
97 | parser.add_argument ('--fmt' , default='netcdf4' , |
---|
98 | help='NetCDF file format, using nco syntax', |
---|
99 | choices=['classic', 'netcdf3', '64bit', '64bit_data', |
---|
100 | '64bit_data', 'netcdf4', 'netcdf4_classsic'] ) |
---|
101 | parser.add_argument ('--ocePerio', type=float, default=0, choices=nemo.NPERIO_VALID_RANGE, |
---|
102 | help='periodicity of ocean grid' ) |
---|
103 | parser.add_argument ('--modelName' , help='Name of model', type=str, default=None) |
---|
104 | |
---|
105 | # Parse command line |
---|
106 | myargs = parser.parse_args () |
---|
107 | |
---|
108 | # Model Names |
---|
109 | src_Name = myargs.atm |
---|
110 | dst_Name = myargs.oce |
---|
111 | model_name = myargs.modelName |
---|
112 | |
---|
113 | # Default vars |
---|
114 | if myargs.repartition_var is None : |
---|
115 | # Runoff from Antarctica iceshelves (Depoorter, 2013) |
---|
116 | if myargs.type == 'iceshelf' : |
---|
117 | repartitionVar = 'sornfisf' |
---|
118 | # Runoff from Antarctica iceberg (Depoorter, 2013) |
---|
119 | if myargs.type == 'iceberg' : |
---|
120 | repartitionVar = 'Icb_Flux' |
---|
121 | else : |
---|
122 | repartitionVar = myargs.repartition_var |
---|
123 | |
---|
124 | # Latitude limits of each calving zone |
---|
125 | limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) ) |
---|
126 | |
---|
127 | nb_zone = len(limit_lat) |
---|
128 | |
---|
129 | if myargs.fmt == 'classic' : FmtNetcdf = 'CLASSIC' |
---|
130 | if myargs.fmt == 'netcdf3' : FmtNetcdf = 'CLASSIC' |
---|
131 | if myargs.fmt == '64bit' : FmtNetcdf = 'NETCDF3_64BIT_OFFSET' |
---|
132 | if myargs.fmt == '64bit_data' : FmtNetcdf = 'NETCDF3_64BIT_DATA' |
---|
133 | if myargs.fmt == '64bit_offset' : FmtNetcdf = 'NETCDF3_64BIT_OFFSET' |
---|
134 | if myargs.fmt == 'netcdf4' : FmtNetcdf = 'NETCDF4' |
---|
135 | if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC' |
---|
136 | ### ===================================================================== |
---|
137 | |
---|
138 | # Model short names |
---|
139 | src_name = None |
---|
140 | dst_name = None |
---|
141 | if src_Name.count('ICO') != 0 : src_name = 'ico' |
---|
142 | if src_Name.count('LMD') != 0 : src_name = 'lmd' |
---|
143 | if dst_Name.count('ORCA') != 0 : dst_name = 'orc' |
---|
144 | |
---|
145 | CplModel = dst_Name + 'x' + src_Name |
---|
146 | |
---|
147 | print ('Atm names : ' + src_name + ' ' + src_Name ) |
---|
148 | print ('Oce names : ' + dst_name + ' ' + dst_Name ) |
---|
149 | print (' ') |
---|
150 | |
---|
151 | # Reading domains characteristics |
---|
152 | grids = myargs.grids |
---|
153 | areas = myargs.areas |
---|
154 | masks = myargs.masks |
---|
155 | o2a = myargs.o2a |
---|
156 | |
---|
157 | print ('Opening ' + areas) |
---|
158 | f_areas = xr.open_dataset ( areas ) |
---|
159 | print ('Opening ' + masks) |
---|
160 | f_masks = xr.open_dataset ( masks ) |
---|
161 | print ('Opening ' + grids) |
---|
162 | f_grids = xr.open_dataset ( grids ) |
---|
163 | |
---|
164 | src_lon = f_grids ['t' + src_name + '.lon'] |
---|
165 | src_lat = f_grids ['t' + src_name + '.lat'] |
---|
166 | dst_lon = f_grids ['t' + dst_name + '.lon'] |
---|
167 | dst_lat = f_grids ['t' + dst_name + '.lat'] |
---|
168 | |
---|
169 | src_msk = f_masks ['t' + src_name + '.msk'] |
---|
170 | dst_msk = f_masks ['t' + dst_name + '.msk'] |
---|
171 | dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean |
---|
172 | print ('dst_msk : ', dst_msk.sum().values ) |
---|
173 | print ('dst_mskutil : ', dst_mskutil.sum().values ) |
---|
174 | |
---|
175 | # Ocean grid periodicity |
---|
176 | nperio_dst=myargs.ocePerio |
---|
177 | |
---|
178 | # Periodicity masking for NEMO |
---|
179 | if nperio_dst == 0 : |
---|
180 | if dst_Name in ['ORCA2.3', 'ORCA2.4'] : nperio_dst = 4 |
---|
181 | if dst_Name in ['eORCA1.2', 'eORCA1.3'] : nperio_dst = 6 |
---|
182 | if dst_Name in ['ORCA025', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] : nperio_dst = 6 |
---|
183 | |
---|
184 | print ('nperio_dst: ' + str(nperio_dst) ) |
---|
185 | dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T', sval=0 ) |
---|
186 | print ('dst_mskutil : ' + str(np.sum(dst_mskutil))) |
---|
187 | |
---|
188 | ## |
---|
189 | ## Fill Closed seas and other |
---|
190 | ## ================================================== |
---|
191 | |
---|
192 | # Preparation by closing some straits |
---|
193 | # ----------------------------------- |
---|
194 | if dst_Name in [ 'eORCA025', 'eORCA025.1', 'eORCA025.4' ] : |
---|
195 | print ('Filling some seas for ' + dst_Name ) |
---|
196 | # Set Gibraltar strait to 0 to fill Mediterranean sea |
---|
197 | dst_mskutil[838:841,1125] = 0 |
---|
198 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
199 | dst_mskutil[736,1321:1324] = 0 |
---|
200 | # Set Stagerak to 0 to fill Baltic Sea |
---|
201 | dst_mskutil[961,1179:1182] = 0 |
---|
202 | # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf |
---|
203 | dst_mskutil[794:798,1374] = 0 |
---|
204 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
205 | dst_mskutil[997:1012,907] = 0 |
---|
206 | |
---|
207 | if dst_Name in [ 'eORCA1.2', 'eORCA1.3', 'eORCA1.4', 'eORCA1.4.2' ] : |
---|
208 | print ('Filling some seas for ' + dst_Name) |
---|
209 | # Set Gibraltar strait to 0 to fill Mediterrean sea |
---|
210 | dst_mskutil[240, 283] = 0 |
---|
211 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
212 | dst_mskutil[211:214, 332] = 0 |
---|
213 | # Set Stagerak to 0 to fill Baltic Sea |
---|
214 | dst_mskutil[272:276, 293] = 0 |
---|
215 | # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf |
---|
216 | dst_mskutil[227:230, 345] = 0 |
---|
217 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
218 | dst_mskutil[284,222:225] = 0 |
---|
219 | |
---|
220 | if dst_Name in [ 'ORCA2.3', 'ORCA2.4' ] : |
---|
221 | print ('Filling some seas for ' + dst_Name) |
---|
222 | # Set Gibraltar strait to 0 to fill Mediterrean sea |
---|
223 | dst_mskutil[101,139] = 0 |
---|
224 | # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails |
---|
225 | dst_mskutil[ 99:111, 0: 5] = 0 |
---|
226 | dst_mskutil[106:111, 173:182] = 0 |
---|
227 | # Set Stagerak to 0 to fill Baltic Sea |
---|
228 | dst_mskutil[115,144] = 0 |
---|
229 | # Set Hudson Strait to 0 to fill Hudson Bay |
---|
230 | dst_mskutil[120:123,110] = 0 |
---|
231 | # Set Bal-El-Mandeb to 0 to fill Red Sea |
---|
232 | dst_mskutil[87:89,166] = 0 |
---|
233 | |
---|
234 | dst_closed = dst_mskutil |
---|
235 | |
---|
236 | # Fill closed seas with image processing library |
---|
237 | # ---------------------------------------------- |
---|
238 | dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes ( |
---|
239 | 1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')), |
---|
240 | nperio=nperio_dst, cd_type='T', sval=0) |
---|
241 | |
---|
242 | # Surfaces |
---|
243 | src_srf = f_areas.variables ['t' + src_name + '.srf'] |
---|
244 | dst_srf = f_areas.variables ['t' + dst_name + '.srf'] |
---|
245 | dst_srfutil = dst_srf * np.float64 (dst_mskutil) |
---|
246 | |
---|
247 | dst_srfutil_sum = np.sum( dst_srfutil) |
---|
248 | |
---|
249 | src_clo = f_grids ['t' + src_name + '.clo'] |
---|
250 | src_cla = f_grids ['t' + src_name + '.cla'] |
---|
251 | dst_clo = f_grids ['t' + dst_name + '.clo'] |
---|
252 | dst_cla = f_grids ['t' + dst_name + '.cla'] |
---|
253 | |
---|
254 | # Indices |
---|
255 | ( src_jpj, src_jpi) = src_lat.shape |
---|
256 | src_grid_size = src_jpj*src_jpi |
---|
257 | ( dst_jpj, dst_jpi) = dst_lat.shape |
---|
258 | dst_grid_size = dst_jpj*dst_jpi |
---|
259 | orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one) |
---|
260 | |
---|
261 | ### ===== Reading needed data ======================================== |
---|
262 | if myargs.type in ['iceberg', 'iceshelf' ]: |
---|
263 | # Reading data file for calving or iceberg geometry around Antarctica |
---|
264 | print ( 'Opening ' + myargs.repartition_file) |
---|
265 | f_repartition = xr.open_dataset ( myargs.repartition_file ) |
---|
266 | repartition = np.sum ( f_repartition.variables [repartitionVar], axis=0 ) |
---|
267 | |
---|
268 | ## Before loop on basins |
---|
269 | remap_matrix = np.empty ( shape=(0), dtype=np.float64 ) |
---|
270 | src_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
271 | dst_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
272 | |
---|
273 | print (' ') |
---|
274 | |
---|
275 | ### ===== Starting loop on basins===================================== |
---|
276 | |
---|
277 | # Initialise some fields |
---|
278 | remap_matrix = np.empty ( shape=(0), dtype=np.float64 ) |
---|
279 | src_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
280 | dst_address = np.empty ( shape=(0), dtype=np.int32 ) |
---|
281 | |
---|
282 | basin_msk = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32) |
---|
283 | key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64) |
---|
284 | |
---|
285 | ## Loop |
---|
286 | for n_bas in np.arange ( nb_zone ) : |
---|
287 | south = False |
---|
288 | ok_run = False |
---|
289 | lat_south = np.min(limit_lat[n_bas]) |
---|
290 | lat_north = np.max(limit_lat[n_bas]) |
---|
291 | if lat_south <= -60.0 : south = True |
---|
292 | |
---|
293 | print ( f'basin: {n_bas:2d} -- Latitudes: {lat_south:+.1f} {lat_north:+.1f} --' ) |
---|
294 | ## |
---|
295 | if myargs.type == 'iceberg' and south : |
---|
296 | ok_run = True |
---|
297 | print ('Applying iceberg to south' ) |
---|
298 | if myargs.type == 'iceshelf' and south : |
---|
299 | ok_run = True |
---|
300 | print ('Applying iceshelf to south' ) |
---|
301 | if myargs.type == 'iceberg' and not south : |
---|
302 | ok_run = False |
---|
303 | print ('Skipping iceberg: not south ') |
---|
304 | if myargs.type == 'iceshelf' and not south : |
---|
305 | ok_run = False |
---|
306 | print ('Skipping iceshelf: not south ') |
---|
307 | if myargs.type == 'nosouth' and south : |
---|
308 | ok_run = False |
---|
309 | print ('Skipping south: nosouth case' ) |
---|
310 | if myargs.type == 'nosouth' and not south : |
---|
311 | ok_run = True |
---|
312 | print ('Running: not in south, uniform repartition') |
---|
313 | if myargs.type == 'full' : |
---|
314 | ok_run = True |
---|
315 | print ('Running general trivial case, uniform repartition' ) |
---|
316 | |
---|
317 | if ok_run : |
---|
318 | # Calving integral send to one point per latitude bands |
---|
319 | index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1 |
---|
320 | |
---|
321 | # Basin mask |
---|
322 | basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ), |
---|
323 | dst_mskutil, 0 ) |
---|
324 | |
---|
325 | # Repartition pattern |
---|
326 | if myargs.type in ['iceberg', 'iceshelf' ] : |
---|
327 | key_repartition[n_bas] = repartition * basin_msk[n_bas] |
---|
328 | else : |
---|
329 | key_repartition[n_bas] = basin_msk[n_bas] |
---|
330 | |
---|
331 | # Integral and normalisation |
---|
332 | sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil ).values |
---|
333 | key_repartition = key_repartition / sum_repartition |
---|
334 | |
---|
335 | print ( 'Sum of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] )) ) |
---|
336 | print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil ).values) ) |
---|
337 | |
---|
338 | # Basin surface (modulated by repartition key) |
---|
339 | basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil ) |
---|
340 | |
---|
341 | # Weights and links |
---|
342 | poids = 1.0 / basin_srfutil |
---|
343 | matrix_local = np.where ( basin_msk[n_bas].ravel() > 0.5, |
---|
344 | key_repartition[n_bas].ravel()*poids.values, 0. ) |
---|
345 | matrix_local = matrix_local[matrix_local > 0.0] # Keep only non zero values |
---|
346 | num_links = np.count_nonzero (matrix_local) |
---|
347 | # Address on source grid : all links points to the same LMDZ point. |
---|
348 | src_address_local = np.ones(num_links, dtype=np.int32 )*index_src |
---|
349 | # Address on destination grid : each NEMO point with non zero link |
---|
350 | dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0) |
---|
351 | dst_address_local = dst_address_local[dst_address_local > 0] |
---|
352 | # Append to global tabs |
---|
353 | remap_matrix = np.append ( remap_matrix, matrix_local ) |
---|
354 | src_address = np.append ( src_address , src_address_local ) |
---|
355 | dst_address = np.append ( dst_address , dst_address_local ) |
---|
356 | # |
---|
357 | #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) ) |
---|
358 | print ( 'Point in atmospheric grid : {index_src:4d} -- num_links: {num_links:6d}' ) |
---|
359 | print (' ') |
---|
360 | |
---|
361 | ## End of loop |
---|
362 | print (' ') |
---|
363 | |
---|
364 | # |
---|
365 | num_links = np.count_nonzero (remap_matrix) |
---|
366 | print ( f'Total num_links : {num_links:10d}' ) |
---|
367 | |
---|
368 | # Diag : interpolate uniform field |
---|
369 | src_field = np.zeros(shape=(src_grid_size,)) |
---|
370 | for n_bas in np.arange ( nb_zone ) : |
---|
371 | index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1 |
---|
372 | src_field[index_src-1] = n_bas |
---|
373 | |
---|
374 | dst_field = np.zeros(shape=(dst_grid_size,)) |
---|
375 | for link in np.arange (num_links) : |
---|
376 | dst_field [dst_address [link]-1] += remap_matrix[link] * src_field[src_address[link]-1] |
---|
377 | |
---|
378 | ### ===== Writing the weights file, for OASIS MCT ======================= |
---|
379 | |
---|
380 | # Output file |
---|
381 | calving = myargs.output |
---|
382 | |
---|
383 | print ('Output file: ' + calving ) |
---|
384 | |
---|
385 | remap_matrix = xr.DataArray (np.reshape(remap_matrix, (num_links, 1)), |
---|
386 | dims = ['num_links', 'num_wgts'] ) |
---|
387 | src_address = xr.DataArray (src_address.astype(np.int32) , |
---|
388 | dims = ['num_links',] ) |
---|
389 | src_address.attrs['convention'] = 'Fortran style addressing, starting at 1' |
---|
390 | dst_address = xr.DataArray (dst_address.astype(np.int32) , |
---|
391 | dims = ['num_links',] ) |
---|
392 | dst_address.attrs['convention'] = 'Fortran style addressing, starting at 1' |
---|
393 | |
---|
394 | src_grid_dims = xr.DataArray ( np.array (( src_jpi, src_jpi ), dtype=np.int32), |
---|
395 | dims = ['src_grid_rank',] ) |
---|
396 | src_grid_center_lon = xr.DataArray ( src_lon.values.ravel(), dims = ['src_grid_size',] ) |
---|
397 | src_grid_center_lat = xr.DataArray ( src_lat.values.ravel(), dims = ['src_grid_size',] ) |
---|
398 | src_grid_center_lon.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude', |
---|
399 | 'standard_name':'longitude', |
---|
400 | 'bounds':'src_grid_corner_lon'}) |
---|
401 | src_grid_center_lat.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' , |
---|
402 | 'standard_name':'latitude' , |
---|
403 | 'bounds':'src_grid_corner_lat'}) |
---|
404 | src_grid_corner_lon = xr.DataArray ( src_clo.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), |
---|
405 | dims = ['src_grid_size', 'src_grid_corners'] ) |
---|
406 | src_grid_corner_lat = xr.DataArray ( src_cla.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), |
---|
407 | dims = ['src_grid_size', 'src_grid_corners' ] ) |
---|
408 | src_grid_corner_lon.attrs['units']='degrees_east' |
---|
409 | src_grid_corner_lat.attrs['units']='degrees_north' |
---|
410 | src_grid_area = xr.DataArray ( src_srf.values.ravel(), dims = ['src_grid_size',] ) |
---|
411 | src_grid_area.attrs.update ({'long_name':'Grid area', 'standard_name':'cell_area', 'units':'m2'}) |
---|
412 | src_grid_imask = xr.DataArray ( src_msk.values.ravel().astype(np.int32) , |
---|
413 | dims = ['src_grid_size',] ) |
---|
414 | src_grid_imask.attrs.update ( {'long_name':'-sea mask', 'units':'Land:1, Ocean:0' }) |
---|
415 | |
---|
416 | # -- |
---|
417 | dst_grid_dims = xr.DataArray ( np.array(( dst_jpi, dst_jpi), dtype=np.int32) , |
---|
418 | dims = ['dst_grid_rank', ]) |
---|
419 | dst_grid_center_lon = xr.DataArray ( dst_lon.values.ravel(), dims = ['dst_grid_size', ]) |
---|
420 | dst_grid_center_lat = xr.DataArray ( dst_lat.values.ravel(), dims = ['dst_grid_size', ]) |
---|
421 | dst_grid_center_lon.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude', |
---|
422 | 'standard_name':'longitude', |
---|
423 | 'bounds':'dst_grid_corner_lon'}) |
---|
424 | dst_grid_center_lat.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' , |
---|
425 | 'standard_name':'latitude' , |
---|
426 | 'bounds':'dst_grid_corner_lat'}) |
---|
427 | dst_grid_corner_lon = xr.DataArray ( dst_clo.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), |
---|
428 | dims = ['dst_grid_size', 'dst_grid_corners'] ) |
---|
429 | dst_grid_corner_lat = xr.DataArray ( dst_cla.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), |
---|
430 | dims = ['dst_grid_size', 'dst_grid_corners'] ) |
---|
431 | dst_grid_corner_lon.attrs['units']='degrees_east' |
---|
432 | dst_grid_corner_lat.attrs['units']='degrees_north' |
---|
433 | dst_grid_area = xr.DataArray ( dst_srf.values.ravel(), dims = ['dst_grid_size', ] ) |
---|
434 | dst_grid_area.attrs.update ({'long_name':'Grid area', 'standard_name':'cell_area', 'units':'m2'}) |
---|
435 | dst_grid_imask = xr.DataArray ( dst_msk.values.ravel(), dims = ['dst_grid_size',] ) |
---|
436 | dst_grid_imask.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' } ) |
---|
437 | |
---|
438 | dst_bassin = xr.DataArray ( basin_msk.reshape ( (nb_zone,dst_grid_size) ) , |
---|
439 | dims = ['nb_zone', 'dst_grid_size', ] ) |
---|
440 | dst_repartition = xr.DataArray ( key_repartition.reshape( (nb_zone,dst_grid_size) ) , |
---|
441 | dims = ['nb_zone', 'dst_grid_size', ] ) |
---|
442 | |
---|
443 | dst_southLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , |
---|
444 | dims = ['nb_zone', ] ) |
---|
445 | dst_northLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , |
---|
446 | dims = ['nb_zone', ] ) |
---|
447 | |
---|
448 | # Additionnal fields for debugging |
---|
449 | # ================================ |
---|
450 | dst_grid_imaskutil = xr.DataArray ( dst_mskutil.ravel().astype(np.float32), |
---|
451 | dims = ['dst_grid_size',] ) |
---|
452 | dst_grid_imaskutil.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' }) |
---|
453 | dst_grid_imaskclose = xr.DataArray ( dst_closed.values.ravel(), dims = ['dst_grid_size',] ) |
---|
454 | dst_grid_imaskclose.attrs.update ({'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' }) |
---|
455 | |
---|
456 | dst_lon_addressed = xr.DataArray ( dst_lon.values.ravel()[dst_address-1].ravel(), |
---|
457 | dims = ['num_links',] ) |
---|
458 | dst_lat_addressed = xr.DataArray ( dst_lat.values.ravel()[dst_address-1].ravel(), |
---|
459 | dims = ['num_links',] ) |
---|
460 | dst_lon_addressed.attrs.update ( {'units':'degrees_east ', 'long_name':'Longitude', |
---|
461 | 'standard_name':'longitude' }) |
---|
462 | dst_lat_addressed.attrs.update ( {'units':'degrees_north', 'long_name':'Latitude' , |
---|
463 | 'standard_name':'latitude' }) |
---|
464 | dst_area_addressed = xr.DataArray ( dst_srf.values.ravel()[dst_address-1].ravel(), |
---|
465 | dims = ['num_links',] ) |
---|
466 | dst_area_addressed.attrs.update ( {'long_name':'Grid area0', 'standard_name':'cell_area', |
---|
467 | 'units':'m2' }) |
---|
468 | dst_imask_addressed = xr.DataArray ( dst_msk.values.ravel()[dst_address-1].ravel(), |
---|
469 | dims = ['num_links', ] ) |
---|
470 | dst_imask_addressed.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' }) |
---|
471 | dst_imaskutil_addressed = xr.DataArray ( dst_mskutil.ravel()[dst_address-1].astype(np.float32), |
---|
472 | dims = ['num_links'] ) |
---|
473 | dst_imaskutil_addressed.attrs.update ( {'long_name':'Land-sea mask', 'units':'Land:1, Ocean:0' }) |
---|
474 | |
---|
475 | src_field = xr.DataArray ( src_field, dims = ['src_grid_size',] ) |
---|
476 | dst_field = xr.DataArray ( dst_field, dims = ['dst_grid_size',] ) |
---|
477 | |
---|
478 | f_calving = xr.Dataset ( { |
---|
479 | 'remap_matrix' : remap_matrix, |
---|
480 | 'src_address' : src_address, |
---|
481 | 'dst_address' : dst_address, |
---|
482 | 'src_grid_dims' : src_grid_dims, |
---|
483 | 'src_grid_center_lon' : src_grid_center_lat, |
---|
484 | 'src_grid_center_lat' : src_grid_center_lat, |
---|
485 | 'src_grid_corner_lon' : src_grid_corner_lon, |
---|
486 | 'src_grid_corner_lat' : src_grid_corner_lat, |
---|
487 | 'src_grid_area' : src_grid_area, |
---|
488 | 'src_grid_imask' : src_grid_imask, |
---|
489 | 'dst_grid_dims' : dst_grid_dims, |
---|
490 | 'dst_grid_center_lon' : dst_grid_center_lon, |
---|
491 | 'dst_grid_center_lat' : dst_grid_center_lat, |
---|
492 | 'dst_grid_corner_lon' : dst_grid_corner_lon, |
---|
493 | 'dst_grid_corner_lat' : dst_grid_corner_lat, |
---|
494 | 'dst_grid_area' : dst_grid_area, |
---|
495 | 'dst_grid_imask' : dst_grid_imask, |
---|
496 | 'dst_bassin' : dst_bassin, |
---|
497 | 'dst_repartition' : dst_repartition, |
---|
498 | 'dst_southLimit' : dst_southLimit, |
---|
499 | 'dst_northLimit' : dst_northLimit, |
---|
500 | 'dst_grid_imaskutil' : dst_grid_imaskutil, |
---|
501 | 'dst_grid_imaskclose' : dst_grid_imaskclose, |
---|
502 | 'dst_lon_addressed' : dst_lon_addressed, |
---|
503 | 'dst_lat_addressed' : dst_lat_addressed, |
---|
504 | 'dst_area_addressed' : dst_area_addressed, |
---|
505 | 'dst_imask_addressed' : dst_imask_addressed, |
---|
506 | 'dst_imaskutil_addressed' : dst_imaskutil_addressed, |
---|
507 | 'src_field' : src_field, |
---|
508 | 'dst_field' : dst_field, |
---|
509 | } ) |
---|
510 | |
---|
511 | f_calving.attrs.update ( { |
---|
512 | 'Conventions' : 'CF-1.6', |
---|
513 | 'source' : 'IPSL Earth system model', |
---|
514 | 'group' : 'ICMC IPSL Climate Modelling Center', |
---|
515 | 'Institution' : 'IPSL https.//www.ipsl.fr', |
---|
516 | 'Ocean' : f'{dst_Name} https://www.nemo-ocean.eu', |
---|
517 | 'Atmosphere' : f'{src_Name} http://lmdz.lmd.jussieu.fr', |
---|
518 | 'associatedFiles' : f'{grids} {areas} {masks}', |
---|
519 | 'description' : 'Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX', |
---|
520 | 'title' : calving, |
---|
521 | 'Program' : f'Generated by {sys.argv[0]} with flags ' + ' '.join (sys.argv[1:]), |
---|
522 | 'repartitionType' : myargs.type, |
---|
523 | 'gridsFile' : grids, |
---|
524 | 'areasFile' : areas, |
---|
525 | 'masksFile' : masks, |
---|
526 | 'timeStamp' : time.asctime(), |
---|
527 | 'directory' : os.getcwd (), |
---|
528 | 'HOSTNAME' : platform.node (), |
---|
529 | 'LOGNAME' : getpass.getuser(), |
---|
530 | 'Python' : 'Python version ' + platform.python_version (), |
---|
531 | 'OS' : platform.system (), |
---|
532 | 'release' : platform.release (), |
---|
533 | 'hardware' : platform.machine (), |
---|
534 | 'conventions' : 'SCRIP', |
---|
535 | 'dest_grid' : 'curvilinear', |
---|
536 | 'Model' : model_name, |
---|
537 | 'SVN_Author' : '$Author$', |
---|
538 | 'SVN_Date' : '$Date$', |
---|
539 | 'SVN_Revision' : '$Revision$', |
---|
540 | 'SVN_Id' : '$Id$', |
---|
541 | 'SVN_HeadURL' : '$HeadURL$', |
---|
542 | }) |
---|
543 | |
---|
544 | if myargs.type in [ 'iceberg', 'iceshelf' ] : |
---|
545 | f_calving.attrs.update ( { |
---|
546 | 'originalFiles' : myargs.repartition_file, |
---|
547 | 'repartitionFile' : myargs.repartition_file, |
---|
548 | 'repartitionVar' : repartitionVar, |
---|
549 | } ) |
---|
550 | |
---|
551 | ## |
---|
552 | f_calving.to_netcdf ( calving, mode='w', format=FmtNetcdf ) |
---|
553 | f_calving.close() |
---|
554 | |
---|
555 | print ("That''s all folks !") |
---|
556 | ## ====================================================================================== |
---|