source: TOOLS/MOSAIX/CalvingWeights.py @ 6066

Last change on this file since 6066 was 6066, checked in by omamce, 2 years ago

O.M. : changes in MOSAIX

  • Property svn:keywords set to Date Revision HeadURL Author Id
File size: 22.6 KB
Line 
1### ===========================================================================
2###
3### Compute calving weights.
4###
5### ===========================================================================
6##
7##  Warning, to install, configure, run, use any of Olivier Marti's
8##  software or to read the associated documentation you'll need at least
9##  one (1) brain in a reasonably working order. Lack of this implement
10##  will void any warranties (either express or implied).
11##  O. Marti assumes no responsability for errors, omissions,
12##  data loss, or any other consequences caused directly or indirectly by
13##  the usage of his software by incorrectly or partially configured
14##  personal.
15##
16import numpy as np, xarray as xr
17import sys, os, platform, argparse, textwrap, time
18from scipy import ndimage
19import nemo
20
21## SVN information
22__Author__   = "$Author$"
23__Date__     = "$Date$"
24__Revision__ = "$Revision$"
25__Id__       = "$Id$"
26__HeadURL__  = "$HeadURL$"
27__SVN_Date__ = "$SVN_Date: $"
28
29###
30
31### ===== Handling command line parameters ==================================================
32# Creating a parser
33parser = argparse.ArgumentParser (
34    description = """Compute calving weights""",
35    epilog='-------- End of the help message --------')
36
37# Adding arguments
38parser.add_argument ('--oce'     , help='oce model name', type=str, default='eORCA1.2',
39                         choices=['ORCA2.3', 'eORCA1.2', 'eORCA1.4', 'eORCA1.4.2', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] )
40parser.add_argument ('--atm'     , help='atm model name (ICO* or LMD*)', type=str, default='ICO40'    )
41parser.add_argument ('--type'    , help='Type of distribution', type=str, choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full'  )
42## parser.add_argument ('--dir'     , help='Directory of input file', type=str, default='./' )
43parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str, default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' )
44parser.add_argument ('--repartition_var' , help='Variable name for iceshelf'      , type=str, default=None)
45parser.add_argument ('--grids' , help='grids file name', default='grids.nc' )
46parser.add_argument ('--areas' , help='masks file name', default='areas.nc' )
47parser.add_argument ('--masks' , help='areas file name', default='masks.nc' )
48parser.add_argument ('--o2a'   , help='o2a file name'  , default='o2a.nc'   )
49parser.add_argument ('--output'  , help='output rmp file name', default='rmp_tlmd_to_torc_calving.nc' )
50parser.add_argument ('--fmt'     , help='NetCDF file format, using nco syntax', default='netcdf4',
51                         choices=['classic', 'netcdf3', '64bit', '64bit_data', '64bit_data', 'netcdf4', 'netcdf4_classsic'] )
52parser.add_argument ('--ocePerio', help='periodicity of ocean grid', type=int, default=0 )
53
54# Parse command line
55myargs = parser.parse_args()
56
57# Model Names
58src_Name = myargs.atm ; dst_Name = myargs.oce
59   
60# Default vars
61if myargs.repartition_var == None :
62    # Runoff from Antarctica iceshelves (Depoorter, 2013)
63    if myargs.type == 'iceshelf' : repartitionVar = 'sornfisf'
64       
65    # Runoff from Antarctica iceberg (Depoorter, 2013)
66    if myargs.type == 'iceberg' :  repartitionVar = 'Icb_Flux'
67       
68else :                             repartitionVar = myargs.repartition_var
69
70# Latitude limits of each calving zone
71limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) )
72
73nb_zone = len(limit_lat)
74
75if myargs.fmt == 'classic'         : FmtNetcdf = 'CLASSIC'
76if myargs.fmt == 'netcdf3'         : FmtNetcdf = 'CLASSIC'
77if myargs.fmt == '64bit'           : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
78if myargs.fmt == '64bit_data'      : FmtNetcdf = 'NETCDF3_64BIT_DATA'
79if myargs.fmt == '64bit_offset'    : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
80if myargs.fmt == 'netcdf4'         : FmtNetcdf = 'NETCDF4'
81if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC'
82### ==========================================================================================
83
84# Model short names
85src_name = None ; dst_name = None
86if src_Name.count('ICO')  != 0 : src_name = 'ico'
87if src_Name.count('LMD')  != 0 : src_name = 'lmd'
88if dst_Name.count('ORCA') != 0 : dst_name = 'orc'
89
90CplModel = dst_Name + 'x' + src_Name
91
92print ('Atm names : ' + src_name + ' ' + src_Name )
93print ('Oce names : ' + dst_name + ' ' + dst_Name )
94print (' ')
95
96# Reading domains characteristics
97grids = myargs.grids
98areas = myargs.areas
99masks = myargs.masks
100o2a   = myargs.o2a
101
102print ('Opening ' + areas)
103f_areas = xr.open_dataset ( areas )
104print ('Opening ' + masks)
105f_masks = xr.open_dataset ( masks )
106print ('Opening ' + grids)
107f_grids = xr.open_dataset ( grids )
108
109src_lon = f_grids ['t' + src_name + '.lon']
110src_lat = f_grids ['t' + src_name + '.lat']
111dst_lon = f_grids ['t' + dst_name + '.lon']
112dst_lat = f_grids ['t' + dst_name + '.lat']
113
114src_msk = f_masks ['t' + src_name + '.msk']
115dst_msk = f_masks ['t' + dst_name + '.msk']
116dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean
117print ('dst_msk     : ', dst_msk.sum().values )
118print ('dst_mskutil : ', dst_mskutil.sum().values )
119
120# Ocean grid periodicity
121nperio_dst=myargs.ocePerio
122
123# Periodicity masking for NEMO
124if nperio_dst == 0 :
125    if dst_Name in ['ORCA2.3', 'ORCA2.4']            : nperio_dst = 4
126    if dst_Name in ['eORCA1.2', 'eORCA1.3']          : nperio_dst = 6
127    if dst_Name in ['ORCA025', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] : nperio_dst = 6
128       
129print ('nperio_dst: ' + str(nperio_dst) )
130dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T', sval=0 )
131print ('dst_mskutil : ' + str(np.sum(dst_mskutil)))
132
133##
134## Fill Closed seas and other
135## ==================================================
136
137# Preparation by closing some straits
138# -----------------------------------
139if dst_Name in [ 'eORCA025', 'eORCA025.1', 'eORCA025.4' ] :
140    print ('Filling some seas for ' + dst_Name )
141    # Set Gibraltar strait to 0 to fill Mediterranean sea
142    dst_mskutil[838:841,1125] = 0
143    # Set Bal-El-Mandeb to 0 to fill Red Sea
144    dst_mskutil[736,1321:1324] = 0
145    # Set Stagerak to 0 to fill Baltic Sea
146    dst_mskutil[961,1179:1182] = 0
147    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
148    dst_mskutil[794:798,1374] = 0
149    # Set Hudson Strait to 0 to fill Hudson Bay
150    dst_mskutil[997:1012,907] = 0
151   
152if dst_Name in [ 'eORCA1.2', 'eORCA1.3', 'eORCA1.4', 'eORCA1.4.2' ] :
153    print ('Filling some seas for ' + dst_Name)
154    # Set Gibraltar strait to 0 to fill Mediterrean sea
155    dst_mskutil[240, 283] = 0
156    # Set Bal-El-Mandeb to 0 to fill Red Sea
157    dst_mskutil[211:214, 332] = 0
158    # Set Stagerak to 0 to fill Baltic Sea
159    dst_mskutil[272:276, 293] = 0
160    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
161    dst_mskutil[227:230, 345] = 0
162    # Set Hudson Strait to 0 to fill Hudson Bay
163    dst_mskutil[284,222:225] = 0
164   
165if dst_Name in [ 'ORCA2.3', 'ORCA2.4' ] :
166    print ('Filling some seas for ' + dst_Name)
167    # Set Gibraltar strait to 0 to fill Mediterrean sea
168    dst_mskutil[101,139] = 0
169    # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails
170    dst_mskutil[ 99:111,   0:  5] = 0
171    dst_mskutil[106:111, 173:182] = 0
172    # Set Stagerak to 0 to fill Baltic Sea
173    dst_mskutil[115,144] = 0
174    # Set Hudson Strait to 0 to fill Hudson Bay
175    dst_mskutil[120:123,110] = 0
176    # Set Bal-El-Mandeb to 0 to fill Red Sea
177    dst_mskutil[87:89,166] = 0
178
179dst_closed = dst_mskutil
180
181# Fill closed seas with image processing library
182# ----------------------------------------------
183dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes (1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')), nperio=nperio_dst, cd_type='T', sval=0)
184
185# Surfaces
186src_srf = f_areas.variables ['t' + src_name + '.srf']
187dst_srf = f_areas.variables ['t' + dst_name + '.srf']
188dst_srfutil = dst_srf * np.float64 (dst_mskutil)
189
190dst_srfutil_sum = np.sum( dst_srfutil)
191
192src_clo = f_grids ['t' + src_name + '.clo']
193src_cla = f_grids ['t' + src_name + '.cla']
194dst_clo = f_grids ['t' + dst_name + '.clo']
195dst_cla = f_grids ['t' + dst_name + '.cla']
196
197# Indices
198( src_jpj, src_jpi) = src_lat.shape ; src_grid_size = src_jpj*src_jpi
199( dst_jpj, dst_jpi) = dst_lat.shape ; dst_grid_size = dst_jpj*dst_jpi
200orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one)
201
202### ===== Reading needed data ==================================================
203if myargs.type in ['iceberg', 'iceshelf' ]: 
204    # Reading data file for calving or iceberg geometry around Antarctica
205    print ( 'Opening ' + myargs.repartition_file)
206    f_repartition = xr.open_dataset ( myargs.repartition_file )
207    repartition = np.sum ( f_repartition.variables [repartitionVar], axis=0 )
208
209## Before loop on basins
210remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
211src_address  = np.empty ( shape=(0), dtype=np.int32   )
212dst_address  = np.empty ( shape=(0), dtype=np.int32   )
213
214print (' ')
215
216### ===== Starting loop on basins==============================================
217
218# Initialise some fields
219remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
220src_address  = np.empty ( shape=(0), dtype=np.int32   )
221dst_address  = np.empty ( shape=(0), dtype=np.int32   )
222
223basin_msk       = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32)
224key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64)
225
226## Loop
227for n_bas in np.arange ( nb_zone ) :
228    south = False ; ok_run = False
229    lat_south = np.min(limit_lat[n_bas]) ; lat_north = np.max(limit_lat[n_bas])
230    if lat_south <= -60.0 : south = True
231   
232    print ( 'basin: {:2d} -- Latitudes: {:+.1f} {:+.1f} --'.format(n_bas, lat_south, lat_north) )
233    ##
234    if myargs.type == 'iceberg'  and     south : ok_run = True  ; print ('Applying iceberg to south' )
235    if myargs.type == 'iceshelf' and     south : ok_run = True  ; print ('Applying iceshelf to south' )
236    if myargs.type == 'iceberg'  and not south : ok_run = False ; print ('Skipping iceberg: not south ')
237    if myargs.type == 'iceshelf' and not south : ok_run = False ; print ('Skipping iceshelf: not south ')
238    if myargs.type == 'nosouth'  and     south : ok_run = False ; print ('Skipping south: nosouth case' )
239    if myargs.type == 'nosouth'  and not south : ok_run = True  ; print ('Running: not in south, uniform repartition')
240    if myargs.type == 'full'                   : ok_run = True  ; print ('Running general trivial case, uniform repartition' )
241
242    if ok_run :
243        # Calving integral send to one point per latitude bands
244        index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
245   
246        # Basin mask
247        basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ), dst_mskutil, 0 )
248
249        # Repartition pattern
250        if myargs.type in ['iceberg', 'iceshelf' ] : key_repartition[n_bas] = repartition * basin_msk[n_bas]
251        else :                                       key_repartition[n_bas] =               basin_msk[n_bas]
252
253        # Integral and normalisation
254        sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil ).values
255        key_repartition = key_repartition / sum_repartition
256   
257        print ( 'Sum of repartition key                      : {:12.3e}'.format (np.sum (key_repartition[n_bas]               )) )
258        print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil ).values) )
259   
260        # Basin surface (modulated by repartition key)
261        basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil )
262           
263        # Weights and links
264        poids = 1.0 / basin_srfutil
265        matrix_local   = np.where ( basin_msk[n_bas].ravel() > 0.5, key_repartition[n_bas].ravel()*poids.values, 0. )
266        matrix_local   = matrix_local[matrix_local > 0.0] # Keep only non zero values
267        num_links      = np.count_nonzero (matrix_local)
268        # Address on source grid : all links points to the same LMDZ point.
269        src_address_local = np.ones(num_links, dtype=np.int32 )*index_src
270        # Address on destination grid : each NEMO point with non zero link
271        dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0)
272        dst_address_local = dst_address_local[dst_address_local > 0]
273        # Append to global tabs
274        remap_matrix = np.append ( remap_matrix, matrix_local      )
275        src_address  = np.append ( src_address , src_address_local )
276        dst_address  = np.append ( dst_address , dst_address_local )
277        #
278        #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) )
279        print ( 'Point in atmospheric grid : {:4d} -- num_links: {:6d}'.format(index_src, num_links) ) 
280        print ('  ')
281
282
283
284## End of loop
285print (' ')
286
287#
288num_links = np.count_nonzero (remap_matrix)
289print ( 'Total num_links : {:10d}'.format(num_links) )
290
291# Diag : interpolate uniform field
292src_field = np.zeros(shape=(src_grid_size))
293for n_bas in np.arange ( nb_zone ) :
294    index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
295    src_field[index_src-1] = n_bas
296
297dst_field = np.zeros(shape=(dst_grid_size,))
298for link in np.arange (num_links) :
299    dst_field [dst_address [link]-1] +=  remap_matrix[link] * src_field[src_address[link]-1]
300
301### ===== Writing the weights file, for OASIS MCT ==========================================
302
303# Output file
304calving = myargs.output
305   
306print ('Output file: ' + calving )
307
308remap_matrix = xr.DataArray (np.reshape(remap_matrix, (num_links, 1)), dims = ['num_links', 'num_wgts'] )
309src_address  = xr.DataArray (src_address.astype(np.int32) , dims = ['num_links',] )
310src_address.attrs['convention'] = "Fortran style addressing, starting at 1"
311dst_address  = xr.DataArray (dst_address.astype(np.int32) , dims =  ['num_links',] )
312dst_address.attrs['convention'] = "Fortran style addressing, starting at 1"
313
314src_grid_dims       = xr.DataArray ( np.array (( src_jpi, src_jpi ), dtype=np.int32), dims =  ['src_grid_rank',] )
315src_grid_center_lon = xr.DataArray ( src_lon.values.ravel(), dims = ['src_grid_size',] )
316src_grid_center_lat = xr.DataArray ( src_lat.values.ravel(), dims = ['src_grid_size',] )
317src_grid_center_lon.attrs['units']='degrees_east'  ; src_grid_center_lon.attrs['long_name']='Longitude'
318src_grid_center_lon.attrs['long_name']='longitude' ; src_grid_center_lon.attrs['bounds']="src_grid_corner_lon" 
319src_grid_center_lat.attrs['units']='degrees_north' ; src_grid_center_lat.attrs['long_name']='Latitude'
320src_grid_center_lat.attrs['long_name']='latitude ' ; src_grid_center_lat.attrs['bounds']="src_grid_corner_lat" 
321src_grid_corner_lon = xr.DataArray ( src_clo.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners'] )
322src_grid_corner_lat = xr.DataArray ( src_cla.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners' ] )
323src_grid_corner_lon.attrs['units']="degrees_east"
324src_grid_corner_lat.attrs['units']="degrees_north"
325src_grid_area       = xr.DataArray ( src_srf.values.ravel(), dims = ['src_grid_size',] )
326src_grid_area.attrs['long_name']="Grid area" ; src_grid_area.attrs['standard_name']="cell_area" ; src_grid_area.attrs['units']="m2"
327src_grid_imask      = xr.DataArray ( src_msk.values.ravel().astype(np.int32) , dims = ['src_grid_size',] )
328src_grid_imask.attrs['long_name']="Land-sea mask" ; src_grid_imask.attrs['units']="Land:1, Ocean:0"
329
330# --
331dst_grid_dims       = xr.DataArray ( np.array(( dst_jpi, dst_jpi), dtype=np.int32) , dims = ['dst_grid_rank', ])
332dst_grid_center_lon = xr.DataArray ( dst_lon.values.ravel(), dims = ['dst_grid_size', ])
333dst_grid_center_lat = xr.DataArray ( dst_lat.values.ravel(), dims = ['dst_grid_size', ])
334dst_grid_center_lon.attrs['units']='degrees_east'  ; dst_grid_center_lon.attrs['long_name']='Longitude'
335dst_grid_center_lon.attrs['long_name']='longitude' ; dst_grid_center_lon.attrs['bounds']="dst_grid_corner_lon" 
336dst_grid_center_lat.attrs['units']='degrees_north' ; dst_grid_center_lat.attrs['long_name']='Latitude'
337dst_grid_center_lat.attrs['long_name']='latitude'  ; dst_grid_center_lat.attrs['bounds']="dst_grid_corner_lat" 
338dst_grid_corner_lon = xr.DataArray ( dst_clo.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] )
339dst_grid_corner_lat = xr.DataArray ( dst_cla.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] )
340dst_grid_corner_lon.attrs['units']="degrees_east"
341dst_grid_corner_lat.attrs['units']="degrees_north"
342dst_grid_area       = xr.DataArray ( dst_srf.values.ravel(), dims = ['dst_grid_size', ] )
343dst_grid_area.attrs['long_name']="Grid area" ; dst_grid_area.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2"
344dst_grid_imask      = xr.DataArray ( dst_msk.values.ravel(), dims =  ['dst_grid_size',]  )
345dst_grid_imask.attrs['long_name']="Land-sea mask" ; dst_grid_imask.attrs['units']="Land:1, Ocean:0"
346
347dst_bassin      = xr.DataArray ( basin_msk.reshape ( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size',  ] )
348dst_repartition = xr.DataArray ( key_repartition.reshape( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size',  ] )
349
350dst_southLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] )
351dst_northLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] )
352
353# Additionnal fields for debugging
354# ================================
355dst_grid_imaskutil  = xr.DataArray ( dst_mskutil.ravel().astype(np.float32), dims = ['dst_grid_size',] )
356dst_grid_imaskutil.attrs['long_name']="Land-sea mask" ; dst_grid_imaskutil.attrs['units']="Land:1, Ocean:0"
357dst_grid_imaskclose = xr.DataArray ( dst_closed.values.ravel(), dims = ['dst_grid_size',] )
358dst_grid_imaskclose.attrs['long_name']="Land-sea mask" ; dst_grid_imaskclose.attrs['units']="Land:1, Ocean:0"
359
360dst_lon_addressed = xr.DataArray ( dst_lon.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
361dst_lat_addressed = xr.DataArray ( dst_lat.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
362dst_lon_addressed.attrs['long_name']="Longitude" ; dst_lon_addressed.attrs['standard_name']="longitude" ; dst_lon_addressed.attrs['units']="degrees_east"
363dst_lat_addressed.attrs['long_name']="Latitude"  ; dst_lat_addressed.attrs['standard_name']="latitude"  ; dst_lat_addressed.attrs['units']="degrees_north"
364dst_area_addressed  = xr.DataArray ( dst_srf.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
365dst_area_addressed.attrs['long_name']="Grid area" ; dst_area_addressed.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2"
366dst_imask_addressed = xr.DataArray ( dst_msk.values.ravel()[dst_address-1].ravel(), dims = ['num_links', ] )
367dst_imask_addressed.attrs['long_name']="Land-sea mask" ; dst_imask_addressed.attrs['units']="Land:1, Ocean:0"
368dst_imaskutil_addressed = xr.DataArray ( dst_mskutil.ravel()[dst_address-1].astype(np.float32), dims = ['num_links'] )
369dst_imaskutil_addressed.attrs['long_name']="Land-sea mask" ; dst_imaskutil_addressed.attrs['units']="Land:1, Ocean:0"
370
371src_field = xr.DataArray ( src_field, dims = ['src_grid_size',] )
372dst_field = xr.DataArray ( dst_field, dims = ['dst_grid_size',] )
373
374f_calving =  xr.Dataset ( { 
375    'remap_matrix'            : remap_matrix,
376        'src_address'             : src_address,
377        'dst_address'             : dst_address,
378        'src_grid_dims'           : src_grid_dims,
379        'src_grid_center_lon'     : src_grid_center_lat,
380        'src_grid_center_lat'     : src_grid_center_lat,
381        'src_grid_corner_lon'     : src_grid_corner_lon,
382        'src_grid_corner_lat'     : src_grid_corner_lat,
383        'src_grid_area'           : src_grid_area,
384        'src_grid_imask'          : src_grid_imask,
385        'dst_grid_dims'           : dst_grid_dims,
386        'dst_grid_center_lon'     : dst_grid_center_lon,
387        'dst_grid_center_lat'     : dst_grid_center_lat,
388        'dst_grid_corner_lon'     : dst_grid_corner_lon,
389        'dst_grid_corner_lat'     : dst_grid_corner_lat,
390        'dst_grid_area'           : dst_grid_area,
391        'dst_grid_imask'          : dst_grid_imask,
392        'dst_bassin'              : dst_bassin,
393        'dst_repartition'         : dst_repartition,
394        'dst_southLimit'          : dst_southLimit,
395        'dst_northLimit'          : dst_northLimit,
396        'dst_grid_imaskutil'      : dst_grid_imaskutil,
397        'dst_grid_imaskclose'     : dst_grid_imaskclose,
398        'dst_lon_addressed'       : dst_lon_addressed,
399        'dst_lat_addressed'       : dst_lat_addressed,
400        'dst_area_addressed'      : dst_area_addressed,
401        'dst_imask_addressed'     : dst_imask_addressed,
402        'dst_imaskutil_addressed' : dst_imaskutil_addressed,
403        'src_field'               : src_field,
404        'dst_field'               : dst_field,
405    } )
406
407f_calving.attrs['Conventions']     = "CF-1.6"
408f_calving.attrs['source']          = "IPSL Earth system model"
409f_calving.attrs['group']           = "ICMC IPSL Climate Modelling Center"
410f_calving.attrs['Institution']     = "IPSL https.//www.ipsl.fr"
411f_calving.attrs['Ocean']           = dst_Name + " https://www.nemo-ocean.eu"
412f_calving.attrs['Atmosphere']      = src_Name + " http://lmdz.lmd.jussieu.fr"
413if myargs.type in ['iceberg', 'iceshelf' ]: f_calving.attrs['originalFiles'] = myargs.repartition_file
414f_calving.attrs['associatedFiles'] = grids + " " + areas + " " + masks
415f_calving.attrs['directory']       = os.getcwd ()
416f_calving.attrs['description']     = "Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX"
417f_calving.attrs['title']           = calving
418f_calving.attrs['Program']         = "Generated by " + sys.argv[0] + " with flags " + str(sys.argv[1:])
419f_calving.attrs['repartitionType'] = myargs.type
420if myargs.type in [ 'iceberg', 'iceshelf' ] :
421    f_calving.attrs['repartitionFile'] = myargs.repartition_file
422    f_calving.attrs['repartitionVar']  = repartitionVar
423f_calving.attrs['gridsFile']       = grids
424f_calving.attrs['areasFile']       = areas
425f_calving.attrs['masksFile']       = masks
426f_calving.attrs['timeStamp']       = time.asctime()
427f_calving.attrs['HOSTNAME']        = platform.node()
428f_calving.attrs['LOGNAME']         = os.getlogin()
429f_calving.attrs['Python']          = "Python version " +  platform.python_version()
430f_calving.attrs['OS']              = platform.system()
431f_calving.attrs['release']         = platform.release()
432f_calving.attrs['hardware']        = platform.machine()
433f_calving.attrs['conventions']     = "SCRIP"
434if src_name == 'lmd' : f_calving.attrs['source_grid'] = "curvilinear"
435if src_name == 'ico' : f_calving.attrs['source_grid'] = "unstructured"
436f_calving.attrs['dest_grid']       = "curvilinear"
437f_calving.attrs['Model']           = "IPSL CM6"
438f_calving.attrs['SVN_Author']      = "$Author$"
439f_calving.attrs['SVN_Date']        = "$Date$"
440f_calving.attrs['SVN_Revision']    = "$Revision$"
441f_calving.attrs['SVN_Id']          = "$Id$"
442f_calving.attrs['SVN_HeadURL']     = "$HeadURL$"
443
444##
445f_calving.to_netcdf ( calving, mode='w', format=FmtNetcdf )
446f_calving.close()
447
448print ('That''s all folks !')
449## ======================================================================================
Note: See TracBrowser for help on using the repository browser.