source: TOOLS/MOSAIX/CalvingWeights.py @ 6620

Last change on this file since 6620 was 6190, checked in by omamce, 2 years ago

O.M. : Evolution on MOSAIX

  • Add licensing information
  • Update and beautify README.md
  • Add few model version to known list
  • Property svn:keywords set to Date Revision HeadURL Author Id
File size: 23.4 KB
Line 
1### ===========================================================================
2###
3### Compute calving weights.
4###
5### ===========================================================================
6##
7##  MOSAIX is under CeCILL_V2 licence. See "Licence_CeCILL_V2-en.txt"
8##  file for an english version of the licence and
9##  "Licence_CeCILL_V2-fr.txt" for a french version.
10##
11##  Permission is hereby granted, free of charge, to any person or
12##  organization obtaining a copy of the software and accompanying
13##  documentation covered by this license (the "Software") to use,
14##  reproduce, display, distribute, execute, and transmit the
15##  Software, and to prepare derivative works of the Software, and to
16##  permit third-parties to whom the Software is furnished to do so,
17##  all subject to the following:
18##
19##  Warning, to install, configure, run, use any of MOSAIX software or
20##  to read the associated documentation you'll need at least one (1)
21##  brain in a reasonably working order. Lack of this implement will
22##  void any warranties (either express or implied).  Authors assumes
23##  no responsability for errors, omissions, data loss, or any other
24##  consequences caused directly or indirectly by the usage of his
25##  software by incorrectly or partially configured
26##
27##
28import numpy as np, xarray as xr
29import sys, os, platform, argparse, textwrap, time
30from scipy import ndimage
31import nemo
32
33## SVN information
34__Author__   = "$Author$"
35__Date__     = "$Date$"
36__Revision__ = "$Revision$"
37__Id__       = "$Id$"
38__HeadURL__  = "$HeadURL$"
39__SVN_Date__ = "$SVN_Date: $"
40
41###
42
43### ===== Handling command line parameters ==================================================
44# Creating a parser
45parser = argparse.ArgumentParser (
46    description = """Compute calving weights""",
47    epilog='-------- End of the help message --------')
48
49# Adding arguments
50parser.add_argument ('--oce'     , help='oce model name', type=str, default='eORCA1.2',
51                         choices=['ORCA2.3', 'ORCA1.0', 'ORCA1.1', 'ORCA1_CNRM', 'eORCA1.2', 'eORCA1.4', 'eORCA1.4.2', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] )
52parser.add_argument ('--atm'     , help='atm model name (ICO* or LMD*)', type=str, default='ICO40'    )
53parser.add_argument ('--type'    , help='Type of distribution', type=str, choices=['iceshelf', 'iceberg', 'nosouth', 'full'], default='full'  )
54## parser.add_argument ('--dir'     , help='Directory of input file', type=str, default='./' )
55parser.add_argument ('--repartition_file', help='Data files with iceberg melting' , type=str, default='./runoff-icb_DaiTrenberth_Depoorter_eORCA1_JD.nc' )
56parser.add_argument ('--repartition_var' , help='Variable name for iceshelf'      , type=str, default=None)
57parser.add_argument ('--grids' , help='grids file name', default='grids.nc' )
58parser.add_argument ('--areas' , help='masks file name', default='areas.nc' )
59parser.add_argument ('--masks' , help='areas file name', default='masks.nc' )
60parser.add_argument ('--o2a'   , help='o2a file name'  , default='o2a.nc'   )
61parser.add_argument ('--output'  , help='output rmp file name', default='rmp_tlmd_to_torc_calving.nc' )
62parser.add_argument ('--fmt'     , help='NetCDF file format, using nco syntax', default='netcdf4',
63                         choices=['classic', 'netcdf3', '64bit', '64bit_data', '64bit_data', 'netcdf4', 'netcdf4_classsic'] )
64parser.add_argument ('--ocePerio', help='periodicity of ocean grid', type=float, default=0, choices=nemo.nperio_valid_range )
65
66# Parse command line
67myargs = parser.parse_args()
68
69# Model Names
70src_Name = myargs.atm ; dst_Name = myargs.oce
71   
72# Default vars
73if myargs.repartition_var == None :
74    # Runoff from Antarctica iceshelves (Depoorter, 2013)
75    if myargs.type == 'iceshelf' : repartitionVar = 'sornfisf'
76       
77    # Runoff from Antarctica iceberg (Depoorter, 2013)
78    if myargs.type == 'iceberg' :  repartitionVar = 'Icb_Flux'
79       
80else :                             repartitionVar = myargs.repartition_var
81
82# Latitude limits of each calving zone
83limit_lat = ( (40.0, 90.0), (-50.0, 40.0), ( -90.0, -50.0) )
84
85nb_zone = len(limit_lat)
86
87if myargs.fmt == 'classic'         : FmtNetcdf = 'CLASSIC'
88if myargs.fmt == 'netcdf3'         : FmtNetcdf = 'CLASSIC'
89if myargs.fmt == '64bit'           : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
90if myargs.fmt == '64bit_data'      : FmtNetcdf = 'NETCDF3_64BIT_DATA'
91if myargs.fmt == '64bit_offset'    : FmtNetcdf = 'NETCDF3_64BIT_OFFSET'
92if myargs.fmt == 'netcdf4'         : FmtNetcdf = 'NETCDF4'
93if myargs.fmt == 'netcdf4_classic' : FmtNetcdf = 'NETCDF4_CLASSIC'
94### ==========================================================================================
95
96# Model short names
97src_name = None ; dst_name = None
98if src_Name.count('ICO')  != 0 : src_name = 'ico'
99if src_Name.count('LMD')  != 0 : src_name = 'lmd'
100if dst_Name.count('ORCA') != 0 : dst_name = 'orc'
101
102CplModel = dst_Name + 'x' + src_Name
103
104print ('Atm names : ' + src_name + ' ' + src_Name )
105print ('Oce names : ' + dst_name + ' ' + dst_Name )
106print (' ')
107
108# Reading domains characteristics
109grids = myargs.grids
110areas = myargs.areas
111masks = myargs.masks
112o2a   = myargs.o2a
113
114print ('Opening ' + areas)
115f_areas = xr.open_dataset ( areas )
116print ('Opening ' + masks)
117f_masks = xr.open_dataset ( masks )
118print ('Opening ' + grids)
119f_grids = xr.open_dataset ( grids )
120
121src_lon = f_grids ['t' + src_name + '.lon']
122src_lat = f_grids ['t' + src_name + '.lat']
123dst_lon = f_grids ['t' + dst_name + '.lon']
124dst_lat = f_grids ['t' + dst_name + '.lat']
125
126src_msk = f_masks ['t' + src_name + '.msk']
127dst_msk = f_masks ['t' + dst_name + '.msk']
128dst_mskutil = 1-dst_msk # Reversing the convention : 0 on continent, 1 on ocean
129print ('dst_msk     : ', dst_msk.sum().values )
130print ('dst_mskutil : ', dst_mskutil.sum().values )
131
132# Ocean grid periodicity
133nperio_dst=myargs.ocePerio
134
135# Periodicity masking for NEMO
136if nperio_dst == 0 :
137    if dst_Name in ['ORCA2.3', 'ORCA2.4']            : nperio_dst = 4
138    if dst_Name in ['eORCA1.2', 'eORCA1.3']          : nperio_dst = 6
139    if dst_Name in ['ORCA025', 'eORCA025', 'eORCA025.1', 'eORCA025.4'] : nperio_dst = 6
140       
141print ('nperio_dst: ' + str(nperio_dst) )
142dst_mskutil = nemo.lbc_mask (dst_mskutil, nperio=nperio_dst, cd_type='T', sval=0 )
143print ('dst_mskutil : ' + str(np.sum(dst_mskutil)))
144
145##
146## Fill Closed seas and other
147## ==================================================
148
149# Preparation by closing some straits
150# -----------------------------------
151if dst_Name in [ 'eORCA025', 'eORCA025.1', 'eORCA025.4' ] :
152    print ('Filling some seas for ' + dst_Name )
153    # Set Gibraltar strait to 0 to fill Mediterranean sea
154    dst_mskutil[838:841,1125] = 0
155    # Set Bal-El-Mandeb to 0 to fill Red Sea
156    dst_mskutil[736,1321:1324] = 0
157    # Set Stagerak to 0 to fill Baltic Sea
158    dst_mskutil[961,1179:1182] = 0
159    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
160    dst_mskutil[794:798,1374] = 0
161    # Set Hudson Strait to 0 to fill Hudson Bay
162    dst_mskutil[997:1012,907] = 0
163   
164if dst_Name in [ 'eORCA1.2', 'eORCA1.3', 'eORCA1.4', 'eORCA1.4.2' ] :
165    print ('Filling some seas for ' + dst_Name)
166    # Set Gibraltar strait to 0 to fill Mediterrean sea
167    dst_mskutil[240, 283] = 0
168    # Set Bal-El-Mandeb to 0 to fill Red Sea
169    dst_mskutil[211:214, 332] = 0
170    # Set Stagerak to 0 to fill Baltic Sea
171    dst_mskutil[272:276, 293] = 0
172    # Set Ormuz Strait to 0 to fill Arabo-Persian Gulf
173    dst_mskutil[227:230, 345] = 0
174    # Set Hudson Strait to 0 to fill Hudson Bay
175    dst_mskutil[284,222:225] = 0
176   
177if dst_Name in [ 'ORCA2.3', 'ORCA2.4' ] :
178    print ('Filling some seas for ' + dst_Name)
179    # Set Gibraltar strait to 0 to fill Mediterrean sea
180    dst_mskutil[101,139] = 0
181    # Set Black Sea to zero. At the edge of the domain : binary_fill_holes fails
182    dst_mskutil[ 99:111,   0:  5] = 0
183    dst_mskutil[106:111, 173:182] = 0
184    # Set Stagerak to 0 to fill Baltic Sea
185    dst_mskutil[115,144] = 0
186    # Set Hudson Strait to 0 to fill Hudson Bay
187    dst_mskutil[120:123,110] = 0
188    # Set Bal-El-Mandeb to 0 to fill Red Sea
189    dst_mskutil[87:89,166] = 0
190
191dst_closed = dst_mskutil
192
193# Fill closed seas with image processing library
194# ----------------------------------------------
195dst_mskutil = nemo.lbc_mask ( 1-ndimage.binary_fill_holes (1-nemo.lbc(dst_mskutil, nperio=nperio_dst, cd_type='T')), nperio=nperio_dst, cd_type='T', sval=0)
196
197# Surfaces
198src_srf = f_areas.variables ['t' + src_name + '.srf']
199dst_srf = f_areas.variables ['t' + dst_name + '.srf']
200dst_srfutil = dst_srf * np.float64 (dst_mskutil)
201
202dst_srfutil_sum = np.sum( dst_srfutil)
203
204src_clo = f_grids ['t' + src_name + '.clo']
205src_cla = f_grids ['t' + src_name + '.cla']
206dst_clo = f_grids ['t' + dst_name + '.clo']
207dst_cla = f_grids ['t' + dst_name + '.cla']
208
209# Indices
210( src_jpj, src_jpi) = src_lat.shape ; src_grid_size = src_jpj*src_jpi
211( dst_jpj, dst_jpi) = dst_lat.shape ; dst_grid_size = dst_jpj*dst_jpi
212orc_index = np.arange (dst_jpj*dst_jpi, dtype=np.int32) + 1 # Fortran indices (starting at one)
213
214### ===== Reading needed data ==================================================
215if myargs.type in ['iceberg', 'iceshelf' ]: 
216    # Reading data file for calving or iceberg geometry around Antarctica
217    print ( 'Opening ' + myargs.repartition_file)
218    f_repartition = xr.open_dataset ( myargs.repartition_file )
219    repartition = np.sum ( f_repartition.variables [repartitionVar], axis=0 )
220
221## Before loop on basins
222remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
223src_address  = np.empty ( shape=(0), dtype=np.int32   )
224dst_address  = np.empty ( shape=(0), dtype=np.int32   )
225
226print (' ')
227
228### ===== Starting loop on basins==============================================
229
230# Initialise some fields
231remap_matrix = np.empty ( shape=(0), dtype=np.float64 )
232src_address  = np.empty ( shape=(0), dtype=np.int32   )
233dst_address  = np.empty ( shape=(0), dtype=np.int32   )
234
235basin_msk       = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float32)
236key_repartition = np.zeros( shape=(nb_zone, dst_jpj, dst_jpi), dtype=np.float64)
237
238## Loop
239for n_bas in np.arange ( nb_zone ) :
240    south = False ; ok_run = False
241    lat_south = np.min(limit_lat[n_bas]) ; lat_north = np.max(limit_lat[n_bas])
242    if lat_south <= -60.0 : south = True
243   
244    print ( 'basin: {:2d} -- Latitudes: {:+.1f} {:+.1f} --'.format(n_bas, lat_south, lat_north) )
245    ##
246    if myargs.type == 'iceberg'  and     south : ok_run = True  ; print ('Applying iceberg to south' )
247    if myargs.type == 'iceshelf' and     south : ok_run = True  ; print ('Applying iceshelf to south' )
248    if myargs.type == 'iceberg'  and not south : ok_run = False ; print ('Skipping iceberg: not south ')
249    if myargs.type == 'iceshelf' and not south : ok_run = False ; print ('Skipping iceshelf: not south ')
250    if myargs.type == 'nosouth'  and     south : ok_run = False ; print ('Skipping south: nosouth case' )
251    if myargs.type == 'nosouth'  and not south : ok_run = True  ; print ('Running: not in south, uniform repartition')
252    if myargs.type == 'full'                   : ok_run = True  ; print ('Running general trivial case, uniform repartition' )
253
254    if ok_run :
255        # Calving integral send to one point per latitude bands
256        index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
257   
258        # Basin mask
259        basin_msk[n_bas] = np.where ( (dst_lat > lat_south ) & (dst_lat <= lat_north ), dst_mskutil, 0 )
260
261        # Repartition pattern
262        if myargs.type in ['iceberg', 'iceshelf' ] : key_repartition[n_bas] = repartition * basin_msk[n_bas]
263        else :                                       key_repartition[n_bas] =               basin_msk[n_bas]
264
265        # Integral and normalisation
266        sum_repartition = np.sum ( key_repartition[n_bas] * dst_srfutil ).values
267        key_repartition = key_repartition / sum_repartition
268   
269        print ( 'Sum of repartition key                      : {:12.3e}'.format (np.sum (key_repartition[n_bas]               )) )
270        print ( 'Integral (area weighted) of repartition key : {:12.3e}'.format (np.sum (key_repartition[n_bas] * dst_srfutil ).values) )
271   
272        # Basin surface (modulated by repartition key)
273        basin_srfutil = np.sum ( key_repartition[n_bas] * dst_srfutil )
274           
275        # Weights and links
276        poids = 1.0 / basin_srfutil
277        matrix_local   = np.where ( basin_msk[n_bas].ravel() > 0.5, key_repartition[n_bas].ravel()*poids.values, 0. )
278        matrix_local   = matrix_local[matrix_local > 0.0] # Keep only non zero values
279        num_links      = np.count_nonzero (matrix_local)
280        # Address on source grid : all links points to the same LMDZ point.
281        src_address_local = np.ones(num_links, dtype=np.int32 )*index_src
282        # Address on destination grid : each NEMO point with non zero link
283        dst_address_local = np.where ( key_repartition[n_bas].ravel() > 0.0, orc_index, 0)
284        dst_address_local = dst_address_local[dst_address_local > 0]
285        # Append to global tabs
286        remap_matrix = np.append ( remap_matrix, matrix_local      )
287        src_address  = np.append ( src_address , src_address_local )
288        dst_address  = np.append ( dst_address , dst_address_local )
289        #
290        #print ( 'Sum of remap_matrix : {:12.3e}'.format(np.sum(matrix_local)) )
291        print ( 'Point in atmospheric grid : {:4d} -- num_links: {:6d}'.format(index_src, num_links) ) 
292        print ('  ')
293
294
295
296## End of loop
297print (' ')
298
299#
300num_links = np.count_nonzero (remap_matrix)
301print ( 'Total num_links : {:10d}'.format(num_links) )
302
303# Diag : interpolate uniform field
304src_field = np.zeros(shape=(src_grid_size))
305for n_bas in np.arange ( nb_zone ) :
306    index_src = ((src_grid_size - 1)*n_bas) // (nb_zone-1) + 1
307    src_field[index_src-1] = n_bas
308
309dst_field = np.zeros(shape=(dst_grid_size,))
310for link in np.arange (num_links) :
311    dst_field [dst_address [link]-1] +=  remap_matrix[link] * src_field[src_address[link]-1]
312
313### ===== Writing the weights file, for OASIS MCT ==========================================
314
315# Output file
316calving = myargs.output
317   
318print ('Output file: ' + calving )
319
320remap_matrix = xr.DataArray (np.reshape(remap_matrix, (num_links, 1)), dims = ['num_links', 'num_wgts'] )
321src_address  = xr.DataArray (src_address.astype(np.int32) , dims = ['num_links',] )
322src_address.attrs['convention'] = "Fortran style addressing, starting at 1"
323dst_address  = xr.DataArray (dst_address.astype(np.int32) , dims =  ['num_links',] )
324dst_address.attrs['convention'] = "Fortran style addressing, starting at 1"
325
326src_grid_dims       = xr.DataArray ( np.array (( src_jpi, src_jpi ), dtype=np.int32), dims =  ['src_grid_rank',] )
327src_grid_center_lon = xr.DataArray ( src_lon.values.ravel(), dims = ['src_grid_size',] )
328src_grid_center_lat = xr.DataArray ( src_lat.values.ravel(), dims = ['src_grid_size',] )
329src_grid_center_lon.attrs['units']='degrees_east'  ; src_grid_center_lon.attrs['long_name']='Longitude'
330src_grid_center_lon.attrs['long_name']='longitude' ; src_grid_center_lon.attrs['bounds']="src_grid_corner_lon" 
331src_grid_center_lat.attrs['units']='degrees_north' ; src_grid_center_lat.attrs['long_name']='Latitude'
332src_grid_center_lat.attrs['long_name']='latitude ' ; src_grid_center_lat.attrs['bounds']="src_grid_corner_lat" 
333src_grid_corner_lon = xr.DataArray ( src_clo.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners'] )
334src_grid_corner_lat = xr.DataArray ( src_cla.values.reshape( (src_jpi*src_jpj, src_clo.shape[0]) ), dims = ['src_grid_size', 'src_grid_corners' ] )
335src_grid_corner_lon.attrs['units']="degrees_east"
336src_grid_corner_lat.attrs['units']="degrees_north"
337src_grid_area       = xr.DataArray ( src_srf.values.ravel(), dims = ['src_grid_size',] )
338src_grid_area.attrs['long_name']="Grid area" ; src_grid_area.attrs['standard_name']="cell_area" ; src_grid_area.attrs['units']="m2"
339src_grid_imask      = xr.DataArray ( src_msk.values.ravel().astype(np.int32) , dims = ['src_grid_size',] )
340src_grid_imask.attrs['long_name']="Land-sea mask" ; src_grid_imask.attrs['units']="Land:1, Ocean:0"
341
342# --
343dst_grid_dims       = xr.DataArray ( np.array(( dst_jpi, dst_jpi), dtype=np.int32) , dims = ['dst_grid_rank', ])
344dst_grid_center_lon = xr.DataArray ( dst_lon.values.ravel(), dims = ['dst_grid_size', ])
345dst_grid_center_lat = xr.DataArray ( dst_lat.values.ravel(), dims = ['dst_grid_size', ])
346dst_grid_center_lon.attrs['units']='degrees_east'  ; dst_grid_center_lon.attrs['long_name']='Longitude'
347dst_grid_center_lon.attrs['long_name']='longitude' ; dst_grid_center_lon.attrs['bounds']="dst_grid_corner_lon" 
348dst_grid_center_lat.attrs['units']='degrees_north' ; dst_grid_center_lat.attrs['long_name']='Latitude'
349dst_grid_center_lat.attrs['long_name']='latitude'  ; dst_grid_center_lat.attrs['bounds']="dst_grid_corner_lat" 
350dst_grid_corner_lon = xr.DataArray ( dst_clo.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] )
351dst_grid_corner_lat = xr.DataArray ( dst_cla.values.reshape(dst_jpi*dst_jpj, dst_clo.shape[0] ), dims = ['dst_grid_size', 'dst_grid_corners'] )
352dst_grid_corner_lon.attrs['units']="degrees_east"
353dst_grid_corner_lat.attrs['units']="degrees_north"
354dst_grid_area       = xr.DataArray ( dst_srf.values.ravel(), dims = ['dst_grid_size', ] )
355dst_grid_area.attrs['long_name']="Grid area" ; dst_grid_area.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2"
356dst_grid_imask      = xr.DataArray ( dst_msk.values.ravel(), dims =  ['dst_grid_size',]  )
357dst_grid_imask.attrs['long_name']="Land-sea mask" ; dst_grid_imask.attrs['units']="Land:1, Ocean:0"
358
359dst_bassin      = xr.DataArray ( basin_msk.reshape ( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size',  ] )
360dst_repartition = xr.DataArray ( key_repartition.reshape( (nb_zone,dst_grid_size) ) , dims = ['nb_zone', 'dst_grid_size',  ] )
361
362dst_southLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] )
363dst_northLimit = xr.DataArray ( np.min(limit_lat, axis=(1,)).astype(np.float32) , dims = ['nb_zone', ] )
364
365# Additionnal fields for debugging
366# ================================
367dst_grid_imaskutil  = xr.DataArray ( dst_mskutil.ravel().astype(np.float32), dims = ['dst_grid_size',] )
368dst_grid_imaskutil.attrs['long_name']="Land-sea mask" ; dst_grid_imaskutil.attrs['units']="Land:1, Ocean:0"
369dst_grid_imaskclose = xr.DataArray ( dst_closed.values.ravel(), dims = ['dst_grid_size',] )
370dst_grid_imaskclose.attrs['long_name']="Land-sea mask" ; dst_grid_imaskclose.attrs['units']="Land:1, Ocean:0"
371
372dst_lon_addressed = xr.DataArray ( dst_lon.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
373dst_lat_addressed = xr.DataArray ( dst_lat.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
374dst_lon_addressed.attrs['long_name']="Longitude" ; dst_lon_addressed.attrs['standard_name']="longitude" ; dst_lon_addressed.attrs['units']="degrees_east"
375dst_lat_addressed.attrs['long_name']="Latitude"  ; dst_lat_addressed.attrs['standard_name']="latitude"  ; dst_lat_addressed.attrs['units']="degrees_north"
376dst_area_addressed  = xr.DataArray ( dst_srf.values.ravel()[dst_address-1].ravel(), dims = ['num_links',] )
377dst_area_addressed.attrs['long_name']="Grid area" ; dst_area_addressed.attrs['standard_name']="cell_area" ; dst_grid_area.attrs['units']="m2"
378dst_imask_addressed = xr.DataArray ( dst_msk.values.ravel()[dst_address-1].ravel(), dims = ['num_links', ] )
379dst_imask_addressed.attrs['long_name']="Land-sea mask" ; dst_imask_addressed.attrs['units']="Land:1, Ocean:0"
380dst_imaskutil_addressed = xr.DataArray ( dst_mskutil.ravel()[dst_address-1].astype(np.float32), dims = ['num_links'] )
381dst_imaskutil_addressed.attrs['long_name']="Land-sea mask" ; dst_imaskutil_addressed.attrs['units']="Land:1, Ocean:0"
382
383src_field = xr.DataArray ( src_field, dims = ['src_grid_size',] )
384dst_field = xr.DataArray ( dst_field, dims = ['dst_grid_size',] )
385
386f_calving =  xr.Dataset ( { 
387    'remap_matrix'            : remap_matrix,
388        'src_address'             : src_address,
389        'dst_address'             : dst_address,
390        'src_grid_dims'           : src_grid_dims,
391        'src_grid_center_lon'     : src_grid_center_lat,
392        'src_grid_center_lat'     : src_grid_center_lat,
393        'src_grid_corner_lon'     : src_grid_corner_lon,
394        'src_grid_corner_lat'     : src_grid_corner_lat,
395        'src_grid_area'           : src_grid_area,
396        'src_grid_imask'          : src_grid_imask,
397        'dst_grid_dims'           : dst_grid_dims,
398        'dst_grid_center_lon'     : dst_grid_center_lon,
399        'dst_grid_center_lat'     : dst_grid_center_lat,
400        'dst_grid_corner_lon'     : dst_grid_corner_lon,
401        'dst_grid_corner_lat'     : dst_grid_corner_lat,
402        'dst_grid_area'           : dst_grid_area,
403        'dst_grid_imask'          : dst_grid_imask,
404        'dst_bassin'              : dst_bassin,
405        'dst_repartition'         : dst_repartition,
406        'dst_southLimit'          : dst_southLimit,
407        'dst_northLimit'          : dst_northLimit,
408        'dst_grid_imaskutil'      : dst_grid_imaskutil,
409        'dst_grid_imaskclose'     : dst_grid_imaskclose,
410        'dst_lon_addressed'       : dst_lon_addressed,
411        'dst_lat_addressed'       : dst_lat_addressed,
412        'dst_area_addressed'      : dst_area_addressed,
413        'dst_imask_addressed'     : dst_imask_addressed,
414        'dst_imaskutil_addressed' : dst_imaskutil_addressed,
415        'src_field'               : src_field,
416        'dst_field'               : dst_field,
417    } )
418
419f_calving.attrs['Conventions']     = "CF-1.6"
420f_calving.attrs['source']          = "IPSL Earth system model"
421f_calving.attrs['group']           = "ICMC IPSL Climate Modelling Center"
422f_calving.attrs['Institution']     = "IPSL https.//www.ipsl.fr"
423f_calving.attrs['Ocean']           = dst_Name + " https://www.nemo-ocean.eu"
424f_calving.attrs['Atmosphere']      = src_Name + " http://lmdz.lmd.jussieu.fr"
425if myargs.type in ['iceberg', 'iceshelf' ]: f_calving.attrs['originalFiles'] = myargs.repartition_file
426f_calving.attrs['associatedFiles'] = grids + " " + areas + " " + masks
427
428f_calving.attrs['description']     = "Generated with XIOS http://forge.ipsl.jussieu.fr/ioserver and MOSAIX https://forge.ipsl.jussieu.fr/igcmg/browser/TOOLS/MOSAIX"
429f_calving.attrs['title']           = calving
430f_calving.attrs['Program']         = "Generated by " + sys.argv[0] + " with flags " + ' '.join (sys.argv[1:]) 
431
432f_calving.attrs['repartitionType'] = myargs.type
433if myargs.type in [ 'iceberg', 'iceshelf' ] :
434    f_calving.attrs['repartitionFile'] = myargs.repartition_file
435    f_calving.attrs['repartitionVar']  = repartitionVar
436f_calving.attrs['gridsFile']       = grids
437f_calving.attrs['areasFile']       = areas
438f_calving.attrs['masksFile']       = masks
439f_calving.attrs['timeStamp']       = time.asctime()
440try    : f_calving.attrs['directory'] = os.getcwd ()
441except : pass
442try    : f_runoff.attrs['HOSTNAME'] = platform.node ()
443except : pass
444try    : f_runoff.attrs['LOGNAME']  = os.getlogin ()
445except : pass
446try    : f_runoff.attrs['Python']   = "Python version " +  platform.python_version ()
447except : pass
448try    : f_runoff.attrs['OS']       = platform.system ()
449except : pass
450try    : f_runoff.attrs['release']  = platform.release ()
451except : pass
452try    : f_runoff.attrs['hardware'] = platform.machine ()
453except : pass
454f_calving.attrs['conventions']     = "SCRIP"
455if src_name == 'lmd' : f_calving.attrs['source_grid'] = "curvilinear"
456if src_name == 'ico' : f_calving.attrs['source_grid'] = "unstructured"
457f_calving.attrs['dest_grid']       = "curvilinear"
458f_calving.attrs['Model']           = "IPSL CM6"
459f_calving.attrs['SVN_Author']      = "$Author$"
460f_calving.attrs['SVN_Date']        = "$Date$"
461f_calving.attrs['SVN_Revision']    = "$Revision$"
462f_calving.attrs['SVN_Id']          = "$Id$"
463f_calving.attrs['SVN_HeadURL']     = "$HeadURL$"
464
465##
466f_calving.to_netcdf ( calving, mode='w', format=FmtNetcdf )
467f_calving.close()
468
469print ('That''s all folks !')
470## ======================================================================================
Note: See TracBrowser for help on using the repository browser.