source: TOOLS/CPLRESTART/create_flxat.py @ 6512

Last change on this file since 6512 was 6512, checked in by omamce, 13 months ago

O.M. : CPLRESTART : Update for RedHat? 8

File size: 13.0 KB
Line 
1### ===========================================================================
2###
3### Part of CPL Restart IPSL packages
4###
5### ===========================================================================
6##
7##  Warning, to install, configure, run, use any of Olivier Marti's
8##  software or to read the associated documentation you'll need at least
9##  one (1) brain in a reasonably working order. Lack of this implement
10##  will void any warranties (either express or implied).
11##  O. Marti assumes no responsability for errors, omissions,
12##  data loss, or any other consequences caused directly or indirectly by
13##  the usage of his software by incorrectly or partially configured
14##  personal.
15##
16###
17### Documentation : https://forge.ipsl.jussieu.fr/igcmg/wiki/IPSLCM6/MOSAIX
18###
19## SVN information
20#  $Author: omamce $
21#  $Date: 2020-09-18 17:02:09 +0200 (Fri, 18 Sep 2020) $
22#  $Revision: 5157 $
23#  $Id: CreateRestartAtm4Oasis.bash 5157 2020-09-18 15:02:09Z omamce $
24#  $HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/CPLRESTART/CreateRestartAtm4Oasis.bash $
25
26import numpy as np, xarray as xr
27import sys, argparse, textwrap, time, os, platform
28
29## Reading the command line arguments
30#
31# Creating a parser
32# The first step in using the argparse is creating an ArgumentParser object:
33parser = argparse.ArgumentParser (description = textwrap.dedent ("""
34   create_flxat
35   """), epilog='-------- This is the end of the help message --------')
36
37# Adding arguments
38parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", default=False )
39parser.add_argument ('-l', '--level', help='verbosity level', type=int, default=0, choices=[0, 1, 2] )
40parser.add_argument ('-i', '--input'  , metavar='inputFile' , help="input file" , nargs='?', default="flxat_fields_notime.nc"  )
41parser.add_argument ('-o', '--output' , metavar='outputFile', help="output file", nargs='?', default="tmp_flxat.nc" )
42parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', help="NetCDF format", nargs='?', default="NETCDF4",
43                         choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC", "64bits"])
44parser.add_argument ('--IsUnstructured',  choices=['True', 'False'], required=True )
45
46# Parse command line
47myargs = parser.parse_args ()
48
49IsUnstructured = myargs.IsUnstructured
50if IsUnstructured == 'True' : IsUnstructured = True
51else :  IsUnstructured = False
52
53NcFormat = myargs.format
54if NcFormat == '64bit' : NcFormat = NETCDF4
55
56## Read Data - promote them to 64 bits
57d_In = xr.open_dataset (myargs.input)
58
59lon       = d_In.lon.astype(np.float64).values
60lat       = d_In.lat.astype(np.float64).values
61
62if IsUnstructured :
63    dims = lon.shape
64else :
65    dims = ( lat.shape[0], lon.shape[0] )
66
67# Try to read variables
68# Set them to zero if not possible
69try    : evap_oce  = d_In.evap_oce[0].squeeze().astype(np.float64).values
70except : evap_oce  = np.zeros ( dims )
71try    : evap_sic  = d_In.evap_sic[0].squeeze().astype(np.float64).values
72except : evap_sic  = np.zeros ( dims )
73try    : fract_oce = d_In.fract_oce[0].squeeze().astype(np.float64).values
74except : fract_oce = np.ones ( dims )
75try    : fract_sic = d_In.fract_sic[0].squeeze().astype(np.float64).values
76except : fract_sic = np.zeros ( dims )
77try    : precip    = d_In.precip[0].squeeze().astype(np.float64).values
78except : evap_oce  = np.zeros ( dims )
79try    : snow      = d_In.snow[0].squeeze().astype(np.float64).values
80except : snow      = np.zeros ( dims )
81try    : soll      = d_In.soll[0].squeeze().astype(np.float64).values
82except : soll      = np.zeros ( dims )
83try    : sols      = d_In.sols[0].squeeze().astype(np.float64).values
84except : sols      = np.zeros ( dims )
85try    : taux_oce  = d_In.taux_oce[0].squeeze().astype(np.float64).values
86except : taux_oce  = np.zeros ( dims )
87try    : taux_sic  = d_In.taux_sic[0].squeeze().astype(np.float64).values
88except : taux_sic  = np.zeros ( dims )
89try    : tauy_oce  = d_In.tauy_oce[0].squeeze().astype(np.float64).values
90except : tauy_oce  = np.zeros ( dims )
91try    : tauy_sic  = d_In.tauy_sic[0].squeeze().astype(np.float64).values
92except : tauy_sic  = np.zeros ( dims )
93try    : wind10m   = d_In.wind10m[0].squeeze().astype(np.float64).values
94except : wind10m   = np.zeros ( dims )
95
96if IsUnstructured :
97    lon       = np.expand_dims ( lon       , axis=1 )
98    lat       = np.expand_dims ( lat       , axis=1 )
99    evap_oce  = np.expand_dims ( evap_oce  , axis=1 )
100    evap_sic  = np.expand_dims ( evap_sic  , axis=1 )
101    fract_oce = np.expand_dims ( fract_oce , axis=1 )
102    fract_sic = np.expand_dims ( fract_sic , axis=1 )
103    precip    = np.expand_dims ( precip    , axis=1 )
104    snow      = np.expand_dims ( snow      , axis=1 )
105    soll      = np.expand_dims ( soll      , axis=1 )
106    sols      = np.expand_dims ( sols      , axis=1 )
107    taux_oce  = np.expand_dims ( taux_oce  , axis=1 )
108    taux_sic  = np.expand_dims ( taux_sic  , axis=1 )
109    tauy_oce  = np.expand_dims ( tauy_oce  , axis=1 )
110    tauy_sic  = np.expand_dims ( tauy_sic  , axis=1 )
111    wind10m   = np.expand_dims ( wind10m   , axis=1 )
112else :
113    lon2 = lat [:, np.newaxis]*0 + lon [np.newaxis, :]
114    lat2 = lat [:, np.newaxis]   + lon [np.newaxis, :]*0
115    lon = lon2 ; lat = lat2
116   
117##
118yxshape = lat.shape
119ny, nx = yxshape
120
121## Computations
122np.seterr (divide='ignore', invalid='ignore')
123
124fract_oce_plus_sic  = (fract_oce + fract_sic) ; ## ocean fraction
125fract_oce_norm = np.where (fract_oce_plus_sic >  0.0, fract_oce/fract_oce_plus_sic, 0.0) # free ocean vs. total ocen fraction
126fract_sic_norm = np.where (fract_oce_plus_sic >  0.0, fract_sic/fract_oce_plus_sic, 0.0) # sea ice vs. total ocean fraction
127##
128COTOTRAI = xr.DataArray (precip-snow, dims=('y', 'x'))
129COTOTRAI.attrs['long_name'] = 'Liquid precip'
130COTOTRAI.attrs['coordinates'] = "lat lon"
131
132COTOTSNO = xr.DataArray (snow       , dims=('y', 'x'))
133COTOTSNO.attrs['long_name'] ='Solid precipitation'
134COTOTSNO.attrs['coordinates'] = "lat lon"
135
136COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, dims=('y', 'x')) 
137COTOTEVA.attrs['coordinates'] = "lat lon"
138
139COICEVAP = xr.DataArray (evap_sic   , dims=('y', 'x'))
140COICEVAP.attrs['long_name'] = 'Evaporation on sea ice'
141COICEVAP.attrs['coordinates'] = "lat lon"
142
143COQSRMIX = xr.DataArray (sols       , dims=('y', 'x'))
144COQSRMIX.attrs['long_name'] = 'Heat flux short wave'
145COQSRMIX.attrs['units'] = 'W/m2'
146COQSRMIX.attrs['coordinates'] = "lat lon"
147
148COQNSMIX = xr.DataArray (soll       , dims=('y', 'x'))
149COQNSMIX.attrs['long_name'] = 'Heat flux minus short wave'
150COQNSMIX.attrs['units'] = 'W/m2'
151COQNSMIX.attrs['coordinates'] = "lat lon"
152
153COSHFICE = xr.DataArray (sols       , dims=('y', 'x'))
154COSHFICE.attrs['long_name'] = 'Heat flux short wave over sea ice'
155COSHFICE.attrs['units'] = 'W/m2'
156COSHFICE.attrs['coordinates'] = "lat lon"
157
158CONSFICE = xr.DataArray (soll       , dims=('y', 'x'))
159CONSFICE.attrs['long_name'] = 'Heat flux minus short wave over sea ice'
160CONSFICE.attrs['units'] = 'W/m2'
161CONSFICE.attrs['coordinates'] = "lat lon"
162
163COWINDSP = xr.DataArray (wind10m    , dims=('y', 'x'))
164COWINDSP.attrs['long_name'] = 'Wind speed at 10m high'
165COWINDSP.attrs['units'] = 'm/s'
166COWINDSP.attrs['coordinates'] = "lat lon"
167
168CODFLXDT = xr.DataArray (-20.0*np.ones(yxshape), dims=('y', 'x'))
169CODFLXDT.attrs['long_name'] = 'dQ/dT - Derivative over temperature of turbulent heat fluxes'
170CODFLXDT.attrs['units'] = 'W/m2/K'     
171CODFLXDT.attrs['coordinates'] = "lat lon"
172
173COCALVIN = xr.DataArray (  0.0*np.ones(yxshape), dims=('y', 'x'))
174COCALVIN.attrs['long_name'] = 'Calving of icebergs, solid'
175COCALVIN.attrs['coordinates'] = "lat lon"
176
177COLIQRUN = xr.DataArray (  0.0*np.ones(yxshape), dims=('y', 'x'))
178COLIQRUN.attrs['long_name'] = 'River run-off, liquid'
179COLIQRUN.attrs['coordinates'] = "lat lon"
180
181## Wind stress
182tau_x = (taux_oce*fract_oce_norm + taux_sic*fract_sic_norm)
183tau_y = (tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm)
184COTAUMOD = xr.DataArray (np.sqrt ( (tau_x*tau_x + tau_y*tau_y) ) , dims=('y', 'x'))
185COTAUMOD.attrs['long_name'] = 'Wind stress modulus'
186COTAUMOD.attrs['units'] = 'Pa'
187COTAUMOD.attrs['coordinates'] = "lat lon"
188
189## Wind stress, from east/north components to geocentric
190rad = np.deg2rad (1.0)
191COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , dims=('y', 'x'))
192COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , dims=('y', 'x'))
193COTAUZZU = xr.DataArray ( tau_y * np.cos(rad * lat)                                                 , dims=('y', 'x'))
194
195## Value at North Pole
196if IsUnstructured :
197    ## Value at North Pole for DYNAMICO grid
198    COTAUXXU = xr.where ( lat >= 89.999, -tau_y, COTAUXXU)
199    COTAUYYU = xr.where ( lat >= 89.999,  tau_x, COTAUYYU)
200    ## Value at South Pole for DYNAMICO grid ?
201   
202else :
203    ## Value at North Pole for LMDZ lon/lat grid
204    COTAUXXU[0,:] = ( -tau_x [0, 0] )
205    COTAUYYU[0,:] = ( -tau_y [0, 0] )
206    COTAUZZU[0,:] =  0.0 ; 
207    ## Value at south Pole for LMDZ lon/lat grid
208    COTAUXXU[-1,:] = ( -tau_x [-1, 0] )
209    COTAUYYU[-1,:] = ( -tau_y [-1, 0] )
210
211##
212COTAUXXU.attrs['long_name'] = 'Wind stress in geocentric referential - x-component'
213COTAUYYU.attrs['long_name'] = 'Wind stress in geocentric referential - y-component'
214COTAUZZU.attrs['long_name'] = 'Wind stress in geocentric referential - z-component'
215
216COTAUXXU.attrs['units'] = 'Pa'
217COTAUYYU.attrs['units'] = 'Pa'
218COTAUZZU.attrs['units'] = 'Pa'
219
220COTAUXXU.attrs['coordinates'] = "lat lon"
221COTAUYYU.attrs['coordinates'] = "lat lon"
222COTAUZZU.attrs['coordinates'] = "lat lon"
223
224COTAUXXV = COTAUXXU ; COTAUYYV = COTAUYYU ; COTAUZZV = COTAUZZU
225
226## check if bounds for lon lat are present and add them to dataset or drop them
227ifbnds=True if ('bounds_lon' in d_In.data_vars) and ('bounds_lat' in d_In.data_vars) else False
228
229## Creates final Dataset
230lon = xr.DataArray (lon, dims=('y', 'x'))
231lon.attrs['name']      = 'longitude'
232lon.attrs['long_name'] = 'Longitude'
233lon.attrs['units']     = 'degrees_east'
234if ifbnds: lon.attrs['bounds']    = 'bounds_lon'
235
236lat = xr.DataArray (lat, dims=('y', 'x'))
237lat.attrs['name']      = 'latitude'
238lat.attrs['long_name'] = 'Latitude'
239lat.attrs['units']     = 'degrees_north'
240if ifbnds: lat.attrs['bounds']    = 'bounds_lat'
241
242if ifbnds: 
243    bounds_lon = d_In.bounds_lon.values.astype (np.float64)
244    bounds_lat = d_In.bounds_lat.values.astype (np.float64)
245    nvertex = bounds_lon.shape[-1]
246
247    bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex'))
248    bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex'))
249
250    bounds_lon.attrs['units'] = 'degrees_east'
251    bounds_lat.attrs['units'] = 'degrees_north'
252
253# prepare dictionnary to export dataset to netcdf file with or without bounds
254dictdataset = {'lat':lat, 'lon':lon }
255if ifbnds: dictdataset.update ( {'bounds_lon':bounds_lon, 'bounds_lat':bounds_lat} )
256dictdataset.update ( {
257    'COTOTRAI':COTOTRAI, 'COTOTSNO':COTOTSNO, 'COTOTEVA':COTOTEVA,
258    'COICEVAP':COICEVAP, 'COQSRMIX':COQSRMIX, 'COQNSMIX':COQNSMIX,
259    'COSHFICE':COSHFICE, 'CONSFICE':CONSFICE, 'CODFLXDT':CODFLXDT,
260    'COCALVIN':COCALVIN, 'COLIQRUN':COLIQRUN, 'COWINDSP':COWINDSP,
261    'COTAUMOD':COTAUMOD, 'COTAUXXU':COTAUXXU, 'COTAUYYU':COTAUYYU,
262    'COTAUZZU':COTAUZZU, 'COTAUXXV':COTAUXXV, 'COTAUYYV':COTAUYYV,
263    'COTAUZZV':COTAUZZV } )
264
265d_Out = xr.Dataset (dictdataset)
266
267d_Out.attrs ['AssociatedFiles']   = myargs.input
268d_Out.attrs ['Conventions']       = "CF-1.6"
269d_Out.attrs ['source']            = "IPSL Earth system model"
270d_Out.attrs ['group']             = "ICMC IPSL Climate Modelling Center"
271d_Out.attrs ['Institution']       = "IPSL https://www.ipsl.fr"
272d_Out.attrs ['Model']             = "IPSL CM6"
273d_Out.attrs ['source']            = "IPSL Earth system model"
274d_Out.attrs ['group']             = "ICMC IPSL Climate Modelling Center"
275d_Out.attrs ['description']       = "Fields needed by OASIS-MCT" 
276d_Out.attrs ['timeStamp']         = time.asctime ()
277try    : d_Out.attrs['directory'] = os.getcwd ()
278except : pass
279try    : d_Out.attrs['HOSTNAME']  = platform.node ()
280except : pass
281try    : d_Out.attrs['LOGNAME']   = os.getlogin ()
282except : pass
283try    : d_Out.attrs['Python']    = "Python version " +  platform.python_version ()
284except : pass
285try    : d_Out.attrs['OS']        = platform.system ()
286except : pass
287try    : d_Out.attrs['release']   = platform.release ()
288except : pass
289try    : d_Out.attrs['hardware']  = platform.machine ()
290except : pass
291d_Out.attrs ['SVN_Author']        = "$Author: omamce $"
292d_Out.attrs ['SVN_Date']          = "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $"
293d_Out.attrs ['SVN_Revision']      = "$Revision: 6190 $"
294d_Out.attrs ['SVN_Id']            = "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $"
295d_Out.attrs ['SVN_HeadURL']       = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $"
296
297d_Out.to_netcdf ( myargs.output, format=NcFormat )
298d_Out.close ()
299## ===========================================================================
300##
301##                               That's all folk's !!!
302##
303## ===========================================================================
Note: See TracBrowser for help on using the repository browser.