1 | ### =========================================================================== |
---|
2 | ### |
---|
3 | ### Part of CPL Restart IPSL packages |
---|
4 | ### |
---|
5 | ### =========================================================================== |
---|
6 | ## |
---|
7 | ## Warning, to install, configure, run, use any of Olivier Marti's |
---|
8 | ## software or to read the associated documentation you'll need at least |
---|
9 | ## one (1) brain in a reasonably working order. Lack of this implement |
---|
10 | ## will void any warranties (either express or implied). |
---|
11 | ## O. Marti assumes no responsability for errors, omissions, |
---|
12 | ## data loss, or any other consequences caused directly or indirectly by |
---|
13 | ## the usage of his software by incorrectly or partially configured |
---|
14 | ## personal. |
---|
15 | ## |
---|
16 | ### |
---|
17 | ### Documentation : https://forge.ipsl.jussieu.fr/igcmg/wiki/IPSLCM6/MOSAIX |
---|
18 | ### |
---|
19 | ## SVN information |
---|
20 | # $Author: omamce $ |
---|
21 | # $Date: 2020-09-18 17:02:09 +0200 (Fri, 18 Sep 2020) $ |
---|
22 | # $Revision: 5157 $ |
---|
23 | # $Id: CreateRestartAtm4Oasis.bash 5157 2020-09-18 15:02:09Z omamce $ |
---|
24 | # $HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/CPLRESTART/CreateRestartAtm4Oasis.bash $ |
---|
25 | |
---|
26 | import numpy as np, xarray as xr |
---|
27 | import sys, argparse, textwrap, time, os, platform |
---|
28 | |
---|
29 | ## Reading the command line arguments |
---|
30 | # |
---|
31 | # Creating a parser |
---|
32 | # The first step in using the argparse is creating an ArgumentParser object: |
---|
33 | parser = argparse.ArgumentParser (description = textwrap.dedent (""" |
---|
34 | create_flxat |
---|
35 | """), epilog='-------- This is the end of the help message --------') |
---|
36 | |
---|
37 | # Adding arguments |
---|
38 | parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", default=False ) |
---|
39 | parser.add_argument ('-l', '--level', help='verbosity level', type=int, default=0, choices=[0, 1, 2] ) |
---|
40 | parser.add_argument ('-i', '--input' , metavar='inputFile' , help="input file" , nargs='?', default="flxat_fields_notime.nc" ) |
---|
41 | parser.add_argument ('-o', '--output' , metavar='outputFile', help="output file", nargs='?', default="tmp_flxat.nc" ) |
---|
42 | parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', help="NetCDF format", nargs='?', default="NETCDF4", |
---|
43 | choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC", "64bits"]) |
---|
44 | parser.add_argument ('--IsUnstructured', choices=['True', 'False'], required=True ) |
---|
45 | |
---|
46 | # Parse command line |
---|
47 | myargs = parser.parse_args () |
---|
48 | |
---|
49 | IsUnstructured = myargs.IsUnstructured |
---|
50 | if IsUnstructured == 'True' : IsUnstructured = True |
---|
51 | else : IsUnstructured = False |
---|
52 | |
---|
53 | NcFormat = myargs.format |
---|
54 | if NcFormat == '64bit' : NcFormat = NETCDF4 |
---|
55 | |
---|
56 | ## Read Data - promote them to 64 bits |
---|
57 | d_In = xr.open_dataset (myargs.input) |
---|
58 | |
---|
59 | lon = d_In.lon.astype(np.float64).values |
---|
60 | lat = d_In.lat.astype(np.float64).values |
---|
61 | |
---|
62 | if IsUnstructured : |
---|
63 | dims = lon.shape |
---|
64 | else : |
---|
65 | dims = ( lat.shape[0], lon.shape[0] ) |
---|
66 | |
---|
67 | # Try to read variables |
---|
68 | # Set them to zero if not possible |
---|
69 | try : evap_oce = d_In.evap_oce[0].squeeze().astype(np.float64).values |
---|
70 | except : evap_oce = np.zeros ( dims ) |
---|
71 | try : evap_sic = d_In.evap_sic[0].squeeze().astype(np.float64).values |
---|
72 | except : evap_sic = np.zeros ( dims ) |
---|
73 | try : fract_oce = d_In.fract_oce[0].squeeze().astype(np.float64).values |
---|
74 | except : fract_oce = np.ones ( dims ) |
---|
75 | try : fract_sic = d_In.fract_sic[0].squeeze().astype(np.float64).values |
---|
76 | except : fract_sic = np.zeros ( dims ) |
---|
77 | try : precip = d_In.precip[0].squeeze().astype(np.float64).values |
---|
78 | except : evap_oce = np.zeros ( dims ) |
---|
79 | try : snow = d_In.snow[0].squeeze().astype(np.float64).values |
---|
80 | except : snow = np.zeros ( dims ) |
---|
81 | try : soll = d_In.soll[0].squeeze().astype(np.float64).values |
---|
82 | except : soll = np.zeros ( dims ) |
---|
83 | try : sols = d_In.sols[0].squeeze().astype(np.float64).values |
---|
84 | except : sols = np.zeros ( dims ) |
---|
85 | try : taux_oce = d_In.taux_oce[0].squeeze().astype(np.float64).values |
---|
86 | except : taux_oce = np.zeros ( dims ) |
---|
87 | try : taux_sic = d_In.taux_sic[0].squeeze().astype(np.float64).values |
---|
88 | except : taux_sic = np.zeros ( dims ) |
---|
89 | try : tauy_oce = d_In.tauy_oce[0].squeeze().astype(np.float64).values |
---|
90 | except : tauy_oce = np.zeros ( dims ) |
---|
91 | try : tauy_sic = d_In.tauy_sic[0].squeeze().astype(np.float64).values |
---|
92 | except : tauy_sic = np.zeros ( dims ) |
---|
93 | try : wind10m = d_In.wind10m[0].squeeze().astype(np.float64).values |
---|
94 | except : wind10m = np.zeros ( dims ) |
---|
95 | |
---|
96 | if IsUnstructured : |
---|
97 | lon = np.expand_dims ( lon , axis=1 ) |
---|
98 | lat = np.expand_dims ( lat , axis=1 ) |
---|
99 | evap_oce = np.expand_dims ( evap_oce , axis=1 ) |
---|
100 | evap_sic = np.expand_dims ( evap_sic , axis=1 ) |
---|
101 | fract_oce = np.expand_dims ( fract_oce , axis=1 ) |
---|
102 | fract_sic = np.expand_dims ( fract_sic , axis=1 ) |
---|
103 | precip = np.expand_dims ( precip , axis=1 ) |
---|
104 | snow = np.expand_dims ( snow , axis=1 ) |
---|
105 | soll = np.expand_dims ( soll , axis=1 ) |
---|
106 | sols = np.expand_dims ( sols , axis=1 ) |
---|
107 | taux_oce = np.expand_dims ( taux_oce , axis=1 ) |
---|
108 | taux_sic = np.expand_dims ( taux_sic , axis=1 ) |
---|
109 | tauy_oce = np.expand_dims ( tauy_oce , axis=1 ) |
---|
110 | tauy_sic = np.expand_dims ( tauy_sic , axis=1 ) |
---|
111 | wind10m = np.expand_dims ( wind10m , axis=1 ) |
---|
112 | else : |
---|
113 | lon2 = lat [:, np.newaxis]*0 + lon [np.newaxis, :] |
---|
114 | lat2 = lat [:, np.newaxis] + lon [np.newaxis, :]*0 |
---|
115 | lon = lon2 ; lat = lat2 |
---|
116 | |
---|
117 | ## |
---|
118 | yxshape = lat.shape |
---|
119 | ny, nx = yxshape |
---|
120 | |
---|
121 | ## Computations |
---|
122 | np.seterr (divide='ignore', invalid='ignore') |
---|
123 | |
---|
124 | fract_oce_plus_sic = (fract_oce + fract_sic) ; ## ocean fraction |
---|
125 | fract_oce_norm = np.where (fract_oce_plus_sic > 0.0, fract_oce/fract_oce_plus_sic, 0.0) # free ocean vs. total ocen fraction |
---|
126 | fract_sic_norm = np.where (fract_oce_plus_sic > 0.0, fract_sic/fract_oce_plus_sic, 0.0) # sea ice vs. total ocean fraction |
---|
127 | ## |
---|
128 | COTOTRAI = xr.DataArray (precip-snow, dims=('y', 'x')) |
---|
129 | COTOTRAI.attrs['long_name'] = 'Liquid precip' |
---|
130 | COTOTRAI.attrs['coordinates'] = "lat lon" |
---|
131 | |
---|
132 | COTOTSNO = xr.DataArray (snow , dims=('y', 'x')) |
---|
133 | COTOTSNO.attrs['long_name'] ='Solid precipitation' |
---|
134 | COTOTSNO.attrs['coordinates'] = "lat lon" |
---|
135 | |
---|
136 | COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, dims=('y', 'x')) |
---|
137 | COTOTEVA.attrs['coordinates'] = "lat lon" |
---|
138 | |
---|
139 | COICEVAP = xr.DataArray (evap_sic , dims=('y', 'x')) |
---|
140 | COICEVAP.attrs['long_name'] = 'Evaporation on sea ice' |
---|
141 | COICEVAP.attrs['coordinates'] = "lat lon" |
---|
142 | |
---|
143 | COQSRMIX = xr.DataArray (sols , dims=('y', 'x')) |
---|
144 | COQSRMIX.attrs['long_name'] = 'Heat flux short wave' |
---|
145 | COQSRMIX.attrs['units'] = 'W/m2' |
---|
146 | COQSRMIX.attrs['coordinates'] = "lat lon" |
---|
147 | |
---|
148 | COQNSMIX = xr.DataArray (soll , dims=('y', 'x')) |
---|
149 | COQNSMIX.attrs['long_name'] = 'Heat flux minus short wave' |
---|
150 | COQNSMIX.attrs['units'] = 'W/m2' |
---|
151 | COQNSMIX.attrs['coordinates'] = "lat lon" |
---|
152 | |
---|
153 | COSHFICE = xr.DataArray (sols , dims=('y', 'x')) |
---|
154 | COSHFICE.attrs['long_name'] = 'Heat flux short wave over sea ice' |
---|
155 | COSHFICE.attrs['units'] = 'W/m2' |
---|
156 | COSHFICE.attrs['coordinates'] = "lat lon" |
---|
157 | |
---|
158 | CONSFICE = xr.DataArray (soll , dims=('y', 'x')) |
---|
159 | CONSFICE.attrs['long_name'] = 'Heat flux minus short wave over sea ice' |
---|
160 | CONSFICE.attrs['units'] = 'W/m2' |
---|
161 | CONSFICE.attrs['coordinates'] = "lat lon" |
---|
162 | |
---|
163 | COWINDSP = xr.DataArray (wind10m , dims=('y', 'x')) |
---|
164 | COWINDSP.attrs['long_name'] = 'Wind speed at 10m high' |
---|
165 | COWINDSP.attrs['units'] = 'm/s' |
---|
166 | COWINDSP.attrs['coordinates'] = "lat lon" |
---|
167 | |
---|
168 | CODFLXDT = xr.DataArray (-20.0*np.ones(yxshape), dims=('y', 'x')) |
---|
169 | CODFLXDT.attrs['long_name'] = 'dQ/dT - Derivative over temperature of turbulent heat fluxes' |
---|
170 | CODFLXDT.attrs['units'] = 'W/m2/K' |
---|
171 | CODFLXDT.attrs['coordinates'] = "lat lon" |
---|
172 | |
---|
173 | COCALVIN = xr.DataArray ( 0.0*np.ones(yxshape), dims=('y', 'x')) |
---|
174 | COCALVIN.attrs['long_name'] = 'Calving of icebergs, solid' |
---|
175 | COCALVIN.attrs['coordinates'] = "lat lon" |
---|
176 | |
---|
177 | COLIQRUN = xr.DataArray ( 0.0*np.ones(yxshape), dims=('y', 'x')) |
---|
178 | COLIQRUN.attrs['long_name'] = 'River run-off, liquid' |
---|
179 | COLIQRUN.attrs['coordinates'] = "lat lon" |
---|
180 | |
---|
181 | ## Wind stress |
---|
182 | tau_x = (taux_oce*fract_oce_norm + taux_sic*fract_sic_norm) |
---|
183 | tau_y = (tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm) |
---|
184 | COTAUMOD = xr.DataArray (np.sqrt ( (tau_x*tau_x + tau_y*tau_y) ) , dims=('y', 'x')) |
---|
185 | COTAUMOD.attrs['long_name'] = 'Wind stress modulus' |
---|
186 | COTAUMOD.attrs['units'] = 'Pa' |
---|
187 | COTAUMOD.attrs['coordinates'] = "lat lon" |
---|
188 | |
---|
189 | ## Wind stress, from east/north components to geocentric |
---|
190 | rad = np.deg2rad (1.0) |
---|
191 | COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , dims=('y', 'x')) |
---|
192 | COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , dims=('y', 'x')) |
---|
193 | COTAUZZU = xr.DataArray ( tau_y * np.cos(rad * lat) , dims=('y', 'x')) |
---|
194 | |
---|
195 | ## Value at North Pole |
---|
196 | if IsUnstructured : |
---|
197 | ## Value at North Pole for DYNAMICO grid |
---|
198 | COTAUXXU = xr.where ( lat >= 89.999, -tau_y, COTAUXXU) |
---|
199 | COTAUYYU = xr.where ( lat >= 89.999, tau_x, COTAUYYU) |
---|
200 | ## Value at South Pole for DYNAMICO grid ? |
---|
201 | |
---|
202 | else : |
---|
203 | ## Value at North Pole for LMDZ lon/lat grid |
---|
204 | COTAUXXU[0,:] = ( -tau_x [0, 0] ) |
---|
205 | COTAUYYU[0,:] = ( -tau_y [0, 0] ) |
---|
206 | COTAUZZU[0,:] = 0.0 ; |
---|
207 | ## Value at south Pole for LMDZ lon/lat grid |
---|
208 | COTAUXXU[-1,:] = ( -tau_x [-1, 0] ) |
---|
209 | COTAUYYU[-1,:] = ( -tau_y [-1, 0] ) |
---|
210 | |
---|
211 | ## |
---|
212 | COTAUXXU.attrs['long_name'] = 'Wind stress in geocentric referential - x-component' |
---|
213 | COTAUYYU.attrs['long_name'] = 'Wind stress in geocentric referential - y-component' |
---|
214 | COTAUZZU.attrs['long_name'] = 'Wind stress in geocentric referential - z-component' |
---|
215 | |
---|
216 | COTAUXXU.attrs['units'] = 'Pa' |
---|
217 | COTAUYYU.attrs['units'] = 'Pa' |
---|
218 | COTAUZZU.attrs['units'] = 'Pa' |
---|
219 | |
---|
220 | COTAUXXU.attrs['coordinates'] = "lat lon" |
---|
221 | COTAUYYU.attrs['coordinates'] = "lat lon" |
---|
222 | COTAUZZU.attrs['coordinates'] = "lat lon" |
---|
223 | |
---|
224 | COTAUXXV = COTAUXXU ; COTAUYYV = COTAUYYU ; COTAUZZV = COTAUZZU |
---|
225 | |
---|
226 | ## check if bounds for lon lat are present and add them to dataset or drop them |
---|
227 | ifbnds=True if ('bounds_lon' in d_In.data_vars) and ('bounds_lat' in d_In.data_vars) else False |
---|
228 | |
---|
229 | ## Creates final Dataset |
---|
230 | lon = xr.DataArray (lon, dims=('y', 'x')) |
---|
231 | lon.attrs['name'] = 'longitude' |
---|
232 | lon.attrs['long_name'] = 'Longitude' |
---|
233 | lon.attrs['units'] = 'degrees_east' |
---|
234 | if ifbnds: lon.attrs['bounds'] = 'bounds_lon' |
---|
235 | |
---|
236 | lat = xr.DataArray (lat, dims=('y', 'x')) |
---|
237 | lat.attrs['name'] = 'latitude' |
---|
238 | lat.attrs['long_name'] = 'Latitude' |
---|
239 | lat.attrs['units'] = 'degrees_north' |
---|
240 | if ifbnds: lat.attrs['bounds'] = 'bounds_lat' |
---|
241 | |
---|
242 | if ifbnds: |
---|
243 | bounds_lon = d_In.bounds_lon.values.astype (np.float64) |
---|
244 | bounds_lat = d_In.bounds_lat.values.astype (np.float64) |
---|
245 | nvertex = bounds_lon.shape[-1] |
---|
246 | |
---|
247 | bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) |
---|
248 | bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) |
---|
249 | |
---|
250 | bounds_lon.attrs['units'] = 'degrees_east' |
---|
251 | bounds_lat.attrs['units'] = 'degrees_north' |
---|
252 | |
---|
253 | # prepare dictionnary to export dataset to netcdf file with or without bounds |
---|
254 | dictdataset = {'lat':lat, 'lon':lon } |
---|
255 | if ifbnds: dictdataset.update ( {'bounds_lon':bounds_lon, 'bounds_lat':bounds_lat} ) |
---|
256 | dictdataset.update ( { |
---|
257 | 'COTOTRAI':COTOTRAI, 'COTOTSNO':COTOTSNO, 'COTOTEVA':COTOTEVA, |
---|
258 | 'COICEVAP':COICEVAP, 'COQSRMIX':COQSRMIX, 'COQNSMIX':COQNSMIX, |
---|
259 | 'COSHFICE':COSHFICE, 'CONSFICE':CONSFICE, 'CODFLXDT':CODFLXDT, |
---|
260 | 'COCALVIN':COCALVIN, 'COLIQRUN':COLIQRUN, 'COWINDSP':COWINDSP, |
---|
261 | 'COTAUMOD':COTAUMOD, 'COTAUXXU':COTAUXXU, 'COTAUYYU':COTAUYYU, |
---|
262 | 'COTAUZZU':COTAUZZU, 'COTAUXXV':COTAUXXV, 'COTAUYYV':COTAUYYV, |
---|
263 | 'COTAUZZV':COTAUZZV } ) |
---|
264 | |
---|
265 | d_Out = xr.Dataset (dictdataset) |
---|
266 | |
---|
267 | d_Out.attrs ['AssociatedFiles'] = myargs.input |
---|
268 | d_Out.attrs ['Conventions'] = "CF-1.6" |
---|
269 | d_Out.attrs ['source'] = "IPSL Earth system model" |
---|
270 | d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" |
---|
271 | d_Out.attrs ['Institution'] = "IPSL https://www.ipsl.fr" |
---|
272 | d_Out.attrs ['Model'] = "IPSL CM6" |
---|
273 | d_Out.attrs ['source'] = "IPSL Earth system model" |
---|
274 | d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" |
---|
275 | d_Out.attrs ['description'] = "Fields needed by OASIS-MCT" |
---|
276 | d_Out.attrs ['timeStamp'] = time.asctime () |
---|
277 | try : d_Out.attrs['directory'] = os.getcwd () |
---|
278 | except : pass |
---|
279 | try : d_Out.attrs['HOSTNAME'] = platform.node () |
---|
280 | except : pass |
---|
281 | try : d_Out.attrs['LOGNAME'] = os.getlogin () |
---|
282 | except : pass |
---|
283 | try : d_Out.attrs['Python'] = "Python version " + platform.python_version () |
---|
284 | except : pass |
---|
285 | try : d_Out.attrs['OS'] = platform.system () |
---|
286 | except : pass |
---|
287 | try : d_Out.attrs['release'] = platform.release () |
---|
288 | except : pass |
---|
289 | try : d_Out.attrs['hardware'] = platform.machine () |
---|
290 | except : pass |
---|
291 | d_Out.attrs ['SVN_Author'] = "$Author: omamce $" |
---|
292 | d_Out.attrs ['SVN_Date'] = "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $" |
---|
293 | d_Out.attrs ['SVN_Revision'] = "$Revision: 6190 $" |
---|
294 | d_Out.attrs ['SVN_Id'] = "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $" |
---|
295 | d_Out.attrs ['SVN_HeadURL'] = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $" |
---|
296 | |
---|
297 | d_Out.to_netcdf ( myargs.output, format=NcFormat ) |
---|
298 | d_Out.close () |
---|
299 | ## =========================================================================== |
---|
300 | ## |
---|
301 | ## That's all folk's !!! |
---|
302 | ## |
---|
303 | ## =========================================================================== |
---|