Changeset 6669 for TOOLS/CPLRESTART/create_flxat.py
- Timestamp:
- 10/27/23 13:32:18 (9 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
TOOLS/CPLRESTART/create_flxat.py
r6512 r6669 23 23 # $Id: CreateRestartAtm4Oasis.bash 5157 2020-09-18 15:02:09Z omamce $ 24 24 # $HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/CPLRESTART/CreateRestartAtm4Oasis.bash $ 25 26 import numpy as np, xarray as xr 27 import sys, argparse, textwrap, time, os, platform 25 # 26 '''Create atmospheric restart 27 28 ## SVN information 29 Author = "$Author: omamce $" 30 Date = "$Date: 2023-10-25 17:11:15 +0200 (Wed, 25 Oct 2023) $" 31 Revision = "$Revision: 6666 $" 32 Id = "$Id: RunoffWeights.py 6666 2023-10-25 15:11:15Z omamce $" 33 HeadURL = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/RunoffWeights.py $" 34 ''' 35 36 # SVN information 37 __SVN__ = ({ 38 'Author' : '$Author: omamce $', 39 'Date' : '$Date: 2023-10-25 17:11:15 +0200 (Wed, 25 Oct 2023) $', 40 'Revision' : '$Revision: 6666 $', 41 'Id' : '$Id: RunoffWeights.py 6666 2023-10-25 15:11:15Z omamce $', 42 'HeadURL' : '$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/RunoffWeights.py $', 43 'SVN_Date' : '$SVN_Date: $', 44 }) 45 ## 46 47 import argparse 48 import textwrap 49 import time 50 import os 51 import sys 52 import platform 53 import numpy as np 54 import xarray as xr 28 55 29 56 ## Reading the command line arguments … … 36 63 37 64 # Adding arguments 38 parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", default=False ) 39 parser.add_argument ('-l', '--level', help='verbosity level', type=int, default=0, choices=[0, 1, 2] ) 40 parser.add_argument ('-i', '--input' , metavar='inputFile' , help="input file" , nargs='?', default="flxat_fields_notime.nc" ) 41 parser.add_argument ('-o', '--output' , metavar='outputFile', help="output file", nargs='?', default="tmp_flxat.nc" ) 42 parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', help="NetCDF format", nargs='?', default="NETCDF4", 43 choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC", "64bits"]) 44 parser.add_argument ('--IsUnstructured', choices=['True', 'False'], required=True ) 65 parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", 66 default=False ) 67 parser.add_argument ('-l', '--level', type=int, default=0, 68 choices=[0, 1, 2], help='verbosity level') 69 parser.add_argument ('-i', '--input' , metavar='inputFile' , nargs='?', 70 default="flxat_fields_notime.nc", 71 help="input file") 72 parser.add_argument ('-o', '--output' , metavar='outputFile', nargs='?', 73 default="tmp_flxat.nc", 74 help="output file" ) 75 parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', nargs='?', 76 default="NETCDF4", 77 choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", 78 "NETCDF3_CLASSIC", "64bits"], 79 help="NetCDF format") 80 parser.add_argument ('--IsUnstructured', choices=['True', 'False'], 81 required=True ) 45 82 46 83 # Parse command line 47 84 myargs = parser.parse_args () 48 85 49 IsUnstructured = myargs.IsUnstructured 50 if IsUnstructured == 'True' : IsUnstructured = True 51 else : IsUnstructured = False 52 86 IsUnstructured = myargs.IsUnstructured in [ True, 'True', 'true', 'TRUE' ] 53 87 NcFormat = myargs.format 54 if NcFormat == '64bit' : NcFormat = NETCDF455 56 ## Read Data - promote them to 64 bits 88 if NcFormat == '64bit' : NcFormat = 'NETCDF4' 89 90 ## Read Data - promote them to 64 bits 57 91 d_In = xr.open_dataset (myargs.input) 58 92 59 lon = d_In.lon.astype(np.float64).values60 lat = d_In.lat.astype(np.float64).values93 lon = d_In.lon.astype(float).values 94 lat = d_In.lat.astype(float).values 61 95 62 96 if IsUnstructured : … … 67 101 # Try to read variables 68 102 # Set them to zero if not possible 69 try : evap_oce = d_In.evap_oce[0].squeeze().astype(np.float64).values 70 except : evap_oce = np.zeros ( dims ) 71 try : evap_sic = d_In.evap_sic[0].squeeze().astype(np.float64).values 72 except : evap_sic = np.zeros ( dims ) 73 try : fract_oce = d_In.fract_oce[0].squeeze().astype(np.float64).values 74 except : fract_oce = np.ones ( dims ) 75 try : fract_sic = d_In.fract_sic[0].squeeze().astype(np.float64).values 76 except : fract_sic = np.zeros ( dims ) 77 try : precip = d_In.precip[0].squeeze().astype(np.float64).values 78 except : evap_oce = np.zeros ( dims ) 79 try : snow = d_In.snow[0].squeeze().astype(np.float64).values 80 except : snow = np.zeros ( dims ) 81 try : soll = d_In.soll[0].squeeze().astype(np.float64).values 82 except : soll = np.zeros ( dims ) 83 try : sols = d_In.sols[0].squeeze().astype(np.float64).values 84 except : sols = np.zeros ( dims ) 85 try : taux_oce = d_In.taux_oce[0].squeeze().astype(np.float64).values 86 except : taux_oce = np.zeros ( dims ) 87 try : taux_sic = d_In.taux_sic[0].squeeze().astype(np.float64).values 88 except : taux_sic = np.zeros ( dims ) 89 try : tauy_oce = d_In.tauy_oce[0].squeeze().astype(np.float64).values 90 except : tauy_oce = np.zeros ( dims ) 91 try : tauy_sic = d_In.tauy_sic[0].squeeze().astype(np.float64).values 92 except : tauy_sic = np.zeros ( dims ) 93 try : wind10m = d_In.wind10m[0].squeeze().astype(np.float64).values 94 except : wind10m = np.zeros ( dims ) 103 if 'evap_oce' in d_In.variables : 104 evap_oce = d_In.evap_oce[0].squeeze().astype(float).values 105 else : 106 evap_oce = np.zeros ( dims ) 107 if 'evap_sic' in d_In.variables : 108 evap_sic = d_In.evap_sic[0].squeeze().astype(float).values 109 else : 110 evap_sic = np.zeros ( dims ) 111 if 'fract_oce' in d_In.variables : 112 fract_oce = d_In.fract_oce[0].squeeze().astype(float).values 113 else : 114 fract_oce = np.ones ( dims ) 115 if 'fract_sic' in d_In.variables : 116 fract_sic = d_In.fract_sic[0].squeeze().astype(float).values 117 else : 118 fract_sic = np.zeros ( dims ) 119 if 'precip' in d_In.variables : 120 precip = d_In.precip[0].squeeze().astype(float).values 121 else : 122 evap_oce = np.zeros ( dims ) 123 if 'snow' in d_In.variables : 124 snow = d_In.snow[0].squeeze().astype(float).values 125 else : 126 snow = np.zeros ( dims ) 127 if 'soll' in d_In.variables : 128 soll = d_In.soll[0].squeeze().astype(float).values 129 else : 130 soll = np.zeros ( dims ) 131 if 'sols' in d_In.variables : 132 sols = d_In.sols[0].squeeze().astype(float).values 133 else : 134 sols = np.zeros ( dims ) 135 if 'taux_oce' in d_In.variables : 136 taux_oce = d_In.taux_oce[0].squeeze().astype(float).values 137 else : 138 taux_oce = np.zeros ( dims ) 139 if 'taux_sic' in d_In.variables : 140 taux_sic = d_In.taux_sic[0].squeeze().astype(float).values 141 else : 142 taux_sic = np.zeros ( dims ) 143 if 'tauy_oce' in d_In.variables : 144 tauy_oce = d_In.tauy_oce[0].squeeze().astype(float).values 145 else : 146 tauy_oce = np.zeros ( dims ) 147 if 'tauy_sic' in d_In.variables : 148 tauy_sic = d_In.tauy_sic[0].squeeze().astype(float).values 149 else : 150 tauy_sic = np.zeros ( dims ) 151 if 'wind10m' in d_In.variables : 152 wind10m = d_In.wind10m[0].squeeze().astype(float).values 153 else : 154 wind10m = np.zeros ( dims ) 95 155 96 156 if IsUnstructured : … … 113 173 lon2 = lat [:, np.newaxis]*0 + lon [np.newaxis, :] 114 174 lat2 = lat [:, np.newaxis] + lon [np.newaxis, :]*0 115 lon = lon2 ; lat = lat2 116 175 lon = lon2 176 lat = lat2 177 117 178 ## 118 179 yxshape = lat.shape … … 122 183 np.seterr (divide='ignore', invalid='ignore') 123 184 124 fract_oce_plus_sic = (fract_oce + fract_sic) ; ## ocean fraction 125 fract_oce_norm = np.where (fract_oce_plus_sic > 0.0, fract_oce/fract_oce_plus_sic, 0.0) # free ocean vs. total ocen fraction 126 fract_sic_norm = np.where (fract_oce_plus_sic > 0.0, fract_sic/fract_oce_plus_sic, 0.0) # sea ice vs. total ocean fraction 185 ## ocean fraction 186 fract_oce_plus_sic = fract_oce + fract_sic 187 # free ocean vs. total ocen fraction 188 fract_oce_norm = np.where (fract_oce_plus_sic > 0.0, 189 fract_oce/fract_oce_plus_sic, 0.0) 190 # sea ice vs. total ocean fraction 191 fract_sic_norm = np.where (fract_oce_plus_sic > 0.0, 192 fract_sic/fract_oce_plus_sic, 0.0) 127 193 ## 128 194 COTOTRAI = xr.DataArray (precip-snow, dims=('y', 'x')) … … 134 200 COTOTSNO.attrs['coordinates'] = "lat lon" 135 201 136 COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, dims=('y', 'x')) 202 COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, 203 dims=('y', 'x')) 137 204 COTOTEVA.attrs['coordinates'] = "lat lon" 138 205 … … 168 235 CODFLXDT = xr.DataArray (-20.0*np.ones(yxshape), dims=('y', 'x')) 169 236 CODFLXDT.attrs['long_name'] = 'dQ/dT - Derivative over temperature of turbulent heat fluxes' 170 CODFLXDT.attrs['units'] = 'W/m2/K' 237 CODFLXDT.attrs['units'] = 'W/m2/K' 171 238 CODFLXDT.attrs['coordinates'] = "lat lon" 172 239 … … 180 247 181 248 ## Wind stress 182 tau_x = (taux_oce*fract_oce_norm + taux_sic*fract_sic_norm)183 tau_y = (tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm)249 tau_x = taux_oce*fract_oce_norm + taux_sic*fract_sic_norm 250 tau_y = tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm 184 251 COTAUMOD = xr.DataArray (np.sqrt ( (tau_x*tau_x + tau_y*tau_y) ) , dims=('y', 'x')) 185 COTAUMOD.attrs['long_name'] = 'Wind stress modulus' 186 COTAUMOD.attrs['units'] = 'Pa' 187 COTAUMOD.attrs['coordinates'] = "lat lon" 252 COTAUMOD.attrs.update ( {'long_name':'Wind stress modulus', 'units':'Pa', 253 'coordinates':'lat lon' }) 188 254 189 255 ## Wind stress, from east/north components to geocentric 190 256 rad = np.deg2rad (1.0) 191 COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , dims=('y', 'x')) 192 COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , dims=('y', 'x')) 257 COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) 258 - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , 259 dims=('y', 'x')) 260 COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) 261 - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , 262 dims=('y', 'x')) 193 263 COTAUZZU = xr.DataArray ( tau_y * np.cos(rad * lat) , dims=('y', 'x')) 194 264 195 265 ## Value at North Pole 196 266 if IsUnstructured : 197 ## Value at North Pole for DYNAMICO grid 267 ## Value at North Pole for DYNAMICO grid 198 268 COTAUXXU = xr.where ( lat >= 89.999, -tau_y, COTAUXXU) 199 269 COTAUYYU = xr.where ( lat >= 89.999, tau_x, COTAUYYU) 200 270 ## Value at South Pole for DYNAMICO grid ? 201 271 202 272 else : 203 273 ## Value at North Pole for LMDZ lon/lat grid 204 274 COTAUXXU[0,:] = ( -tau_x [0, 0] ) 205 275 COTAUYYU[0,:] = ( -tau_y [0, 0] ) 206 COTAUZZU[0,:] = 0.0 ;276 COTAUZZU[0,:] = 0.0 207 277 ## Value at south Pole for LMDZ lon/lat grid 208 278 COTAUXXU[-1,:] = ( -tau_x [-1, 0] ) … … 210 280 211 281 ## 212 COTAUXXU.attrs['long_name'] = 'Wind stress in geocentric referential - x-component' 213 COTAUYYU.attrs['long_name'] = 'Wind stress in geocentric referential - y-component' 214 COTAUZZU.attrs['long_name'] = 'Wind stress in geocentric referential - z-component' 215 216 COTAUXXU.attrs['units'] = 'Pa' 217 COTAUYYU.attrs['units'] = 'Pa' 218 COTAUZZU.attrs['units'] = 'Pa' 219 220 COTAUXXU.attrs['coordinates'] = "lat lon" 221 COTAUYYU.attrs['coordinates'] = "lat lon" 222 COTAUZZU.attrs['coordinates'] = "lat lon" 223 224 COTAUXXV = COTAUXXU ; COTAUYYV = COTAUYYU ; COTAUZZV = COTAUZZU 282 COTAUXXU.attrs.update ({'long_name':'Wind stress in geocentric referential - x-component', 283 'units':'Pa', 'coordinates':'lat lon', 'Grid':'U' }) 284 COTAUYYU.attrs.update ({'long_name':'Wind stress in geocentric referential - y-component', 285 'units':'Pa', 'coordinates':'lat lon', 'Grid':'U' }) 286 COTAUZZU.attrs.update ({'long_name':'Wind stress in geocentric referential - z-component', 287 'units':'Pa', 'coordinates':'lat lon', 'Grid':'U' }) 288 289 COTAUXXV = COTAUXXU 290 COTAUYYV = COTAUYYU 291 COTAUZZV = COTAUZZU 292 293 COTAUXXV.attrs.update ( {'Grid':'V'} ) 294 COTAUYYV.attrs.update ( {'Grid':'V'} ) 295 COTAUZZV.attrs.update ( {'Grid':'V'} ) 225 296 226 297 ## check if bounds for lon lat are present and add them to dataset or drop them 227 ifbnds =True if ('bounds_lon' in d_In.data_vars) and ('bounds_lat' in d_In.data_vars) else False298 ifbnds = 'bounds_lon' in d_In.data_vars and 'bounds_lat' in d_In.data_vars 228 299 229 300 ## Creates final Dataset 230 301 lon = xr.DataArray (lon, dims=('y', 'x')) 231 lon.attrs['name'] = 'longitude' 232 lon.attrs['long_name'] = 'Longitude' 233 lon.attrs['units'] = 'degrees_east' 234 if ifbnds: lon.attrs['bounds'] = 'bounds_lon' 302 lon.attrs.update ({'name':'longitude', 'long_name':'Longitude', 303 'units':'degrees_east', 'standard_name':'longitude' }) 235 304 236 305 lat = xr.DataArray (lat, dims=('y', 'x')) 237 lat.attrs['name'] = 'latitude' 238 lat.attrs['long_name'] = 'Latitude' 239 lat.attrs['units'] = 'degrees_north' 240 if ifbnds: lat.attrs['bounds'] = 'bounds_lat' 241 242 if ifbnds: 243 bounds_lon = d_In.bounds_lon.values.astype (np.float64) 244 bounds_lat = d_In.bounds_lat.values.astype (np.float64) 306 lat.attrs.update ({'name':'latitude', 'long_name':'Latitude', 307 'units':'degrees_north', 'standard_name':'latitude' }) 308 309 if ifbnds : 310 lon.attrs['bounds'] = 'bounds_lon' 311 lat.attrs['bounds'] = 'bounds_lat' 312 313 bounds_lon = d_In.bounds_lon.values.astype (float) 314 bounds_lat = d_In.bounds_lat.values.astype (float) 245 315 nvertex = bounds_lon.shape[-1] 246 316 247 bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) 248 bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) 317 bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny, nx, nvertex)), 318 dims=('y', 'x', 'nvertex')) 319 bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny, nx, nvertex)), 320 dims=('y', 'x', 'nvertex')) 249 321 250 322 bounds_lon.attrs['units'] = 'degrees_east' 251 323 bounds_lat.attrs['units'] = 'degrees_north' 252 324 253 # preparedictionnary to export dataset to netcdf file with or without bounds325 # Prepares dictionnary to export dataset to netcdf file with or without bounds 254 326 dictdataset = {'lat':lat, 'lon':lon } 255 if ifbnds: dictdataset.update ( {'bounds_lon':bounds_lon, 'bounds_lat':bounds_lat} ) 327 if ifbnds: 328 dictdataset.update ( {'bounds_lon':bounds_lon, 329 'bounds_lat':bounds_lat} ) 256 330 dictdataset.update ( { 257 331 'COTOTRAI':COTOTRAI, 'COTOTSNO':COTOTSNO, 'COTOTEVA':COTOTEVA, … … 265 339 d_Out = xr.Dataset (dictdataset) 266 340 267 d_Out.attrs ['AssociatedFiles'] = myargs.input 268 d_Out.attrs ['Conventions'] = "CF-1.6" 269 d_Out.attrs ['source'] = "IPSL Earth system model" 270 d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" 271 d_Out.attrs ['Institution'] = "IPSL https://www.ipsl.fr" 272 d_Out.attrs ['Model'] = "IPSL CM6" 273 d_Out.attrs ['source'] = "IPSL Earth system model" 274 d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" 275 d_Out.attrs ['description'] = "Fields needed by OASIS-MCT" 276 d_Out.attrs ['timeStamp'] = time.asctime () 277 try : d_Out.attrs['directory'] = os.getcwd () 278 except : pass 279 try : d_Out.attrs['HOSTNAME'] = platform.node () 280 except : pass 281 try : d_Out.attrs['LOGNAME'] = os.getlogin () 282 except : pass 283 try : d_Out.attrs['Python'] = "Python version " + platform.python_version () 284 except : pass 285 try : d_Out.attrs['OS'] = platform.system () 286 except : pass 287 try : d_Out.attrs['release'] = platform.release () 288 except : pass 289 try : d_Out.attrs['hardware'] = platform.machine () 290 except : pass 291 d_Out.attrs ['SVN_Author'] = "$Author: omamce $" 292 d_Out.attrs ['SVN_Date'] = "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $" 293 d_Out.attrs ['SVN_Revision'] = "$Revision: 6190 $" 294 d_Out.attrs ['SVN_Id'] = "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $" 295 d_Out.attrs ['SVN_HeadURL'] = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $" 341 d_Out.attrs.update ( { 342 'AssociatedFiles' : myargs.input, 343 'Conventions' : "CF-1.6", 344 'source' : "IPSL Earth system model", 345 'group' : "ICMC IPSL Climate Modelling Center", 346 'Institution' : "IPSL https://www.ipsl.fr", 347 'Model' : "IPSL CM", 348 'description' : "Fields needed by OASIS-MCT", 349 'Program' : f'Generated by {sys.argv[0]} with flags ' + ' '.join (sys.argv[1:]), 350 'timeStamp' : time.asctime (), 351 'directory' : os.getcwd (), 352 'HOSTNAME' : platform.node (), 353 'LOGNAME' : os.getlogin (), 354 'Python' : "Python version " + platform.python_version (), 355 'OS' : platform.system (), 356 'release' : platform.release (), 357 'hardware' : platform.machine (), 358 'SVN_Author' : "$Author: omamce $", 359 'SVN_Date' : "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $", 360 'SVN_Revision' : "$Revision: 6190 $", 361 'SVN_Id' : "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $", 362 'SVN_HeadURL' : "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $", 363 } ) 296 364 297 365 d_Out.to_netcdf ( myargs.output, format=NcFormat )
Note: See TracChangeset
for help on using the changeset viewer.