### =========================================================================== ### ### Part of CPL Restart IPSL packages ### ### =========================================================================== ## ## Warning, to install, configure, run, use any of Olivier Marti's ## software or to read the associated documentation you'll need at least ## one (1) brain in a reasonably working order. Lack of this implement ## will void any warranties (either express or implied). ## O. Marti assumes no responsability for errors, omissions, ## data loss, or any other consequences caused directly or indirectly by ## the usage of his software by incorrectly or partially configured ## personal. ## ### ### Documentation : https://forge.ipsl.jussieu.fr/igcmg/wiki/IPSLCM6/MOSAIX ### ## SVN information # $Author: omamce $ # $Date: 2020-09-18 17:02:09 +0200 (Fri, 18 Sep 2020) $ # $Revision: 5157 $ # $Id: CreateRestartAtm4Oasis.bash 5157 2020-09-18 15:02:09Z omamce $ # $HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/CPLRESTART/CreateRestartAtm4Oasis.bash $ import numpy as np, xarray as xr import sys, argparse, textwrap, time, os, platform ## Reading the command line arguments # # Creating a parser # The first step in using the argparse is creating an ArgumentParser object: parser = argparse.ArgumentParser (description = textwrap.dedent (""" create_flxat """), epilog='-------- This is the end of the help message --------') # Adding arguments parser.add_argument ('-v', '--verbose', help='verbosity', action="store_true", default=False ) parser.add_argument ('-l', '--level', help='verbosity level', type=int, default=0, choices=[0, 1, 2] ) parser.add_argument ('-i', '--input' , metavar='inputFile' , help="input file" , nargs='?', default="flxat_fields_notime.nc" ) parser.add_argument ('-o', '--output' , metavar='outputFile', help="output file", nargs='?', default="tmp_flxat.nc" ) parser.add_argument ('-f', '--format' , metavar='NetCDF_Format', help="NetCDF format", nargs='?', default="NETCDF4", choices=["NETCDF4","NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC", "64bits"]) parser.add_argument ('--IsUnstructured', choices=['True', 'False'], required=True ) # Parse command line myargs = parser.parse_args () IsUnstructured = myargs.IsUnstructured if IsUnstructured == 'True' : IsUnstructured = True else : IsUnstructured = False NcFormat = myargs.format if NcFormat == '64bit' : NcFormat = NETCDF4 ## Read Data - promote them to 64 bits d_In = xr.open_dataset (myargs.input) lon = d_In.lon.astype(np.float64).values lat = d_In.lat.astype(np.float64).values if IsUnstructured : dims = lon.shape else : dims = ( lat.shape[0], lon.shape[0] ) # Try to read variables # Set them to zero if not possible try : evap_oce = d_In.evap_oce[0].squeeze().astype(np.float64).values except : evap_oce = np.zeros ( dims ) try : evap_sic = d_In.evap_sic[0].squeeze().astype(np.float64).values except : evap_sic = np.zeros ( dims ) try : fract_oce = d_In.fract_oce[0].squeeze().astype(np.float64).values except : fract_oce = np.ones ( dims ) try : fract_sic = d_In.fract_sic[0].squeeze().astype(np.float64).values except : fract_sic = np.zeros ( dims ) try : precip = d_In.precip[0].squeeze().astype(np.float64).values except : evap_oce = np.zeros ( dims ) try : snow = d_In.snow[0].squeeze().astype(np.float64).values except : snow = np.zeros ( dims ) try : soll = d_In.soll[0].squeeze().astype(np.float64).values except : soll = np.zeros ( dims ) try : sols = d_In.sols[0].squeeze().astype(np.float64).values except : sols = np.zeros ( dims ) try : taux_oce = d_In.taux_oce[0].squeeze().astype(np.float64).values except : taux_oce = np.zeros ( dims ) try : taux_sic = d_In.taux_sic[0].squeeze().astype(np.float64).values except : taux_sic = np.zeros ( dims ) try : tauy_oce = d_In.tauy_oce[0].squeeze().astype(np.float64).values except : tauy_oce = np.zeros ( dims ) try : tauy_sic = d_In.tauy_sic[0].squeeze().astype(np.float64).values except : tauy_sic = np.zeros ( dims ) try : wind10m = d_In.wind10m[0].squeeze().astype(np.float64).values except : wind10m = np.zeros ( dims ) if IsUnstructured : lon = np.expand_dims ( lon , axis=1 ) lat = np.expand_dims ( lat , axis=1 ) evap_oce = np.expand_dims ( evap_oce , axis=1 ) evap_sic = np.expand_dims ( evap_sic , axis=1 ) fract_oce = np.expand_dims ( fract_oce , axis=1 ) fract_sic = np.expand_dims ( fract_sic , axis=1 ) precip = np.expand_dims ( precip , axis=1 ) snow = np.expand_dims ( snow , axis=1 ) soll = np.expand_dims ( soll , axis=1 ) sols = np.expand_dims ( sols , axis=1 ) taux_oce = np.expand_dims ( taux_oce , axis=1 ) taux_sic = np.expand_dims ( taux_sic , axis=1 ) tauy_oce = np.expand_dims ( tauy_oce , axis=1 ) tauy_sic = np.expand_dims ( tauy_sic , axis=1 ) wind10m = np.expand_dims ( wind10m , axis=1 ) else : lon2 = lat [:, np.newaxis]*0 + lon [np.newaxis, :] lat2 = lat [:, np.newaxis] + lon [np.newaxis, :]*0 lon = lon2 ; lat = lat2 ## yxshape = lat.shape ny, nx = yxshape ## Computations np.seterr (divide='ignore', invalid='ignore') fract_oce_plus_sic = (fract_oce + fract_sic) ; ## ocean fraction fract_oce_norm = np.where (fract_oce_plus_sic > 0.0, fract_oce/fract_oce_plus_sic, 0.0) # free ocean vs. total ocen fraction fract_sic_norm = np.where (fract_oce_plus_sic > 0.0, fract_sic/fract_oce_plus_sic, 0.0) # sea ice vs. total ocean fraction ## COTOTRAI = xr.DataArray (precip-snow, dims=('y', 'x')) COTOTRAI.attrs['long_name'] = 'Liquid precip' COTOTRAI.attrs['coordinates'] = "lat lon" COTOTSNO = xr.DataArray (snow , dims=('y', 'x')) COTOTSNO.attrs['long_name'] ='Solid precipitation' COTOTSNO.attrs['coordinates'] = "lat lon" COTOTEVA = xr.DataArray (evap_oce*fract_oce_norm + evap_sic*fract_sic_norm, dims=('y', 'x')) COTOTEVA.attrs['coordinates'] = "lat lon" COICEVAP = xr.DataArray (evap_sic , dims=('y', 'x')) COICEVAP.attrs['long_name'] = 'Evaporation on sea ice' COICEVAP.attrs['coordinates'] = "lat lon" COQSRMIX = xr.DataArray (sols , dims=('y', 'x')) COQSRMIX.attrs['long_name'] = 'Heat flux short wave' COQSRMIX.attrs['units'] = 'W/m2' COQSRMIX.attrs['coordinates'] = "lat lon" COQNSMIX = xr.DataArray (soll , dims=('y', 'x')) COQNSMIX.attrs['long_name'] = 'Heat flux minus short wave' COQNSMIX.attrs['units'] = 'W/m2' COQNSMIX.attrs['coordinates'] = "lat lon" COSHFICE = xr.DataArray (sols , dims=('y', 'x')) COSHFICE.attrs['long_name'] = 'Heat flux short wave over sea ice' COSHFICE.attrs['units'] = 'W/m2' COSHFICE.attrs['coordinates'] = "lat lon" CONSFICE = xr.DataArray (soll , dims=('y', 'x')) CONSFICE.attrs['long_name'] = 'Heat flux minus short wave over sea ice' CONSFICE.attrs['units'] = 'W/m2' CONSFICE.attrs['coordinates'] = "lat lon" COWINDSP = xr.DataArray (wind10m , dims=('y', 'x')) COWINDSP.attrs['long_name'] = 'Wind speed at 10m high' COWINDSP.attrs['units'] = 'm/s' COWINDSP.attrs['coordinates'] = "lat lon" CODFLXDT = xr.DataArray (-20.0*np.ones(yxshape), dims=('y', 'x')) CODFLXDT.attrs['long_name'] = 'dQ/dT - Derivative over temperature of turbulent heat fluxes' CODFLXDT.attrs['units'] = 'W/m2/K' CODFLXDT.attrs['coordinates'] = "lat lon" COCALVIN = xr.DataArray ( 0.0*np.ones(yxshape), dims=('y', 'x')) COCALVIN.attrs['long_name'] = 'Calving of icebergs, solid' COCALVIN.attrs['coordinates'] = "lat lon" COLIQRUN = xr.DataArray ( 0.0*np.ones(yxshape), dims=('y', 'x')) COLIQRUN.attrs['long_name'] = 'River run-off, liquid' COLIQRUN.attrs['coordinates'] = "lat lon" ## Wind stress tau_x = (taux_oce*fract_oce_norm + taux_sic*fract_sic_norm) tau_y = (tauy_oce*fract_oce_norm + tauy_sic*fract_sic_norm) COTAUMOD = xr.DataArray (np.sqrt ( (tau_x*tau_x + tau_y*tau_y) ) , dims=('y', 'x')) COTAUMOD.attrs['long_name'] = 'Wind stress modulus' COTAUMOD.attrs['units'] = 'Pa' COTAUMOD.attrs['coordinates'] = "lat lon" ## Wind stress, from east/north components to geocentric rad = np.deg2rad (1.0) COTAUXXU = xr.DataArray (-tau_x * np.sin(rad * lon) - tau_y * np.sin(rad * lat) * np.cos(rad * lon) , dims=('y', 'x')) COTAUYYU = xr.DataArray ( tau_x * np.cos(rad * lon) - tau_y * np.sin(rad * lat) * np.sin(rad * lon) , dims=('y', 'x')) COTAUZZU = xr.DataArray ( tau_y * np.cos(rad * lat) , dims=('y', 'x')) ## Value at North Pole if IsUnstructured : ## Value at North Pole for DYNAMICO grid COTAUXXU = xr.where ( lat >= 89.999, -tau_y, COTAUXXU) COTAUYYU = xr.where ( lat >= 89.999, tau_x, COTAUYYU) ## Value at South Pole for DYNAMICO grid ? else : ## Value at North Pole for LMDZ lon/lat grid COTAUXXU[0,:] = ( -tau_x [0, 0] ) COTAUYYU[0,:] = ( -tau_y [0, 0] ) COTAUZZU[0,:] = 0.0 ; ## Value at south Pole for LMDZ lon/lat grid COTAUXXU[-1,:] = ( -tau_x [-1, 0] ) COTAUYYU[-1,:] = ( -tau_y [-1, 0] ) ## COTAUXXU.attrs['long_name'] = 'Wind stress in geocentric referential - x-component' COTAUYYU.attrs['long_name'] = 'Wind stress in geocentric referential - y-component' COTAUZZU.attrs['long_name'] = 'Wind stress in geocentric referential - z-component' COTAUXXU.attrs['units'] = 'Pa' COTAUYYU.attrs['units'] = 'Pa' COTAUZZU.attrs['units'] = 'Pa' COTAUXXU.attrs['coordinates'] = "lat lon" COTAUYYU.attrs['coordinates'] = "lat lon" COTAUZZU.attrs['coordinates'] = "lat lon" COTAUXXV = COTAUXXU ; COTAUYYV = COTAUYYU ; COTAUZZV = COTAUZZU ## check if bounds for lon lat are present and add them to dataset or drop them ifbnds=True if ('bounds_lon' in d_In.data_vars) and ('bounds_lat' in d_In.data_vars) else False ## Creates final Dataset lon = xr.DataArray (lon, dims=('y', 'x')) lon.attrs['name'] = 'longitude' lon.attrs['long_name'] = 'Longitude' lon.attrs['units'] = 'degrees_east' if ifbnds: lon.attrs['bounds'] = 'bounds_lon' lat = xr.DataArray (lat, dims=('y', 'x')) lat.attrs['name'] = 'latitude' lat.attrs['long_name'] = 'Latitude' lat.attrs['units'] = 'degrees_north' if ifbnds: lat.attrs['bounds'] = 'bounds_lat' if ifbnds: bounds_lon = d_In.bounds_lon.values.astype (np.float64) bounds_lat = d_In.bounds_lat.values.astype (np.float64) nvertex = bounds_lon.shape[-1] bounds_lon = xr.DataArray ( np.reshape (bounds_lon, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) bounds_lat = xr.DataArray ( np.reshape (bounds_lat, (ny, nx, nvertex)), dims=('y', 'x', 'nvertex')) bounds_lon.attrs['units'] = 'degrees_east' bounds_lat.attrs['units'] = 'degrees_north' # prepare dictionnary to export dataset to netcdf file with or without bounds dictdataset = {'lat':lat, 'lon':lon } if ifbnds: dictdataset.update ( {'bounds_lon':bounds_lon, 'bounds_lat':bounds_lat} ) dictdataset.update ( { 'COTOTRAI':COTOTRAI, 'COTOTSNO':COTOTSNO, 'COTOTEVA':COTOTEVA, 'COICEVAP':COICEVAP, 'COQSRMIX':COQSRMIX, 'COQNSMIX':COQNSMIX, 'COSHFICE':COSHFICE, 'CONSFICE':CONSFICE, 'CODFLXDT':CODFLXDT, 'COCALVIN':COCALVIN, 'COLIQRUN':COLIQRUN, 'COWINDSP':COWINDSP, 'COTAUMOD':COTAUMOD, 'COTAUXXU':COTAUXXU, 'COTAUYYU':COTAUYYU, 'COTAUZZU':COTAUZZU, 'COTAUXXV':COTAUXXV, 'COTAUYYV':COTAUYYV, 'COTAUZZV':COTAUZZV } ) d_Out = xr.Dataset (dictdataset) d_Out.attrs ['AssociatedFiles'] = myargs.input d_Out.attrs ['Conventions'] = "CF-1.6" d_Out.attrs ['source'] = "IPSL Earth system model" d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" d_Out.attrs ['Institution'] = "IPSL https://www.ipsl.fr" d_Out.attrs ['Model'] = "IPSL CM6" d_Out.attrs ['source'] = "IPSL Earth system model" d_Out.attrs ['group'] = "ICMC IPSL Climate Modelling Center" d_Out.attrs ['description'] = "Fields needed by OASIS-MCT" d_Out.attrs ['timeStamp'] = time.asctime () try : d_Out.attrs['directory'] = os.getcwd () except : pass try : d_Out.attrs['HOSTNAME'] = platform.node () except : pass try : d_Out.attrs['LOGNAME'] = os.getlogin () except : pass try : d_Out.attrs['Python'] = "Python version " + platform.python_version () except : pass try : d_Out.attrs['OS'] = platform.system () except : pass try : d_Out.attrs['release'] = platform.release () except : pass try : d_Out.attrs['hardware'] = platform.machine () except : pass d_Out.attrs ['SVN_Author'] = "$Author: omamce $" d_Out.attrs ['SVN_Date'] = "$Date: 2022-07-06 11:06:07 +0200 (Wed, 06 Jul 2022) $" d_Out.attrs ['SVN_Revision'] = "$Revision: 6190 $" d_Out.attrs ['SVN_Id'] = "$Id: CalvingWeights.py 6190 2022-07-06 09:06:07Z omamce $" d_Out.attrs ['SVN_HeadURL'] = "$HeadURL: svn+ssh://omamce@forge.ipsl.jussieu.fr/ipsl/forge/projets/igcmg/svn/TOOLS/MOSAIX/CalvingWeights.py $" d_Out.to_netcdf ( myargs.output, format=NcFormat ) d_Out.close () ## =========================================================================== ## ## That's all folk's !!! ## ## ===========================================================================