Skip to content
Extraits de code Groupes Projets
Valider 9206ab28 rédigé par Benjamin Richaud's avatar Benjamin Richaud
Parcourir les fichiers

Initial commit for publication

parent
Aucune branche associée trouvée
Aucune étiquette associée trouvée
1 requête de fusion!1Initial commit for publication
Fichier ajouté
Fichier ajouté
Fichier ajouté
import xarray as xr
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.path as mpath
import matplotlib.patches as mpatches
import cartopy.crs as ccrs
import cartopy.feature as cfeat
from pathlib import Path
from pyproj import CRS, Transformer
def define_sectors(ref="RH"):
"""
Define sectors, based on a reference. This works for the Arctic and the Antarctic regions.
The sectors can be defined according to:
- for Southern Hemisphere:
- ref="RH" or "RaphaelHobbs2014" (recommended, default): Raphael & Hobbs(2014, doi:10.1002/2014GL060365), based on ice variance.
- ref="Z83" or "ZwallyEtAl1983": Zwally et al. (1983, see also doi:10.1029/2000JC000733), based on geography (same definition as Cavalieri & Parkinson [2008], e.g.)
- for Northern Hemisphere:
- ref="A21" or "ArthunEtAl2021": Arthun et al. (2021, doi:10.1029/2020GL090825), only covers high Arctic
- ref="K16" or "KoenigkEtAl2016": Koenigk et al. (2016, doi:10.1007/s00382-015-2586-1), covers sub-Arctic seas
- ref="NSIDC" (recommended): NSIDC new (as of 2023) definition (NSIDC Special report 25, figure 3)
Returns a dictionary with names of the sectors as keys and a 2-items list of the bounding longitudes (E and W) for each sector.
V0.3: Includes the 'nsidc' definition.
"""
# Initialize dictionary
sectors = dict()
##------ Southern Regions -----------
# Raphael & Hobbs [2014]: Ice variance-based definition (recommanded):
if ref in ["RH","RaphaelHobbs2014"]:
# Lon min and max of each sector
sectors['Bellinghausen'] = [[-90,-50],[-110, -70]]
sectors['Weddell'] = [[-90,-50],[-70, -15]]
sectors['KingHakon'] = [[-90,-50],[-15, 70]]
sectors['EAntarctic'] = [[-90,-50],[70, 165]]
sectors['RossAmundsen'] = [[-90,-50],[165, -110]]
# Zwally et al. [1983]: Classic, geography-based definition:
elif ref in ["Z83","ZwallyEtAl1983"]:
# Lon min and max of each sector
sectors['BAS'] = [[-90,-50],[-130, -60]]
sectors['Weddell'] = [[-90,-50],[-60, 20]]
sectors['Indian'] = [[-90,-50],[20, 90]]
sectors['WPacific'] = [[-90,-50],[90, 160]]
sectors['Ross'] = [[-90,-50],[160, -130]]
##------ Northern Regions -----------
# Arthun et al. [2021] definition: High Arctic, excludes Labrador, Greenland Seas
elif ref in ["A21","ArthunEtAl2021"]:
# Lon min and max of each sector
sectors['Central'] = [[80,90],[-180,180]]
sectors['Barents'] = [[65,80],[15, 55]]
sectors['Kara'] = [[65,80],[55, 100]]
sectors['Laptev'] = [[65,80],[100, 150]]
sectors['EastSiberian'] = [[65,80],[150, 180]]
sectors['Chukchi'] = [[65,80],[-180, -160]]
sectors['Beaufort'] = [[65,80],[-160, -120]]
# Koenigk et al. [2016] definition: Includes Lab and Greenland Seas but less sectors in Siberian Seas.
elif ref in ["K16","KoenigkEtAl2016"]:
# Lon min and max of each sector
sectors['Central'] = [[80,90],[-180,180]]
sectors['LabBaffin'] = [[50,80],[-100, -40]]
sectors['GreenlandSea'] = [[50,80],[-40, 15]]
sectors['BarentsKara'] = [[65,80],[15, 100]]
sectors['SiberianSeas'] = [[65,80],[100, 180]]
sectors['Chukchi'] = [[65,80],[-180,-160]]
sectors['Beaufort'] = [[65,80],[-160,-100]]
# NSIDC's 2023 definition: covers the whole Arctic, many sectors (18!)
elif ref.lower() in ["nsidc"]:
# Different approach: the sectors are based on a mask.
flags = np.arange(1,18+1)
secNames = ['Central', 'Beaufort', 'Chukchi', 'EastSiberian', 'Laptev', 'Kara', 'Barents',
'EastGreenland', 'BaffinLab', 'GoStLawrence', 'Hudson', 'CAA',
'Bering', 'Okhotsk','SoJapan', 'BohaiYellow', 'Baltic', 'GoAlaska']
# Associate the flags to the sector names, to match the mask.
sectors = dict(zip(secNames, flags))
return sectors
def interp_mask2grid(target_grid=None,
maskFile=Path.home().joinpath('shared/NSIDC-0780_SeaIceRegions_PS-N3.125km_v1_wLatLon.nc')):
"""
This function creates a region mask based on the NSIDC definition for the Arctic Ocean.
It loads the NSIDC mask and interpolates it onto the provided grid.
NOTA BENE: this function uses xesmf, which is only available on `coriolis` at the moment.
For the ORCA1 and ORCA025 grids, masks have been processed and saved, for efficiency and ease of access.
If any of those grids are requested by providing a string, the function simply loads the file.
Inputs: - target_grid: the target grid on which to interpolate the NSIDC mask.
It can be a xr.Dataset containing lat and lon variables defining the grid,
or a string mentioning the grid:
if weights='ORCA1_nh', use the weights for Northern Hemisphere ORCA1 config;
if 'ORCA025_nh', use the weights for NorthH for ORCA025
#TODO: need to add full ORCA1 and ORCA025, as well as ORCA12 and ORCA12_nh
-maskFile: path and name of file containing the NSIDC mask.
Output: xr.Dataset containing the NSIDC mask interpolated onto the target grid contained in ds.
"""
# Before calculating weights, check if this is one of the pre-calculated situations
if isinstance(target_grid,str):
if target_grid == 'ORCA1_nh':
ds_mask_interp = xr.open_dataset(Path.home().joinpath('shared/NSIDCRegionsMask_ORCA1_nh.nc'))
elif target_grid == 'ORCA025_nh':
ds_mask_interp = xr.open_dataset(Path.home().joinpath('shared/NSIDCRegionsMask_ORCA025_nh.nc'))
else:
print("ERROR: the target grid could not be recognized.")
return
else:
# First of all, load the mask
ds_mask = xr.open_dataset(maskFile)
# ------------- If no lat-lon in the ds_mask, calculate and add them -------------
if 'lat' not in list(ds_mask.coords):
# Convert from projected coordinates to lat-lon coordinates
# A first issue is that the mask is provided in North Polar Stereographic projection
# Extract the x and y (projection) coordinates and grid them (from 1D to 2D)
x_mask, y_mask = np.meshgrid(ds_mask.x, ds_mask.y)
# Define the projection and geographic systems according to CRS conventions.
orig_crs = CRS.from_epsg(3413) # North Polar Stereographic from NSIDC
target_crs = CRS.from_epsg(4326) # Geographic coordinate system (i.e. lat-lon)
# Use pyproj.Transformer to convert from NSIDC projection to lat-lon coordinates
transformer = Transformer.from_crs(orig_crs, target_crs)
lat_src, lon_src = transformer.transform(x_mask, y_mask)
# Assign the new coordinates to the dataset
ds_mask = ds_mask.assign_coords({'lon': (['y', 'x'], lon_src), 'lat': (['y', 'x'], lat_src)})
# ------------- Bulk of the function: interpolate the mask to the ds grid -------------
# If 'weights' are given, load them directly. If not, calculate them
# But since xesmf does not exist yet on `cyclone`, need to deal with it.
# try: # If xesmf exists, create or load the regridder and regrid
import xesmf as xe # type: ignore
Regridder = xe.Regridder(ds_mask, target_grid, method='bilinear')
# Now apply this regridder to the mask
ds_mask_interp = Regridder(ds_mask)
# except ImportError: # If the import failed, send an error
# print("ERROR: xesmf could not be loaded, go to `coriolis`.")
# return
# return the interpolated mask.
return ds_mask_interp
def groupby_sectors(ds, ref=None, target_gd=None):
"""
Define sectors and Groupby a dataset or dataArray into those sectors.
This can then be used to perform calculations such as ice extent or ice area for each sector.
Inputs: - ds: xr.Dataset or xr.DataArray with **lat** and **lon** coordinates, to group by sector.
This ds should be weighted by the cell areas, if needed.
- ref: string, reference of the sectors to use. Can be any of the following options:
'Z83' or 'ZwallyEtAl1983' (for Zwally et al. (1983), AA),
'RH' or 'RaphaelHobbs' (for Raphael and Hobbs (2014), AA),
'A21' or 'Arthun2021' (for Arthun et al. (2021), Arctic),
'K16' or 'Koenigk2016' (for Koenigk et al. (2016), Arctic),
'NSIDC' or 'nsidc' (for NSIDC mask, new as of 2023, Arctic).
If None (not recommended), then attempts to figure out if `ds` covers Arctic or AA,
then use either the NSIDC mask for Arctic or the RH ref for the AA.
- target_gd: String, the target grid. This is only valid if ref='NSIDC'.
This is not necessary, but does speed up the function greatly,
as it loads an already processed mask, on the ORCA1_nh or ORCA025_nh grid.
Outputs: A xr.Dataset.groupby object on which computations can be applied.
Example:
- ds_grouped = groupby_sectors(ds_2Bgrouped, ref='RH'):
Simply apply the Raphael & Hobbs (2014) definition on ds_2Bgrouped.
- ds_grouped = groupby_sectors(ds_2Bgrouped, ref='nsidc', target_gd='ORCA025_nh'):
Use the precalculated weights on ORCA025_nh (>50°N) grid to apply the NSIDC sector definition
V0.: allow for the use of the NSIDC mask and combining with the `defineSectors` function.
"""
# ------------------ Some Tests ---------------------
# 1. Test that the correct coordinates are there: lat and lon
if 'lat' not in ds.coords: # if lat is not there, check if it can be latitude
if 'latitude' in ds.coords:
ds = ds.rename({'latitude':'lat'})
else:
raise AttributeError("There is no `lat` coordinate in the input. Please rename the coordinates.")
if 'lon' not in ds.coords: # if lon is not there, check if it can be longitude
if 'longitude' in ds.coords:
ds = ds.rename({'longitude':'lon'})
else:
raise AttributeError("There is no `lon` coordinate in the input. Please rename the coordinates.")
# Extract lat and lon to create the sector coordinate
lat, lon = ds['lat'], ds['lon']
# 2. Test longitudes range: it should be [-180,180]. If [0,360], need to wrap
if np.any((ds.lon>180) & (ds.lon<=360)):
lon = (ds.lon + 180) % 360 - 180
# 3. Test if the reference was provided. If not, attempt to assign it.
if ref is None:
if (ds.lat>=0).all():
print("Use NSIDC definition for Northern hemisphere")
ref='NSIDC'
elif (ds.lat<=0).all():
print("Use Raphael & Hobbs [2014] definition for Southern hemisphere")
ref='RH'
else:
print("Could not determine correct hemisphere, use Raphael & Hobbs [2014] definition for Antarctica")
ref='RH'
# Creates the dictionary containing sectors names, and longitudes if necessary.
sectors = define_sectors(ref=ref)
# Initialize a Sector coordinate. Before doing so, need to retrieve names of dimensions
latdn, londn = list(lat.dims), list(lon.dims)
# Check if lat-lon are 1D or 2D, by checking if lat and lon have the same dimensions.
if latdn == londn: # Multidimensional array:
dimnames = latdn
CoordSec = xr.zeros_like(lat) * np.nan
else: # One-dimensional lat and lon coordinates
dimnames = latdn + londn
CoordSec = xr.DataArray(data=np.ones([len(lat),len(lon)]) * np.nan,
dims=dimnames,
coords={'lat':('lat',lat.data), 'lon':('lon',lon.data)})
# First case: NSIDC. Need to load the mask, then applies it to create sectors
if ref.lower() in ['nsidc']:
# If no target grid provided, just use the ds.
if target_gd is None:
target_gd=ds # Reminder: in `interp_mask2grid`, target_grid can be a string or a ds.
# Use `interp_mask2grid` to load or calculate the appropriate mask
ds_mask = interp_mask2grid(target_grid=target_gd)
# Then mask the CoordSec appropriately:
# loop over sectors, use the `where` method to replace flag by sector name.
for n, (secName, secFlag) in enumerate(sectors.items()):
CoordSec = xr.where(ds_mask.sea_ice_region==secFlag, secName, CoordSec)
# Second case: all other sectors, defined with hard coded lat and lon.
else:
# Loop over the `sectors`, use the `where` function to replace the NaNs by the name of
# the sector whenever it matches the lat-lon conditions.
for n, (secName, [llat, llon]) in enumerate(sectors.items()):
# Need to also deal with dateline crossing: add a simple if condition
if llon[0] > llon[1]:
CoordSec = xr.where(((lat>=llat[0]) & (lat<llat[1]) & ((lon>=llon[0]) | (lon<llon[1]))).data,
secName, CoordSec)
else:
CoordSec = xr.where(((lat>=llat[0]) & (lat<llat[1]) & (lon>=llon[0]) & (lon<llon[1])).data,
secName, CoordSec)
# We have the CoordSec variable that we can use as a coordinate to groupby.
# Copy initial dataset to avoid changing the input
ds_out = ds.copy()
# Assign the created array as a coordinate to this dataArray
ds_out = ds_out.assign_coords(sector=(dimnames, CoordSec.data))
# Done! Isn't that amazing?
return ds_out.groupby('sector')
def plot_sectormaps():
"""
Plot a 5-panel figure which illustrates the different sector definitions available.
This is called by default when executing the module.
"""
### Some preparatory stuff: define sectors, prepare the circular frame, etc.
# Prep circular boundary
r_extent = 4651194.319 * 1.005
circle_path = mpath.Path.unit_circle()
circle_path = mpath.Path(circle_path.vertices.copy() * r_extent, circle_path.codes.copy())
# Initialize the figure
fig = plt.figure(figsize=(12,8))
gs = plt.GridSpec(2, 3, figure=fig)
#--------------Create sectors-----------
# First create a dummy DataArray with 1°x1° resolution grid
lat, lon = np.arange(-90,90), np.arange(-180,180) # Lat-lon vectors
latm, lonm = np.meshgrid(lat,lon) # create a grid mesh from the vectors
# Create a DataArray filled with 0s
ds_map = xr.DataArray(data=np.zeros_like(latm),
coords={'lat':(['y','x'], latm), 'lon':(['y','x'], lonm)},dims=['y','x'])
# Second, apply the sector definitions to get a groupby, then ungroup everything to get one grid
# This is a bit circonvoluted, but hey it works!
listRefs = ['K16', 'A21', 'NSIDC', 'Z83', 'RH']
listFullNames = ['Koenigk et al. (2016)', 'Arthun et al. (2021)', 'NSIDC (2023)',
'Zwally et al. (1983)', 'Raphael and Hobbs (2014)']
for n, ref in enumerate(listRefs):
# Get a groupby dataArray, and ungroup it to retrieve a mask
try: # If xesmf exists, create or load the regridder and regrid
groupd = groupby_sectors(ds_map, ref=ref)
except ImportError: # If the import failed, send an error
print("ERROR: xesmf could not be loaded, reference {0:s} skipped.".format(ref))
continue
ungroupd = xr.concat([groupd[sec] for sec in groupd.groups.keys()], dim='stacked_y_x').unstack()
# Extract the list of sectors. Could simply call `define_sectors`, but then I wouldn't be
# sure that the order of the names matches the categories.
sects, cat = np.unique(ungroupd.sector.astype('str'), return_inverse=True)
# Use the categorical output to create a numbered sector mask
sectorsCat = xr.DataArray(cat.reshape(ungroupd.sector.shape), dims=ungroupd.dims, coords=ungroupd.coords)
# Some processing of this dataarray: nan out the nans, extrapolate lat and lon to be able to plot
sectorsCat = sectorsCat.where(~ungroupd.isnull())
sectorsCat['lon'] = sectorsCat.lon.interpolate_na(dim='y',fill_value="extrapolate")
sectorsCat['lat'] = sectorsCat.lat.interpolate_na(dim='y',fill_value="extrapolate")
# Also some cleaning of the sector names to remove the 'nan' sector.
sects_clean = sects[~(sects=='nan')]
nSects = len(sects_clean)
# ------------------- Finally, the map! -----------------
# Before plotting the axes, need to assign the projection and extent
if ref in ['K16', 'A21', 'NSIDC']:
proj = ccrs.NorthPolarStereo()
extent = [-180, 180, 50, 90]
if ref=='NSIDC':
cmap=plt.colormaps['tab20']
else:
cmap = plt.colormaps['tab10']
elif ref in ['Z83', 'RH']:
proj = ccrs.SouthPolarStereo()
extent = [-180, 180, -90, -50]
cmap = plt.colormaps['Dark2']
# Create the axis
ax = fig.add_subplot(gs[n//3,n%3],projection=proj)
# Plot the sectors
c0 = ax.pcolormesh(sectorsCat.lon, sectorsCat.lat, sectorsCat,
transform=ccrs.PlateCarree(),shading='nearest',
cmap=cmap.resampled(nSects), vmin= -0.5, vmax=nSects-0.5)
# Create the colorbar
cbar = plt.colorbar(c0, ax=ax, ticks=np.arange(0, nSects-0.5, 1))
# Assign the right labels to the sectors
cbar.set_ticklabels(sects_clean)
# Some generic formatting of the map
ax.set_extent(extent, ccrs.PlateCarree()) # Restrain extent
ax.add_feature(cfeat.LAND,zorder=3) # Put the land above all
ax.coastlines() # Also draw coastlines in black, and gridlines below
ax.gridlines(draw_labels=True, rotate_labels=False, xlocs=range(-180,180,30), ylocs=range(-90,90,10))
ax.set_boundary(circle_path) # Create a round boundary instead of a square one. Just aesthetic.
ax.set_frame_on(False) # hide the boundary frame
# Now can put the title on top of the panel
ax.set_title(listFullNames[n])
# Restrain the layou to make it fit tighter.
fig.tight_layout()
return fig
if __name__ == "__main__":
print("Available sector definitions:")
plot_sectormaps()
\ No newline at end of file
Ce diff est replié.
0% Chargement en cours ou .
You are about to add 0 people to the discussion. Proceed with caution.
Terminez d'abord l'édition de ce message.
Veuillez vous inscrire ou vous pour commenter