Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix 369 #370

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 46 additions & 46 deletions docs/postprocessing.ipynb

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions docs/tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -5253,7 +5253,7 @@
"<a id=\"units\"></a>\n",
"## Consistent CF bounds\n",
"Many of the CMIP6 models come with 'bound' dataarrays, that describe the extent of the finite grid cells.\n",
"For the longitude and latitude there are two conventions: 2-element 'bounds' (describing the width of a cell along the center) and 4 element 'verticies' (describing the 4 corner coordinates of the cell).\n",
"For the longitude and latitude there are two conventions: 2-element 'bounds' (describing the width of a cell along the center) and 4 element 'vertices' (describing the 4 corner coordinates of the cell).\n",
"`xmip` automatically renames these variables consistently and converts them so that every dataset has both conventions available.\n"
]
},
Expand Down Expand Up @@ -5376,11 +5376,11 @@
"Dimensions: (bnds: 2, lev: 60, member_id: 1, time: 1980, vertex: 4, x: 320, y: 384)\n",
"Coordinates:\n",
" lat (y, x) float64 dask.array<chunksize=(384, 320), meta=np.ndarray>\n",
" lat_verticies (y, x, vertex) float32 dask.array<chunksize=(384, 320, 4), meta=np.ndarray>\n",
" lat_vertices (y, x, vertex) float32 dask.array<chunksize=(384, 320, 4), meta=np.ndarray>\n",
" * lev (lev) float64 500.0 1.5e+03 2.5e+03 ... 5.125e+05 5.375e+05\n",
" lev_bounds (lev, bnds) float32 dask.array<chunksize=(60, 2), meta=np.ndarray>\n",
" lon (y, x) float64 dask.array<chunksize=(384, 320), meta=np.ndarray>\n",
" lon_verticies (y, x, vertex) float32 dask.array<chunksize=(384, 320, 4), meta=np.ndarray>\n",
" lon_vertices (y, x, vertex) float32 dask.array<chunksize=(384, 320, 4), meta=np.ndarray>\n",
" * y (y) float64 -79.22 -78.69 -78.15 -77.62 ... 89.11 89.66 89.71\n",
" * x (x) float64 1.062 2.187 3.312 4.437 ... 357.7 358.8 359.9\n",
" * time (time) float64 6.749e+05 6.749e+05 ... 7.351e+05 7.351e+05\n",
Expand Down Expand Up @@ -5454,8 +5454,8 @@
" lon (y, x) float32 dask.array<chunksize=(291, 360), meta=np.ndarray>\n",
" * time (time) int64 0 708 1416 2148 ... 1443192 1443924 1444656\n",
" time_bounds (time, bnds) float64 dask.array<chunksize=(1980, 2), meta=np.ndarray>\n",
" lat_verticies (y, x, vertex) float32 dask.array<chunksize=(291, 360, 4), meta=np.ndarray>\n",
" lon_verticies (y, x, vertex) float32 dask.array<chunksize=(291, 360, 4), meta=np.ndarray>\n",
" lat_vertices (y, x, vertex) float32 dask.array<chunksize=(291, 360, 4), meta=np.ndarray>\n",
" lon_vertices (y, x, vertex) float32 dask.array<chunksize=(291, 360, 4), meta=np.ndarray>\n",
" * bnds (bnds) int64 0 1\n",
" * vertex (vertex) int64 0 1 2 3\n",
" lon_bounds (bnds, y, x) float32 dask.array<chunksize=(1, 291, 360), meta=np.ndarray>\n",
Expand Down
34 changes: 17 additions & 17 deletions tests/test_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _test_data(grid_label="gn", z_axis=True):
[_add_small_rand(lev - 0.5), _add_small_rand(lev + 0.5)], dim="bnds"
)

lon_verticies = xr.concat(
lon_vertices = xr.concat(
[
_add_small_rand(lon_bounds_e),
_add_small_rand(lon_bounds_e),
Expand All @@ -77,7 +77,7 @@ def _test_data(grid_label="gn", z_axis=True):
],
dim="vertex",
)
lat_verticies = xr.concat(
lat_vertices = xr.concat(
[
_add_small_rand(lat_bounds_s),
_add_small_rand(lat_bounds_n),
Expand All @@ -94,8 +94,8 @@ def _test_data(grid_label="gn", z_axis=True):
lat=lat,
lon_bounds=lon_bounds,
lat_bounds=lat_bounds,
lon_verticies=lon_verticies,
lat_verticies=lat_verticies,
lon_vertices=lon_vertices,
lat_vertices=lat_vertices,
)

if z_axis:
Expand All @@ -115,7 +115,7 @@ def test_parse_bounds_vertex():
data = np.random.rand(4)

da = xr.DataArray(
data, dims=["vertex"], coords={"lon_verticies": lon_b, "lat_verticies": lat_b}
data, dims=["vertex"], coords={"lon_vertices": lon_b, "lat_vertices": lat_b}
)
test = _parse_bounds_vertex(da, "vertex", position=[0, 3])
print(test)
Expand Down Expand Up @@ -214,15 +214,15 @@ def test_recreate_metrics(xshift, yshift, z_axis):
if yshift == "left":
# dx
lon0, lon1 = grid.axes["X"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds_metrics.lon_verticies, "y").isel(bnds=0),
_interp_vertex_to_bounds(ds_metrics.lon_vertices, "y").isel(bnds=0),
xshift,
)
lat0, lat1 = grid.axes["X"]._get_neighbor_data_pairs(
ds_metrics.lat_bounds.isel(bnds=0), xshift
)
elif yshift == "right":
lon0, lon1 = grid.axes["X"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds_metrics.lon_verticies, "y").isel(bnds=1),
_interp_vertex_to_bounds(ds_metrics.lon_vertices, "y").isel(bnds=1),
xshift,
)
lat0, lat1 = grid.axes["X"]._get_neighbor_data_pairs(
Expand All @@ -234,15 +234,15 @@ def test_recreate_metrics(xshift, yshift, z_axis):
if xshift == "left":
# dx
lat0, lat1 = grid.axes["Y"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds_metrics.lat_verticies, "x").isel(bnds=0),
_interp_vertex_to_bounds(ds_metrics.lat_vertices, "x").isel(bnds=0),
yshift,
)
lon0, lon1 = grid.axes["Y"]._get_neighbor_data_pairs(
ds_metrics.lon_bounds.isel(bnds=0), yshift
)
elif xshift == "right":
lat0, lat1 = grid.axes["Y"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds_metrics.lat_verticies, "x").isel(bnds=1),
_interp_vertex_to_bounds(ds_metrics.lat_vertices, "x").isel(bnds=1),
yshift,
)
lon0, lon1 = grid.axes["Y"]._get_neighbor_data_pairs(
Expand All @@ -255,12 +255,12 @@ def test_recreate_metrics(xshift, yshift, z_axis):
else:
vertex_points = [2, 3]
lon0, lon1 = (
ds_metrics.lon_verticies.isel(vertex=vertex_points[0]),
ds_metrics.lon_verticies.isel(vertex=vertex_points[1]),
ds_metrics.lon_vertices.isel(vertex=vertex_points[0]),
ds_metrics.lon_vertices.isel(vertex=vertex_points[1]),
)
lat0, lat1 = (
ds_metrics.lat_verticies.isel(vertex=vertex_points[0]),
ds_metrics.lat_verticies.isel(vertex=vertex_points[1]),
ds_metrics.lat_vertices.isel(vertex=vertex_points[0]),
ds_metrics.lat_vertices.isel(vertex=vertex_points[1]),
)
dy_gx_expected = distance(lon0, lat0, lon1, lat1)

Expand All @@ -269,12 +269,12 @@ def test_recreate_metrics(xshift, yshift, z_axis):
else:
vertex_points = [1, 2]
lon0, lon1 = (
ds_metrics.lon_verticies.isel(vertex=vertex_points[0]),
ds_metrics.lon_verticies.isel(vertex=vertex_points[1]),
ds_metrics.lon_vertices.isel(vertex=vertex_points[0]),
ds_metrics.lon_vertices.isel(vertex=vertex_points[1]),
)
lat0, lat1 = (
ds_metrics.lat_verticies.isel(vertex=vertex_points[0]),
ds_metrics.lat_verticies.isel(vertex=vertex_points[1]),
ds_metrics.lat_vertices.isel(vertex=vertex_points[0]),
ds_metrics.lat_vertices.isel(vertex=vertex_points[1]),
)
dx_gy_expected = distance(lon0, lat0, lon1, lat1)

Expand Down
28 changes: 14 additions & 14 deletions tests/test_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,8 +242,8 @@ def _lons_parsed_make_sense(
"lon_bounds",
"lat_bounds",
"time_bounds",
"lat_verticies",
"lon_verticies",
"lat_vertices",
"lon_vertices",
],
)
def test_correct_coordinates(coord):
Expand Down Expand Up @@ -293,8 +293,8 @@ def test_parse_lon_lat_bounds():
)

ds_test = parse_lon_lat_bounds(ds)
assert "lon_verticies" in ds_test.coords
assert "lat_verticies" in ds_test.coords
assert "lon_vertices" in ds_test.coords
assert "lat_vertices" in ds_test.coords

# introduce a time diemension
for wrong_coord in ["lon_bounds", "lat_bounds"]:
Expand Down Expand Up @@ -386,7 +386,7 @@ def test_maybe_convert_bounds_to_vertex():
lat_v = lat_v.reset_coords(drop=True)

ds_expected = ds.copy()
ds_expected = ds_expected.assign_coords(lon_verticies=lon_v, lat_verticies=lat_v)
ds_expected = ds_expected.assign_coords(lon_vertices=lon_v, lat_vertices=lat_v)

xr.testing.assert_identical(ds_expected, maybe_convert_bounds_to_vertex(ds))
# check that datasets that already conform to this are not changed
Expand All @@ -396,7 +396,7 @@ def test_maybe_convert_bounds_to_vertex():


def test_maybe_convert_vertex_to_bounds():
# create a ds with verticies
# create a ds with vertices
lon = np.arange(0, 10)
lat = np.arange(20, 30)
data = np.random.rand(len(lon), len(lat))
Expand All @@ -406,10 +406,10 @@ def test_maybe_convert_vertex_to_bounds():
ds.coords["lon"] = ds.x * xr.ones_like(ds.y)
ds.coords["lat"] = xr.ones_like(ds.x) * ds.y

ds.coords["lon_verticies"] = (
ds.coords["lon_vertices"] = (
xr.DataArray([-0.1, -0.1, 0.1, 0.1], dims=["vertex"]) + ds["lon"]
)
ds.coords["lat_verticies"] = (
ds.coords["lat_vertices"] = (
xr.DataArray([-0.1, 0.1, 0.1, -0.1], dims=["vertex"]) + ds["lat"]
)
ds = promote_empty_dims(ds)
Expand Down Expand Up @@ -459,24 +459,24 @@ def test_sort_vertex_order():
.expand_dims(["x", "y"])
.to_dataset(name="test")
)
da = da.assign_coords({"lon_verticies": lon_v, "lat_verticies": lat_v})
da = da.assign_coords({"lon_vertices": lon_v, "lat_vertices": lat_v})

da_sorted = sort_vertex_order(da).squeeze()
new = np.vstack((da_sorted.lon_verticies, da_sorted.lat_verticies)).T
new = np.vstack((da_sorted.lon_vertices, da_sorted.lat_vertices)).T

np.testing.assert_allclose(new, ordered_points)

assert da_sorted.lon_verticies.isel(vertex=0) < da_sorted.lon_verticies.isel(
assert da_sorted.lon_vertices.isel(vertex=0) < da_sorted.lon_vertices.isel(
vertex=3
)
assert da_sorted.lon_verticies.isel(vertex=1) < da_sorted.lon_verticies.isel(
assert da_sorted.lon_vertices.isel(vertex=1) < da_sorted.lon_vertices.isel(
vertex=2
)

assert da_sorted.lat_verticies.isel(vertex=0) < da_sorted.lat_verticies.isel(
assert da_sorted.lat_vertices.isel(vertex=0) < da_sorted.lat_vertices.isel(
vertex=1
)
assert da_sorted.lat_verticies.isel(vertex=3) < da_sorted.lat_verticies.isel(
assert da_sorted.lat_vertices.isel(vertex=3) < da_sorted.lat_vertices.isel(
vertex=2
)

Expand Down
34 changes: 17 additions & 17 deletions tests/test_preprocessing_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def test_check_dim_coord_values(

# this fixture has to be redifined every time to account for different fail cases for each test
@pytest.fixture
def spec_check_bounds_verticies(request, gl, vi, ei, cat):
def spec_check_bounds_vertices(request, gl, vi, ei, cat):
expected_failures = (
not_supported_failures
+ intake_concat_failures
Expand All @@ -329,15 +329,15 @@ def spec_check_bounds_verticies(request, gl, vi, ei, cat):
return request


@pytest.mark.parametrize("spec_check_bounds_verticies", test_models, indirect=True)
def test_check_bounds_verticies(spec_check_bounds_verticies):
@pytest.mark.parametrize("spec_check_bounds_vertices", test_models, indirect=True)
def test_check_bounds_vertices(spec_check_bounds_vertices):
(
source_id,
variable_id,
experiment_id,
grid_label,
catalog,
) = spec_check_bounds_verticies.param
) = spec_check_bounds_vertices.param
ds, cat = data(
source_id, variable_id, experiment_id, grid_label, True, catalog=catalog
)
Expand All @@ -350,8 +350,8 @@ def test_check_bounds_verticies(spec_check_bounds_verticies):
if "vertex" in ds.dims:
np.testing.assert_allclose(ds.vertex.data, np.arange(4))

# Check for existing bounds and verticies
for co in ["lon_bounds", "lat_bounds", "lon_verticies", "lat_verticies"]:
# Check for existing bounds and vertices
for co in ["lon_bounds", "lat_bounds", "lon_vertices", "lat_vertices"]:
assert co in ds.coords
# make sure that all other dims are eliminated from the bounds.
assert (set(ds[co].dims) - set(["bnds", "vertex"])) == set(["x", "y"])
Expand All @@ -361,18 +361,18 @@ def test_check_bounds_verticies(spec_check_bounds_verticies):
# things are still weird.
test_ds = ds.where(abs(ds.lat) <= 40, drop=True)

vertex_lon_diff1 = test_ds.lon_verticies.isel(
vertex=3
) - test_ds.lon_verticies.isel(vertex=0)
vertex_lon_diff2 = test_ds.lon_verticies.isel(
vertex=2
) - test_ds.lon_verticies.isel(vertex=1)
vertex_lat_diff1 = test_ds.lat_verticies.isel(
vertex_lon_diff1 = test_ds.lon_vertices.isel(vertex=3) - test_ds.lon_vertices.isel(
vertex=0
)
vertex_lon_diff2 = test_ds.lon_vertices.isel(vertex=2) - test_ds.lon_vertices.isel(
vertex=1
) - test_ds.lat_verticies.isel(vertex=0)
vertex_lat_diff2 = test_ds.lat_verticies.isel(
vertex=2
) - test_ds.lat_verticies.isel(vertex=3)
)
vertex_lat_diff1 = test_ds.lat_vertices.isel(vertex=1) - test_ds.lat_vertices.isel(
vertex=0
)
vertex_lat_diff2 = test_ds.lat_vertices.isel(vertex=2) - test_ds.lat_vertices.isel(
vertex=3
)
for vertex_diff in [vertex_lon_diff1, vertex_lon_diff2]:
assert (vertex_diff <= 0).sum() <= (3 * len(vertex_diff.y))
# allowing for a few rows to be negative
Expand Down
24 changes: 12 additions & 12 deletions xmip/grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


def _parse_bounds_vertex(da, dim="bnds", position=[0, 1]):
"""Convenience function to extract positions from bounds/verticies"""
"""Convenience function to extract positions from bounds/vertices"""
return tuple([da.isel({dim: i}).load().data for i in position])


Expand Down Expand Up @@ -151,30 +151,30 @@ def recreate_metrics(ds, grid):
ew_bound_idx = [1]

# infer dx at tracer points
if "lon_bounds" in ds.coords and "lat_verticies" in ds.coords:
if "lon_bounds" in ds.coords and "lat_vertices" in ds.coords:
lon0, lon1 = _parse_bounds_vertex(ds["lon_bounds"])
lat0, lat1 = _parse_bounds_vertex(
_interp_vertex_to_bounds(ds["lat_verticies"], "x")
_interp_vertex_to_bounds(ds["lat_vertices"], "x")
)
dist = distance(lon0, lat0, lon1, lat1)
ds.coords["dx_t"] = xr.DataArray(dist, coords=ds.lon.coords)

# infer dy at tracer points
if "lat_bounds" in ds.coords and "lon_verticies" in ds.coords:
if "lat_bounds" in ds.coords and "lon_vertices" in ds.coords:
lat0, lat1 = _parse_bounds_vertex(ds["lat_bounds"])
lon0, lon1 = _parse_bounds_vertex(
_interp_vertex_to_bounds(ds["lon_verticies"], "y")
_interp_vertex_to_bounds(ds["lon_vertices"], "y")
)
dist = distance(lon0, lat0, lon1, lat1)
ds.coords["dy_t"] = xr.DataArray(dist, coords=ds.lon.coords)

if "lon_verticies" in ds.coords and "lat_verticies" in ds.coords:
if "lon_vertices" in ds.coords and "lat_vertices" in ds.coords:
# infer dx at the north/south face
lon0, lon1 = _parse_bounds_vertex(
ds["lon_verticies"], dim="vertex", position=ns_vertex_idx
ds["lon_vertices"], dim="vertex", position=ns_vertex_idx
)
lat0, lat1 = _parse_bounds_vertex(
ds["lat_verticies"], dim="vertex", position=ns_vertex_idx
ds["lat_vertices"], dim="vertex", position=ns_vertex_idx
)
dist = distance(lon0, lat0, lon1, lat1)
ds.coords["dx_gy"] = xr.DataArray(
Expand All @@ -183,10 +183,10 @@ def recreate_metrics(ds, grid):

# infer dy at the east/west face
lon0, lon1 = _parse_bounds_vertex(
ds["lon_verticies"], dim="vertex", position=ew_vertex_idx
ds["lon_vertices"], dim="vertex", position=ew_vertex_idx
)
lat0, lat1 = _parse_bounds_vertex(
ds["lat_verticies"], dim="vertex", position=ew_vertex_idx
ds["lat_vertices"], dim="vertex", position=ew_vertex_idx
)
dist = distance(lon0, lat0, lon1, lat1)
ds.coords["dy_gx"] = xr.DataArray(
Expand Down Expand Up @@ -223,7 +223,7 @@ def recreate_metrics(ds, grid):

# infer dx at the corner point
lon0, lon1 = grid.axes["X"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds.lon_verticies.load(), "y")
_interp_vertex_to_bounds(ds.lon_vertices.load(), "y")
.isel(bnds=ns_bound_idx)
.squeeze(),
axis_vel_pos["X"],
Expand All @@ -241,7 +241,7 @@ def recreate_metrics(ds, grid):

# infer dy at the corner point
lat0, lat1 = grid.axes["Y"]._get_neighbor_data_pairs(
_interp_vertex_to_bounds(ds.lat_verticies.load(), "x")
_interp_vertex_to_bounds(ds.lat_vertices.load(), "x")
.isel(bnds=ew_bound_idx)
.squeeze(),
axis_vel_pos["Y"],
Expand Down
Loading