Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions Changelog.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
Version NEXTVERSION
----------------

**2026-03-??**

* Write UGRID datasets with `cfdm.write`
(https://github.com/NCAS-CMS/cfdm/issues/271)

Version 1.12.4.0
----------------

**2026-01-??**

Expand All @@ -9,10 +18,10 @@ Version NEXTVERSION
`cfdm.read` (https://github.com/NCAS-CMS/cfdm/issues/355)
* New function `cfdm.dataset_flatten` that replaces the deprecated
`cfdm.netcdf_flatten` (https://github.com/NCAS-CMS/cfdm/issues/355)
* New optional dependency: ``zarr>=3.1.3``
* Removed dependency (now optional): ``zarr>=3.0.8``
* Reduce the time taken to import `cfdm`
(https://github.com/NCAS-CMS/cfdm/issues/361)
* New optional dependency: ``zarr>=3.1.3``
* Removed dependency (now optional): ``zarr>=3.0.8``

----

Expand Down
1 change: 1 addition & 0 deletions cfdm/cellconnectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

class CellConnectivity(
mixin.NetCDFVariable,
mixin.NetCDFConnectivityDimension,
mixin.Topology,
mixin.PropertiesData,
mixin.Files,
Expand Down
69 changes: 47 additions & 22 deletions cfdm/cfdmimplementation.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,25 @@ def get_bounds_ncvar(self, parent, default=None):

return self.nc_get_variable(bounds, default=default)

def get_cell_connectivities(self, parent):
"""Return the cell connectivities from a parent.

.. versionadded:: (cfdm) NEXTVERSION

:Parameters:

parent: `Field` or `Domain`
The parent object.

:Returns:

`dict`
A dictionary whose values are cell connectivity
objects, keyed by unique identifiers.

"""
return parent.cell_connectivities(todict=True)

def get_cell_measures(self, field):
"""Return all of the cell measure constructs of a field.

Expand Down Expand Up @@ -1012,6 +1031,25 @@ def get_domain_axis_size(self, field, axis):
"""
return field.domain_axes(todict=True)[axis].get_size()

def get_domain_topologies(self, parent):
"""Return the domain topologies from a parent.

.. versionadded:: (cfdm) NEXTVERSION

:Parameters:

parent: `Field` or `Domain`
The parent object.

:Returns:

`dict`
A dictionary whose values are domain topology objects,
keyed by unique identifiers.

"""
return parent.domain_topologies(todict=True)

def get_sample_dimension_position(self, construct):
"""Returns the position of the compressed data sample dimension.

Expand Down Expand Up @@ -1983,16 +2021,23 @@ def get_tie_points(self, construct, default=None):

return data.source(default)

def initialise_AuxiliaryCoordinate(self):
def initialise_AuxiliaryCoordinate(self, **kwargs):
"""Return an auxiliary coordinate construct.

:Parameters:

kwargs: optional
Parameters with whcih to intialising the object.

.. versionadded:: (cfdm) NEXTVERSION

:Returns:

`AuxiliaryCoordinate`

"""
cls = self.get_class("AuxiliaryCoordinate")
return cls()
return cls(**kwargs)

def initialise_Bounds(self):
"""Return a bounds component.
Expand Down Expand Up @@ -3421,26 +3466,6 @@ def set_dependent_tie_points(self, construct, tie_points, dimensions):
)
construct.set_data(data)

def set_mesh_id(self, parent, mesh_id):
"""Set a mesh identifier.

.. versionadded:: (cfdm) 1.11.0.0

:Parameters:

parent: construct
The construct on which to set the mesh id

mesh_id:
The mesh identifier.

:Returns:

`None`

"""
parent.set_mesh_id(mesh_id)

def nc_set_external(self, construct):
"""Set the external status of a construct.

Expand Down
6 changes: 0 additions & 6 deletions cfdm/data/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -737,12 +737,6 @@ def __getitem__(self, indices):
# ------------------------------------------------------------
new._set_dask(dx, clear=self._ALL ^ self._CFA, in_memory=None)

if 0 in new.shape:
raise IndexError(
f"Index [{original_indices}] selects no elements from "
f"data with shape {original_shape}"
)

# ------------------------------------------------------------
# Get the axis identifiers for the subspace
# ------------------------------------------------------------
Expand Down
54 changes: 0 additions & 54 deletions cfdm/data/mixin/compressedarraymixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,49 +5,6 @@ class CompressedArrayMixin:

"""

def _lock_file_read(self, array):
"""Try to return an array that doesn't support concurrent reads.

.. versionadded:: (cfdm) 1.11.2.0

:Parameters:

array: array_like
The array to process.

:Returns"

`dask.array.Array` or array_like
The new `dask` array, or the orginal array if it
couldn't be ascertained how to form the `dask` array.

"""
try:
return array.to_dask_array()
except AttributeError:
pass

try:
chunks = array.chunks
except AttributeError:
chunks = "auto"

try:
array = array.source()
except (ValueError, AttributeError):
pass

try:
array.get_filename()
except AttributeError:
pass
else:
import dask.array as da

array = da.from_array(array, chunks=chunks, lock=True)

return array

def to_dask_array(self, chunks="auto"):
"""Convert the data to a `dask` array.

Expand Down Expand Up @@ -87,18 +44,7 @@ def to_dask_array(self, chunks="auto"):

context = partial(config.set, scheduler="synchronous")

# If possible, convert the compressed data to a dask array
# that doesn't support concurrent reads. This prevents
# "compute called by compute" failures problems at compute
# time.
#
# TODO: This won't be necessary if this is refactored so that
# the compressed data is part of the same dask graph as
# the compressed subarrays.
conformed_data = self.conformed_data()
conformed_data = {
k: self._lock_file_read(v) for k, v in conformed_data.items()
}
subarray_kwargs = {**conformed_data, **self.subarray_parameters()}

# Get the (cfdm) subarray class
Expand Down
10 changes: 6 additions & 4 deletions cfdm/data/netcdfindexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,13 +249,15 @@ def __getitem__(self, index):
# E.g. index : (1, np.newaxis, slice(1, 5))
# => index1: (1, slice(1, 5))
# and index2: (slice(None), np.newaxis, slice(None))
except ValueError:
except (ValueError, TypeError):
# Something went wrong, which is indicative of the
# variable not supporting the appropriate slicing method
# (e.g. `h5netcdf` might have returned "ValueError: Step
# must be >= 1 (got -2)"). Therefore we'll just get the
# entire array as a numpy array, and then try indexing
# that.
# must be >= 1 (got -2)" or "TypeError: Indexing elements
# must be in increasing order").
#
# Therefore we'll just get the entire array as a numpy
# array, and then try indexing that.
data = self._index(Ellipsis)
data = self._index(index, data=data)

Expand Down
1 change: 1 addition & 0 deletions cfdm/data/subarray/cellconnectivitysubarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def __getitem__(self, indices):
stop += 1

data = self._select_data(check_mask=True)

if np.ma.isMA(data):
empty = np.ma.empty
else:
Expand Down
50 changes: 47 additions & 3 deletions cfdm/data/subarray/mixin/pointtopology.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,29 @@ def __getitem__(self, indices):
from cfdm.functions import integer_dtype

start_index = self.start_index
node_connectivity = self._select_data(check_mask=False)
node_connectivity = self._select_data(check_mask=True)

# ------------------------------------------------------------
# E.g. For faces, 'node_connectivity' might be (two
# quadrilaterals and one triangle):
#
# [[3 4 2 1 ]
# [5 6 4 3 ]
# [7 2 4 --]]
#
# E.g. For nine edges of the above faces, 'node_connectivity'
# is
#
# [[2 7]
# [4 7]
# [4 2]
# [1 2]
# [3 1]
# [3 4]
# [3 5]
# [6 5]
# [4 6]]
# ------------------------------------------------------------

masked = np.ma.isMA(node_connectivity)

Expand All @@ -49,8 +71,15 @@ def __getitem__(self, indices):
cols_extend = cols.extend
u_extend = u.extend

unique_nodes = np.unique(node_connectivity)
if masked:
# Remove the missing value from unique nodes
unique_nodes = unique_nodes[:-1]

unique_nodes = unique_nodes.tolist()

# WARNING (TODO): This loop is a potential performance bottleneck.
for node in np.unique(node_connectivity).tolist():
for node in unique_nodes:
# Find the collection of all nodes that are joined to this
# node via links in the mesh, including this node itself
# (which will be at the start of the list).
Expand All @@ -62,10 +91,11 @@ def __getitem__(self, indices):
cols_extend(range(n_nodes))
u_extend(nodes)

del unique_nodes

u = np.array(u, dtype=integer_dtype(largest_node_id))
u = csr_array((u, cols, pointers))
u = u.toarray()

if any(map(isnan, self.shape)):
# Store the shape, now that is it known.
self._set_component("shape", u.shape, copy=False)
Expand All @@ -76,6 +106,20 @@ def __getitem__(self, indices):
# Mask all zeros
u = np.ma.where(u == 0, np.ma.masked, u)

# ------------------------------------------------------------
# E.g. For both of the face and edges examples above, 'u' is
# now
#
# [[1 2 3 -- --]
# [2 1 4 7 --]
# [3 1 4 5 --]
# [4 2 3 6 7]
# [5 3 6 -- --]
# [6 4 5 -- --]
# [7 2 4 -- --]]
#
# ------------------------------------------------------------

if not start_index:
# Subtract 1 to get back to zero-based node identities
u -= 1
Expand Down
Loading